├── .github ├── CODEOWNERS └── workflows │ └── webpack.yml ├── .gitignore ├── .nvmrc ├── .prettierrc ├── README.md ├── data ├── entity_resolution_data │ ├── entra_id_user_generated.jsonl │ ├── generated_logs.jsonl │ ├── mini_entra_id_user_generated.jsonl │ ├── mini_generated_logs.jsonl │ ├── mini_okta_system_generated.jsonl │ ├── mini_okta_user_generated.jsonl │ ├── okta_system_generated.jsonl │ └── okta_user_generated.jsonl └── entity_store_perf_data │ ├── medium.jsonl │ └── small.jsonl ├── entity_resolution_data ├── entra_id_user_generated.jsonl ├── generated_logs.jsonl ├── mini_entra_id_user_generated.jsonl ├── mini_generated_logs.jsonl ├── mini_okta_system_generated.jsonl ├── mini_okta_user_generated.jsonl ├── okta_system_generated.jsonl └── okta_user_generated.jsonl ├── eslint.config.mjs ├── logs └── .gitkeep ├── package.json ├── renovate.json ├── src ├── commands │ ├── asset_criticality.ts │ ├── documents.ts │ ├── entity_resolution.ts │ ├── entity_store.ts │ ├── entity_store_perf.ts │ ├── insights.ts │ ├── legacy_risk_score.ts │ ├── privileged_access_detection_ml │ │ ├── event_generator.ts │ │ ├── index_management.ts │ │ └── privileged_access_detection_ml.ts │ ├── privileged_user_monitoring │ │ ├── generate_csv_file.ts │ │ ├── privileged_user_monitoring.ts │ │ └── sample_documents.ts │ ├── rules.ts │ └── utils │ │ ├── cli_utils.ts │ │ ├── create_agent_document.ts │ │ ├── indices.ts │ │ ├── integrations_sync_utils.ts │ │ ├── sample_data_helpers.ts │ │ └── time_windows.ts ├── constants.ts ├── create_alerts.ts ├── create_events.ts ├── create_misconfigurations.ts ├── create_vulnerability.ts ├── get_config.ts ├── index.ts ├── mappings │ ├── alertMappings.json │ ├── auditbeat.json │ └── eventMappings.json └── utils │ ├── create_config_on_first_run.ts │ ├── get_alert_index.ts │ ├── index.ts │ ├── initialize_space.ts │ └── kibana_api.ts ├── test_log_data └── proof_point_tap.json ├── tsconfig.json └── yarn.lock /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | ./** @elastic/security-entity-analytics 2 | ./** @hop-dev 3 | 4 | -------------------------------------------------------------------------------- /.github/workflows/webpack.yml: -------------------------------------------------------------------------------- 1 | name: NodeJS with Webpack 2 | 3 | on: 4 | push: 5 | branches: [ "main" ] 6 | pull_request: 7 | branches: [ "main" ] 8 | 9 | jobs: 10 | build: 11 | name: Lint and Type Check 12 | runs-on: ubuntu-latest 13 | 14 | steps: 15 | - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5 16 | 17 | - name: Use Node.js v23.11.1 18 | uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5 19 | with: 20 | node-version: v23.11.1 21 | 22 | - name: Build 23 | run: | 24 | yarn 25 | yarn checks -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | output 2 | node_modules 3 | config.json 4 | logs/*.log 5 | *.DS_Store 6 | .vscode -------------------------------------------------------------------------------- /.nvmrc: -------------------------------------------------------------------------------- 1 | 23.11.1 2 | -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "singleQuote": true, 3 | "trailingComma": "es5", 4 | "printWidth": 100 5 | } 6 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Security Documents Generator 2 | > **Note:** For compatibility with Elasticsearch 8.18 and below, checkout the tag `8.18-compatibility`. 3 | 4 | Generate fake data for testing and development. Configure your Elasticsearch environment via basic auth or API key, and use the various commands to generate, manipulate, and clean data. 5 | 6 | ## Getting started 7 | 8 | 1. Install dependencies: `yarn` 9 | 10 | 2. Choose a command to run or simply run `yarn start`, you will be guided to generate a config file. 11 | 12 | 3. *Optional* you can change `config.json` and provide different credentials for elasticsearch at any time. 13 | 14 | You can provide apiKey for Cloud/Serverless, or just username/password. 15 | 16 | Examples of config: 17 | 18 | ``` 19 | { 20 | "elastic": { 21 | "node": "https://test.es.us-west2.gcp.elastic-cloud.com", 22 | "apiKey": "ASdlkk==" 23 | 24 | }, 25 | "kibana": { 26 | "node": "https://test.kb.us-west2.gcp.elastic-cloud.com:9243", 27 | "apiKey": "asdasdasd==" 28 | } 29 | } 30 | ``` 31 | 32 | 33 | ``` 34 | { 35 | "elastic": { 36 | "node": "http://localhost:9200", 37 | "username": "elastic", 38 | "password": "changeme" 39 | }, 40 | "kibana": { 41 | "node": "http://127.0.0.1:5601", 42 | "username": "elastic", 43 | "password": "changeme" 44 | }, 45 | "eventIndex": "" 46 | } 47 | ``` 48 | 49 | ## Commands 50 | 51 | ### Privileged User Monitoring 52 | 53 | `yarn start privileged-user-monitoring` - Generate source events and anomalous source data for privileged user monitoring and the privileged access detection ML jobs. 54 | 55 | ### Entity store 56 | 57 | `yarn start entity-store` - Generate data for entity store 58 | 59 | `yarn start clean-entity-store` - Clean data for entity store 60 | 61 | ### Alerts 62 | `yarn start help` - To see the commands list 63 | 64 | `yarn start generate-alerts -n -h -u -s ` 65 | 66 | `yarn start delete-alerts` - Delete all alerts 67 | 68 | ### API tests 69 | 70 | `yarn start test-risk-score` - Test risk score API time response 71 | 72 | 73 | ### Alert document 74 | 75 | To modify alert document, you can change `createAlert.ts` file. 76 | 77 | 78 | ### How to test Risk Score API 79 | 80 | Example list of command for testing Risk Score API worth 10.000 alerts. 81 | ``` 82 | yarn start delete-alerts 83 | yarn start generate-alerts -n 10000 -h 100 -u 100 84 | yarn start test-risk-score 85 | ``` 86 | 87 | ## How to generate data for serverless project 88 | 89 | 1. Get your Elasticsearch url. 90 | 91 | Go to Cloud -> Projects -> Your serverless project. 92 | 93 | Then click Endpoints -> View and copy paste your ES URL to `config.json` into `elastic.node` field. 94 | 95 | 2. Generate API key 96 | 97 | Go to Cloud -> Projects -> Api Keys -> Manage project API keys 98 | 99 | Create a new API key and past it to `config.json` into `elastic.apiKey` field. 100 | 101 | 3. (Optional) Change if you want index name in `config.json` in `eventIndex` field. 102 | 103 | By default - `logs-testlogs-default` 104 | 105 | 4. (Optional) Change mappings in `eventMappings.json` file. 106 | 107 | 5. (Optional) Change event structure in `create_events.ts` file 108 | 109 | 6. Run `yarn start generate-events n`. Where `n` is the amount of documents that will be generated. 110 | 111 | 7. `yarn start delete-events` to remove all documents from event index after your test. 112 | 113 | ## Entity Store Performance Testing 114 | 115 | ### Sending one of the pre-built files 116 | 117 | #### One time send 118 | 119 | To upload a perf file once, use the `upload-perf-data` command, e.g: 120 | 121 | ``` 122 | # upload the small file, delete all logs and entities beforehand 123 | yarn start upload-perf-data-interval small --delete 124 | ``` 125 | 126 | If you omit the file name you will be presented with a picker. 127 | 128 | #### Send at an interval 129 | A better test is to send data at an interval to put the system under continued load. 130 | 131 | To do this use the `upload-perf-data-interval` command. This will upload a file 10 times with 30 seconds between each send by default, e.g: 132 | 133 | ``` 134 | # upload the small data file 10 times with 30 seconds between sends 135 | yarn start upload-perf-data-interval small --deleteEntities 136 | ``` 137 | 138 | The count and interval can be customized: 139 | 140 | ``` 141 | # upload the small data file 100 times with 60 seconds between sends 142 | yarn start upload-perf-data-interval small --deleteEntities --interval 60 --count 100 143 | ``` 144 | 145 | The entity IDs are modified before sending so that each upload creates new entities, this means there will be count * entityCount entities by the end of the test. 146 | 147 | While the files are uploaded, we poll elasticsearch for the cluster health and the transform health, these files can be found in `./logs`. Where one file contains the cluster health every 5 seconds, and the other contains the transform health every 5 seconds: 148 | 149 | ``` 150 | > ll logs 151 | total 464 152 | -rw-r--r--@ 1 mark staff 33K Oct 28 11:20 small-2024-10-28T11:14:06.828Z-cluster-health.log 153 | -rw-r--r--@ 1 mark staff 145K Oct 28 11:20 small-2024-10-28T11:14:06.828Z-transform-stats.log 154 | ``` 155 | 156 | ### Generating a data file 157 | 158 | To generate a data file for performance testing, use the `create-perf-data` command. 159 | 160 | E.g this is how 'large' was created: 161 | 162 | ``` 163 | # create a file with 100k entities each with 5 logs. 164 | yarn start create-perf-data large 100000 5 165 | ``` 166 | 167 | Entities are split 50/50 host/user. 168 | The log messages created contain incremental data, e.g the first log message for a host would contain IP 192.168.1.0 and 192.168.1.1, the second log would contain 192.168.1.2 and 192.168.1.3. This way when 5 log messages are sent, an entity should have 10 IP addresses ranging from 0 - 10. 169 | 170 | 171 | ### Generate rules and gaps 172 | 173 | Will generate 100 rules with 10000 gaps per rule. 174 | 175 | `yarn start rules --rules 100 -g 10000 -c -i"48h"` 176 | -------------------------------------------------------------------------------- /data/entity_resolution_data/mini_entra_id_user_generated.jsonl: -------------------------------------------------------------------------------- 1 | { "@timestamp": "2023-10-10T12:00:00.000000-03:00", "event": { "action": "user-discovered" }, "azure_ad": { "userPrincipalName": "mark.hopkin@testcompany.com", "mail": "mark.hopkin@testcompany.com", "displayName": "Mark Hopkin", "givenName": "Mark", "surname": "Hopkin", "jobTitle": "Clinical molecular geneticist", "mobilePhone": "123-555-3658", "businessPhones": [ "123-555-3658" ] }, "user": { "id": "unique-user-id-mark-hopkin", "group": [ { "id": "78d5b9c0-9801-4c66-bc63-8062d41bdc20", "name": "group4" } ] }, "labels": { "identity_source": "azure-1" } } 2 | -------------------------------------------------------------------------------- /data/entity_resolution_data/mini_generated_logs.jsonl: -------------------------------------------------------------------------------- 1 | { "@timestamp": "2024-07-01T00:00:00.000Z", "agent": { "ephemeral_id": "1234abcd-56ef-78gh-90ij-klmnopqrst", "id": "5678wxyz-90ab-12cd-34ef-5678ghijklmn", "name": "markhopkin-ubuntu-server", "type": "filebeat", "version": "8.14.3" }, "cloud": { "account": { "id": "testcompany-dev" }, "availability_zone": "southamerica-east1-b", "instance": { "id": "398273918273918273", "name": "markhopkin-ubuntu-server" }, "machine": { "type": "e2-medium" }, "project": { "id": "testcompany-dev" }, "provider": "gcp", "region": "southamerica-east1", "service": { "name": "GCE" } }, "data_stream": { "dataset": "system.auth", "namespace": "default", "type": "logs" }, "ecs": { "version": "8.0.0" }, "elastic_agent": { "id": "5678wxyz-90ab-12cd-34ef-5678ghijklmn", "snapshot": false, "version": "8.14.3" }, "event": { "action": "ssh_login", "agent_id_status": "verified", "category": [ "authentication", "session" ], "dataset": "system.auth", "ingested": "2024-07-01T00:05:00Z", "kind": "event", "outcome": "success", "timezone": "+00:00", "type": [ "info" ] }, "host": { "architecture": "x86_64", "containerized": false, "hostname": "markhopkin-ubuntu-server", "id": "c123d4e56f78901g234h567i8j901k23", "ip": [ "10.142.3.222", "1050:0:0:0:6:700:400d:427c" ], "mac": [ "00-1b-63-84-45-e6" ], "name": "markhopkin-ubuntu-server", "os": { "codename": "focal", "family": "debian", "kernel": "5.15.0-1062-gcp", "name": "Ubuntu", "platform": "ubuntu", "type": "linux", "version": "20.04.6 LTS (Focal Fossa)" } }, "input": { "type": "log" }, "log": { "file": { "path": "/var/log/auth.log" }, "offset": 55678, "syslog": { "appname": "sshd", "hostname": "markhopkin-ubuntu-server", "procid": "4732" } }, "related": { "hosts": [ "markhopkin-ubuntu-server" ], "ip": [ "3.231.221.243" ], "user": [ "mark" ] }, "source": { "address": "3.231.221.243", "as": { "number": 396982, "organization": { "name": "GOOGLE-CLOUD-PLATFORM" } }, "geo": { "continent_name": "South America", "country_iso_code": "AR", "country_name": "Argentina", "location": { "lat": -34.61315, "lon": -58.37723 } }, "ip": "3.231.221.243", "port": 38760 }, "system": { "auth": { "ssh": { "event": "Accepted", "method": "publickey", "signature": "ECDSA SHA256:Bi35p556x+zBggHHJGK4YZIy40y+04tccPF4g156MmR" } } }, "tags": [ "system-auth" ], "user": { "name": "markhopkin" } } 2 | -------------------------------------------------------------------------------- /data/entity_resolution_data/mini_okta_system_generated.jsonl: -------------------------------------------------------------------------------- 1 | { "actor": { "id": "mark.hopkin@testcompany.com", "display_name": "Mark Hopkin", "type": "User", "alternate_id": "mark.hopkin@testcompany.com" }, "request": { "ip_chain": [ { "geographical_context": { "country": "Argentina", "city": "Buenos Aires", "state": "CABA", "postal_code": "C1000", "geolocation": { "lon": -58.3816, "lat": -34.6037 } }, "ip": "190.174.1.10", "version": "V4" } ] }, "debug_context": { "debug_data": { "flattened": { "authnRequestId": "4b8f98abc9def0123ab4c5678e901234", "deviceFingerprint": "1a2b3c4d5e6f7890abcdef1234567890", "dtHash": "7a8b9c0d1e2f34567890abcdef123456", "oktaUserAgentExtended": "okta-auth-js/8.0.0 okta-signin-widget-8.0.0", "requestId": "5c6d7e8f9a0b1c2d3e4f567890abcdef", "requestUri": "/api/v1/authn", "threatSuspected": "false", "url": "/api/v1/authn?" }, "device_fingerprint": "1a2b3c4d5e6f7890abcdef1234567890", "dt_hash": "7a8b9c0d1e2f34567890abcdef123456", "threat_suspected": "false", "request_id": "5c6d7e8f9a0b1c2d3e4f567890abcdef", "request_uri": "/api/v1/authn", "url": "/api/v1/authn?" } }, "event_type": "policy.evaluate_sign_on", "authentication_context": { "authentication_step": 0, "external_session_id": "1234abcd5678efgh9012ijkl_TUA" }, "display_message": "Evaluation of sign-on policy", "client": { "zone": "null", "ip": "190.174.1.10", "device": "Computer", "user_agent": { "raw_user_agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:92.0) Gecko/20100101 Firefox/92.0", "os": "Ubuntu 20.04", "browser": "FIREFOX" } }, "uuid": "b123c4d5-e678-90ab-cdef-1234567890ab", "outcome": { "result": "CHALLENGE", "reason": "Sign-on policy evaluation resulted in CHALLENGE" }, "transaction": { "id": "8a9b0c1d2e3f4567890abcdef1234567", "type": "WEB" }, "security_context": { "as": { "number": 22927, "organization": { "name": "Telecom Argentina" } }, "domain": "telecom.com.ar", "isp": "Telecom Argentina", "is_proxy": false }, "target": [ { "id": "default_policy_id", "type": "PolicyEntity", "display_name": "Default Policy", "alternate_id": "unknown" }, { "id": "verify_app_mfa_rule_id", "type": "PolicyRule", "display_name": "Verify App MFA", "alternate_id": "00pf1r6hd3yXuNl3A5d6" } ] } 2 | -------------------------------------------------------------------------------- /data/entity_resolution_data/mini_okta_user_generated.jsonl: -------------------------------------------------------------------------------- 1 | { "account": { "activated_date": "2024-07-01T00:00:00.000Z", "change_date": "2024-07-01T00:00:00.000Z", "create_date": "2024-07-01T00:00:00.000Z", "password_change_date": "2024-07-01T00:00:00.000Z", "status": { "deprovisioned": false, "locked_out": false, "password_expired": false, "recovery": false, "suspended": false } }, "email": "mark.hopkin@testcompany.com", "id": "239238293Sdjip029h2b", "name": "mark.hopkin@testcompany.com", "profile": { "first_name": "Mark", "last_name": "Hopkin", "status": "ACTIVE" } } 2 | -------------------------------------------------------------------------------- /entity_resolution_data/mini_entra_id_user_generated.jsonl: -------------------------------------------------------------------------------- 1 | { "@timestamp": "2023-10-10T12:00:00.000000-03:00", "event": { "action": "user-discovered" }, "azure_ad": { "userPrincipalName": "mark.hopkin@testcompany.com", "mail": "mark.hopkin@testcompany.com", "displayName": "Mark Hopkin", "givenName": "Mark", "surname": "Hopkin", "jobTitle": "Clinical molecular geneticist", "mobilePhone": "123-555-3658", "businessPhones": [ "123-555-3658" ] }, "user": { "id": "unique-user-id-mark-hopkin", "group": [ { "id": "78d5b9c0-9801-4c66-bc63-8062d41bdc20", "name": "group4" } ] }, "labels": { "identity_source": "azure-1" } } 2 | -------------------------------------------------------------------------------- /entity_resolution_data/mini_generated_logs.jsonl: -------------------------------------------------------------------------------- 1 | { "@timestamp": "2024-07-01T00:00:00.000Z", "agent": { "ephemeral_id": "1234abcd-56ef-78gh-90ij-klmnopqrst", "id": "5678wxyz-90ab-12cd-34ef-5678ghijklmn", "name": "markhopkin-ubuntu-server", "type": "filebeat", "version": "8.14.3" }, "cloud": { "account": { "id": "testcompany-dev" }, "availability_zone": "southamerica-east1-b", "instance": { "id": "398273918273918273", "name": "markhopkin-ubuntu-server" }, "machine": { "type": "e2-medium" }, "project": { "id": "testcompany-dev" }, "provider": "gcp", "region": "southamerica-east1", "service": { "name": "GCE" } }, "data_stream": { "dataset": "system.auth", "namespace": "default", "type": "logs" }, "ecs": { "version": "8.0.0" }, "elastic_agent": { "id": "5678wxyz-90ab-12cd-34ef-5678ghijklmn", "snapshot": false, "version": "8.14.3" }, "event": { "action": "ssh_login", "agent_id_status": "verified", "category": [ "authentication", "session" ], "dataset": "system.auth", "ingested": "2024-07-01T00:05:00Z", "kind": "event", "outcome": "success", "timezone": "+00:00", "type": [ "info" ] }, "host": { "architecture": "x86_64", "containerized": false, "hostname": "markhopkin-ubuntu-server", "id": "c123d4e56f78901g234h567i8j901k23", "ip": [ "10.142.3.222", "1050:0:0:0:6:700:400d:427c" ], "mac": [ "00-1b-63-84-45-e6" ], "name": "markhopkin-ubuntu-server", "os": { "codename": "focal", "family": "debian", "kernel": "5.15.0-1062-gcp", "name": "Ubuntu", "platform": "ubuntu", "type": "linux", "version": "20.04.6 LTS (Focal Fossa)" } }, "input": { "type": "log" }, "log": { "file": { "path": "/var/log/auth.log" }, "offset": 55678, "syslog": { "appname": "sshd", "hostname": "markhopkin-ubuntu-server", "procid": "4732" } }, "related": { "hosts": [ "markhopkin-ubuntu-server" ], "ip": [ "3.231.221.243" ], "user": [ "mark" ] }, "source": { "address": "3.231.221.243", "as": { "number": 396982, "organization": { "name": "GOOGLE-CLOUD-PLATFORM" } }, "geo": { "continent_name": "South America", "country_iso_code": "AR", "country_name": "Argentina", "location": { "lat": -34.61315, "lon": -58.37723 } }, "ip": "3.231.221.243", "port": 38760 }, "system": { "auth": { "ssh": { "event": "Accepted", "method": "publickey", "signature": "ECDSA SHA256:Bi35p556x+zBggHHJGK4YZIy40y+04tccPF4g156MmR" } } }, "tags": [ "system-auth" ], "user": { "name": "markhopkin" } } 2 | -------------------------------------------------------------------------------- /entity_resolution_data/mini_okta_system_generated.jsonl: -------------------------------------------------------------------------------- 1 | { "actor": { "id": "mark.hopkin@testcompany.com", "display_name": "Mark Hopkin", "type": "User", "alternate_id": "mark.hopkin@testcompany.com" }, "request": { "ip_chain": [ { "geographical_context": { "country": "Argentina", "city": "Buenos Aires", "state": "CABA", "postal_code": "C1000", "geolocation": { "lon": -58.3816, "lat": -34.6037 } }, "ip": "190.174.1.10", "version": "V4" } ] }, "debug_context": { "debug_data": { "flattened": { "authnRequestId": "4b8f98abc9def0123ab4c5678e901234", "deviceFingerprint": "1a2b3c4d5e6f7890abcdef1234567890", "dtHash": "7a8b9c0d1e2f34567890abcdef123456", "oktaUserAgentExtended": "okta-auth-js/8.0.0 okta-signin-widget-8.0.0", "requestId": "5c6d7e8f9a0b1c2d3e4f567890abcdef", "requestUri": "/api/v1/authn", "threatSuspected": "false", "url": "/api/v1/authn?" }, "device_fingerprint": "1a2b3c4d5e6f7890abcdef1234567890", "dt_hash": "7a8b9c0d1e2f34567890abcdef123456", "threat_suspected": "false", "request_id": "5c6d7e8f9a0b1c2d3e4f567890abcdef", "request_uri": "/api/v1/authn", "url": "/api/v1/authn?" } }, "event_type": "policy.evaluate_sign_on", "authentication_context": { "authentication_step": 0, "external_session_id": "1234abcd5678efgh9012ijkl_TUA" }, "display_message": "Evaluation of sign-on policy", "client": { "zone": "null", "ip": "190.174.1.10", "device": "Computer", "user_agent": { "raw_user_agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:92.0) Gecko/20100101 Firefox/92.0", "os": "Ubuntu 20.04", "browser": "FIREFOX" } }, "uuid": "b123c4d5-e678-90ab-cdef-1234567890ab", "outcome": { "result": "CHALLENGE", "reason": "Sign-on policy evaluation resulted in CHALLENGE" }, "transaction": { "id": "8a9b0c1d2e3f4567890abcdef1234567", "type": "WEB" }, "security_context": { "as": { "number": 22927, "organization": { "name": "Telecom Argentina" } }, "domain": "telecom.com.ar", "isp": "Telecom Argentina", "is_proxy": false }, "target": [ { "id": "default_policy_id", "type": "PolicyEntity", "display_name": "Default Policy", "alternate_id": "unknown" }, { "id": "verify_app_mfa_rule_id", "type": "PolicyRule", "display_name": "Verify App MFA", "alternate_id": "00pf1r6hd3yXuNl3A5d6" } ] } 2 | -------------------------------------------------------------------------------- /entity_resolution_data/mini_okta_user_generated.jsonl: -------------------------------------------------------------------------------- 1 | { "account": { "activated_date": "2024-07-01T00:00:00.000Z", "change_date": "2024-07-01T00:00:00.000Z", "create_date": "2024-07-01T00:00:00.000Z", "password_change_date": "2024-07-01T00:00:00.000Z", "status": { "deprovisioned": false, "locked_out": false, "password_expired": false, "recovery": false, "suspended": false } }, "email": "mark.hopkin@testcompany.com", "id": "239238293Sdjip029h2b", "name": "mark.hopkin@testcompany.com", "profile": { "first_name": "Mark", "last_name": "Hopkin", "status": "ACTIVE" } } 2 | -------------------------------------------------------------------------------- /eslint.config.mjs: -------------------------------------------------------------------------------- 1 | import tseslint from '@typescript-eslint/eslint-plugin'; 2 | import tsparser from '@typescript-eslint/parser'; 3 | import eslintPluginPrettierRecommended from 'eslint-plugin-prettier/recommended'; 4 | import checkFile from 'eslint-plugin-check-file'; 5 | 6 | import js from '@eslint/js'; 7 | import globals from 'globals'; 8 | export default [ 9 | { 10 | languageOptions: { 11 | parser: tsparser, 12 | globals: { 13 | ...globals.node, 14 | }, 15 | }, 16 | plugins: { 17 | '@typescript-eslint': tseslint, 18 | ...eslintPluginPrettierRecommended.plugins, 19 | 'check-file': checkFile, 20 | }, 21 | rules: { 22 | ...js.configs.recommended.rules, 23 | ...tseslint.configs['eslint-recommended'].rules, 24 | ...tseslint.configs.recommended.rules, 25 | ...eslintPluginPrettierRecommended.rules, 26 | 'prettier/prettier': ['error', { singleQuote: true }], 27 | 'check-file/filename-naming-convention': [ 28 | 'error', 29 | { 30 | '**/*.ts': 'SNAKE_CASE', 31 | }, 32 | ], 33 | }, 34 | files: ['src/**/*.ts'], 35 | }, 36 | ]; 37 | -------------------------------------------------------------------------------- /logs/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/elastic/security-documents-generator/140705742edea3f53ce31ae91f004ed30a2d60cd/logs/.gitkeep -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "security-documents-generator", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.ts", 6 | "type": "module", 7 | "scripts": { 8 | "test": "echo \"Error: no test specified\" && exit 1", 9 | "typecheck": "tsc --noEmit", 10 | "start": "tsx src/index", 11 | "lint": "eslint", 12 | "format": "prettier --write \"src/**/*.ts\" \"*.json\" \"*.mjs\" \"src/**/*.json\"", 13 | "checks": "yarn typecheck && yarn lint && yarn prettier --check \"src/**/*.ts\" \"*.json\" \"*.mjs\" \"src/**/*.json\" || { echo \"‼️ run yarn fix to automatically fix issues ‼️\"; exit 1; }", 14 | "fix": "yarn lint --fix && yarn prettier --write \"src/**/*.ts\" \"*.json\" \"*.mjs\" \"src/**/*.json\"" 15 | }, 16 | "engines": { 17 | "node": "23.11.1", 18 | "yarn": "^1.22.22" 19 | }, 20 | "author": "", 21 | "license": "ISC", 22 | "dependencies": { 23 | "@elastic/elasticsearch": "^9.0.0", 24 | "@faker-js/faker": "^10.0.0", 25 | "@inquirer/prompts": "^7.3.1", 26 | "chalk": "^5.2.0", 27 | "cli-progress": "^3.12.0", 28 | "commander": "^14.0.0", 29 | "conf": "^14.0.0", 30 | "form-data": "^4.0.3", 31 | "fp-ts": "^2.16.5", 32 | "globals": "^16.0.0", 33 | "io-ts": "^2.2.21", 34 | "lodash-es": "^4.17.21", 35 | "moment": "^2.29.4", 36 | "node-fetch": "^3.3.1", 37 | "p-map": "^7.0.2", 38 | "readline": "^1.3.0", 39 | "tsx": "^4.7.1", 40 | "url-join": "^5.0.0", 41 | "uuid": "^13.0.0" 42 | }, 43 | "devDependencies": { 44 | "@types/cli-progress": "3.11.6", 45 | "@types/lodash-es": "4.17.12", 46 | "@types/uuid": "11.0.0", 47 | "@typescript-eslint/eslint-plugin": "8.44.1", 48 | "@typescript-eslint/parser": "8.44.1", 49 | "esbuild": "0.25.10", 50 | "eslint": "9.36.0", 51 | "eslint-config-airbnb": "19.0.4", 52 | "eslint-config-airbnb-typescript": "18.0.0", 53 | "eslint-config-prettier": "10.1.8", 54 | "eslint-plugin-check-file": "3.3.0", 55 | "eslint-plugin-import": "2.31.0", 56 | "eslint-plugin-jsx-a11y": "6.10.2", 57 | "eslint-plugin-prettier": "5.5.4", 58 | "eslint-plugin-react": "7.37.5", 59 | "eslint-plugin-react-hooks": "5.2.0", 60 | "prettier": "3.6.2", 61 | "typescript": "5.9.2" 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://docs.renovatebot.com/renovate-schema.json", 3 | "extends": ["local>elastic/renovate-config"] 4 | } 5 | -------------------------------------------------------------------------------- /src/commands/asset_criticality.ts: -------------------------------------------------------------------------------- 1 | import { faker } from '@faker-js/faker'; 2 | import { generateNewSeed } from '../constants'; 3 | import { 4 | assignAssetCriticalityToEntities, 5 | createRandomHost, 6 | createRandomUser, 7 | } from './entity_store'; 8 | 9 | /** 10 | * Generate asset criticality 11 | */ 12 | export const generateAssetCriticality = async ({ 13 | users, 14 | hosts, 15 | seed = generateNewSeed(), 16 | space = 'default', 17 | }: { 18 | users: number; 19 | hosts: number; 20 | seed?: number; 21 | space: string; 22 | }) => { 23 | faker.seed(seed); 24 | 25 | try { 26 | const generatedUsers = faker.helpers.multiple(createRandomUser, { 27 | count: users, 28 | }); 29 | 30 | const generatedHosts = faker.helpers.multiple(createRandomHost, { 31 | count: hosts, 32 | }); 33 | 34 | await assignAssetCriticalityToEntities({ 35 | entities: generatedUsers, 36 | field: 'user.name', 37 | space, 38 | }); 39 | console.log(`Assigned asset criticality to ${generatedUsers.length} users`); 40 | await assignAssetCriticalityToEntities({ 41 | entities: generatedHosts, 42 | field: 'host.name', 43 | space, 44 | }); 45 | console.log(`Assigned asset criticality to ${generatedHosts.length} hosts`); 46 | 47 | console.log('Finished generating asset criticality'); 48 | } catch (error) { 49 | console.log('Error: ', error); 50 | } 51 | }; 52 | -------------------------------------------------------------------------------- /src/commands/documents.ts: -------------------------------------------------------------------------------- 1 | import createAlerts, { BaseCreateAlertsReturnType } from '../create_alerts'; 2 | import createEvents from '../create_events'; 3 | import eventMappings from '../mappings/eventMappings.json' assert { type: 'json' }; 4 | import { getEsClient, indexCheck } from './utils/indices'; 5 | import { getConfig } from '../get_config'; 6 | import { MappingTypeMapping, BulkOperationContainer } from '@elastic/elasticsearch/lib/api/types'; 7 | import pMap from 'p-map'; 8 | import { chunk } from 'lodash-es'; 9 | import cliProgress from 'cli-progress'; 10 | import { faker } from '@faker-js/faker'; 11 | import { getAlertIndex } from '../utils'; 12 | 13 | const generateDocs = async ({ 14 | createDocs, 15 | amount, 16 | index, 17 | }: { 18 | createDocs: DocumentCreator; 19 | amount: number; 20 | index: string; 21 | }) => { 22 | const limit = 30000; 23 | let generated = 0; 24 | 25 | while (generated < amount) { 26 | const docs = createDocuments(Math.min(limit, amount), generated, createDocs, index); 27 | try { 28 | const result = await bulkUpsert(docs); 29 | generated += result.items.length / 2; 30 | } catch (err) { 31 | console.log('Error: ', err); 32 | process.exit(1); 33 | } 34 | } 35 | }; 36 | 37 | const bulkUpsert = async (docs: unknown[]) => { 38 | const client = getEsClient(); 39 | 40 | try { 41 | return client.bulk({ body: docs, refresh: true }); 42 | } catch (err) { 43 | console.log('Error: ', err); 44 | process.exit(1); 45 | } 46 | }; 47 | 48 | interface DocumentCreator { 49 | (descriptor: { id_field: string; id_value: string }): object; 50 | } 51 | 52 | const alertToBatchOps = (alert: BaseCreateAlertsReturnType, index: string): unknown[] => { 53 | return [{ index: { _index: index, _id: alert['kibana.alert.uuid'] } }, { ...alert }]; 54 | }; 55 | 56 | const createDocuments = ( 57 | n: number, 58 | generated: number, 59 | createDoc: DocumentCreator, 60 | index: string 61 | ): unknown[] => { 62 | return Array(n) 63 | .fill(null) 64 | .reduce((acc, _, i) => { 65 | let alert = createDoc({ 66 | id_field: 'host.name', 67 | id_value: `Host ${generated + i}`, 68 | }); 69 | acc.push({ index: { _index: index } }); 70 | acc.push({ ...alert }); 71 | alert = createDoc({ 72 | id_field: 'user.name', 73 | id_value: `User ${generated + i}`, 74 | }); 75 | acc.push({ index: { _index: index } }); 76 | acc.push({ ...alert }); 77 | return acc; 78 | }, []); 79 | }; 80 | 81 | export const generateAlerts = async ( 82 | alertCount: number, 83 | hostCount: number, 84 | userCount: number, 85 | space: string 86 | ) => { 87 | if (userCount > alertCount) { 88 | console.log('User count should be less than alert count'); 89 | process.exit(1); 90 | } 91 | 92 | if (hostCount > alertCount) { 93 | console.log('Host count should be less than alert count'); 94 | process.exit(1); 95 | } 96 | 97 | console.log( 98 | `Generating ${alertCount} alerts containing ${hostCount} hosts and ${userCount} users in space ${space}` 99 | ); 100 | const concurrency = 10; // how many batches to send in parallel 101 | const batchSize = 2500; // number of alerts in a batch 102 | const no_overrides = {}; 103 | 104 | const batchOpForIndex = ({ userName, hostName }: { userName: string; hostName: string }) => 105 | alertToBatchOps( 106 | createAlerts(no_overrides, { userName, hostName, space }), 107 | getAlertIndex(space) 108 | ); 109 | 110 | console.log('Generating entity names...'); 111 | const userNames = Array.from({ length: userCount }, () => faker.internet.username()); 112 | const hostNames = Array.from({ length: hostCount }, () => faker.internet.domainName()); 113 | 114 | console.log('Assigning entity names...'); 115 | const alertEntityNames = Array.from({ length: alertCount }, (_, i) => ({ 116 | userName: userNames[i % userCount], 117 | hostName: hostNames[i % hostCount], 118 | })); 119 | 120 | console.log('Entity names assigned. Batching...'); 121 | const operationBatches = chunk(alertEntityNames, batchSize).map((batch) => 122 | batch.flatMap(batchOpForIndex) 123 | ); 124 | 125 | console.log('Batching complete. Sending to ES...'); 126 | 127 | console.log( 128 | `Sending in ${operationBatches.length} batches of ${batchSize} alerts, with up to ${concurrency} batches in parallel\n\n` 129 | ); 130 | const progress = new cliProgress.SingleBar({}, cliProgress.Presets.shades_classic); 131 | 132 | progress.start(operationBatches.length, 0); 133 | 134 | await pMap( 135 | operationBatches, 136 | async (operations) => { 137 | await bulkUpsert(operations); 138 | progress.increment(); 139 | }, 140 | { concurrency } 141 | ); 142 | 143 | progress.stop(); 144 | }; 145 | 146 | // this creates asset criticality not events? 147 | export const generateEvents = async (n: number) => { 148 | const config = getConfig(); 149 | 150 | if (!config.eventIndex) { 151 | throw new Error('eventIndex not defined in config'); 152 | } 153 | await indexCheck(config.eventIndex, { 154 | mappings: eventMappings as MappingTypeMapping, 155 | }); 156 | 157 | console.log('Generating events...'); 158 | 159 | await generateDocs({ 160 | createDocs: createEvents, 161 | amount: n, 162 | index: config.eventIndex, 163 | }); 164 | 165 | console.log('Finished generating events'); 166 | }; 167 | 168 | export const generateGraph = async ({ users = 100, maxHosts = 3 }) => { 169 | console.log('Generating alerts graph...'); 170 | 171 | type AlertOverride = { host: { name: string }; user: { name: string } }; 172 | 173 | const clusters: (ReturnType & AlertOverride)[][] = []; 174 | 175 | /** 176 | * The type you can pass to the bulk API, if you're working with Fake Alerts. 177 | * This accepts partial docs, full docs, and other docs that indicate _index, _id, and such 178 | */ 179 | type FakeAlertBulkOperations = BulkOperationContainer | Partial; 180 | 181 | const alerts: FakeAlertBulkOperations[] = []; 182 | for (let i = 0; i < users; i++) { 183 | const userCluster = []; 184 | for (let j = 0; j < maxHosts; j++) { 185 | const alert = createAlerts({ 186 | host: { 187 | name: 'Host mark', 188 | }, 189 | user: { 190 | name: 'User pablo', 191 | }, 192 | }); 193 | userCluster.push(alert); 194 | } 195 | clusters.push(userCluster); 196 | } 197 | 198 | let lastAlertFromCluster: (ReturnType & AlertOverride) | null = null; 199 | clusters.forEach((cluster) => { 200 | if (lastAlertFromCluster) { 201 | const alert = createAlerts({ 202 | host: { 203 | name: cluster[0].host.name, 204 | }, 205 | user: { 206 | name: lastAlertFromCluster.user.name, 207 | }, 208 | }); 209 | alerts.push({ 210 | index: { 211 | _index: getAlertIndex('default'), 212 | _id: alert['kibana.alert.uuid'], 213 | }, 214 | }); 215 | alerts.push(alert); 216 | } 217 | cluster.forEach((alert) => { 218 | alerts.push({ 219 | index: { 220 | _index: getAlertIndex('default'), 221 | _id: alert['kibana.alert.uuid'], 222 | }, 223 | }); 224 | alerts.push(alert); 225 | lastAlertFromCluster = alert; 226 | }); 227 | }); 228 | 229 | try { 230 | const client = getEsClient(); 231 | 232 | const result = await client.bulk({ body: alerts, refresh: true }); 233 | console.log(`${result.items.length} alerts created`); 234 | } catch (err) { 235 | console.log('Error: ', err); 236 | } 237 | }; 238 | 239 | export const deleteAllAlerts = async () => { 240 | console.log('Deleting all alerts...'); 241 | try { 242 | console.log('Deleted all alerts'); 243 | const client = getEsClient(); 244 | 245 | await client.deleteByQuery({ 246 | index: '.alerts-security.alerts-*', 247 | refresh: true, 248 | query: { 249 | match_all: {}, 250 | }, 251 | }); 252 | } catch (error) { 253 | console.log('Failed to delete alerts'); 254 | console.log(error); 255 | } 256 | }; 257 | 258 | export const deleteAllEvents = async () => { 259 | const config = getConfig(); 260 | 261 | console.log('Deleting all events...'); 262 | if (!config.eventIndex) { 263 | throw new Error('eventIndex not defined in config'); 264 | } 265 | try { 266 | console.log('Deleted all events'); 267 | const client = getEsClient(); 268 | 269 | await client.deleteByQuery({ 270 | index: config.eventIndex, 271 | refresh: true, 272 | query: { 273 | match_all: {}, 274 | }, 275 | }); 276 | } catch (error) { 277 | console.log('Failed to delete events'); 278 | console.log(error); 279 | } 280 | }; 281 | -------------------------------------------------------------------------------- /src/commands/entity_resolution.ts: -------------------------------------------------------------------------------- 1 | import { getEsClient, getFileLineCount } from './utils/indices'; 2 | import { 3 | installPackage, 4 | createRule, 5 | getRule, 6 | createComponentTemplate, 7 | buildKibanaUrl, 8 | } from '../utils/kibana_api'; 9 | import pMap from 'p-map'; 10 | import cliProgress from 'cli-progress'; 11 | import readline from 'readline'; 12 | import fs from 'fs'; 13 | import { dirname } from 'path'; 14 | import { fileURLToPath } from 'url'; 15 | 16 | const directoryName = dirname(fileURLToPath(import.meta.url)); 17 | const BATCH_SIZE = 1000; 18 | const CONCURRENCY = 10; 19 | const RULE_ID = 'er-demo-match-all'; 20 | const ECS_USER_MAPPINGS = { 21 | properties: { 22 | 'user.name': { 23 | fields: { 24 | text: { 25 | type: 'match_only_text', 26 | }, 27 | }, 28 | type: 'keyword', 29 | }, 30 | 'user.email': { 31 | fields: { 32 | text: { 33 | type: 'match_only_text', 34 | }, 35 | }, 36 | type: 'keyword', 37 | }, 38 | }, 39 | }; 40 | 41 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 42 | const addMetaToLine = (line: any) => { 43 | line._meta = { 44 | is_demo_data: true, 45 | }; 46 | return line; 47 | }; 48 | 49 | const clearData = async () => { 50 | const client = getEsClient(); 51 | try { 52 | const res = await client.deleteByQuery({ 53 | index: '*', 54 | query: { 55 | match: { 56 | '_meta.is_demo_data': true, 57 | }, 58 | }, 59 | ignore_unavailable: true, 60 | refresh: true, 61 | }); 62 | 63 | console.log('Deleted log documents: ', res.deleted, '❌'); 64 | } catch (err) { 65 | console.log('Error: ', err); 66 | process.exit(1); 67 | } 68 | 69 | try { 70 | const res1 = await client.deleteByQuery({ 71 | index: '.entities.v1.latest.secsol-ea-entity-store', 72 | query: { 73 | match_all: {}, 74 | }, 75 | ignore_unavailable: true, 76 | refresh: true, 77 | }); 78 | 79 | console.log('Deleted entity store documents: ', res1.deleted, '❌'); 80 | } catch (err) { 81 | console.log('Error: ', err); 82 | process.exit(1); 83 | } 84 | 85 | try { 86 | const res2 = await client.deleteByQuery({ 87 | index: '.entities.v1.history.secsol-ea-entity-store*', 88 | query: { 89 | match_all: {}, 90 | }, 91 | ignore_unavailable: true, 92 | refresh: true, 93 | }); 94 | 95 | console.log('Deleted entity store history documents: ', res2.deleted, '❌'); 96 | } catch (err) { 97 | console.log('Error: ', err); 98 | process.exit(1); 99 | } 100 | 101 | // delete alerts where the rule_id is the one we created 102 | try { 103 | const res3 = await client.deleteByQuery({ 104 | index: '.alerts-security.alerts-*', 105 | refresh: true, 106 | query: { 107 | match: { 108 | 'kibana.alert.rule.parameters.rule_id': RULE_ID, 109 | }, 110 | }, 111 | }); 112 | 113 | console.log('Deleted alerts: ', res3.deleted, '❌'); 114 | } catch (err) { 115 | console.log('Error: ', err); 116 | process.exit(1); 117 | } 118 | }; 119 | 120 | const VARIANT_TYPES = { 121 | DO_NOTHING: 'DO_NOTHING', 122 | INITIAL_FIRSTNAME: 'INITIAL_FIRSTNAME', 123 | INITIAL_LASTNAME: 'INITIAL_LASTNAME', 124 | REMOVE_LASTNAME: 'REMOVE_LASTNAME', 125 | }; 126 | 127 | const VARIANT_TYPE_ORDER = [ 128 | VARIANT_TYPES.DO_NOTHING, 129 | VARIANT_TYPES.DO_NOTHING, 130 | VARIANT_TYPES.INITIAL_FIRSTNAME, 131 | VARIANT_TYPES.INITIAL_LASTNAME, 132 | VARIANT_TYPES.REMOVE_LASTNAME, 133 | ]; 134 | 135 | const getVariantType = (index: number) => { 136 | return VARIANT_TYPE_ORDER[index % VARIANT_TYPE_ORDER.length]; 137 | }; 138 | type MaybeStringArray = string | string[]; 139 | 140 | const getEmailVariant = (email: string | string[], index: number): MaybeStringArray => { 141 | try { 142 | if (Array.isArray(email)) { 143 | // this means there are already variants 144 | return email; 145 | } 146 | const [name, domain] = email.split('@'); 147 | const [first, last] = name.split('.'); 148 | 149 | if (!first || !last || !domain) { 150 | console.log('Unexpected email format: ', email); 151 | return email; 152 | } 153 | switch (getVariantType(index)) { 154 | case VARIANT_TYPES.DO_NOTHING: 155 | return email; 156 | case VARIANT_TYPES.INITIAL_FIRSTNAME: 157 | return `${first[0]}.${last}@${domain}`; 158 | case VARIANT_TYPES.INITIAL_LASTNAME: 159 | return `${first}.${last[0]}@${domain}`; 160 | case VARIANT_TYPES.REMOVE_LASTNAME: 161 | return `${first}@${domain}`; 162 | } 163 | console.log('Unexpected variant type: ', getVariantType(index)); 164 | return email; 165 | } catch (err) { 166 | console.log(`Error creating email variant ${email}: `, err); 167 | process.exit(1); 168 | } 169 | }; 170 | 171 | const dataStreamFieldsToIndexName = (dataStreamFields: { 172 | dataset: string; 173 | namespace: string; 174 | type: string; 175 | }) => { 176 | return `${dataStreamFields.type}-${dataStreamFields.dataset}-${dataStreamFields.namespace}`; 177 | }; 178 | 179 | const getTimeStamp = () => { 180 | // last minute 181 | // const now = new Date(); 182 | // const randomOffset = Math.floor(Math.random() * 60); 183 | // return new Date(now.getTime() - randomOffset * 60 * 1000).toISOString(); 184 | 185 | return new Date().toISOString(); 186 | }; 187 | 188 | const bulkUpsert = async (docs: unknown[]) => { 189 | const client = getEsClient(); 190 | 191 | try { 192 | return client.bulk({ body: docs, refresh: true }); 193 | } catch (err) { 194 | console.log('Error: ', err); 195 | process.exit(1); 196 | } 197 | }; 198 | 199 | const PACKAGES_TO_INSTALL = ['entityanalytics_okta', 'okta', 'system', 'entityanalytics_entra_id']; 200 | 201 | const installPackages = async (space: string) => { 202 | console.log('Installing packages...'); 203 | const progress = new cliProgress.SingleBar( 204 | { 205 | clearOnComplete: true, 206 | }, 207 | cliProgress.Presets.shades_classic 208 | ); 209 | progress.start(PACKAGES_TO_INSTALL.length, 0); 210 | await pMap( 211 | PACKAGES_TO_INSTALL, 212 | async (packageName) => { 213 | await installPackage({ packageName, space }); 214 | progress.increment(); 215 | }, 216 | { concurrency: 1 } 217 | ); 218 | progress.stop(); 219 | }; 220 | 221 | // take a jsonl file and return a generator which yields batches of operations 222 | const jsonlFileToBatchGenerator = ( 223 | filePath: string, 224 | batchSize: number, 225 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 226 | lineToOperation: (line: any, index: number) => [any, any] 227 | ): AsyncGenerator => { 228 | const rl = readline.createInterface({ 229 | input: fs.createReadStream(filePath), 230 | }); 231 | 232 | const generator = async function* () { 233 | let batch: unknown[] = []; 234 | let i = 0; 235 | for await (const line of rl) { 236 | const lineJson = JSON.parse(line); 237 | const lineWithMeta = addMetaToLine(lineJson); 238 | const [index, doc] = lineToOperation(lineWithMeta, i); 239 | batch.push(index); 240 | batch.push(doc); 241 | if (batch.length / 2 >= batchSize) { 242 | yield batch; 243 | batch = []; 244 | } 245 | i++; 246 | } 247 | if (batch.length > 0) { 248 | yield batch; 249 | } 250 | }; 251 | 252 | return generator(); 253 | }; 254 | 255 | const getFilePath = (fileName: string, mini: boolean) => { 256 | return directoryName + `/../../data/entity_resolution_data/${mini ? 'mini_' : ''}${fileName}`; 257 | }; 258 | 259 | const importLogData = async ({ 260 | mini = false, 261 | keepEmails = false, 262 | }: { 263 | mini: boolean; 264 | keepEmails: boolean; 265 | }) => { 266 | const filePath = getFilePath('generated_logs.jsonl', mini); 267 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 268 | const lineToOperation = (line: any, i: number): [any, any] => { 269 | if ( 270 | line.data_stream && 271 | line.data_stream.dataset && 272 | line.data_stream.namespace && 273 | line.data_stream.type 274 | ) { 275 | const index = dataStreamFieldsToIndexName(line.data_stream); 276 | line['@timestamp'] = getTimeStamp(); 277 | if (line.user && line.user.email) { 278 | line.user.email = keepEmails ? line.user.email : getEmailVariant(line.user.email, i); 279 | } 280 | return [{ create: { _index: index } }, line]; 281 | } else { 282 | throw new Error(`Invalid log data line ${JSON.stringify(line)}`); 283 | } 284 | }; 285 | 286 | console.log('Importing log data...'); 287 | await importFile(filePath, lineToOperation); 288 | }; 289 | 290 | const createOktaSystemComponentTemplate = async () => { 291 | console.log('Creating okta system custom component template...'); 292 | await createComponentTemplate({ 293 | name: 'logs-okta.system@custom', 294 | mappings: ECS_USER_MAPPINGS, 295 | }); 296 | }; 297 | 298 | const importOktaSystemData = async ({ 299 | mini = false, 300 | keepEmails = false, 301 | }: { 302 | mini: boolean; 303 | keepEmails: boolean; 304 | }) => { 305 | const filePath = getFilePath('okta_system_generated.jsonl', mini); 306 | const index = 'logs-okta.system-default'; 307 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 308 | const lineToOperation = (line: any, i: number): [any, any] => { 309 | line['@timestamp'] = getTimeStamp(); 310 | line.user = { 311 | name: line.actor.display_name, 312 | email: keepEmails ? line.actor.alternate_id : getEmailVariant(line.actor.alternate_id, i), 313 | }; 314 | return [{ create: { _index: index } }, line]; 315 | }; 316 | console.log('Importing Okta system data...'); 317 | await importFile(filePath, lineToOperation); 318 | }; 319 | 320 | const createOktaUserComponentTemplate = async () => { 321 | console.log('Creating okta user custom component template...'); 322 | await createComponentTemplate({ 323 | name: 'logs-entityanalytics_okta.user@custom', 324 | mappings: ECS_USER_MAPPINGS, 325 | }); 326 | }; 327 | 328 | const createEntraIdUserComponentTemplate = async () => { 329 | console.log('Creating entra id user custom component template...'); 330 | await createComponentTemplate({ 331 | name: 'logs-entityanalytics_entra_id.user@custom', 332 | mappings: ECS_USER_MAPPINGS, 333 | }); 334 | }; 335 | 336 | const importOktaUserData = async ({ 337 | mini = false, 338 | keepEmails = false, 339 | }: { 340 | mini: boolean; 341 | keepEmails: boolean; 342 | }) => { 343 | const filePath = getFilePath('okta_user_generated.jsonl', mini); 344 | const index = 'logs-entityanalytics_okta.user-default'; 345 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 346 | const lineToOperation = (line: any, i: number): [any, any] => { 347 | line['@timestamp'] = getTimeStamp(); 348 | line.user = { 349 | name: line.profile.first_name + ' ' + line.profile.last_name, 350 | email: keepEmails ? line.email : getEmailVariant(line.email, i), 351 | }; 352 | return [{ create: { _index: index } }, line]; 353 | }; 354 | console.log('Importing Okta user data...'); 355 | await importFile(filePath, lineToOperation); 356 | }; 357 | 358 | const importEntraIdUserData = async ({ 359 | mini = false, 360 | keepEmails = false, 361 | }: { 362 | mini: boolean; 363 | keepEmails: boolean; 364 | }) => { 365 | const filePath = getFilePath('entra_id_user_generated.jsonl', mini); 366 | const index = 'logs-entityanalytics_entra_id.user-default'; 367 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 368 | const lineToOperation = (line: any, i: number): [any, any] => { 369 | line['@timestamp'] = getTimeStamp(); 370 | line.user = { 371 | name: line.azure_ad.displayName, 372 | email: keepEmails ? line.azure_ad.mail : getEmailVariant(line.azure_ad.mail, i), 373 | }; 374 | return [{ create: { _index: index } }, line]; 375 | }; 376 | console.log('Importing Entra ID user data...'); 377 | await importFile(filePath, lineToOperation); 378 | }; 379 | 380 | const importFile = async ( 381 | filePath: string, 382 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 383 | lineToOperation: (line: any, index: number) => [any, any] 384 | ) => { 385 | const lineCountInFile = await getFileLineCount(filePath); 386 | const batchGenerator = jsonlFileToBatchGenerator(filePath, BATCH_SIZE, lineToOperation); 387 | await batchIndexDocsWithProgress(batchGenerator, lineCountInFile); 388 | }; 389 | 390 | const createMatchAllRule = async (space: string) => { 391 | const rule = await getRule(RULE_ID, space); 392 | 393 | if (rule) { 394 | console.log('Match all rule already exists.'); 395 | return; 396 | } 397 | 398 | await createRule({ 399 | id: RULE_ID, 400 | space, 401 | }); 402 | console.log('Match all rule created.'); 403 | }; 404 | 405 | const batchIndexDocsWithProgress = async ( 406 | generator: AsyncGenerator, 407 | docCount: number 408 | ) => { 409 | const progress = new cliProgress.SingleBar( 410 | { 411 | clearOnComplete: true, 412 | }, 413 | cliProgress.Presets.shades_classic 414 | ); 415 | progress.start(docCount, 0); 416 | await pMap( 417 | generator, 418 | async (operations) => { 419 | const res = await bulkUpsert(operations); 420 | if (res.errors) { 421 | progress.stop(); 422 | console.log('Failed to index documents' + JSON.stringify(res)); 423 | process.exit(1); 424 | } 425 | progress.increment(operations.length / 2); 426 | }, 427 | { concurrency: CONCURRENCY } 428 | ); 429 | 430 | progress.stop(); 431 | console.log('Indexed ', docCount, '✅'); 432 | }; 433 | 434 | export const setupEntityResolutionDemo = async ({ 435 | mini = false, 436 | deleteData = false, 437 | keepEmails = false, 438 | space, 439 | }: { 440 | mini: boolean; 441 | deleteData: boolean; 442 | keepEmails: boolean; 443 | space: string; 444 | }) => { 445 | if (deleteData) { 446 | console.log('Deleting existing demo data first...'); 447 | await clearData(); 448 | } 449 | 450 | console.log(`Setting up${mini ? ' mini' : ''} entity resolution demo...`); 451 | // create a rule which matches everything, handy for exploring all the different entity views 452 | await createMatchAllRule(space); 453 | // install the packages to get the mappings in place 454 | await installPackages(space); 455 | // create @custom component templates to get user.name and user.email field mappings 456 | // which the inttegrations don't provide 457 | // we will eventually have to release a new version of the integrations to include these mappings 458 | await createOktaSystemComponentTemplate(); 459 | await createOktaUserComponentTemplate(); 460 | await createEntraIdUserComponentTemplate(); 461 | // now load all the data 462 | await importLogData({ mini, keepEmails }); 463 | await importOktaSystemData({ mini, keepEmails }); 464 | await importOktaUserData({ mini, keepEmails }); 465 | await importEntraIdUserData({ mini, keepEmails }); 466 | console.log(` 467 | Entity resolution demo setup complete. 468 | 469 | Now go and install the model! 470 | 471 | CLICK HERE ---->> ${buildKibanaUrl({ path: '/app/security/entity_analytics_management', space })} <<---- CLICK HERE 472 | 473 | Once installed, ${mini ? 'Mark Hopkin should have matches' : 'See here:\n\n https://github.com/elastic/security-ml/blob/gus/entity_resoluton_data_generation/projects/entity_resolution_poc_2024/test_data_generation/seed_data_with_name_variations_and_user_agent_gen_and_groups.json \n\nfor all the seed data names'} 474 | `); 475 | }; 476 | -------------------------------------------------------------------------------- /src/commands/entity_store.ts: -------------------------------------------------------------------------------- 1 | import { faker } from '@faker-js/faker'; 2 | import { getEsClient, indexCheck, createAgentDocument } from './utils/indices'; 3 | import { chunk, once } from 'lodash-es'; 4 | import moment from 'moment'; 5 | import auditbeatMappings from '../mappings/auditbeat.json' assert { type: 'json' }; 6 | import { assignAssetCriticality, enableRiskScore, createRule } from '../utils/kibana_api'; 7 | import { 8 | ASSET_CRITICALITY, 9 | AssetCriticality, 10 | ENTITY_STORE_OPTIONS, 11 | generateNewSeed, 12 | } from '../constants'; 13 | import { 14 | BulkOperationContainer, 15 | BulkUpdateAction, 16 | MappingTypeMapping, 17 | } from '@elastic/elasticsearch/lib/api/types'; 18 | import { getConfig } from '../get_config'; 19 | import { initializeSpace } from '../utils'; 20 | 21 | const EVENT_INDEX_NAME = 'auditbeat-8.12.0-2024.01.18-000001'; 22 | const AGENT_INDEX_NAME = '.fleet-agents-7'; 23 | 24 | const getClient = () => { 25 | const client = getEsClient(); 26 | 27 | if (!client) { 28 | throw new Error('failed to create ES client'); 29 | } 30 | return client; 31 | }; 32 | 33 | const getOffset = (offsetHours?: number) => { 34 | const config = getConfig(); 35 | 36 | if (config.eventDateOffsetHours !== undefined) { 37 | once(() => console.log(`Using event date offset: ${config.eventDateOffsetHours} hours`)); 38 | 39 | return config.eventDateOffsetHours; 40 | } 41 | 42 | if (offsetHours !== undefined) { 43 | return offsetHours; 44 | } 45 | 46 | return faker.number.int({ max: 10 }); 47 | }; 48 | 49 | type Agent = ReturnType; 50 | 51 | enum EntityTypes { 52 | User = 'user', 53 | Host = 'host', 54 | Service = 'service', 55 | Generic = 'generic', 56 | } 57 | 58 | interface BaseEntity { 59 | name: string; 60 | assetCriticality: AssetCriticality; 61 | entity?: { 62 | EngineMetadata: { 63 | Type: string; 64 | }; 65 | source: string; 66 | type: string; 67 | sub_type: string; 68 | name: string; 69 | id: string; 70 | }; 71 | } 72 | interface User extends BaseEntity { 73 | type: EntityTypes.User; 74 | } 75 | 76 | interface Host extends BaseEntity { 77 | type: EntityTypes.Host; 78 | } 79 | 80 | interface Service extends BaseEntity { 81 | type: EntityTypes.Service; 82 | } 83 | 84 | interface GenericEntity extends BaseEntity { 85 | id: string; 86 | type: string; 87 | } 88 | 89 | interface BaseEvent { 90 | '@timestamp': string; 91 | message: string; 92 | service?: { 93 | type: string; 94 | }; 95 | } 96 | 97 | interface EventUser { 98 | name: string; 99 | id: number; 100 | } 101 | 102 | interface EventHost { 103 | name: string; 104 | id: number; 105 | ip: string; 106 | mac: string; 107 | os: { 108 | name: string; 109 | }; 110 | } 111 | 112 | interface ServiceEvent extends BaseEvent { 113 | service: { 114 | node: { 115 | roles: string; 116 | name: string; 117 | }; 118 | environment: string; 119 | address: string; 120 | name: string; 121 | id: string; 122 | state: string; 123 | ephemeral_id: string; 124 | type: string; 125 | version: string; 126 | }; 127 | } 128 | 129 | interface GenericEntityEvent extends BaseEvent { 130 | event: { 131 | ingested: string; 132 | dataset: string; 133 | module: string; 134 | }; 135 | cloud: { 136 | provider: string; 137 | region: string; 138 | account: { 139 | name: string; 140 | id: string; 141 | }; 142 | }; 143 | entity?: { 144 | type: string; 145 | sub_type?: string; 146 | name: string; 147 | id: string; 148 | }; 149 | } 150 | 151 | interface UserEvent extends BaseEvent { 152 | user: EventUser; 153 | host?: EventHost; 154 | } 155 | 156 | interface HostEvent extends BaseEvent { 157 | host: EventHost; 158 | user?: EventUser; 159 | } 160 | 161 | type Event = UserEvent | HostEvent | ServiceEvent | GenericEntityEvent; 162 | 163 | export const createRandomUser = (): User => { 164 | return { 165 | name: `User-${faker.internet.username()}`, 166 | assetCriticality: faker.helpers.arrayElement(ASSET_CRITICALITY), 167 | type: EntityTypes.User, 168 | }; 169 | }; 170 | 171 | export const createRandomHost = (): Host => { 172 | return { 173 | name: `Host-${faker.internet.domainName()}`, 174 | assetCriticality: faker.helpers.arrayElement(ASSET_CRITICALITY), 175 | type: EntityTypes.Host, 176 | }; 177 | }; 178 | 179 | export const createRandomService = (): Service => { 180 | return { 181 | name: `Service-${faker.hacker.noun()}`, 182 | assetCriticality: faker.helpers.arrayElement(ASSET_CRITICALITY), 183 | type: EntityTypes.Service, 184 | }; 185 | }; 186 | 187 | const genericTypes = [ 188 | { type: 'Messaging Service', subType: 'AWS SNS Topic' }, 189 | { type: 'Storage Service', subType: 'AWS S3 Bucket' }, 190 | { type: 'Compute Service', subType: 'AWS EC2 Instance' }, 191 | { type: 'Database Service', subType: 'AWS RDS Instance' }, 192 | { type: 'Compute Service', subType: 'AWS Lambda Function' }, 193 | { type: 'Network Service', subType: 'AWS VPC' }, 194 | { type: 'Storage Service', subType: 'AWS EBS Volume' }, 195 | { type: 'Database Service', subType: 'AWS DynamoDB Table' }, 196 | { type: 'Compute Service', subType: 'AWS ECS Service' }, 197 | { type: 'Network Service', subType: 'AWS Load Balancer' }, 198 | ]; 199 | 200 | export const createRandomGenericEntity = (): GenericEntity => { 201 | const taxonomy = genericTypes[Math.floor(Math.random() * genericTypes.length)]; 202 | 203 | const resourceName = `${taxonomy.subType.toLowerCase().replace(/\s+/g, '-')}-${faker.string.alphanumeric(8)}`; 204 | const regions = ['us-east-1', 'us-west-2', 'eu-west-1', 'eu-central-1', 'ap-southeast-1']; 205 | const region = faker.helpers.arrayElement(regions); 206 | const accountId = faker.string.numeric(12); // Generate AWS ARN-style ID based on service type 207 | let resourceId: string; 208 | if (taxonomy.subType.includes('SNS')) { 209 | resourceId = `arn:aws:sns:${region}:${accountId}:${resourceName}`; 210 | } else if (taxonomy.subType.includes('S3')) { 211 | resourceId = `arn:aws:s3:::${resourceName}`; 212 | } else if (taxonomy.subType.includes('EC2')) { 213 | resourceId = `arn:aws:ec2:${region}:${accountId}:instance/${faker.string.alphanumeric(17)}`; 214 | } else if (taxonomy.subType.includes('RDS')) { 215 | resourceId = `arn:aws:rds:${region}:${accountId}:db:${resourceName}`; 216 | } else if (taxonomy.subType.includes('Lambda')) { 217 | resourceId = `arn:aws:lambda:${region}:${accountId}:function:${resourceName}`; 218 | } else if (taxonomy.subType.includes('VPC')) { 219 | resourceId = `arn:aws:ec2:${region}:${accountId}:vpc/${faker.string.alphanumeric(17)}`; 220 | } else if (taxonomy.subType.includes('EBS')) { 221 | resourceId = `arn:aws:ec2:${region}:${accountId}:volume/${faker.string.alphanumeric(17)}`; 222 | } else if (taxonomy.subType.includes('DynamoDB')) { 223 | resourceId = `arn:aws:dynamodb:${region}:${accountId}:table/${resourceName}`; 224 | } else if (taxonomy.subType.includes('ECS')) { 225 | resourceId = `arn:aws:ecs:${region}:${accountId}:service/${resourceName}`; 226 | } else if (taxonomy.subType.includes('Load Balancer')) { 227 | resourceId = `arn:aws:elasticloadbalancing:${region}:${accountId}:loadbalancer/${resourceName}`; 228 | } else { 229 | resourceId = `arn:aws:${taxonomy.subType.toLowerCase().replace(/\s+/g, '-')}:${region}:${accountId}:${resourceName}`; 230 | } 231 | 232 | return { 233 | name: resourceName, 234 | assetCriticality: faker.helpers.arrayElement(ASSET_CRITICALITY), 235 | id: resourceId, 236 | type: taxonomy.type, 237 | entity: { 238 | EngineMetadata: { 239 | Type: EntityTypes.Generic, 240 | }, 241 | source: resourceId, 242 | type: taxonomy.type, 243 | sub_type: taxonomy.subType, 244 | name: resourceName, 245 | id: resourceId, 246 | }, 247 | }; 248 | }; 249 | 250 | export const createRandomEventForHost = (host: Host, offsetHours?: number): HostEvent => ({ 251 | '@timestamp': moment() 252 | .subtract(getOffset(offsetHours), 'h') 253 | .format('yyyy-MM-DDTHH:mm:ss.SSSSSSZ'), 254 | message: `Host ${faker.hacker.phrase()}`, 255 | service: { 256 | type: 'system', 257 | }, 258 | host: { 259 | name: host.name, 260 | id: faker.number.int({ max: 10000 }), 261 | ip: faker.internet.ip(), 262 | mac: faker.internet.mac(), 263 | os: { 264 | name: faker.helpers.arrayElement(['Windows', 'Linux', 'MacOS']), 265 | }, 266 | }, 267 | }); 268 | 269 | export const createRandomEventForUser = (user: User, offsetHours?: number): UserEvent => ({ 270 | '@timestamp': moment() 271 | .subtract(getOffset(offsetHours), 'h') 272 | .format('yyyy-MM-DDTHH:mm:ss.SSSSSSZ'), 273 | message: `User ${faker.hacker.phrase()}`, 274 | service: { 275 | type: 'system', 276 | }, 277 | user: { 278 | name: user.name, 279 | id: faker.number.int({ max: 10000 }), 280 | }, 281 | }); 282 | 283 | export const createRandomEventForService = ( 284 | service: Service, 285 | offsetHours?: number 286 | ): ServiceEvent => ({ 287 | '@timestamp': moment() 288 | .subtract(getOffset(offsetHours), 'h') 289 | .format('yyyy-MM-DDTHH:mm:ss.SSSSSSZ'), 290 | message: `Service ${faker.hacker.phrase()}`, 291 | service: { 292 | node: { 293 | roles: faker.helpers.arrayElement(['master', 'data', 'ingest']), 294 | name: faker.internet.domainWord(), 295 | }, 296 | environment: faker.helpers.arrayElement(['production', 'staging', 'development']), 297 | address: faker.internet.ip(), 298 | name: service.name, 299 | id: faker.string.nanoid(), 300 | state: faker.helpers.arrayElement(['running', 'stopped', 'starting']), 301 | ephemeral_id: faker.string.nanoid(), 302 | type: 'system', 303 | version: faker.system.semver(), 304 | }, 305 | }); 306 | 307 | const createRandomEventForGenericEntity = ( 308 | entity: GenericEntity, 309 | offsetHours?: number 310 | ): GenericEntityEvent => { 311 | // Always use AWS since we're generating AWS resources 312 | const cloudProvider = 'aws'; 313 | 314 | const service = { 315 | type: entity.type, 316 | subType: entity.entity?.sub_type, 317 | }; 318 | 319 | const regions = ['us-east-1', 'us-west-2', 'eu-west-1', 'eu-central-1', 'ap-southeast-1']; 320 | const region = faker.helpers.arrayElement(regions); 321 | 322 | return { 323 | '@timestamp': moment() 324 | .subtract(getOffset(offsetHours), 'h') 325 | .format('yyyy-MM-DDTHH:mm:ss.SSSSSSZ'), 326 | message: `${service.subType} entity discovered`, 327 | event: { 328 | ingested: moment().format('yyyy-MM-DDTHH:mm:ss.SSSSSSZ'), 329 | dataset: 'cloud_asset_inventory.asset_inventory', 330 | module: 'cloud_asset_inventory', 331 | }, 332 | cloud: { 333 | provider: cloudProvider, 334 | region: region, 335 | account: { 336 | name: faker.company.name().toLowerCase().replace(/\s+/g, '-'), 337 | id: faker.string.numeric(12), 338 | }, 339 | }, 340 | entity: { 341 | type: service.type, 342 | sub_type: service.subType, 343 | name: entity.name, 344 | id: entity.id, 345 | }, 346 | }; 347 | }; 348 | 349 | const ingestEvents = async (events: Event[]) => 350 | ingest(EVENT_INDEX_NAME, events, auditbeatMappings as MappingTypeMapping); 351 | 352 | type TDocument = object; 353 | type TPartialDocument = Partial; 354 | 355 | const ingestAgents = async (agents: Agent[]) => ingest(AGENT_INDEX_NAME, agents); 356 | 357 | const ingest = async ( 358 | index: string, 359 | documents: Array, 360 | mapping?: MappingTypeMapping, 361 | skipIndexCheck = false 362 | ) => { 363 | if (!skipIndexCheck) { 364 | await indexCheck(index, { mappings: mapping }); 365 | } 366 | 367 | const chunks = chunk(documents, 10000); 368 | 369 | for (const chunk of chunks) { 370 | try { 371 | // Make bulk request 372 | const ingestRequest = chunk.reduce( 373 | ( 374 | acc: ( 375 | | BulkOperationContainer 376 | | BulkUpdateAction 377 | | TDocument 378 | )[], 379 | event 380 | ) => { 381 | acc.push({ index: { _index: index } }); 382 | acc.push(event); 383 | return acc; 384 | }, 385 | [] 386 | ); 387 | 388 | const client = getClient(); 389 | await client.bulk({ operations: ingestRequest, refresh: true }); 390 | } catch (err) { 391 | console.log('Error: ', err); 392 | } 393 | } 394 | }; 395 | 396 | // E = Entity, EV = Event 397 | export const generateEvents = ( 398 | entities: E[], 399 | createEvent: (entity: E, offsetHours?: number) => EV, 400 | offsetHours?: number 401 | ): EV[] => { 402 | const eventsPerEntity = 10; 403 | const acc: EV[] = []; 404 | return entities.reduce((acc, entity) => { 405 | const events = faker.helpers.multiple(() => createEvent(entity, offsetHours), { 406 | count: eventsPerEntity, 407 | }); 408 | acc.push(...events); 409 | return acc; 410 | }, acc); 411 | }; 412 | 413 | export const assignAssetCriticalityToEntities = async (opts: { 414 | entities: BaseEntity[]; 415 | field: string; 416 | space?: string; 417 | }) => { 418 | const { entities, field, space } = opts; 419 | const chunks = chunk(entities, 10000); 420 | for (const chunk of chunks) { 421 | const records = chunk 422 | .filter(({ assetCriticality }) => assetCriticality !== 'unknown') 423 | .map(({ name, assetCriticality }) => ({ 424 | id_field: field, 425 | id_value: name, 426 | criticality_level: assetCriticality, 427 | })); 428 | 429 | if (records.length > 0) { 430 | await assignAssetCriticality(records, space); 431 | } 432 | } 433 | }; 434 | 435 | /** 436 | * Generate entities first 437 | * Then Generate events, assign asset criticality, create rule and enable risk engine 438 | */ 439 | export const generateEntityStore = async ({ 440 | users = 10, 441 | hosts = 10, 442 | services = 10, 443 | genericEntities = 10, 444 | seed = generateNewSeed(), 445 | space, 446 | options, 447 | offsetHours = 10, 448 | }: { 449 | users: number; 450 | hosts: number; 451 | services: number; 452 | genericEntities: number; 453 | seed: number; 454 | space?: string; 455 | options: string[]; 456 | offsetHours?: number; 457 | }) => { 458 | if (options.includes(ENTITY_STORE_OPTIONS.seed)) { 459 | faker.seed(seed); 460 | } 461 | try { 462 | const generatedUsers = faker.helpers.multiple(createRandomUser, { 463 | count: users, 464 | }); 465 | 466 | const generatedHosts = faker.helpers.multiple(createRandomHost, { 467 | count: hosts, 468 | }); 469 | 470 | const generatedGenericEntities = faker.helpers.multiple(createRandomGenericEntity, { 471 | count: genericEntities, 472 | }); 473 | 474 | const eventsForUsers: UserEvent[] = generateEvents( 475 | generatedUsers, 476 | createRandomEventForUser, 477 | offsetHours 478 | ); 479 | const eventsForHosts: HostEvent[] = generateEvents( 480 | generatedHosts, 481 | createRandomEventForHost, 482 | offsetHours 483 | ); 484 | 485 | const generatedServices: Service[] = faker.helpers.multiple(createRandomService, { 486 | count: services, 487 | }); 488 | 489 | const eventsForServices: ServiceEvent[] = generateEvents( 490 | generatedServices, 491 | createRandomEventForService, 492 | offsetHours 493 | ); 494 | 495 | const eventsForGenericEntities: GenericEntityEvent[] = generateEvents( 496 | generatedGenericEntities, 497 | createRandomEventForGenericEntity, 498 | offsetHours 499 | ); 500 | 501 | const relational = matchUsersAndHosts(eventsForUsers, eventsForHosts); 502 | 503 | await ingestEvents(relational.users); 504 | console.log('Users events ingested'); 505 | await ingestEvents(relational.hosts); 506 | console.log('Hosts events ingested'); 507 | await ingestEvents(eventsForServices); 508 | console.log('Services events ingested'); 509 | await ingestEvents(eventsForGenericEntities); 510 | console.log('Generic Entities events ingested'); 511 | 512 | if (space && space !== 'default') { 513 | await initializeSpace(space); 514 | } 515 | 516 | if (options.includes(ENTITY_STORE_OPTIONS.criticality)) { 517 | await assignAssetCriticalityToEntities({ 518 | entities: generatedUsers, 519 | field: 'user.name', 520 | space, 521 | }); 522 | console.log('Assigned asset criticality to users'); 523 | await assignAssetCriticalityToEntities({ 524 | entities: generatedHosts, 525 | field: 'host.name', 526 | space, 527 | }); 528 | console.log('Assigned asset criticality to hosts'); 529 | } 530 | 531 | if (options.includes(ENTITY_STORE_OPTIONS.riskEngine)) { 532 | await enableRiskScore(space); 533 | console.log('Risk score enabled'); 534 | } 535 | 536 | if (options.includes(ENTITY_STORE_OPTIONS.rule)) { 537 | await createRule({ space }); 538 | console.log('Rule created'); 539 | } 540 | 541 | if (options.includes(ENTITY_STORE_OPTIONS.agent)) { 542 | const agents = generatedHosts.map((host) => createAgentDocument({ hostname: host.name })); 543 | await ingestAgents(agents); 544 | console.log('Agents ingested'); 545 | } 546 | 547 | console.log('Finished generating entity store'); 548 | } catch (error) { 549 | console.log('Error: ', error); 550 | } 551 | }; 552 | 553 | export const cleanEntityStore = async () => { 554 | console.log('Deleting all entity-store data...'); 555 | try { 556 | console.log('Deleted all events'); 557 | const client = getClient(); 558 | await client.deleteByQuery({ 559 | index: EVENT_INDEX_NAME, 560 | refresh: true, 561 | query: { 562 | match_all: {}, 563 | }, 564 | }); 565 | 566 | console.log('Deleted asset criticality'); 567 | await client.deleteByQuery({ 568 | index: '.asset-criticality.asset-criticality-default', 569 | refresh: true, 570 | query: { 571 | match_all: {}, 572 | }, 573 | }); 574 | } catch (error) { 575 | console.log('Failed to clean data'); 576 | console.log(error); 577 | } 578 | }; 579 | 580 | const matchUsersAndHosts = ( 581 | users: UserEvent[], 582 | hosts: HostEvent[] 583 | ): { 584 | users: UserEvent[]; 585 | hosts: HostEvent[]; 586 | } => { 587 | const splitIndex = faker.number.int({ max: users.length - 1 }); 588 | 589 | return { 590 | users: users 591 | .slice(0, splitIndex) 592 | .map((user) => { 593 | const index = faker.number.int({ max: hosts.length - 1 }); 594 | return { ...user, host: hosts[index].host } as UserEvent; 595 | }) 596 | .concat(users.slice(splitIndex)) as UserEvent[], 597 | 598 | hosts: hosts 599 | .slice(0, splitIndex) 600 | .map((host) => { 601 | const index = faker.number.int({ max: users.length - 1 }); 602 | return { ...host, user: users[index].user } as HostEvent; 603 | }) 604 | .concat(hosts.slice(splitIndex)), 605 | }; 606 | }; 607 | -------------------------------------------------------------------------------- /src/commands/entity_store_perf.ts: -------------------------------------------------------------------------------- 1 | import { faker } from '@faker-js/faker'; 2 | import fs from 'fs'; 3 | import cliProgress from 'cli-progress'; 4 | import { getEsClient, getFileLineCount } from './utils/indices'; 5 | import readline from 'readline'; 6 | import { deleteEngines, initEntityEngineForEntityTypes } from '../utils/kibana_api'; 7 | import { get } from 'lodash-es'; 8 | import { dirname } from 'path'; 9 | import { fileURLToPath } from 'url'; 10 | import { getConfig } from '../get_config'; 11 | import * as path from 'path'; 12 | 13 | const config = getConfig(); 14 | 15 | interface EntityFields { 16 | id: string; 17 | name: string; 18 | type: string; 19 | sub_type: string; 20 | address: string; 21 | } 22 | 23 | interface HostFields { 24 | entity: EntityFields; 25 | host: { 26 | hostname?: string; 27 | domain?: string; 28 | ip?: string[]; 29 | name: string; 30 | id?: string; 31 | type?: string; 32 | mac?: string[]; 33 | architecture?: string[]; 34 | }; 35 | } 36 | 37 | interface UserFields { 38 | entity: EntityFields; 39 | user: { 40 | full_name?: string[]; 41 | domain?: string; 42 | roles?: string[]; 43 | name: string; 44 | id?: string; 45 | email?: string[]; 46 | hash?: string[]; 47 | }; 48 | } 49 | 50 | let stop = false; 51 | 52 | process.on('SIGINT', () => { 53 | console.log('Caught interrupt signal (Ctrl + C), stopping...'); 54 | stop = true; 55 | }); 56 | 57 | const generateIpAddresses = (startIndex: number, count: number) => { 58 | const ips = []; 59 | for (let i = 0; i < count; i++) { 60 | ips.push(`192.168.1.${startIndex + i}`); 61 | } 62 | return ips; 63 | }; 64 | 65 | const generateMacAddresses = (startIndex: number, count: number) => { 66 | const macs = []; 67 | for (let i = 0; i < count; i++) { 68 | const macPart = (startIndex + i) 69 | .toString(16) 70 | .padStart(12, '0') 71 | .match(/.{1,2}/g) 72 | ?.join(':'); 73 | macs.push(macPart ? macPart : '00:00:00:00:00:00'); 74 | } 75 | return macs; 76 | }; 77 | 78 | interface GeneratorOptions { 79 | entityIndex: number; 80 | valueStartIndex: number; 81 | fieldLength: number; 82 | idPrefix: string; 83 | } 84 | 85 | const getLogsPerEntity = (filePath: string) => { 86 | return new Promise((resolve, reject) => { 87 | let idField: string | undefined; 88 | let idValue: string | undefined; 89 | let count: number = 0; 90 | let resolved = false; 91 | const readStream = fs.createReadStream(filePath); 92 | 93 | const rl = readline.createInterface({ 94 | input: readStream, 95 | crlfDelay: Infinity, 96 | }); 97 | 98 | rl.on('line', (line) => { 99 | const doc = JSON.parse(line); 100 | if (!idField) { 101 | if (doc.host) { 102 | idField = 'host.name'; 103 | idValue = doc.host.name; 104 | } else { 105 | idField = 'user.name'; 106 | idValue = doc.user.name; 107 | } 108 | } 109 | 110 | const docId = get(doc, idField); 111 | if (docId !== idValue && !resolved) { 112 | resolved = true; 113 | rl.close(); 114 | resolve(count); 115 | } else { 116 | count++; 117 | } 118 | }); 119 | 120 | rl.on('error', (err) => { 121 | reject(err); 122 | }); 123 | }); 124 | }; 125 | 126 | const generateHostFields = ({ 127 | entityIndex, 128 | valueStartIndex, 129 | fieldLength, 130 | idPrefix, 131 | }: GeneratorOptions): HostFields => { 132 | const id = `${idPrefix}-host-${entityIndex}`; 133 | return { 134 | entity: { 135 | id: id, 136 | name: id, 137 | type: 'host', 138 | sub_type: 'aws_ec2_instance', 139 | address: `example.${idPrefix}.com`, 140 | }, 141 | host: { 142 | id: id, 143 | name: id, 144 | hostname: `${id}.example.${idPrefix}.com`, 145 | domain: `example.${idPrefix}.com`, 146 | ip: generateIpAddresses(valueStartIndex, fieldLength), 147 | mac: generateMacAddresses(valueStartIndex, fieldLength), 148 | type: 'server', 149 | architecture: ['x86_64'], 150 | }, 151 | }; 152 | }; 153 | 154 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 155 | const changeHostName = (doc: Record, addition: string) => { 156 | const newName = `${doc.host.hostname}-${addition}`; 157 | doc.host.hostname = newName; 158 | doc.host.name = newName; 159 | doc.host.id = newName; 160 | return doc; 161 | }; 162 | 163 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 164 | const changeUserName = (doc: Record, addition: string) => { 165 | const newName = `${doc.user.name}-${addition}`; 166 | doc.user.name = newName; 167 | doc.user.id = newName; 168 | return doc; 169 | }; 170 | 171 | const generateUserFields = ({ idPrefix, entityIndex }: GeneratorOptions): UserFields => { 172 | const id = `${idPrefix}-user-${entityIndex}`; 173 | return { 174 | entity: { 175 | id: id, 176 | name: id, 177 | type: 'user', 178 | sub_type: 'aws_iam_user', 179 | address: `example.${idPrefix}.com`, 180 | }, 181 | user: { 182 | id: id, 183 | name: id, 184 | full_name: [`User ${idPrefix} ${entityIndex}`], 185 | domain: `example.${idPrefix}.com`, 186 | roles: ['admin'], 187 | email: [`${id}.example.${idPrefix}.com`], 188 | }, 189 | }; 190 | }; 191 | 192 | const FIELD_LENGTH = 2; 193 | const directoryName = dirname(fileURLToPath(import.meta.url)); 194 | const DATA_DIRECTORY = directoryName + '/../../data/entity_store_perf_data'; 195 | const LOGS_DIRECTORY = directoryName + '/../../logs'; 196 | 197 | const getFilePath = (name: string) => { 198 | return `${DATA_DIRECTORY}/${name}${name.endsWith('.jsonl') ? '' : '.jsonl'}`; 199 | }; 200 | 201 | export const listPerfDataFiles = () => fs.readdirSync(DATA_DIRECTORY); 202 | 203 | const deleteAllEntities = async () => { 204 | const esClient = getEsClient(); 205 | return await esClient.deleteByQuery({ 206 | index: '.entities.v1.latest*', 207 | query: { 208 | match_all: {}, 209 | }, 210 | }); 211 | }; 212 | 213 | const deleteLogsIndex = async (index: string) => { 214 | return await getEsClient().indices.delete( 215 | { 216 | index, 217 | }, 218 | { ignore: [404] } 219 | ); 220 | }; 221 | 222 | const countEntities = async (baseDomainName: string) => { 223 | const esClient = getEsClient(); 224 | const res = await esClient.count({ 225 | index: '.entities.v1.latest*', 226 | query: { 227 | bool: { 228 | should: [ 229 | { 230 | term: { 231 | 'host.domain': `example.${baseDomainName}.com`, 232 | }, 233 | }, 234 | { 235 | term: { 236 | 'user.domain': `example.${baseDomainName}.com`, 237 | }, 238 | }, 239 | ], 240 | minimum_should_match: 1, 241 | }, 242 | }, 243 | }); 244 | 245 | return res.count; 246 | }; 247 | 248 | const countEntitiesUntil = async (name: string, count: number) => { 249 | let total = 0; 250 | console.log('Polling for entities...'); 251 | const progress = new cliProgress.SingleBar( 252 | { 253 | format: 'Progress | {value}/{total} Entities', 254 | }, 255 | cliProgress.Presets.shades_classic 256 | ); 257 | progress.start(count, 0); 258 | 259 | while (total < count && !stop) { 260 | total = await countEntities(path.parse(name).name); 261 | progress.update(total); 262 | 263 | await new Promise((resolve) => setTimeout(resolve, 2000)); 264 | } 265 | 266 | progress.stop(); 267 | 268 | if (stop) { 269 | console.log('Process stopped before reaching the count.'); 270 | } 271 | 272 | return total; 273 | }; 274 | 275 | const logClusterHealthEvery = (name: string, interval: number): (() => void) => { 276 | if (config.serverless) { 277 | console.log('Skipping cluster health on serverless cluster'); 278 | return () => {}; 279 | } 280 | 281 | let stopCalled = false; 282 | 283 | const stopCallback = () => { 284 | stopCalled = true; 285 | }; 286 | 287 | const logFile = `${LOGS_DIRECTORY}/${name}-${new Date().toISOString()}-cluster-health.log`; 288 | 289 | const stream = fs.createWriteStream(logFile, { flags: 'a' }); 290 | 291 | const log = (message: string) => { 292 | stream.write(`${new Date().toISOString()} - ${message}\n`); 293 | }; 294 | 295 | const logClusterHealth = async () => { 296 | const esClient = getEsClient(); 297 | const res = await esClient.cluster.health(); 298 | log(JSON.stringify(res)); 299 | }; 300 | 301 | const int = setInterval(async () => { 302 | await logClusterHealth(); 303 | 304 | if (stopCalled || stop) { 305 | clearInterval(int); 306 | stream.end(); 307 | } 308 | }, interval); 309 | 310 | return stopCallback; 311 | }; 312 | 313 | const logTransformStatsEvery = (name: string, interval: number): (() => void) => { 314 | const TRANSFORM_NAMES = [ 315 | 'entities-v1-latest-security_host_default', 316 | 'entities-v1-latest-security_user_default', 317 | ]; 318 | 319 | let stopCalled = false; 320 | 321 | const stopCallback = () => { 322 | stopCalled = true; 323 | }; 324 | 325 | const logFile = `${LOGS_DIRECTORY}/${name}-${new Date().toISOString()}-transform-stats.log`; 326 | 327 | const stream = fs.createWriteStream(logFile, { flags: 'a' }); 328 | 329 | const log = (message: string) => { 330 | stream.write(`${new Date().toISOString()} - ${message}\n`); 331 | }; 332 | 333 | const logTransformStatsEvery = async () => { 334 | const esClient = getEsClient(); 335 | for (const transform of TRANSFORM_NAMES) { 336 | const res = await esClient.transform.getTransformStats({ 337 | transform_id: transform, 338 | }); 339 | 340 | log(`Transform ${transform} stats: ${JSON.stringify(res)}`); 341 | } 342 | }; 343 | 344 | const int = setInterval(async () => { 345 | await logTransformStatsEvery(); 346 | 347 | if (stopCalled || stop) { 348 | clearInterval(int); 349 | stream.end(); 350 | } 351 | }, interval); 352 | 353 | return stopCallback; 354 | }; 355 | 356 | export const createPerfDataFile = ({ 357 | entityCount, 358 | logsPerEntity, 359 | startIndex, 360 | name, 361 | }: { 362 | name: string; 363 | entityCount: number; 364 | logsPerEntity: number; 365 | startIndex: number; 366 | }) => { 367 | const filePath = getFilePath(name); 368 | console.log( 369 | `Creating performance data file ${name} at with ${entityCount} entities and ${logsPerEntity} logs per entity. Starting at index ${startIndex}` 370 | ); 371 | 372 | if (fs.existsSync(filePath)) { 373 | console.log(`Data file ${name}.json already exists. Deleting...`); 374 | fs.unlinkSync(filePath); 375 | } 376 | 377 | console.log(`Generating ${entityCount * logsPerEntity} logs...`); 378 | const progress = new cliProgress.SingleBar({}, cliProgress.Presets.shades_classic); 379 | 380 | progress.start(entityCount * logsPerEntity, 0); 381 | // we could be generating up to 1 million entities, so we need to be careful with memory 382 | // we will write to the file as we generate the data to avoid running out of memory 383 | const writeStream = fs.createWriteStream(filePath, { flags: 'a' }); 384 | 385 | const generateLogs = async () => { 386 | for (let i = 0; i < entityCount; i++) { 387 | // we generate 50/50 host/user entities 388 | const entityType = i % 2 === 0 ? 'host' : 'user'; 389 | 390 | // user-0 host-0 user-1 host-1 user-2 host-2 391 | const entityIndex = Math.floor(i / 2) + 1; 392 | 393 | for (let j = 0; j < logsPerEntity; j++) { 394 | // start index for IP/MAC addresses 395 | // host-0: 0-1, host-1: 2-3, host-2: 4-5 396 | const valueStartIndex = startIndex + j * FIELD_LENGTH; 397 | const generatorOpts = { 398 | entityIndex, 399 | valueStartIndex: valueStartIndex, 400 | fieldLength: FIELD_LENGTH, 401 | idPrefix: name, 402 | }; 403 | const doc = { 404 | // @timestamp is generated on ingest 405 | ...(entityType === 'host' 406 | ? generateHostFields(generatorOpts) 407 | : generateUserFields(generatorOpts)), 408 | message: faker.lorem.sentence(), 409 | tags: ['entity-store-perf'], 410 | }; 411 | 412 | writeStream.write(JSON.stringify(doc) + '\n'); 413 | progress.increment(); 414 | } 415 | 416 | // Yield to the event loop to prevent blocking 417 | await new Promise((resolve) => setImmediate(resolve)); 418 | } 419 | progress.stop(); 420 | console.log(`Data file ${filePath} created`); 421 | }; 422 | 423 | generateLogs().catch((err) => { 424 | console.error('Error generating logs:', err); 425 | }); 426 | }; 427 | 428 | const uploadFile = async ({ 429 | filePath, 430 | index, 431 | lineCount, 432 | modifyDoc, 433 | onComplete, 434 | }: { 435 | filePath: string; 436 | index: string; 437 | lineCount: number; 438 | modifyDoc?: (doc: Record) => Record; // eslint-disable-line @typescript-eslint/no-explicit-any 439 | onComplete?: () => void; 440 | }) => { 441 | const esClient = getEsClient(); 442 | const stream = fs.createReadStream(filePath); 443 | const progress = new cliProgress.SingleBar( 444 | { 445 | format: '{bar} | {percentage}% | {value}/{total} Documents Uploaded', 446 | }, 447 | cliProgress.Presets.shades_classic 448 | ); 449 | progress.start(lineCount, 0); 450 | 451 | const rl = readline.createInterface({ 452 | input: stream, 453 | crlfDelay: Infinity, 454 | }); 455 | 456 | const lineGenerator = async function* () { 457 | for await (const line of rl) { 458 | yield JSON.parse(line); 459 | } 460 | }; 461 | 462 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 463 | await esClient.helpers.bulk>({ 464 | datasource: lineGenerator(), 465 | onDocument: (doc) => { 466 | if (stop) { 467 | throw new Error('Stopped'); 468 | } 469 | 470 | doc['@timestamp'] = new Date().toISOString(); 471 | 472 | if (modifyDoc) { 473 | doc = modifyDoc(doc); 474 | } 475 | 476 | return [{ create: { _index: index } }, { ...doc }]; 477 | }, 478 | flushBytes: 1024 * 1024 * 1, 479 | flushInterval: 3000, 480 | onSuccess: () => { 481 | progress.increment(); 482 | }, 483 | onDrop: (doc) => { 484 | console.log('Failed to index document:', doc); 485 | process.exit(1); 486 | }, 487 | }); 488 | 489 | progress.stop(); 490 | if (onComplete) { 491 | onComplete(); 492 | } 493 | }; 494 | 495 | const getFileStats = async (filePath: string) => { 496 | const lineCount = await getFileLineCount(filePath); 497 | const logsPerEntity = await getLogsPerEntity(filePath); 498 | const entityCount = lineCount / logsPerEntity; 499 | 500 | return { lineCount, logsPerEntity, entityCount }; 501 | }; 502 | 503 | export const uploadPerfDataFile = async ( 504 | name: string, 505 | indexOverride?: string, 506 | deleteEntities?: boolean 507 | ) => { 508 | const index = indexOverride || `logs-perftest.${name}-default`; 509 | 510 | if (deleteEntities) { 511 | console.log('Deleting all entities...'); 512 | await deleteAllEntities(); 513 | console.log('All entities deleted'); 514 | 515 | console.log('Deleting logs index...'); 516 | await deleteLogsIndex(index); 517 | console.log('Logs index deleted'); 518 | } 519 | const filePath = getFilePath(name); 520 | 521 | console.log(`Uploading performance data file ${name} to index ${index}`); 522 | 523 | if (!fs.existsSync(filePath)) { 524 | console.log(`Data file ${name} does not exist`); 525 | process.exit(1); 526 | } 527 | 528 | console.log('initialising entity engines'); 529 | await initEntityEngineForEntityTypes(['host', 'user']); 530 | console.log('entity engines initialised'); 531 | 532 | const { lineCount, logsPerEntity, entityCount } = await getFileStats(filePath); 533 | console.log( 534 | `Data file ${name} has ${lineCount} lines, ${entityCount} entities and ${logsPerEntity} logs per entity` 535 | ); 536 | const startTime = Date.now(); 537 | 538 | await uploadFile({ filePath, index, lineCount }); 539 | const ingestTook = Date.now() - startTime; 540 | console.log(`Data file ${name} uploaded to index ${index} in ${ingestTook}ms`); 541 | 542 | await countEntitiesUntil(name, entityCount); 543 | 544 | const tookTotal = Date.now() - startTime; 545 | 546 | console.log(`Total time: ${tookTotal}ms`); 547 | }; 548 | 549 | export const uploadPerfDataFileInterval = async ( 550 | name: string, 551 | intervalMs: number, 552 | uploadCount: number, 553 | deleteEntities?: boolean, 554 | doDeleteEngines?: boolean 555 | ) => { 556 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 557 | const addIdPrefix = (prefix: string) => (doc: Record) => { 558 | const isHost = !!doc.host; 559 | 560 | if (isHost) { 561 | return changeHostName(doc, prefix); 562 | } 563 | 564 | return changeUserName(doc, prefix); 565 | }; 566 | 567 | const index = `logs-perftest.${name}-default`; 568 | const filePath = getFilePath(name); 569 | 570 | console.log( 571 | `Uploading performance data file ${name} every ${intervalMs}ms ${uploadCount} times to index ${index}` 572 | ); 573 | 574 | if (doDeleteEngines) { 575 | console.log('Deleting all engines...'); 576 | await deleteEngines(); 577 | console.log('All engines deleted'); 578 | } 579 | if (deleteEntities) { 580 | console.log('Deleting all entities...'); 581 | await deleteAllEntities(); 582 | console.log('All entities deleted'); 583 | 584 | console.log('Deleting logs index...'); 585 | await deleteLogsIndex(index); 586 | console.log('Logs index deleted'); 587 | } 588 | 589 | if (!fs.existsSync(filePath)) { 590 | console.log(`Data file ${name} does not exist`); 591 | process.exit(1); 592 | } 593 | 594 | console.log('initialising entity engines'); 595 | 596 | await initEntityEngineForEntityTypes(['host', 'user']); 597 | 598 | console.log('entity engines initialised'); 599 | 600 | const { lineCount, logsPerEntity, entityCount } = await getFileStats(filePath); 601 | 602 | console.log( 603 | `Data file ${name} has ${lineCount} lines, ${entityCount} entities and ${logsPerEntity} logs per entity` 604 | ); 605 | 606 | const startTime = Date.now(); 607 | 608 | let previousUpload = Promise.resolve(); 609 | 610 | const stopHealthLogging = logClusterHealthEvery(name, 5000); 611 | const stopTransformsLogging = logTransformStatsEvery(name, 5000); 612 | 613 | for (let i = 0; i < uploadCount; i++) { 614 | if (stop) { 615 | break; 616 | } 617 | let uploadCompleted = false; 618 | const onComplete = () => { 619 | uploadCompleted = true; 620 | }; 621 | const intervalS = intervalMs / 1000; 622 | console.log(`Uploading ${i + 1} of ${uploadCount}, next upload in ${intervalS}s...`); 623 | previousUpload = previousUpload.then(() => 624 | uploadFile({ 625 | onComplete, 626 | filePath, 627 | index, 628 | lineCount, 629 | modifyDoc: addIdPrefix(i.toString()), 630 | }) 631 | ); 632 | let progress: cliProgress.SingleBar | null = null; 633 | for (let j = 0; j < intervalS; j++) { 634 | if (stop) { 635 | break; 636 | } 637 | if (uploadCompleted) { 638 | if (!progress) { 639 | progress = new cliProgress.SingleBar( 640 | { 641 | format: '{bar} | {value}s | waiting {total}s until next upload', 642 | }, 643 | cliProgress.Presets.shades_classic 644 | ); 645 | 646 | progress.start(intervalS, j + 1); 647 | } else { 648 | progress.update(j + 1); 649 | } 650 | } 651 | await new Promise((resolve) => setTimeout(resolve, 1000)); 652 | } 653 | progress?.update(intervalS); 654 | progress?.stop(); 655 | } 656 | 657 | await previousUpload; 658 | 659 | const ingestTook = Date.now() - startTime; 660 | console.log(`Data file ${name} uploaded to index ${index} in ${ingestTook}ms`); 661 | 662 | await countEntitiesUntil(name, entityCount * uploadCount); 663 | 664 | const tookTotal = Date.now() - startTime; 665 | 666 | stopHealthLogging(); 667 | stopTransformsLogging(); 668 | 669 | console.log(`Total time: ${tookTotal}ms`); 670 | }; 671 | -------------------------------------------------------------------------------- /src/commands/insights.ts: -------------------------------------------------------------------------------- 1 | import { generateNewSeed } from '../constants'; 2 | import { faker } from '@faker-js/faker'; 3 | import { ingest } from './utils/indices'; 4 | import createVulnerabilities, { CreateVulnerabilitiesParams } from '../create_vulnerability'; 5 | import createMisconfigurations, { 6 | CreateMisconfigurationsParams, 7 | } from '../create_misconfigurations'; 8 | import { installPackage } from '../utils/kibana_api'; 9 | 10 | const VULNERABILITY_INDEX_NAME = 'logs-cloud_security_posture.vulnerabilities_latest-default'; 11 | 12 | const MISCONFIGURATION_INDEX_NAME = 13 | 'security_solution-cloud_security_posture.misconfiguration_latest'; 14 | 15 | const PACKAGE_TO_INSTALL = 'cloud_security_posture'; 16 | 17 | export const generateInsights = async ({ 18 | users, 19 | hosts, 20 | space, 21 | seed = generateNewSeed(), 22 | }: { 23 | users: number; 24 | hosts: number; 25 | seed?: number; 26 | space: string; 27 | }) => { 28 | faker.seed(seed); 29 | const usersData = Array.from({ length: users }, () => ({ 30 | username: faker.internet.username(), 31 | })); 32 | 33 | const hostsData = Array.from({ length: hosts }, () => ({ 34 | hostname: faker.internet.domainName(), 35 | })); 36 | 37 | console.log('Installing cloud posture package'); 38 | await installPackage({ packageName: PACKAGE_TO_INSTALL, space }); 39 | 40 | await ingest(VULNERABILITY_INDEX_NAME, generateDocs(usersData, space, createVulnerabilities)); 41 | await ingest(VULNERABILITY_INDEX_NAME, generateDocs(hostsData, space, createVulnerabilities)); 42 | 43 | await ingest( 44 | MISCONFIGURATION_INDEX_NAME, 45 | generateDocs(usersData, space, createMisconfigurations) 46 | ); 47 | await ingest( 48 | MISCONFIGURATION_INDEX_NAME, 49 | generateDocs(hostsData, space, createMisconfigurations) 50 | ); 51 | }; 52 | 53 | interface EntityData { 54 | username?: string; 55 | hostname?: string; 56 | } 57 | 58 | export const generateDocs = ( 59 | entityData: EntityData[], 60 | space: string, 61 | createDocs: (param: CreateVulnerabilitiesParams | CreateMisconfigurationsParams) => object 62 | ) => { 63 | const eventsPerEntity = 2; 64 | const acc: object[] = []; 65 | return entityData.reduce((acc, data) => { 66 | const events = faker.helpers.multiple(() => createDocs({ space, ...data }), { 67 | count: eventsPerEntity, 68 | }); 69 | acc.push(...events); 70 | return acc; 71 | }, acc); 72 | }; 73 | -------------------------------------------------------------------------------- /src/commands/legacy_risk_score.ts: -------------------------------------------------------------------------------- 1 | import { installLegacyRiskScore } from '../utils/kibana_api'; 2 | import { getEsClient } from './utils/indices'; 3 | 4 | /** 5 | * Install legacy risk score and generate data 6 | */ 7 | export const generateLegacyRiskScore = async () => { 8 | console.log('Installing legacy risk score'); 9 | 10 | await installLegacyRiskScore(); 11 | 12 | console.log('Generating data'); 13 | 14 | await bulkIndexData(); 15 | 16 | console.log('Data generated'); 17 | }; 18 | 19 | const data = [ 20 | { 21 | index: 'ml_host_risk_score_latest_default', 22 | source: { 23 | '@timestamp': '2022-09-18T17:50:42.961Z', 24 | host: { 25 | name: 'MacBook-Pro.local', 26 | risk: { 27 | calculated_level: 'Low', 28 | calculated_score_norm: 21, 29 | rule_risks: [ 30 | { 31 | rule_name: 'test', 32 | rule_risk: 21, 33 | }, 34 | ], 35 | }, 36 | }, 37 | ingest_timestamp: '2022-09-18T17:54:22.363192Z', 38 | }, 39 | }, 40 | { 41 | index: 'ml_host_risk_score_default', 42 | source: { 43 | host: { 44 | name: 'MacBook-Pro.local', 45 | risk: { 46 | calculated_level: 'Low', 47 | calculated_score_norm: 21, 48 | rule_risks: [ 49 | { 50 | rule_name: 'test', 51 | rule_risk: 21, 52 | }, 53 | ], 54 | }, 55 | }, 56 | ingest_timestamp: '2022-09-18T17:54:22.363192Z', 57 | '@timestamp': '2022-09-18T17:50:42.961Z', 58 | }, 59 | }, 60 | { 61 | id: 'Yb5XrKVhLNCdGL2ef-A3jloAAAAAAAAA', 62 | index: 'ml_user_risk_score_latest_default', 63 | source: { 64 | '@timestamp': '2022-09-18T18:28:30.943Z', 65 | ingest_timestamp: '2022-09-18T18:31:32.969840Z', 66 | user: { 67 | name: 'johnsmith', 68 | risk: { 69 | calculated_level: 'Low', 70 | calculated_score_norm: 21, 71 | rule_risks: [ 72 | { 73 | rule_name: 'test', 74 | rule_risk: 21, 75 | }, 76 | ], 77 | }, 78 | }, 79 | }, 80 | }, 81 | { 82 | id: 'LrLLssaZUZHhh2cKbkwqIEpguUegcpuG9V+qzVlm8N0=', 83 | index: 'ml_user_risk_score_default', 84 | source: { 85 | ingest_timestamp: '2022-09-18T18:31:32.969840Z', 86 | '@timestamp': '2022-09-18T18:28:30.943Z', 87 | user: { 88 | name: 'johnsmith', 89 | risk: { 90 | calculated_level: 'Low', 91 | calculated_score_norm: 21, 92 | rule_risks: [ 93 | { 94 | rule_name: 'test', 95 | rule_risk: 21, 96 | }, 97 | ], 98 | }, 99 | }, 100 | }, 101 | }, 102 | ]; 103 | 104 | const bulkIndexData = async () => { 105 | const body = data.flatMap((doc) => { 106 | doc.source['@timestamp'] = new Date().toISOString(); 107 | doc.source.ingest_timestamp = new Date().toISOString(); 108 | return [{ index: { _index: doc.index } }, doc.source]; 109 | }); 110 | 111 | const esClient = getEsClient(); 112 | await esClient.bulk({ refresh: true, body }); 113 | }; 114 | -------------------------------------------------------------------------------- /src/commands/privileged_access_detection_ml/event_generator.ts: -------------------------------------------------------------------------------- 1 | import { TimeWindow, TimeWindows } from '../utils/time_windows'; 2 | import { faker } from '@faker-js/faker'; 3 | 4 | const BASELINE_NUMBER_OF_EVENTS_PER_USER = 1000; 5 | const ANOMALOUS_PROBABILITY_WEIGHT = 5; 6 | 7 | interface Event { 8 | '@timestamp': string; 9 | } 10 | 11 | const createPrivilegedLinuxEvent = (timeWindow: TimeWindow, userName: string) => 12 | ({ 13 | '@timestamp': TimeWindows.toRandomTimestamp({ 14 | start: timeWindow.start, 15 | end: timeWindow.end, 16 | }), 17 | user: { name: userName }, 18 | host: { 19 | name: faker.database.engine(), 20 | os: { 21 | type: 'linux', 22 | }, 23 | }, 24 | event: { 25 | type: 'start', 26 | category: 'process', 27 | }, 28 | process: { 29 | name: `process_${faker.animal.type()}`, 30 | command_line: faker.helpers.arrayElement([ 31 | // For future reference, a full list of these values may be found here: https://github.com/elastic/integrations/blob/main/packages/pad/kibana/ml_module/pad-ml.json 32 | 'pw unlock', 33 | 'systemctl daemon-reload', 34 | '!tty_tickets', 35 | ]), 36 | }, 37 | }) as Event; 38 | 39 | export class User { 40 | constructor( 41 | readonly userName: string, 42 | readonly numberOfAnomalousDays: number, 43 | readonly maxNumberOfAnomalousEvents: number 44 | ) {} 45 | } 46 | 47 | interface UserNameByNumber { 48 | [userName: string]: number; 49 | } 50 | 51 | export class UserGenerator { 52 | /** 53 | * 54 | * @returns an array of Users having baseline and anomalous events 55 | */ 56 | public static getUsers(numberOfUsers: number) { 57 | const userNames = faker.helpers.multiple(() => faker.person.fullName(), { 58 | count: numberOfUsers, 59 | }); 60 | 61 | const usersByNumberOfAnomalousEvents = UserGenerator.getUsersByNumberOfAnomalousEvents( 62 | UserGenerator.getWeightedUserNames(userNames) 63 | ); 64 | return userNames.map( 65 | (eachUserName) => 66 | new User( 67 | eachUserName, 68 | faker.helpers.rangeToNumber({ min: 3, max: 10 }), 69 | usersByNumberOfAnomalousEvents[eachUserName] ?? 1 70 | ) 71 | ); 72 | } 73 | 74 | /** 75 | * 76 | * @returns an object whose keys are userNames, and whose values are the number of anomalous events that user 77 | * should contain. 78 | */ 79 | private static getUsersByNumberOfAnomalousEvents( 80 | weightedUserNames: { 81 | weight: number; 82 | value: string; 83 | }[] 84 | ): UserNameByNumber { 85 | return faker.helpers 86 | .multiple( 87 | () => { 88 | return faker.helpers.weightedArrayElement(weightedUserNames); 89 | }, 90 | { 91 | count: BASELINE_NUMBER_OF_EVENTS_PER_USER * ANOMALOUS_PROBABILITY_WEIGHT, 92 | } 93 | ) 94 | .reduce((acc, next) => { 95 | if (acc[next]) acc[next]++; 96 | else acc[next] = 1; 97 | return acc; 98 | }, {} as UserNameByNumber); 99 | } 100 | 101 | /** 102 | * @returns userNames associated with a particular weight, in order to have a fairly random distribution of data. 103 | * This results in some users exhibiting more anomalous behaviors than others 104 | */ 105 | private static getWeightedUserNames(userNames: string[]) { 106 | return userNames.map((eachUserName, index) => ({ 107 | weight: index + 1, 108 | value: eachUserName, 109 | })); 110 | } 111 | } 112 | 113 | export class UserEventGenerator { 114 | /** 115 | * @returns Events to build a baseline of behaviors 116 | */ 117 | public static evenlyDistributedEvents(user: User, eventMultiplier: number): Event[] { 118 | return faker.helpers.multiple( 119 | () => { 120 | return createPrivilegedLinuxEvent(TimeWindows.last30DayWindow(), user.userName); 121 | }, 122 | { count: BASELINE_NUMBER_OF_EVENTS_PER_USER * eventMultiplier } 123 | ); 124 | } 125 | 126 | private static anomalousEventsForWindow( 127 | user: User, 128 | window: TimeWindow, 129 | eventMultiplier: number 130 | ): Event[] { 131 | const randomNumberOfAnomalousEvents = faker.helpers.rangeToNumber({ 132 | min: 0, 133 | max: user.maxNumberOfAnomalousEvents, 134 | }); 135 | return faker.helpers.multiple( 136 | () => { 137 | return createPrivilegedLinuxEvent(window, user.userName); 138 | }, 139 | { count: randomNumberOfAnomalousEvents * eventMultiplier } 140 | ); 141 | } 142 | 143 | /** 144 | * @returns Anomalous events within day-long windows, based on the numberOfAnomalousDays. Each day will have a maximum of maxNumberOfAnomalousEvents. 145 | */ 146 | public static anomalousEvents(user: User, eventMultiplier: number): Event[] { 147 | return faker.helpers 148 | .multiple( 149 | () => { 150 | const window = TimeWindows.randomWindowOfOneDayInTheLastMonth(); 151 | return this.anomalousEventsForWindow(user, window, eventMultiplier); 152 | }, 153 | { count: user.numberOfAnomalousDays } 154 | ) 155 | .flat(); 156 | } 157 | } 158 | -------------------------------------------------------------------------------- /src/commands/privileged_access_detection_ml/index_management.ts: -------------------------------------------------------------------------------- 1 | import { getEsClient, indexCheck, ingest } from '../utils/indices'; 2 | 3 | export const createPrivilegedAccessDetectionSourceIndex = async (index: string) => { 4 | try { 5 | await indexCheck(index, { 6 | mappings: { 7 | properties: { 8 | '@timestamp': { 9 | type: 'date', 10 | }, 11 | user: { 12 | properties: { 13 | name: { 14 | type: 'keyword', 15 | fields: { 16 | text: { 17 | type: 'text', 18 | }, 19 | }, 20 | }, 21 | }, 22 | }, 23 | host: { 24 | properties: { 25 | name: { 26 | type: 'keyword', 27 | fields: { 28 | text: { 29 | type: 'text', 30 | }, 31 | }, 32 | }, 33 | os: { 34 | properties: { 35 | type: { 36 | type: 'keyword', 37 | fields: { 38 | text: { 39 | type: 'text', 40 | }, 41 | }, 42 | }, 43 | }, 44 | }, 45 | }, 46 | }, 47 | process: { 48 | properties: { 49 | name: { 50 | type: 'keyword', 51 | fields: { 52 | text: { 53 | type: 'text', 54 | }, 55 | }, 56 | }, 57 | command_line: { 58 | type: 'keyword', 59 | fields: { 60 | text: { 61 | type: 'text', 62 | }, 63 | }, 64 | }, 65 | }, 66 | }, 67 | event: { 68 | properties: { 69 | type: { 70 | type: 'keyword', 71 | fields: { 72 | text: { 73 | type: 'text', 74 | }, 75 | }, 76 | }, 77 | category: { 78 | type: 'keyword', 79 | fields: { 80 | text: { 81 | type: 'text', 82 | }, 83 | }, 84 | }, 85 | }, 86 | }, 87 | }, 88 | }, 89 | }); 90 | } catch (error) { 91 | console.log( 92 | 'There was an error creating the source data index. This is likely a field mapping issue: ', 93 | error 94 | ); 95 | throw error; 96 | } 97 | }; 98 | 99 | export const deleteSourceIndex = async (index: string) => { 100 | try { 101 | await getEsClient().indices.delete({ 102 | index: [index], 103 | ignore_unavailable: true, 104 | }); 105 | console.log('Index deleted'); 106 | } catch (error) { 107 | console.log( 108 | 'There was an error deleting the source index. Will continue, and attempt to recreate the index: ', 109 | error 110 | ); 111 | } 112 | }; 113 | 114 | export const ingestIntoSourceIndex = async (index: string, documents: Array) => 115 | await ingest(index, documents); 116 | -------------------------------------------------------------------------------- /src/commands/privileged_access_detection_ml/privileged_access_detection_ml.ts: -------------------------------------------------------------------------------- 1 | import { 2 | createPrivilegedAccessDetectionSourceIndex, 3 | deleteSourceIndex, 4 | ingestIntoSourceIndex, 5 | } from './index_management'; 6 | import { User, UserEventGenerator } from './event_generator'; 7 | 8 | const LOGS_LINUX_INDEX = 'logs-linux'; 9 | 10 | const getAllPrivilegedAccessDetectionEvents = (users: User[]) => { 11 | const events = []; 12 | const eventMultiplier = 1; // We want this value to be consistent for evenly distributed events and anomalous events 13 | 14 | for (const eachUser of users) { 15 | events.push(...UserEventGenerator.evenlyDistributedEvents(eachUser, eventMultiplier)); 16 | events.push(...UserEventGenerator.anomalousEvents(eachUser, eventMultiplier)); 17 | } 18 | return events; 19 | }; 20 | 21 | export const generatePrivilegedAccessDetectionData = async ({ users }: { users: User[] }) => { 22 | try { 23 | await deleteSourceIndex(LOGS_LINUX_INDEX); 24 | await createPrivilegedAccessDetectionSourceIndex(LOGS_LINUX_INDEX); 25 | await ingestIntoSourceIndex(LOGS_LINUX_INDEX, getAllPrivilegedAccessDetectionEvents(users)); 26 | } catch (e) { 27 | console.log(e); 28 | } 29 | }; 30 | -------------------------------------------------------------------------------- /src/commands/privileged_user_monitoring/generate_csv_file.ts: -------------------------------------------------------------------------------- 1 | import fs from 'fs/promises'; 2 | import { resolve } from 'path'; 3 | import { User } from '../privileged_access_detection_ml/event_generator'; 4 | import { srcDirectory } from '../../index'; 5 | import { enablePrivmon, uploadPrivmonCsv } from '../../utils/kibana_api'; 6 | 7 | const CSV_FILE_NAME = 'privileged_users.csv'; 8 | 9 | const generateLabelForUser = (user: User): string => { 10 | const LABELS = [ 11 | 'admin', 12 | 'superuser', 13 | 'Administrator', 14 | 'root', 15 | 'privileged', 16 | 'power user', 17 | 'system administrator', 18 | 'IT support', 19 | 'security officer', 20 | 'network engineer', 21 | 'database administrator', 22 | 'cloud engineer', 23 | ]; 24 | const index = user.userName.length % LABELS.length; 25 | return LABELS[index]; 26 | }; 27 | 28 | export const generateCSVFile = async ({ 29 | users, 30 | upload, 31 | space, 32 | }: { 33 | users: User[]; 34 | upload: boolean; 35 | space: string; 36 | }) => { 37 | try { 38 | const csvContent = users 39 | .map((user) => user.userName + ',' + generateLabelForUser(user)) 40 | .join('\n'); 41 | const outputDirectory = resolve(srcDirectory, `../output`); 42 | const csvFilePath = resolve(outputDirectory, `./${CSV_FILE_NAME}`); 43 | await fs.mkdir(outputDirectory, { recursive: true }); 44 | await fs.writeFile(csvFilePath, csvContent); 45 | if (upload) { 46 | console.log('Uploading CSV file to Privileged User Monitoring...'); 47 | console.log('First, enabling Privileged User Monitoring...'); 48 | await enablePrivmon(space); 49 | console.log('Now, uploading the CSV file...'); 50 | await uploadPrivmonCsv(csvFilePath, space); 51 | console.log('Upload complete.'); 52 | } 53 | console.log(`A CSV file containing all of the privileged users was written to ${csvFilePath}`); 54 | } catch (e) { 55 | console.log( 56 | 'There was a problem writing the CSV file to the local directory. See details below.' 57 | ); 58 | console.error(e); 59 | } 60 | }; 61 | -------------------------------------------------------------------------------- /src/commands/privileged_user_monitoring/privileged_user_monitoring.ts: -------------------------------------------------------------------------------- 1 | import { faker } from '@faker-js/faker'; 2 | import { ingestIntoSourceIndex } from '../privileged_access_detection_ml/index_management'; 3 | import { getEsClient } from '../utils/indices'; 4 | import { 5 | ACCOUNT_SWITCH_LINUX_SAMPLE_DOCUMENT, 6 | GRANTED_RIGHTS_LINUX_SAMPLE_DOCUMENT, 7 | GRANTED_RIGHTS_OKTA_SAMPLE_DOCUMENT, 8 | GRANTED_RIGHTS_WINDOWS_SAMPLE_DOCUMENT, 9 | OKTA_AUTHENTICATION, 10 | } from './sample_documents'; 11 | import { TimeWindows } from '../utils/time_windows'; 12 | import { User, UserGenerator } from '../privileged_access_detection_ml/event_generator'; 13 | import { 14 | assignAssetCriticality, 15 | createRule, 16 | enableRiskScore, 17 | installPad, 18 | } from '../../utils/kibana_api'; 19 | import { createSampleFullSyncEvents, makeDoc } from '../utils/integrations_sync_utils'; 20 | import { 21 | ASSET_CRITICALITY, 22 | AssetCriticality, 23 | PRIVILEGED_USER_MONITORING_OPTIONS, 24 | PrivilegedUserMonitoringOption, 25 | } from '../../constants'; 26 | import { generatePrivilegedAccessDetectionData } from '../privileged_access_detection_ml/privileged_access_detection_ml'; 27 | import { generateCSVFile } from './generate_csv_file'; 28 | import { chunk } from 'lodash-es'; 29 | import { initializeSpace } from '../../utils'; 30 | 31 | const endpointLogsDataStreamName = 'logs-endpoint.events.process-default'; 32 | const systemLogsDataStreamName = 'logs-system.security-default'; 33 | const oktaLogsDataStreamName = 'logs-okta.system-default'; 34 | const oktaLogsUsersDataStreamName = 'logs-entityanalytics_okta.user-default'; 35 | const oktaLogsEntityDataStreamName = 'logs-entityanalytics_okta.entity-default'; 36 | 37 | const getSampleEndpointLogs = (users: User[]) => { 38 | return faker.helpers.multiple( 39 | () => { 40 | return GRANTED_RIGHTS_LINUX_SAMPLE_DOCUMENT( 41 | faker.helpers.arrayElement(users).userName, 42 | TimeWindows.toRandomTimestamp(TimeWindows.last30DayWindow()) 43 | ); 44 | }, 45 | { count: 100 } 46 | ); 47 | }; 48 | 49 | const getSampleEndpointAccountSwitchLogs = (users: User[]) => { 50 | return faker.helpers.multiple( 51 | () => { 52 | return ACCOUNT_SWITCH_LINUX_SAMPLE_DOCUMENT( 53 | faker.helpers.arrayElement(users).userName, 54 | TimeWindows.toRandomTimestamp(TimeWindows.last30DayWindow()) 55 | ); 56 | }, 57 | { count: 100 } 58 | ); 59 | }; 60 | 61 | const getSampleSystemLogs = (users: User[]) => { 62 | return faker.helpers.multiple( 63 | () => { 64 | return GRANTED_RIGHTS_WINDOWS_SAMPLE_DOCUMENT( 65 | faker.helpers.arrayElement(users).userName, 66 | TimeWindows.toRandomTimestamp(TimeWindows.last30DayWindow()) 67 | ); 68 | }, 69 | { count: 100 } 70 | ); 71 | }; 72 | 73 | const getSampleOktaLogs = (users: User[]) => { 74 | return faker.helpers.multiple( 75 | () => { 76 | return GRANTED_RIGHTS_OKTA_SAMPLE_DOCUMENT( 77 | faker.helpers.arrayElement(users).userName, 78 | TimeWindows.toRandomTimestamp(TimeWindows.last30DayWindow()) 79 | ); 80 | }, 81 | { count: 100 } 82 | ); 83 | }; 84 | 85 | const getSampleOktaUsersLogs = (count: number) => { 86 | const adminCount = Math.round((50 / 100) * count); 87 | const nonAdminCount = Math.max(0, count - adminCount); 88 | console.log( 89 | `Generating ${adminCount} admin users and ${nonAdminCount} non-admin users (total ${count})` 90 | ); 91 | const adminDocs = Array.from({ length: adminCount }, () => makeDoc(true)); 92 | const userDocs = Array.from({ length: nonAdminCount }, () => makeDoc(false)); 93 | const docs = adminDocs.concat(userDocs); 94 | return docs; 95 | }; 96 | 97 | const getSampleOktaEntityLogs = (count: number, syncInterval: number) => { 98 | const docs = createSampleFullSyncEvents({ 99 | count, 100 | syncWindowMs: syncInterval, 101 | }); 102 | return docs; 103 | }; 104 | 105 | const getSampleOktaAuthenticationLogs = (users: User[]) => { 106 | return faker.helpers.multiple( 107 | () => { 108 | return OKTA_AUTHENTICATION( 109 | faker.helpers.arrayElement(users).userName, 110 | TimeWindows.toRandomTimestamp(TimeWindows.last30DayWindow()) 111 | ); 112 | }, 113 | { count: 100 } 114 | ); 115 | }; 116 | 117 | const quickEnableRiskEngineAndRule = async (space: string) => { 118 | try { 119 | console.log('Enabling risk engine and rule...'); 120 | await createRule({ space }); 121 | await enableRiskScore(space); 122 | } catch (e) { 123 | console.log(e); 124 | } 125 | }; 126 | 127 | const generatePrivilegedUserMonitoringData = async ({ users }: { users: User[] }) => { 128 | try { 129 | await reinitializeDataStream(endpointLogsDataStreamName, [ 130 | ...getSampleEndpointLogs(users), 131 | ...getSampleEndpointAccountSwitchLogs(users), 132 | ]); 133 | 134 | await reinitializeDataStream(systemLogsDataStreamName, getSampleSystemLogs(users)); 135 | 136 | await reinitializeDataStream(oktaLogsDataStreamName, [ 137 | ...getSampleOktaLogs(users), 138 | ...getSampleOktaAuthenticationLogs(users), 139 | ]); 140 | } catch (e) { 141 | console.log(e); 142 | } 143 | }; 144 | 145 | /** 146 | * Generate data for integrations sync only. 147 | * Currently okta data only. 148 | */ 149 | const generatePrivilegedUserIntegrationsSyncData = async ({ 150 | usersCount, 151 | syncEventsCount = 10, 152 | }: { 153 | usersCount: number; 154 | syncEventsCount?: number; 155 | }) => { 156 | try { 157 | const sampleDocuments = getSampleOktaUsersLogs(usersCount); 158 | const sampleEntityDocuments = getSampleOktaEntityLogs( 159 | syncEventsCount, 160 | 24 * 60 * 60 * 1000 // 1 day interval 161 | ); 162 | await reinitializeDataStream(oktaLogsUsersDataStreamName, sampleDocuments); 163 | await reinitializeDataStream(oktaLogsEntityDataStreamName, sampleEntityDocuments); 164 | } catch (e) { 165 | console.log(e); 166 | } 167 | }; 168 | 169 | const createDataStream = async (indexName: string) => { 170 | await getEsClient().indices.createDataStream({ 171 | name: indexName, 172 | }); 173 | }; 174 | 175 | const deleteDataStream = async (indexName: string) => { 176 | try { 177 | await getEsClient().indices.deleteDataStream({ name: indexName }); 178 | } catch (e: unknown) { 179 | const error = e as { meta: { statusCode: number } }; 180 | 181 | if (error.meta.statusCode === 404) 182 | console.log('Resource does not yet exist, and will be created.'); 183 | else throw e; 184 | } 185 | // Wait in order to ensure no race conditions after deletion 186 | await new Promise((r) => setTimeout(r, 1000)); 187 | }; 188 | 189 | const reinitializeDataStream = async (indexName: string, documents: Array) => { 190 | await deleteDataStream(indexName); 191 | await createDataStream(indexName); 192 | await ingestIntoSourceIndex(indexName, documents); 193 | }; 194 | 195 | const assignAssetCriticalityToUsers = async (opts: { users: User[]; space?: string }) => { 196 | const { users, space } = opts; 197 | const chunks = chunk(users, 1000); 198 | 199 | console.log(`Assigning asset criticality to ${users.length} users in ${chunks.length} chunks...`); 200 | 201 | const countMap: Record = { 202 | unknown: 0, 203 | low_impact: 0, 204 | medium_impact: 0, 205 | high_impact: 0, 206 | extreme_impact: 0, 207 | }; 208 | 209 | for (const chunk of chunks) { 210 | const records = chunk 211 | .map(({ userName }) => { 212 | const criticalityLevel = faker.helpers.arrayElement(ASSET_CRITICALITY); 213 | countMap[criticalityLevel]++; 214 | return { 215 | id_field: 'user.name', 216 | id_value: userName, 217 | criticality_level: criticalityLevel, 218 | }; 219 | }) 220 | .filter((r) => r.criticality_level !== 'unknown'); 221 | 222 | if (records.length > 0) { 223 | await assignAssetCriticality(records, space); 224 | } 225 | } 226 | 227 | console.log('Assigned asset criticality counts:', countMap); 228 | }; 229 | 230 | export const privmonCommand = async ({ 231 | options, 232 | userCount, 233 | space = 'default', 234 | }: { 235 | options: PrivilegedUserMonitoringOption[]; 236 | userCount: number; 237 | space: string; 238 | }) => { 239 | console.log('Starting Privileged User Monitoring data generation in space:', space); 240 | 241 | await initializeSpace(space); 242 | 243 | const users = UserGenerator.getUsers(userCount); 244 | 245 | if (options.includes(PRIVILEGED_USER_MONITORING_OPTIONS.integrationSyncSourceEventData)) { 246 | await generatePrivilegedUserIntegrationsSyncData({ 247 | usersCount: userCount, 248 | }); 249 | } 250 | 251 | if (options.includes(PRIVILEGED_USER_MONITORING_OPTIONS.sourceEventData)) { 252 | await generatePrivilegedUserMonitoringData({ users }); 253 | } 254 | 255 | if (options.includes(PRIVILEGED_USER_MONITORING_OPTIONS.anomalyData)) { 256 | await generatePrivilegedAccessDetectionData({ users }); 257 | } 258 | 259 | await generateCSVFile({ 260 | users, 261 | upload: options.includes(PRIVILEGED_USER_MONITORING_OPTIONS.csvFile), 262 | space, 263 | }); 264 | 265 | if (options.includes(PRIVILEGED_USER_MONITORING_OPTIONS.assetCriticality)) { 266 | await assignAssetCriticalityToUsers({ users, space }); 267 | } 268 | 269 | if (options.includes(PRIVILEGED_USER_MONITORING_OPTIONS.riskEngineAndRule)) { 270 | await quickEnableRiskEngineAndRule(space); 271 | } 272 | 273 | if (options.includes(PRIVILEGED_USER_MONITORING_OPTIONS.installPad)) { 274 | console.log('Installing PAD...'); 275 | await installPad(space); 276 | } 277 | 278 | console.log('Privileged User Monitoring data generation complete.'); 279 | }; 280 | -------------------------------------------------------------------------------- /src/commands/rules.ts: -------------------------------------------------------------------------------- 1 | import { faker } from '@faker-js/faker'; 2 | import { getEsClient } from './utils/indices'; 3 | import moment from 'moment'; 4 | import { chunk } from 'lodash-es'; 5 | import { createRule, getAllRules, bulkDeleteRules } from '../utils/kibana_api'; 6 | 7 | const EVENTS_INDEX = 'logs-*'; 8 | 9 | interface Event { 10 | '@timestamp': string; 11 | message: string; 12 | host: { 13 | name: string; 14 | ip: string; 15 | }; 16 | user: { 17 | name: string; 18 | id: string; 19 | }; 20 | event: { 21 | category: string[]; 22 | type: string[]; 23 | outcome: string; 24 | }; 25 | } 26 | 27 | interface RuleGenerationOptions { 28 | interval: string; 29 | from: number; 30 | gapsPerRule: number; 31 | } 32 | 33 | interface GapRange { 34 | gte: string; 35 | lte: string; 36 | } 37 | 38 | interface GapEvent { 39 | '@timestamp': string; 40 | event: { 41 | provider: 'alerting'; 42 | action: 'gap'; 43 | kind: 'alert'; 44 | category: ['siem']; 45 | }; 46 | kibana: { 47 | alert: { 48 | rule: { 49 | revision: number; 50 | rule_type_id: string; 51 | consumer: string; 52 | execution: { 53 | uuid: string; 54 | }; 55 | gap: { 56 | range: GapRange; 57 | filled_intervals: GapRange[]; 58 | in_progress_intervals: GapRange[]; 59 | unfilled_intervals: GapRange[]; 60 | status: 'unfilled' | 'filled' | 'in_progress'; 61 | total_gap_duration_ms: number; 62 | filled_duration_ms: number; 63 | unfilled_duration_ms: number; 64 | in_progress_duration_ms: number; 65 | }; 66 | }; 67 | }; 68 | saved_objects: Array<{ 69 | rel: 'primary'; 70 | type: 'alert'; 71 | id: string; 72 | type_id: string; 73 | }>; 74 | space_ids: string[]; 75 | server_uuid: string; 76 | version: string; 77 | }; 78 | rule: { 79 | id: string; 80 | license: string; 81 | category: string; 82 | ruleset: string; 83 | name: string; 84 | }; 85 | ecs: { 86 | version: string; 87 | }; 88 | } 89 | 90 | const generateEvent = (from: number): Event => ({ 91 | '@timestamp': moment() 92 | .subtract(faker.number.int({ min: 1, max: from }), 'h') 93 | .toISOString(), 94 | message: faker.lorem.sentence(), 95 | host: { 96 | name: faker.internet.domainName(), 97 | ip: faker.internet.ip(), 98 | }, 99 | user: { 100 | name: faker.internet.username(), 101 | id: faker.string.uuid(), 102 | }, 103 | event: { 104 | category: faker.helpers.arrayElements(['authentication', 'process', 'network', 'file'], { 105 | min: 1, 106 | max: 2, 107 | }), 108 | type: faker.helpers.arrayElements(['start', 'end', 'info'], { 109 | min: 1, 110 | max: 2, 111 | }), 112 | outcome: faker.helpers.arrayElement(['success', 'failure']), 113 | }, 114 | }); 115 | 116 | const generateNonOverlappingGapEvents = ( 117 | ruleId: string, 118 | ruleName: string, 119 | fromHours: number, 120 | gapCount: number 121 | ): GapEvent[] => { 122 | const totalMinutes = fromHours * 60; 123 | // Calculate maximum duration for each gap including spacing 124 | const maxTimePerGap = Math.floor(totalMinutes / gapCount); 125 | 126 | // Ensure minimum values are at least 1 127 | const minGapDuration = Math.max(1, Math.min(5, Math.floor(maxTimePerGap * 0.6))); // 60% of available time 128 | const maxGapDuration = Math.max( 129 | minGapDuration + 1, 130 | Math.min(30, Math.floor(maxTimePerGap * 0.8)) 131 | ); // 80% of available time 132 | const maxSpaceBetweenGaps = Math.max(1, Math.floor(maxTimePerGap * 0.2)); // 20% of available time 133 | 134 | if (maxTimePerGap < 2) { 135 | console.warn( 136 | `Warning: Time window too small for ${gapCount} gaps. Each gap will be very short (${maxTimePerGap} minutes or less)` 137 | ); 138 | } 139 | 140 | const gaps: Array<{ start: number; end: number }> = []; 141 | let currentTimePoint = 0; 142 | 143 | // Generate exactly gapCount gaps 144 | for (let i = 0; i < gapCount; i++) { 145 | const gapDuration = faker.number.int({ 146 | min: minGapDuration, 147 | max: maxGapDuration, 148 | }); 149 | const spaceBetweenGaps = faker.number.int({ 150 | min: 1, 151 | max: maxSpaceBetweenGaps, 152 | }); 153 | 154 | const gapEnd = currentTimePoint + spaceBetweenGaps; 155 | const gapStart = gapEnd + gapDuration; 156 | 157 | currentTimePoint = gapStart; 158 | gaps.push({ start: gapEnd, end: gapStart }); 159 | } 160 | 161 | // Convert minute-based gaps to actual gap events 162 | return gaps.map((gap) => { 163 | const gapDurationMs = (gap.end - gap.start) * 60 * 1000; 164 | const gapEndTime = moment().subtract(gap.start, 'minutes'); 165 | const gapStartTime = moment().subtract(gap.end, 'minutes'); 166 | 167 | const range = { 168 | gte: gapStartTime.toISOString(), 169 | lte: gapEndTime.toISOString(), 170 | }; 171 | 172 | return { 173 | '@timestamp': range.lte, 174 | event: { 175 | provider: 'alerting', 176 | action: 'gap', 177 | kind: 'alert', 178 | category: ['siem'], 179 | }, 180 | kibana: { 181 | alert: { 182 | rule: { 183 | revision: 1, 184 | rule_type_id: 'siem.queryRule', 185 | consumer: 'siem', 186 | execution: { 187 | uuid: faker.string.uuid(), 188 | }, 189 | gap: { 190 | range, 191 | filled_intervals: [], 192 | in_progress_intervals: [], 193 | unfilled_intervals: [range], 194 | status: 'unfilled', 195 | total_gap_duration_ms: gapDurationMs, 196 | filled_duration_ms: 0, 197 | unfilled_duration_ms: gapDurationMs, 198 | in_progress_duration_ms: 0, 199 | }, 200 | }, 201 | }, 202 | saved_objects: [ 203 | { 204 | rel: 'primary', 205 | type: 'alert', 206 | id: ruleId, 207 | type_id: 'siem.queryRule', 208 | }, 209 | ], 210 | space_ids: ['default'], 211 | server_uuid: '5d29f261-1b85-4d90-9088-53e0e0e87c7c', 212 | version: '9.1.0', 213 | }, 214 | rule: { 215 | id: ruleId, 216 | license: 'basic', 217 | category: 'siem.queryRule', 218 | ruleset: 'siem', 219 | name: ruleName, 220 | }, 221 | ecs: { 222 | version: '1.8.0', 223 | }, 224 | }; 225 | }); 226 | }; 227 | 228 | const ingestEvents = async (events: Event[]) => { 229 | const client = getEsClient(); 230 | if (!client) throw new Error('Failed to get ES client'); 231 | 232 | const chunks = chunk(events, 1000); 233 | 234 | for (const chunk of chunks) { 235 | try { 236 | const operations = chunk.flatMap((doc) => [{ index: { _index: EVENTS_INDEX } }, doc]); 237 | 238 | await client.bulk({ operations, refresh: true }); 239 | } catch (err) { 240 | console.error('Error ingesting events:', err); 241 | throw err; 242 | } 243 | } 244 | }; 245 | 246 | const ingestGapEvents = async (gapEvents: GapEvent[]) => { 247 | const client = getEsClient(); 248 | if (!client) throw new Error('Failed to get ES client'); 249 | 250 | const chunks = chunk(gapEvents, 1000); 251 | 252 | for (const chunk of chunks) { 253 | try { 254 | const operations = chunk.flatMap((doc) => [ 255 | { create: { _index: '.kibana-event-log-ds' } }, 256 | doc, 257 | ]); 258 | 259 | await client.bulk({ operations, refresh: true }); 260 | } catch (err) { 261 | console.error('Error ingesting gap events:', err); 262 | throw err; 263 | } 264 | } 265 | }; 266 | 267 | const deleteGapEvents = async () => { 268 | const client = getEsClient(); 269 | if (!client) throw new Error('Failed to get ES client'); 270 | 271 | try { 272 | console.log('Deleting gap events...'); 273 | const response = await client.deleteByQuery({ 274 | index: '.ds-.kibana-event-log-*', 275 | refresh: true, 276 | query: { 277 | bool: { 278 | must: [{ term: { 'event.action': 'gap' } }, { term: { 'event.provider': 'alerting' } }], 279 | }, 280 | }, 281 | }); 282 | 283 | console.log(`Deleted ${response.deleted} gap events`); 284 | return response.deleted; 285 | } catch (err) { 286 | console.error('Error deleting gap events:', err); 287 | throw err; 288 | } 289 | }; 290 | 291 | export const generateRulesAndAlerts = async ( 292 | ruleCount: number, 293 | eventCount: number, 294 | options: RuleGenerationOptions 295 | ) => { 296 | // Create rules through Kibana API 297 | const ruleResults = await Promise.all( 298 | Array.from({ length: ruleCount }, () => { 299 | const ruleName = `Rule-${faker.string.alphanumeric(8)}`; 300 | const severity = faker.helpers.arrayElement(['low', 'medium', 'high', 'critical']); 301 | const riskScore = faker.number.int({ min: 1, max: 100 }); 302 | 303 | return createRule({ 304 | name: ruleName, 305 | description: faker.lorem.sentence(), 306 | enabled: true, 307 | risk_score: riskScore, 308 | severity: severity, 309 | index: ['logs-*', 'metrics-*', 'auditbeat-*'], 310 | type: 'query', 311 | query: '*:*', 312 | from: `now-${options.from}h`, 313 | interval: options.interval, 314 | }); 315 | }) 316 | ); 317 | 318 | // Generate events that rules can match against 319 | const events = Array.from({ length: eventCount }, () => generateEvent(options.from)); 320 | 321 | let gapEvents: GapEvent[] = []; 322 | if (options.gapsPerRule > 0) { 323 | // Generate non-overlapping gap events for each rule 324 | gapEvents = ruleResults.flatMap((rule) => { 325 | return generateNonOverlappingGapEvents( 326 | rule.id, 327 | rule.name || 'Unknown Rule', 328 | options.from, 329 | options.gapsPerRule 330 | ); 331 | }); 332 | } 333 | 334 | await Promise.all([ingestEvents(events), ingestGapEvents(gapEvents)]); 335 | 336 | console.log(`Created ${ruleResults.length} rules`); 337 | console.log(`Ingested ${events.length} events`); 338 | console.log(`Generated ${gapEvents.length} gap events`); 339 | 340 | return { rules: ruleResults, events, gapEvents }; 341 | }; 342 | 343 | export const deleteAllRules = async (space?: string) => { 344 | console.log('Fetching all rules...'); 345 | const { data: rules } = await getAllRules(space); 346 | 347 | if (rules.length === 0) { 348 | console.log('No rules found to delete'); 349 | return; 350 | } 351 | 352 | console.log(`Found ${rules.length} rules. Deleting...`); 353 | 354 | // Using bulk delete with chunks of 100 355 | const ruleIds = rules.map((rule) => rule.id); 356 | const chunks = chunk(ruleIds, 100); 357 | 358 | try { 359 | let deletedCount = 0; 360 | for (const chunkIds of chunks) { 361 | await bulkDeleteRules(chunkIds, space); 362 | deletedCount += chunkIds.length; 363 | console.log(`Progress: ${deletedCount}/${rules.length} rules deleted`); 364 | } 365 | 366 | // Delete gap events after rules are deleted 367 | await deleteGapEvents(); 368 | 369 | console.log(`Successfully deleted ${deletedCount} rules and their gap events`); 370 | } catch (err) { 371 | console.error('Failed to delete rules:', JSON.stringify(err)); 372 | throw err; 373 | } 374 | }; 375 | -------------------------------------------------------------------------------- /src/commands/utils/cli_utils.ts: -------------------------------------------------------------------------------- 1 | import cliProgress from 'cli-progress'; 2 | import { select } from '@inquirer/prompts'; 3 | 4 | export const createProgressBar = (indexName: string) => { 5 | return new cliProgress.SingleBar( 6 | { 7 | format: `Progress indexing into ${indexName} | {value}/{total} docs`, 8 | }, 9 | cliProgress.Presets.shades_classic 10 | ); 11 | }; 12 | 13 | export const promptForFileSelection = async (fileList: string[]) => { 14 | if (fileList.length === 0) { 15 | console.log('No files to upload'); 16 | process.exit(1); 17 | } 18 | 19 | return select({ 20 | message: 'Select a file to upload', 21 | choices: fileList.map((file) => ({ name: file, value: file })), 22 | }); 23 | }; 24 | -------------------------------------------------------------------------------- /src/commands/utils/create_agent_document.ts: -------------------------------------------------------------------------------- 1 | import { v4 as uuidv4 } from 'uuid'; 2 | import moment from 'moment'; 3 | 4 | export const createAgentDocument = ({ hostname }: { hostname: string }) => { 5 | const agentId = uuidv4(); 6 | 7 | const nowTimestamp = moment().utc().toISOString(); 8 | 9 | return { 10 | access_api_key_id: 'WDxG740BAG_XfFTa8Wbz', 11 | action_seq_no: [-1], 12 | active: true, 13 | agent: { 14 | id: agentId, 15 | version: '8.13.0', 16 | }, 17 | enrolled_at: '2024-02-28T10:33:40Z', 18 | local_metadata: { 19 | elastic: { 20 | agent: { 21 | 'build.original': 22 | '8.13.0 (build: edeb9adbf0c11a997359038d1393d14ab03462ce at 2024-02-23 12:32:56 +0000 UTC)', 23 | complete: false, 24 | id: agentId, 25 | log_level: 'info', 26 | snapshot: false, 27 | upgradeable: false, 28 | version: '8.13.0', 29 | }, 30 | }, 31 | host: { 32 | architecture: 'x86_64', 33 | hostname, 34 | id: '', 35 | ip: ['127.0.0.1/8', '172.17.0.10/16'], 36 | mac: ['02:42:ac:11:00:0a'], 37 | name: hostname, 38 | }, 39 | os: { 40 | family: 'debian', 41 | full: 'Ubuntu focal(20.04.6 LTS (Focal Fossa))', 42 | kernel: '5.15.0-1032-gcp', 43 | name: 'Ubuntu', 44 | platform: 'ubuntu', 45 | version: '20.04.6 LTS (Focal Fossa)', 46 | }, 47 | }, 48 | policy_id: 'policy-elastic-agent-on-cloud', 49 | type: 'PERMANENT', 50 | outputs: { 51 | 'es-containerhost': { 52 | api_key: 'XjxH740BAG_XfFTaAmYH:AtX5ejLMRIyfcmRXTMX-Lg', 53 | permissions_hash: 'b8bf91d03aa17d178cdd82db91a1e0e7711e8fd623ee2d5cb689f912ad5cd026', 54 | type: 'elasticsearch', 55 | api_key_id: 'XjxH740BAG_XfFTaAmYH', 56 | }, 57 | }, 58 | policy_revision_idx: 5, 59 | policy_coordinator_idx: 1, 60 | updated_at: nowTimestamp, 61 | components: [ 62 | { 63 | id: 'fake-policy', 64 | units: [ 65 | { 66 | id: 'fleet-server-es-containerhost-fleet-server-fleet_server-elastic-cloud-fleet-server', 67 | type: 'input', 68 | message: 'Re-configuring', 69 | status: 'CONFIGURING', 70 | }, 71 | { 72 | id: 'fleet-server-es-containerhost', 73 | type: 'output', 74 | message: 'Re-configuring', 75 | status: 'CONFIGURING', 76 | }, 77 | ], 78 | type: 'fleet-server', 79 | message: "Healthy: communicating with pid '153'", 80 | status: 'HEALTHY', 81 | }, 82 | { 83 | id: 'apm-es-containerhost', 84 | units: [ 85 | { 86 | id: 'apm-es-containerhost', 87 | type: 'output', 88 | message: 'Healthy', 89 | status: 'HEALTHY', 90 | }, 91 | { 92 | id: 'apm-es-containerhost-elastic-cloud-apm', 93 | type: 'input', 94 | message: 'Healthy', 95 | status: 'HEALTHY', 96 | }, 97 | ], 98 | type: 'apm', 99 | message: "Healthy: communicating with pid '179'", 100 | status: 'HEALTHY', 101 | }, 102 | ], 103 | last_checkin_message: 'Running', 104 | last_checkin_status: 'online', 105 | last_checkin: nowTimestamp, 106 | }; 107 | }; 108 | -------------------------------------------------------------------------------- /src/commands/utils/indices.ts: -------------------------------------------------------------------------------- 1 | import { Client } from '@elastic/elasticsearch'; 2 | import { ConfigType, getConfig } from '../../get_config'; 3 | import { IndicesCreateRequest } from '@elastic/elasticsearch/lib/api/types'; 4 | import { exec } from 'child_process'; 5 | import { chunk, once } from 'lodash-es'; 6 | import { createProgressBar } from './cli_utils'; 7 | 8 | export * from './create_agent_document'; 9 | 10 | let esClient: Client; 11 | 12 | const getClientAuth = (config: ConfigType) => { 13 | let auth; 14 | if ('apiKey' in config.elastic) { 15 | auth = { apiKey: config.elastic.apiKey }; 16 | } else if (config.elastic.username && config.elastic.password) { 17 | auth = { 18 | username: config.elastic.username, 19 | password: config.elastic.password, 20 | }; 21 | } 22 | return auth; 23 | }; 24 | 25 | export const getEsClient = () => { 26 | if (esClient) return esClient; 27 | const config = getConfig(); 28 | 29 | once(() => console.log('Elasticsearch node:', config.elastic.node)); 30 | 31 | esClient = new Client({ 32 | node: config.elastic.node, 33 | auth: getClientAuth(config), 34 | }); 35 | 36 | return esClient; 37 | }; 38 | 39 | export const getFileLineCount = async (filePath: string): Promise => { 40 | return new Promise((resolve, reject) => { 41 | exec(`wc -l ${filePath}`, (error, stdout, stderr) => { 42 | if (error || stderr) { 43 | reject(error || stderr); 44 | } 45 | 46 | const count = parseInt(stdout.trim().split(' ')[0]); 47 | 48 | if (isNaN(count)) { 49 | console.log( 50 | `Failed to parse line count, line count: "${stdout}", split result: "${stdout.split(' ')}"` 51 | ); 52 | reject(); 53 | } 54 | resolve(count); 55 | }); 56 | }); 57 | }; 58 | 59 | export const indexCheck = async (index: string, body?: Omit) => { 60 | const client = getEsClient(); 61 | if (!client) { 62 | throw new Error(); 63 | } 64 | const isExist = await client.indices.exists({ index: index }); 65 | if (isExist) return; 66 | 67 | console.log('Index does not exist, creating...'); 68 | 69 | try { 70 | await client.indices.create({ 71 | index: index, 72 | settings: { 73 | 'index.mapping.total_fields.limit': 10000, 74 | }, 75 | ...body, 76 | }); 77 | console.log('Index created', index); 78 | } catch (error) { 79 | console.log('Index creation failed', JSON.stringify(error)); 80 | throw error; 81 | } 82 | }; 83 | 84 | export const ingest = async (index: string, documents: Array) => { 85 | const esClient = getEsClient(); 86 | 87 | const progressBar = createProgressBar(index); 88 | 89 | const chunks = chunk(documents, 10000); 90 | progressBar.start(documents.length, 0); 91 | 92 | for (const chunk of chunks) { 93 | try { 94 | const operations = chunk.flatMap((doc) => [{ create: {} }, doc]); 95 | 96 | const results = await esClient.bulk({ index, operations, refresh: true }); 97 | if (results.errors) { 98 | console.log( 99 | 'The errors below occurred when bulk creating documents. Continuing with the potential for partial data.' 100 | ); 101 | results.items.forEach((each) => { 102 | console.log(each); 103 | }); 104 | } 105 | progressBar.increment(chunk.length); 106 | } catch (err) { 107 | console.log('Error: ', err); 108 | } 109 | } 110 | progressBar.stop(); 111 | }; 112 | -------------------------------------------------------------------------------- /src/commands/utils/integrations_sync_utils.ts: -------------------------------------------------------------------------------- 1 | import { OKTA_USERS_SAMPLE_DOCUMENT } from '../privileged_user_monitoring/sample_documents'; 2 | import { userNameAsEmail, userNameWhitespaceRemoved } from './sample_data_helpers'; 3 | import { TimeWindows } from './time_windows'; 4 | import { faker } from '@faker-js/faker'; 5 | 6 | export const OKTA_ADMIN_USER_ROLES: string[] = [ 7 | 'Super Administrator', 8 | 'Organization Administrator', 9 | 'Group Administrator', 10 | 'Application Administrator', 11 | 'Mobile Administrator', 12 | 'Help Desk Administrator', 13 | 'Report Administrator', 14 | 'API Access Management Administrator', 15 | 'Group Membership Administrator', 16 | 'Read-only Administrator', 17 | ]; 18 | 19 | export const OKTA_NON_ADMIN_USER_ROLES: string[] = [ 20 | 'Guest', 21 | 'Employee', 22 | 'Contractor', 23 | 'Intern', 24 | 'Temp', 25 | ]; 26 | 27 | export type FullSyncEntityEventDoc = { 28 | event: { 29 | agent_id_status: 'verified'; 30 | kind: 'asset'; 31 | dataset: string; 32 | action: 'started' | 'completed'; 33 | start?: string; 34 | end?: string; 35 | ingested?: string; 36 | }; 37 | }; 38 | 39 | export type OktaSampleUser = { 40 | email: string; 41 | firstName: string; 42 | lastName: string; 43 | userId: string; 44 | userName: string; 45 | }; 46 | 47 | export const createOktaSampleUser = (): OktaSampleUser => { 48 | const firstName = faker.person.firstName(); 49 | const lastName = faker.person.lastName(); 50 | const userId = faker.string.uuid(); 51 | const userName = userNameWhitespaceRemoved(`${firstName}.${lastName}`); 52 | const email = userNameAsEmail(userName); 53 | return { 54 | email, 55 | firstName, 56 | lastName, 57 | userId, 58 | userName, 59 | }; 60 | }; 61 | 62 | // okta helpers for admin roles split 63 | export const pick = (a: T[]) => a[Math.floor(Math.random() * a.length)]; 64 | export const makeDoc = (isAdmin: boolean) => 65 | OKTA_USERS_SAMPLE_DOCUMENT( 66 | createOktaSampleUser(), // new user each doc 67 | TimeWindows.toRandomTimestamp(TimeWindows.last30DayWindow()), 68 | [isAdmin ? pick(OKTA_ADMIN_USER_ROLES) : pick(OKTA_NON_ADMIN_USER_ROLES)] 69 | ); 70 | 71 | // helpers for entity sync events 72 | export const makeEntityFullSyncEventPair = ({ 73 | dataSet = 'entityanalytics_okta.entity', 74 | baseIso, 75 | gaps = 25000, 76 | ingestedDelay = 3000, 77 | }: { 78 | dataSet?: string; 79 | baseIso: string; 80 | gaps?: number; 81 | ingestedDelay?: number; 82 | }) => { 83 | const startTs = new Date(baseIso).toISOString(); 84 | const endTs = new Date(new Date(baseIso).getTime() + gaps).toISOString(); 85 | const startIngested = new Date(new Date(baseIso).getTime() + ingestedDelay).toISOString(); 86 | const endIngested = new Date(new Date(baseIso).getTime() + gaps + ingestedDelay).toISOString(); 87 | const started: FullSyncEntityEventDoc = { 88 | event: { 89 | agent_id_status: 'verified', 90 | kind: 'asset', 91 | dataset: dataSet, 92 | action: 'started', 93 | start: startTs, 94 | ingested: startIngested, 95 | }, 96 | }; 97 | const completed: FullSyncEntityEventDoc = { 98 | event: { 99 | agent_id_status: 'verified', 100 | kind: 'asset', 101 | dataset: 'entityanalytics_okta.entity', 102 | action: 'completed', 103 | end: endTs, 104 | ingested: endIngested, 105 | }, 106 | }; 107 | return [started, completed]; 108 | }; 109 | export const createSampleFullSyncEvents = ({ 110 | count, 111 | syncWindowMs, // e.g. 24h = 24 * 60 * 60 * 1000 112 | base = new Date(), // starting anchor (defaults to "now") 113 | }: { 114 | count: number; 115 | syncWindowMs: number; 116 | base?: Date | string; 117 | }): FullSyncEntityEventDoc[] => { 118 | const baseMs = typeof base === 'string' ? new Date(base).getTime() : base.getTime(); 119 | const out: FullSyncEntityEventDoc[] = []; 120 | 121 | for (let i = 0; i < count; i++) { 122 | const startIso = new Date(baseMs + i * syncWindowMs).toISOString(); 123 | out.push( 124 | ...makeEntityFullSyncEventPair({ 125 | baseIso: startIso, 126 | }) 127 | ); 128 | } 129 | 130 | return out; // [start, completed, start, completed, ...] in order 131 | }; 132 | -------------------------------------------------------------------------------- /src/commands/utils/sample_data_helpers.ts: -------------------------------------------------------------------------------- 1 | export const userNameWhitespaceRemoved = (userName: string) => { 2 | return userName.replace(/\s+/g, '_'); 3 | }; 4 | 5 | export const userNameAsEmail = (userName: string) => { 6 | return `${userNameWhitespaceRemoved(userName)}@elastic.co`; 7 | }; 8 | -------------------------------------------------------------------------------- /src/commands/utils/time_windows.ts: -------------------------------------------------------------------------------- 1 | import moment from 'moment/moment'; 2 | import { faker } from '@faker-js/faker'; 3 | 4 | export interface TimeWindow { 5 | start: moment.Moment; 6 | end: moment.Moment; 7 | } 8 | 9 | export class TimeWindows { 10 | static last30DayWindow = () => ({ 11 | start: moment().subtract(30, 'days'), 12 | end: moment(), 13 | }); 14 | static randomWindowOfOneDayInTheLastMonth = () => { 15 | const day = faker.helpers.rangeToNumber({ min: 2, max: 28 }); 16 | return { 17 | start: moment().subtract(day, 'days'), 18 | end: moment().subtract(day - 1, 'days'), 19 | }; 20 | }; 21 | static toRandomTimestamp = (timeWindow: TimeWindow): string => { 22 | return moment( 23 | faker.date.between({ 24 | from: timeWindow.start.toDate(), 25 | to: timeWindow.end.toDate(), 26 | }) 27 | ).format('yyyy-MM-DDTHH:mm:ss.SSSSSSZ'); 28 | }; 29 | } 30 | -------------------------------------------------------------------------------- /src/constants.ts: -------------------------------------------------------------------------------- 1 | export const ENTITY_STORE_OPTIONS = { 2 | seed: 'seed', 3 | criticality: 'criticality', 4 | riskEngine: 'riskEngine', 5 | rule: 'rule', 6 | agent: 'agent', 7 | } as const; 8 | 9 | export const PRIVILEGED_USER_MONITORING_OPTIONS = { 10 | anomalyData: 'anomalyData', 11 | sourceEventData: 'sourceEventData', 12 | csvFile: 'csvFile', 13 | riskEngineAndRule: 'riskEngineAndRule', 14 | integrationSyncSourceEventData: 'integrationSyncSourceEventData', 15 | assetCriticality: 'assetCriticality', 16 | installPad: 'installPad', 17 | } as const; 18 | 19 | export type PrivilegedUserMonitoringOption = keyof typeof PRIVILEGED_USER_MONITORING_OPTIONS; 20 | 21 | export const generateNewSeed = () => { 22 | return Math.round(Math.random() * 100000); 23 | }; 24 | 25 | export const API_VERSIONS = { 26 | public: { 27 | v1: '2023-10-31', 28 | }, 29 | internal: { 30 | v1: '1', 31 | }, 32 | }; 33 | 34 | export type AssetCriticality = 35 | | 'low_impact' 36 | | 'medium_impact' 37 | | 'high_impact' 38 | | 'extreme_impact' 39 | | 'unknown'; // not a valid value for assignment, signifies no criticality assigned 40 | 41 | export const ASSET_CRITICALITY: AssetCriticality[] = [ 42 | 'low_impact', 43 | 'medium_impact', 44 | 'high_impact', 45 | 'extreme_impact', 46 | 'unknown', 47 | ]; 48 | 49 | // API Endpoint URL's for Kibana 50 | export const RISK_SCORE_URL = '/internal/risk_score'; 51 | export const RISK_SCORE_DASHBOARD_URL = (entityType: 'host' | 'user') => 52 | `/internal/risk_score/prebuilt_content/saved_objects/_bulk_create/${entityType}RiskScoreDashboards`; 53 | export const RISK_SCORE_SCORES_URL = '/internal/risk_score/scores'; 54 | export const RISK_SCORE_ENGINE_INIT_URL = '/internal/risk_score/engine/init'; 55 | export const ASSET_CRITICALITY_URL = '/api/asset_criticality'; 56 | export const ASSET_CRITICALITY_BULK_URL = '/api/asset_criticality/bulk'; 57 | export const DETECTION_ENGINE_RULES_URL = '/api/detection_engine/rules'; 58 | export const DETECTION_ENGINE_RULES_BULK_ACTION_URL = `${DETECTION_ENGINE_RULES_URL}/_bulk_action`; 59 | export const COMPONENT_TEMPLATES_URL = '/api/index_management/component_templates'; 60 | export const FLEET_EPM_PACKAGES_URL = (packageName: string, version: string = 'latest') => { 61 | let url = `/api/fleet/epm/packages/${packageName}`; 62 | if (version !== 'latest') { 63 | url = `${url}/${version}`; 64 | } 65 | return url; 66 | }; 67 | export const SPACES_URL = '/api/spaces/space'; 68 | export const SPACE_URL = (space: string) => `/api/spaces/space/${space}`; 69 | 70 | export const ENTITY_ENGINES_URL = '/api/entity_store/engines'; 71 | export const ENTITY_ENGINE_URL = (engineType: string) => `${ENTITY_ENGINES_URL}/${engineType}`; 72 | export const INIT_ENTITY_ENGINE_URL = (engineType: string) => 73 | `${ENTITY_ENGINE_URL(engineType)}/init`; 74 | -------------------------------------------------------------------------------- /src/create_alerts.ts: -------------------------------------------------------------------------------- 1 | import { faker } from '@faker-js/faker'; 2 | 3 | function baseCreateAlerts({ 4 | userName = 'user-1', 5 | hostName = 'host-1', 6 | space = 'default', 7 | }: { 8 | userName?: string; 9 | hostName?: string; 10 | space?: string; 11 | } = {}) { 12 | return { 13 | 'host.name': hostName, 14 | 'user.name': userName, 15 | 'kibana.alert.start': '2023-04-11T20:18:15.816Z', 16 | 'kibana.alert.last_detected': '2023-04-11T20:18:15.816Z', 17 | 'kibana.version': '8.7.0', 18 | 'kibana.alert.rule.parameters': { 19 | description: '2', 20 | risk_score: 21, 21 | severity: 'low', 22 | license: '', 23 | author: [], 24 | false_positives: [], 25 | from: 'now-360s', 26 | rule_id: faker.string.uuid(), 27 | max_signals: 100, 28 | risk_score_mapping: [], 29 | severity_mapping: [], 30 | threat: [], 31 | to: 'now', 32 | references: [], 33 | version: 3, 34 | exceptions_list: [], 35 | immutable: false, 36 | related_integrations: [], 37 | required_fields: [], 38 | setup: '', 39 | type: 'query', 40 | language: 'kuery', 41 | index: ['my*'], 42 | query: '*', 43 | filters: [], 44 | }, 45 | 'kibana.alert.rule.category': 'Custom Query Rule', 46 | 'kibana.alert.rule.consumer': 'siem', 47 | 'kibana.alert.rule.execution.uuid': faker.string.uuid(), 48 | 'kibana.alert.rule.name': 'Alert create by documents-generator', 49 | 'kibana.alert.rule.producer': 'siem', 50 | 'kibana.alert.rule.rule_type_id': 'siem.queryRule', 51 | 'kibana.alert.rule.uuid': faker.string.uuid(), 52 | 'kibana.space_ids': [space], 53 | 'kibana.alert.rule.tags': [], 54 | '@timestamp': Date.now(), 55 | 'event.kind': 'signal', 56 | 'kibana.alert.original_time': '2023-04-11T20:17:14.851Z', 57 | 'kibana.alert.ancestors': [ 58 | { 59 | id: '8TD3cYcB1hicTK_CdP--', 60 | type: 'event', 61 | index: 'my-index', 62 | depth: 0, 63 | }, 64 | ], 65 | 'kibana.alert.status': 'active', 66 | 'kibana.alert.workflow_status': 'open', 67 | 'kibana.alert.depth': 1, 68 | 'kibana.alert.reason': 'event on ' + hostName + 'created low alert 1.', 69 | 'kibana.alert.severity': 'low', 70 | 'kibana.alert.risk_score': 21, 71 | 'kibana.alert.rule.actions': [], 72 | 'kibana.alert.rule.author': [], 73 | 'kibana.alert.rule.created_at': '2023-04-11T20:15:52.473Z', 74 | 'kibana.alert.rule.created_by': 'elastic', 75 | 'kibana.alert.rule.description': '2', 76 | 'kibana.alert.rule.enabled': true, 77 | 'kibana.alert.rule.exceptions_list': [], 78 | 'kibana.alert.rule.false_positives': [], 79 | 'kibana.alert.rule.from': 'now-360s', 80 | 'kibana.alert.rule.immutable': false, 81 | 'kibana.alert.rule.interval': '5m', 82 | 'kibana.alert.rule.indices': ['my*'], 83 | 'kibana.alert.rule.license': '', 84 | 'kibana.alert.rule.max_signals': 100, 85 | 'kibana.alert.rule.references': [], 86 | 'kibana.alert.rule.risk_score_mapping': [], 87 | 'kibana.alert.rule.rule_id': 'cc066b08-b4d2-4e74-81cb-3cda5aaa612d', 88 | 'kibana.alert.rule.severity_mapping': [], 89 | 'kibana.alert.rule.threat': [], 90 | 'kibana.alert.rule.to': 'now', 91 | 'kibana.alert.rule.type': 'query', 92 | 'kibana.alert.rule.updated_at': '2023-04-11T20:18:11.024Z', 93 | 'kibana.alert.rule.updated_by': 'elastic', 94 | 'kibana.alert.rule.version': 3, 95 | 'kibana.alert.rule.meta.from': '1m', 96 | 'kibana.alert.rule.meta.kibana_siem_app_url': 'http://localhost:5601/app/security', 97 | 'kibana.alert.rule.risk_score': 21, 98 | 'kibana.alert.rule.severity': 'low', 99 | 'kibana.alert.uuid': faker.string.uuid(), 100 | }; 101 | } 102 | 103 | export type BaseCreateAlertsReturnType = ReturnType; 104 | 105 | export default function createAlerts( 106 | override: O, 107 | { 108 | userName, 109 | hostName, 110 | space, 111 | }: { 112 | userName?: string; 113 | hostName?: string; 114 | space?: string; 115 | } = {} 116 | ): O & BaseCreateAlertsReturnType { 117 | return { ...baseCreateAlerts({ userName, hostName, space }), ...override }; 118 | } 119 | -------------------------------------------------------------------------------- /src/create_events.ts: -------------------------------------------------------------------------------- 1 | import { faker } from '@faker-js/faker'; 2 | import moment from 'moment'; 3 | 4 | export default function createEvents(override = {}) { 5 | return { 6 | '@timestamp': moment().format('yyyy-MM-DDTHH:mm:ss.SSSSSSZ'), 7 | criticality: faker.helpers.arrayElement([ 8 | 'low_impact', 9 | 'medium_impact', 10 | 'high_impact', 11 | 'extreme_impact', 12 | ]), 13 | ...override, 14 | }; 15 | } 16 | -------------------------------------------------------------------------------- /src/create_misconfigurations.ts: -------------------------------------------------------------------------------- 1 | import { faker } from '@faker-js/faker'; 2 | import moment from 'moment'; 3 | 4 | export interface CreateMisconfigurationsParams { 5 | username?: string; 6 | hostname?: string; 7 | space?: string; 8 | } 9 | 10 | export default function createMisconfigurations({ 11 | username = 'user-1', 12 | hostname = 'host-1', 13 | space = 'default', 14 | }: CreateMisconfigurationsParams) { 15 | const now = moment().format('yyyy-MM-DDTHH:mm:ss.SSSSSSZ'); 16 | return { 17 | '@timestamp': now, 18 | agent: { 19 | name: 'elastic-agent-cspm', 20 | id: faker.string.uuid(), 21 | type: 'cloudbeat', 22 | ephemeral_id: faker.string.uuid(), 23 | version: '9.0.0', 24 | }, 25 | resource: { 26 | account_id: faker.string.numeric(12), 27 | sub_type: 'gcp-iam-service-account-key', 28 | account_name: 'test', 29 | organization_id: faker.string.numeric(12), 30 | name: `projects/test/serviceAccounts/sa-cspm-gcp-test-91@test.iam.gserviceaccount.com/keys/${faker.string.uuid()}`, 31 | raw: { 32 | AccessContextPolicy: null, 33 | update_time: { 34 | seconds: 1753889348, 35 | }, 36 | resource: { 37 | data: { 38 | keyAlgorithm: 'KEY_ALG_RSA_2048', 39 | privateKeyType: 'TYPE_GOOGLE_CREDENTIALS_FILE', 40 | validBeforeTime: '9999-12-31T23:59:59Z', 41 | name: `projects/test/serviceAccounts/sa-cspm-gcp-test-73@test.iam.gserviceaccount.com/keys/${faker.string.uuid()}`, 42 | keyType: 'USER_MANAGED', 43 | keyOrigin: 'GOOGLE_PROVIDED', 44 | validAfterTime: '2025-07-30T15:29:08Z', 45 | }, 46 | discovery_name: 'TestAccountKey', 47 | version: 'v1', 48 | discovery_document_uri: 'https://iam.googleapis.com/$discovery/rest', 49 | }, 50 | asset_type: 'iam.googleapis.com/ServiceAccountKey', 51 | name: `//iam.googleapis.com/projects/test/serviceAccounts/${faker.string.numeric(22)}/keys/${faker.string.uuid()}`, 52 | ancestors: [ 53 | `projects/${faker.string.numeric(12)}`, 54 | `folders/${faker.string.numeric(12)}`, 55 | `folders/${faker.string.numeric(12)}`, 56 | `folders/${faker.string.numeric(12)}`, 57 | `organizations/${faker.string.numeric(12)}`, 58 | ], 59 | }, 60 | id: `//iam.googleapis.com/projects/test/serviceAccounts/${faker.string.numeric(22)}/keys/${faker.string.uuid()}`, 61 | type: 'identity-management', 62 | }, 63 | cloud_security_posture: { 64 | package_policy: { 65 | id: faker.string.uuid(), 66 | revision: 11, 67 | }, 68 | }, 69 | elastic_agent: { 70 | id: faker.string.uuid(), 71 | version: '9.0.0', 72 | snapshot: false, 73 | }, 74 | rule: { 75 | references: 76 | '1. https://cloud.google.com/iam/docs/understanding-service-accounts#managing_service_account_keys\n2. https://cloud.google.com/sdk/gcloud/reference/iam/service-accounts/keys/list\n3. https://cloud.google.com/iam/docs/service-accounts', 77 | impact: 78 | 'Rotating service account keys will break communication for dependent applications. Dependent applications need to be configured manually with the new key `ID` displayed in the `Service account keys` section and the `private key` downloaded by the user.', 79 | description: 80 | 'Service Account keys consist of a key ID (Private_key_Id) and Private key, which are used to sign programmatic requests users make to Google cloud services accessible to that particular service account.\nIt is recommended that all Service Account keys are regularly rotated.', 81 | default_value: '', 82 | section: 'Identity and Access Management', 83 | rationale: 84 | 'Rotating Service Account keys will reduce the window of opportunity for an access key that is associated with a compromised or terminated account to be used.\nService Account keys should be rotated to ensure that data cannot be accessed with an old key that might have been lost, cracked, or stolen.\n\nEach service account is associated with a key pair managed by Google Cloud Platform (GCP).\nIt is used for service-to-service authentication within GCP.\nGoogle rotates the keys daily.\n\nGCP provides the option to create one or more user-managed (also called external key pairs) key pairs for use from outside GCP (for example, for use with Application Default Credentials).\nWhen a new key pair is created, the user is required to download the private key (which is not retained by Google).\nWith external keys, users are responsible for keeping the private key secure and other management operations such as key rotation.\nExternal keys can be managed by the IAM API, gcloud command-line tool, or the Service Accounts page in the Google Cloud Platform Console.\nGCP facilitates up to 10 external service account keys per service account to facilitate key rotation.', 85 | version: '1.0', 86 | benchmark: { 87 | name: 'CIS Google Cloud Platform Foundation', 88 | rule_number: '1.7', 89 | id: 'cis_gcp', 90 | version: 'v2.0.0', 91 | posture_type: 'cspm', 92 | }, 93 | tags: ['CIS', 'GCP', 'CIS 1.7', 'Identity and Access Management'], 94 | remediation: 95 | '**From Google Cloud Console**\n\n**Delete any external (user-managed) Service Account Key older than 90 days:**\n\n1. Go to `APIs & Services\\Credentials` using `https://console.cloud.google.com/apis/credentials`\n\n2. In the Section `Service Account Keys`, for every external (user-managed) service account key where `creation date` is greater than or equal to the past 90 days, click `Delete Bin Icon` to `Delete Service Account key`\n\n**Create a new external (user-managed) Service Account Key for a Service Account:**\n\n3. Go to `APIs & Services\\Credentials` using `https://console.cloud.google.com/apis/credentials`\n\n4. Click `Create Credentials` and Select `Service Account Key`.\n\n5. Choose the service account in the drop-down list for which an External (user-managed) Service Account key needs to be created.\n\n6. Select the desired key type format among `JSON` or `P12`.\n\n7. Click `Create`. It will download the `private key`. Keep it safe. \n\n8. Click `Close` if prompted. \n\n9. The site will redirect to the `APIs & Services\\Credentials` page. Make a note of the new `ID` displayed in the `Service account keys` section.', 96 | audit: 97 | '**From Google Cloud Console**\n\n1. Go to `APIs & Services\\Credentials` using `https://console.cloud.google.com/apis/credentials`\n\n2. In the section `Service Account Keys`, for every External (user-managed) service account key listed ensure the `creation date` is within the past 90 days.\n\n**From Google Cloud CLI**\n\n3. List all Service accounts from a project.\n\n```\ngcloud iam service-accounts list\n```\n\n4. For every service account list service account keys.\n\n```\ngcloud iam service-accounts keys list --iam-account [Service_Account_Email_Id] --format=json\n```\n\n5. Ensure every service account key for a service account has a `"validAfterTime"` value within the past 90 days.', 98 | name: 'Ensure User-Managed/External Keys for Service Accounts Are Rotated Every 90 Days or Fewer', 99 | id: faker.string.uuid(), 100 | profile_applicability: '* Level 1', 101 | }, 102 | message: 103 | 'Rule "Ensure User-Managed/External Keys for Service Accounts Are Rotated Every 90 Days or Fewer": passed', 104 | result: { 105 | evaluation: faker.helpers.arrayElement(['passed', 'failed']), 106 | evidence: now, 107 | expected: null, 108 | }, 109 | cloud: { 110 | Organization: { 111 | id: faker.string.numeric(12), 112 | }, 113 | provider: 'gcp', 114 | account: { 115 | name: 'test', 116 | id: faker.string.numeric(12), 117 | }, 118 | }, 119 | observer: { 120 | vendor: 'Elastic', 121 | }, 122 | cloudbeat: { 123 | commit_time: '0001-01-01T00:00:00Z', 124 | version: '9.0.0', 125 | policy: { 126 | commit_time: '0001-01-01T00:00:00Z', 127 | version: '9.0.0', 128 | }, 129 | }, 130 | ecs: { 131 | version: '8.6.0', 132 | }, 133 | related: { 134 | entity: [ 135 | `//iam.googleapis.com/projects/test/serviceAccounts/${faker.string.numeric(21)}/keys/${faker.string.uuid()}`, 136 | ], 137 | }, 138 | data_stream: { 139 | namespace: space, 140 | type: 'logs', 141 | dataset: 'cloud_security_posture.findings', 142 | }, 143 | event: { 144 | agent_id_status: 'verified', 145 | sequence: 1753889443, 146 | created: now, 147 | kind: 'state', 148 | id: faker.string.uuid(), 149 | category: ['configuration'], 150 | type: ['info'], 151 | dataset: 'cloud_security_posture.findings', 152 | outcome: 'success', 153 | }, 154 | user: { 155 | name: username, 156 | effective: { 157 | name: `projects/test/serviceAccounts/sa-cspm-gcp-test-85-sa@test.iam.gserviceaccount.com/keys/${faker.string.uuid()}`, 158 | id: `//iam.googleapis.com/projects/test/serviceAccounts/${faker.string.numeric(21)}/keys/${faker.string.uuid()}`, 159 | }, 160 | }, 161 | host: { 162 | name: hostname, 163 | }, 164 | }; 165 | } 166 | -------------------------------------------------------------------------------- /src/create_vulnerability.ts: -------------------------------------------------------------------------------- 1 | import moment from 'moment'; 2 | import { faker } from '@faker-js/faker'; 3 | 4 | export interface CreateVulnerabilitiesParams { 5 | username?: string; 6 | hostname?: string; 7 | space?: string; 8 | } 9 | export default function createVulnerabilities({ 10 | username = 'user-1', 11 | hostname = 'host-1', 12 | space = 'default', 13 | }: CreateVulnerabilitiesParams) { 14 | const now = moment().format('yyyy-MM-DDTHH:mm:ss.SSSSSSZ'); 15 | const dataset = 'cloud_security_posture.vulnerabilities'; 16 | return { 17 | '@timestamp': now, 18 | user: { 19 | name: username, 20 | }, 21 | agent: { 22 | ephemeral_id: faker.string.uuid(), 23 | id: faker.string.uuid(), 24 | name: 'ip-172-31-41-123.eu-west-1.compute.internal', 25 | type: 'cloudbeat', 26 | version: '9.1.2', 27 | }, 28 | cloud: { 29 | Security: { 30 | security_groups: { 31 | group_id: faker.string.uuid(), 32 | group_name: `terraform-${faker.string.alphanumeric(26)}`, 33 | }, 34 | }, 35 | Tags: { 36 | Name: 'test-env-kspm-20p', 37 | deployment: 'test-env-kspm', 38 | division: 'engineering', 39 | ec2_type: 'kspm', 40 | id: faker.string.uuid(), 41 | org: 'security', 42 | owner: 'cloudbeat', 43 | project: 'test-env', 44 | provisioner: 'terraform', 45 | team: 'cloud-security-posture', 46 | }, 47 | account: { 48 | id: faker.string.numeric(12), 49 | name: 'cloud-security-tests', 50 | }, 51 | availability_zone: 'eu-west-1a', 52 | instance: { 53 | id: faker.string.uuid(), 54 | name: 'test-env-kspm-3Oj', 55 | }, 56 | machine: { 57 | Authentication: { 58 | key: `cloudbeat-generated-${faker.string.uuid()}`, 59 | }, 60 | Image: `ami-${faker.string.uuid()}`, 61 | Launch_time: now, 62 | type: 'c5.4xlarge', 63 | }, 64 | provider: 'aws', 65 | region: 'eu-west-1', 66 | service: { 67 | name: 'AWS EC2', 68 | }, 69 | }, 70 | cloud_security_posture: { 71 | package_policy: { 72 | id: faker.string.uuid(), 73 | revision: 16, 74 | }, 75 | }, 76 | cloudbeat: { 77 | commit_time: '0001-01-01T00:00:00.000Z', 78 | version: '9.1.2', 79 | }, 80 | data_stream: { 81 | dataset, 82 | namespace: space, 83 | type: 'logs', 84 | }, 85 | ecs: { 86 | version: '8.6.0', 87 | }, 88 | elastic_agent: { 89 | id: faker.string.uuid(), 90 | version: '9.1.2', 91 | snapshot: false, 92 | }, 93 | event: { 94 | agent_id_status: 'verified', 95 | category: ['vulnerability'], 96 | created: now, 97 | dataset, 98 | id: faker.string.uuid(), 99 | ingested: now, 100 | kind: 'state', 101 | outcome: 'success', 102 | sequence: 1756996222, 103 | type: ['info'], 104 | }, 105 | host: { 106 | architecture: 'x86_64', 107 | name: hostname, 108 | os: { 109 | platform: 'Linux/UNIX', 110 | }, 111 | }, 112 | network: { 113 | Mac_addresses: [faker.internet.mac()], 114 | Private_ip: faker.internet.ip(), 115 | Public_ip: faker.internet.ip(), 116 | }, 117 | observer: { 118 | vendor: 'Elastic', 119 | }, 120 | package: { 121 | fixed_version: '1.23.12, 1.24.6', 122 | name: 'stdlib', 123 | path: `var/snap/docker/common/var-lib-docker/volumes/${faker.string.uuid()}/_data/lib/containerd/io.containerd.snapshotter.v1.overlayfs/snapshots/28/fs/usr/share/elastic-agent/data/elastic-agent-5f1123/elastic-agent`, 124 | type: 'gobinary', 125 | version: 'v1.24.4', 126 | }, 127 | resource: { 128 | id: faker.string.uuid(), 129 | name: hostname, 130 | }, 131 | vulnerability: { 132 | category: 'lang-pkgs', 133 | class: 'lang-pkgs', 134 | classification: 'CVSS', 135 | cvss: { 136 | bitnami: { 137 | V3Vector: 'CVSS:3.1/AV:N/AC:H/PR:N/UI:N/S:U/C:H/I:L/A:L', 138 | V3Score: 7, 139 | }, 140 | redhat: { 141 | V3Vector: 'CVSS:3.1/AV:N/AC:H/PR:N/UI:N/S:U/C:H/I:L/A:L', 142 | V3Score: 7, 143 | }, 144 | }, 145 | data_source: { 146 | ID: 'govulndb', 147 | Name: 'The Go Vulnerability Database', 148 | URL: 'https://pkg.go.dev/vuln/', 149 | }, 150 | description: 151 | 'Cancelling a query (e.g. by cancelling the context passed to one of the query methods) during a call to the Scan method of the returned Rows can result in unexpected results if other queries are being made in parallel. This can result in a race condition that may overwrite the expected results with those of another query, causing the call to Scan to return either unexpected results from the other query or an error.', 152 | enumeration: 'CVE', 153 | id: 'CVE-2025-47907', 154 | package: { 155 | fixed_version: '1.23.12, 1.24.6', 156 | name: 'stdlib', 157 | version: 'v1.24.4', 158 | }, 159 | published_date: '2025-08-07T16:15:30.357Z', 160 | reference: 'https://avd.aquasec.com/nvd/cve-2025-47907', 161 | report_id: '1.756996211E9', 162 | scanner: { 163 | vendor: 'Trivy', 164 | version: 'v0.35.0', 165 | }, 166 | score: { 167 | base: 7, 168 | version: '3.1', 169 | }, 170 | severity: faker.helpers.arrayElement(['HIGH', 'LOW', 'MEDIUM', 'CRITICAL']), 171 | 172 | title: 'database/sql: Postgres Scan Race Condition', 173 | }, 174 | }; 175 | } 176 | -------------------------------------------------------------------------------- /src/get_config.ts: -------------------------------------------------------------------------------- 1 | import * as fs from 'fs'; 2 | import * as t from 'io-ts'; 3 | // get config relative to the file 4 | import { fileURLToPath } from 'url'; 5 | import { dirname, resolve } from 'path'; 6 | import { PathReporter } from 'io-ts/lib/PathReporter'; 7 | 8 | const NodeWithCredentials = t.type({ 9 | node: t.string, 10 | username: t.string, 11 | password: t.string, 12 | }); 13 | 14 | const NodeWithAPIKey = t.type({ 15 | node: t.string, 16 | apiKey: t.string, 17 | }); 18 | 19 | const Node = t.union([NodeWithCredentials, NodeWithAPIKey]); 20 | 21 | const Config = t.type({ 22 | elastic: Node, 23 | kibana: Node, 24 | serverless: t.union([t.boolean, t.undefined]), 25 | eventIndex: t.union([t.string, t.undefined]), 26 | eventDateOffsetHours: t.union([t.number, t.undefined]), 27 | }); 28 | 29 | export type ConfigType = t.TypeOf; 30 | 31 | let config: ConfigType; 32 | 33 | const CONFIG_FILE_NAME = 'config.json'; 34 | 35 | const directoryName = dirname(fileURLToPath(import.meta.url)); 36 | export const configPath = resolve(directoryName, `../${CONFIG_FILE_NAME}`); 37 | 38 | export const getConfig = (): ConfigType => { 39 | if (config) { 40 | return config; 41 | } 42 | 43 | const configJson = JSON.parse(fs.readFileSync(configPath, 'utf8')); 44 | 45 | if (!configJson.eventIndex) { 46 | configJson.eventIndex = 'logs-testlogs-default'; 47 | } 48 | 49 | const validationResult = Config.decode(configJson); 50 | 51 | if (validationResult._tag === 'Left') { 52 | console.error( 53 | `There was a config validation error. Fix issues below in the ${CONFIG_FILE_NAME} file, and try again.` 54 | ); 55 | console.log(PathReporter.report(validationResult)); 56 | process.exit(1); 57 | } 58 | 59 | config = configJson; 60 | return configJson; 61 | }; 62 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env node 2 | import { program } from 'commander'; 3 | import { 4 | deleteAllAlerts, 5 | deleteAllEvents, 6 | generateAlerts, 7 | generateEvents, 8 | generateGraph, 9 | } from './commands/documents'; 10 | import { setupEntityResolutionDemo } from './commands/entity_resolution'; 11 | import { generateLegacyRiskScore } from './commands/legacy_risk_score'; 12 | import { kibanaApi } from './utils/'; 13 | import { cleanEntityStore, generateEntityStore } from './commands/entity_store'; 14 | import { 15 | createPerfDataFile, 16 | listPerfDataFiles, 17 | uploadPerfDataFile, 18 | uploadPerfDataFileInterval, 19 | } from './commands/entity_store_perf'; 20 | import { checkbox, input } from '@inquirer/prompts'; 21 | import { 22 | ENTITY_STORE_OPTIONS, 23 | generateNewSeed, 24 | PRIVILEGED_USER_MONITORING_OPTIONS, 25 | PrivilegedUserMonitoringOption, 26 | } from './constants'; 27 | import { initializeSpace } from './utils'; 28 | import { generateAssetCriticality } from './commands/asset_criticality'; 29 | import { deleteAllRules, generateRulesAndAlerts } from './commands/rules'; 30 | import { createConfigFileOnFirstRun } from './utils/create_config_on_first_run'; 31 | import { promptForFileSelection } from './commands/utils/cli_utils'; 32 | import { privmonCommand } from './commands/privileged_user_monitoring/privileged_user_monitoring'; 33 | import { dirname } from 'path'; 34 | import { fileURLToPath } from 'url'; 35 | import { generateInsights } from './commands/insights'; 36 | 37 | await createConfigFileOnFirstRun(); 38 | 39 | const parseIntBase10 = (input: string) => parseInt(input, 10); 40 | 41 | export const srcDirectory = dirname(fileURLToPath(import.meta.url)); 42 | 43 | program 44 | .command('generate-alerts') 45 | .option('-n ', 'number of alerts') 46 | .option('-h ', 'number of hosts') 47 | .option('-u ', 'number of users') 48 | .option('-s ', 'space (will be created if it does not exist)') 49 | .description('Generate fake alerts') 50 | .action(async (options) => { 51 | const alertsCount = parseInt(options.n || '1'); 52 | const hostCount = parseInt(options.h || '1'); 53 | const userCount = parseInt(options.u || '1'); 54 | const space = options.s || 'default'; 55 | 56 | if (space !== 'default') { 57 | await initializeSpace(space); 58 | } 59 | 60 | generateAlerts(alertsCount, userCount, hostCount, space); 61 | }); 62 | 63 | program 64 | .command('generate-events') 65 | .argument('', 'integer argument', parseIntBase10) 66 | .description('Generate events') 67 | .action(generateEvents); 68 | 69 | program.command('generate-graph').description('Generate fake graph').action(generateGraph); 70 | 71 | program.command('delete-alerts').description('Delete all alerts').action(deleteAllAlerts); 72 | 73 | program.command('delete-events').description('Delete all events').action(deleteAllEvents); 74 | 75 | program 76 | .command('test-risk-score') 77 | .description('Test risk score API') 78 | .action(kibanaApi.fetchRiskScore); 79 | 80 | program 81 | .command('create-perf-data') 82 | .argument('', 'name of the file') 83 | .argument('', 'number of entities', parseIntBase10) 84 | .argument('', 'number of logs per entity', parseIntBase10) 85 | .argument('[start-index]', 'for sequential data, which index to start at', parseIntBase10, 0) 86 | .description('Create performance data') 87 | .action((name, entityCount, logsPerEntity, startIndex) => { 88 | createPerfDataFile({ name, entityCount, logsPerEntity, startIndex }); 89 | }); 90 | 91 | program 92 | .command('upload-perf-data') 93 | .argument('[file]', 'File to upload') 94 | .option('--index ', 'Destination index') 95 | .option('--delete', 'Delete all entities before uploading') 96 | .description('Upload performance data file') 97 | .action(async (file, options) => { 98 | await uploadPerfDataFile( 99 | file ?? (await promptForFileSelection(listPerfDataFiles())), 100 | options.index, 101 | options.delete 102 | ); 103 | }); 104 | 105 | program 106 | .command('upload-perf-data-interval') 107 | .argument('[file]', 'File to upload') 108 | .option('--interval ', 'interval in s', parseIntBase10, 30) 109 | .option('--count ', 'number of times to upload', parseIntBase10, 10) 110 | .option('--deleteData', 'Delete all entities before uploading') 111 | .option('--deleteEngines', 'Delete all entities before uploading') 112 | .description('Upload performance data file') 113 | .action(async (file, options) => { 114 | await uploadPerfDataFileInterval( 115 | file ?? (await promptForFileSelection(listPerfDataFiles())), 116 | options.interval * 1000, 117 | options.count, 118 | options.deleteData, 119 | options.deleteEngines 120 | ); 121 | }); 122 | 123 | program 124 | .command('entity-resolution-demo') 125 | .option('--mini', 'Only load the mini dataset', false) 126 | .option('--delete', 'Delete old data', false) 127 | .option('--keep-emails', 'No Email variants', false) 128 | .option('--space', 'space to use', 'default') 129 | .description('Load entity resolution demo data') 130 | .action(({ mini, deleteData, keepEmails, space }) => { 131 | setupEntityResolutionDemo({ mini, deleteData, keepEmails, space }); 132 | }); 133 | 134 | program 135 | .command('entity-store') 136 | .description('Generate entity store') 137 | .option('--space ', 'Space to create entity store in') 138 | .action(async (options) => { 139 | const entityStoreAnswers = await checkbox({ 140 | message: 'Select options', 141 | choices: [ 142 | { 143 | name: 'Seed (stable random data)', 144 | value: ENTITY_STORE_OPTIONS.seed, 145 | checked: true, 146 | }, 147 | { 148 | name: 'Assign asset criticality', 149 | value: ENTITY_STORE_OPTIONS.criticality, 150 | checked: true, 151 | }, 152 | { 153 | name: 'Enable Risk Engine', 154 | value: ENTITY_STORE_OPTIONS.riskEngine, 155 | checked: true, 156 | }, 157 | { 158 | name: 'Create detection rule', 159 | value: ENTITY_STORE_OPTIONS.rule, 160 | checked: true, 161 | }, 162 | { 163 | name: 'Generate fake elastic agents for hosts', 164 | value: ENTITY_STORE_OPTIONS.agent, 165 | checked: false, 166 | }, 167 | ], 168 | }); 169 | 170 | const userCount = await input({ 171 | message: 'How many users', 172 | default: '10', 173 | }); 174 | 175 | const hostCount = await input({ 176 | message: 'How many hosts', 177 | default: '10', 178 | }); 179 | 180 | const serviceCount = await input({ 181 | message: 'How many services', 182 | default: '10', 183 | }); 184 | 185 | const genericEntitiesCount = await input({ 186 | message: 'How many generic entities', 187 | default: '10', 188 | }); 189 | 190 | const offsetHours = await input({ 191 | message: 'Event date offset in hours (how many hours ago events should be generated)', 192 | default: '1', 193 | }); 194 | 195 | const seed = generateNewSeed() + ''; 196 | 197 | let seedAnswer = seed; 198 | 199 | if (entityStoreAnswers.includes(ENTITY_STORE_OPTIONS.seed)) { 200 | seedAnswer = await input({ 201 | message: 'Enter seed to generate stable random data or to use a new seed', 202 | default: seed, 203 | }); 204 | } 205 | 206 | generateEntityStore({ 207 | space: options.space, 208 | users: parseIntBase10(userCount), 209 | hosts: parseIntBase10(hostCount), 210 | services: parseIntBase10(serviceCount), 211 | genericEntities: parseIntBase10(genericEntitiesCount), 212 | seed: parseIntBase10(seedAnswer), 213 | options: entityStoreAnswers, 214 | offsetHours: parseIntBase10(offsetHours), 215 | }); 216 | }); 217 | 218 | program 219 | .command('quick-entity-store') 220 | .description('Generate quick entity store') 221 | .option('--space ', 'Space to create entity store in') 222 | .action(async (options) => { 223 | const space = options.space || 'default'; 224 | 225 | generateEntityStore({ 226 | space, 227 | users: 10, 228 | hosts: 10, 229 | services: 10, 230 | genericEntities: 10, 231 | seed: generateNewSeed(), 232 | options: [ 233 | ENTITY_STORE_OPTIONS.criticality, 234 | ENTITY_STORE_OPTIONS.riskEngine, 235 | ENTITY_STORE_OPTIONS.rule, 236 | ], 237 | offsetHours: 1, 238 | }); 239 | }); 240 | 241 | program.command('clean-entity-store').description('clean entity store').action(cleanEntityStore); 242 | 243 | program 244 | .command('generate-entity-insights') 245 | .description('Generate entities vulnerabilities and misconfigurations') 246 | .action(async (options) => { 247 | const users = parseInt(options.u || '10'); 248 | const hosts = parseInt(options.h || '10'); 249 | const space = options.s || 'default'; 250 | 251 | generateInsights({ users, hosts, space }); 252 | }); 253 | 254 | program 255 | .command('generate-asset-criticality') 256 | .option('-h ', 'number of hosts') 257 | .option('-u ', 'number of users') 258 | .option('-s ', 'space') 259 | .description('Generate asset criticality for entities') 260 | .action(async (options) => { 261 | const users = parseInt(options.u || '10'); 262 | const hosts = parseInt(options.h || '10'); 263 | const space = options.s || 'default'; 264 | 265 | generateAssetCriticality({ users, hosts, space }); 266 | }); 267 | 268 | program 269 | .command('generate-legacy-risk-score') 270 | .description('Install legacy risk score and generate data') 271 | .action(generateLegacyRiskScore); 272 | 273 | program 274 | .command('rules') 275 | .description('Generate detection rules and events') 276 | .option('-r, --rules ', 'Number of rules to generate', '10') 277 | .option('-e, --events ', 'Number of events to generate', '50') 278 | .option('-i, --interval ', 'Rule execution interval', '5m') 279 | .option('-f, --from ', 'Generate events from last N hours', '24') 280 | .option('-g, --gaps ', 'Amount of gaps per rule', '0') 281 | .option('-c, --clean', 'Clean gap events before generating rules', 'false') 282 | .action(async (options) => { 283 | try { 284 | const ruleCount = parseInt(options.rules); 285 | const eventCount = parseInt(options.events); 286 | const fromHours = parseInt(options.from); 287 | const gaps = parseInt(options.gaps); 288 | 289 | console.log(`Generating ${ruleCount} rules and ${eventCount} events...`); 290 | console.log(`Using interval: ${options.interval}`); 291 | console.log(`Generating events from last ${fromHours} hours`); 292 | console.log(`Generating ${gaps} gaps per rule`); 293 | 294 | if (options.clean) { 295 | await deleteAllRules(); 296 | } 297 | 298 | await generateRulesAndAlerts(ruleCount, eventCount, { 299 | interval: options.interval, 300 | from: fromHours, 301 | gapsPerRule: gaps, 302 | }); 303 | 304 | console.log('Successfully generated rules and events'); 305 | } catch (error) { 306 | console.error('Error generating rules and events:', error); 307 | process.exit(1); 308 | } 309 | }); 310 | 311 | program 312 | .command('delete-rules') 313 | .description('Delete all detection rules') 314 | .option('-s, --space ', 'Space to delete rules from') 315 | .action(async (options) => { 316 | try { 317 | await deleteAllRules(options.space); 318 | } catch (error) { 319 | console.error('Error deleting rules:', error); 320 | process.exit(1); 321 | } 322 | }); 323 | 324 | program 325 | .command('privileged-user-monitoring') 326 | .alias('privmon') 327 | .description( 328 | `Generate source events and anomalous source data for privileged user monitoring and the privileged access detection ML jobs.` 329 | ) 330 | .option('--space ', 'Space to use', 'default') 331 | .action(async (options) => { 332 | const answers = await checkbox({ 333 | message: 'Select options', 334 | choices: [ 335 | { 336 | name: 'Basic events', 337 | value: PRIVILEGED_USER_MONITORING_OPTIONS.sourceEventData, 338 | checked: true, 339 | }, 340 | { 341 | name: 'Anomaly events', 342 | value: PRIVILEGED_USER_MONITORING_OPTIONS.anomalyData, 343 | checked: true, 344 | }, 345 | { 346 | name: 'Upload CSV (skip onboarding)', 347 | value: PRIVILEGED_USER_MONITORING_OPTIONS.csvFile, 348 | checked: true, 349 | }, 350 | { 351 | name: 'Integration data', 352 | value: PRIVILEGED_USER_MONITORING_OPTIONS.integrationSyncSourceEventData, 353 | checked: true, 354 | }, 355 | { 356 | name: 'Enable risk engine', 357 | value: PRIVILEGED_USER_MONITORING_OPTIONS.riskEngineAndRule, 358 | checked: true, 359 | }, 360 | { 361 | name: 'Assign asset criticality', 362 | value: PRIVILEGED_USER_MONITORING_OPTIONS.assetCriticality, 363 | checked: true, 364 | }, 365 | { 366 | name: 'Install PAD', 367 | value: PRIVILEGED_USER_MONITORING_OPTIONS.installPad, 368 | checked: true, 369 | }, 370 | ], 371 | }); 372 | 373 | const userCount = Number( 374 | await input({ 375 | message: 'How many users', 376 | default: '10', 377 | }) 378 | ); 379 | 380 | await privmonCommand({ 381 | options: answers, 382 | userCount, 383 | space: options.space, 384 | }); 385 | }); 386 | 387 | program 388 | .command('privmon-quick') 389 | .alias('privileged-user-monitoring-quick') 390 | .alias('quickmon') 391 | .option('--space ', 'Space to use', 'default') 392 | .action(async (options) => { 393 | await privmonCommand({ 394 | options: [...Object.values(PRIVILEGED_USER_MONITORING_OPTIONS)], 395 | userCount: 100, 396 | space: options.space, 397 | }); 398 | }); 399 | 400 | program.parse(); 401 | -------------------------------------------------------------------------------- /src/mappings/eventMappings.json: -------------------------------------------------------------------------------- 1 | { 2 | "properties": { 3 | "@timestamp": { 4 | "type": "date" 5 | }, 6 | "criticality": { 7 | "type": "keyword" 8 | }, 9 | "id_field": { 10 | "type": "keyword" 11 | }, 12 | "id_value": { 13 | "type": "keyword" 14 | } 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /src/utils/create_config_on_first_run.ts: -------------------------------------------------------------------------------- 1 | import { input, select } from '@inquirer/prompts'; 2 | import fs from 'fs'; 3 | import { configPath, ConfigType } from '../get_config'; 4 | 5 | export const createConfigFileOnFirstRun = async () => { 6 | if (fs.existsSync(configPath)) { 7 | return; 8 | } 9 | 10 | console.log(` 11 | Hi there! Looks like this is your first run 👋 12 | 13 | First we need to create a config file for you. 14 | `); 15 | 16 | let apiKey = ''; 17 | let username = ''; 18 | let password = ''; 19 | 20 | const enum AuthMethod { 21 | Basic = 'basic', 22 | ApiKey = 'api_key', 23 | } 24 | 25 | const authMethod: AuthMethod = await select({ 26 | choices: [ 27 | { name: 'Basic Auth (username + password)', value: AuthMethod.Basic }, 28 | { name: 'API Key', value: AuthMethod.ApiKey }, 29 | ], 30 | message: 'Select the authentication method', 31 | default: AuthMethod.Basic, 32 | }); 33 | 34 | if (authMethod === 'api_key') { 35 | apiKey = await input({ 36 | message: 'Enter the API key', 37 | default: '', 38 | }); 39 | } else { 40 | username = await input({ 41 | message: 'Enter the username', 42 | default: 'elastic', 43 | }); 44 | 45 | password = await input({ 46 | message: 'Enter the password', 47 | default: 'changeme', 48 | }); 49 | } 50 | 51 | const elasticNode = await input({ 52 | message: 'Enter the ElasticSearch node URL', 53 | default: 'http://localhost:9200', 54 | }); 55 | const kibanaNode = await input({ 56 | message: 'Enter the Kibana node URL', 57 | default: 'http://localhost:5601', 58 | }); 59 | 60 | const auth = authMethod === AuthMethod.ApiKey ? { apiKey } : { username, password }; 61 | 62 | const config: ConfigType = { 63 | elastic: { 64 | node: elasticNode, 65 | ...auth, 66 | }, 67 | kibana: { 68 | node: kibanaNode, 69 | ...auth, 70 | }, 71 | serverless: false, 72 | eventIndex: '', 73 | eventDateOffsetHours: undefined, 74 | }; 75 | 76 | fs.writeFileSync(configPath, JSON.stringify(config, null, 2)); 77 | 78 | console.log(` 79 | 80 | Config file created at ${configPath} 🎉 81 | 82 | Now let's run the command you wanted to run... 83 | 84 | `); 85 | }; 86 | -------------------------------------------------------------------------------- /src/utils/get_alert_index.ts: -------------------------------------------------------------------------------- 1 | export const getAlertIndex = (space: string) => `.alerts-security.alerts-${space}`; 2 | -------------------------------------------------------------------------------- /src/utils/index.ts: -------------------------------------------------------------------------------- 1 | export { getAlertIndex } from './get_alert_index'; 2 | export { initializeSpace } from './initialize_space'; 3 | export * as kibanaApi from './kibana_api'; 4 | -------------------------------------------------------------------------------- /src/utils/initialize_space.ts: -------------------------------------------------------------------------------- 1 | import { kibanaApi } from '.'; 2 | import { getAlertIndex } from '.'; 3 | import { getEsClient } from '../commands/utils/indices'; 4 | const DUMMY_RULE_ID = 'dummy-rule'; 5 | 6 | export const initializeSpace = async (space: string) => { 7 | await ensureSpaceExists(space); 8 | 9 | if (await alertIndexExistsInSpace(space)) { 10 | console.log('Skipping space initialization.'); 11 | return; 12 | } 13 | console.log(`Initializing space ${space}`); 14 | console.log(`Creating dummy rule to initialize alerts index in ${space}`); 15 | await kibanaApi.createRule({ space, id: DUMMY_RULE_ID }); 16 | await waitForAlertIndexMapping(space); 17 | console.log('Deleting dummy rule'); 18 | await kibanaApi.deleteRule(DUMMY_RULE_ID, space); 19 | console.log('Dummy rule deleted. Space initialized'); 20 | }; 21 | 22 | const alertIndexExistsInSpace = async (space: string): Promise => { 23 | const client = getEsClient(); 24 | const index = getAlertIndex(space); 25 | console.log(`Checking if index ${index} exists`); 26 | const exists = await client.indices.exists({ index }); 27 | 28 | console.log(exists ? `Index ${index} exists` : `Index ${index} does not exist`); 29 | return exists; 30 | }; 31 | 32 | const waitForAlertIndexMapping = async (space: string, attempts: number = 5, waitSeconds = 5) => { 33 | const client = getEsClient(); 34 | const index = getAlertIndex(space); 35 | const backingIndex = '.internal' + index + '-000001'; 36 | 37 | console.log(`Waiting for index ${index} to have the correct mapping`); 38 | 39 | let attempt = 0; 40 | 41 | while (attempt < attempts) { 42 | try { 43 | const res = await client.indices.getMapping({ index }); 44 | console.log(`Got mapping for index ${index} (backing index ${backingIndex})`); 45 | if (res[backingIndex]?.mappings?.properties) { 46 | const mapping = res[backingIndex].mappings.properties; 47 | // I use @timestamp to detect if the mapping is correct, if it has beem automatically created it will be long 48 | if (mapping['@timestamp'] && mapping['@timestamp'].type === 'date') { 49 | console.log( 50 | `Index ${index} has the correct date field mapping: ${JSON.stringify(mapping['@timestamp'])}` 51 | ); 52 | return; 53 | } else { 54 | throw new Error(`Index ${index} does not have the correct mapping`); 55 | } 56 | } 57 | 58 | console.log(`Index ${index} does not have the correct mapping.`); 59 | } catch (e) { 60 | if (JSON.stringify(e).includes('index_not_found_exception')) { 61 | console.log(`Index ${index} does not exist yet.`); 62 | } else { 63 | throw e; 64 | } 65 | } 66 | 67 | if (attempt === attempts - 1) { 68 | throw new Error( 69 | `Index ${index} does not have the correct mapping after ${attempts} attempts` 70 | ); 71 | } 72 | 73 | console.log(`Waiting ${waitSeconds} seconds before trying again`); 74 | await new Promise((resolve) => setTimeout(resolve, waitSeconds * 1000)); 75 | attempt++; 76 | } 77 | }; 78 | 79 | const ensureSpaceExists = async (space: string) => { 80 | console.log(`Checking if space ${space} exists`); 81 | if (await kibanaApi.doesSpaceExist(space)) { 82 | console.log(`Space ${space} exists`); 83 | return; 84 | } 85 | 86 | console.log(`Space ${space} does not exist. Creating space ${space}`); 87 | await kibanaApi.createSpace(space); 88 | console.log(`Space ${space} created`); 89 | }; 90 | -------------------------------------------------------------------------------- /src/utils/kibana_api.ts: -------------------------------------------------------------------------------- 1 | import urlJoin from 'url-join'; 2 | import fetch, { Headers } from 'node-fetch'; 3 | import { getConfig } from '../get_config'; 4 | import { faker } from '@faker-js/faker'; 5 | import fs from 'fs'; 6 | import FormData from 'form-data'; 7 | import { 8 | RISK_SCORE_SCORES_URL, 9 | RISK_SCORE_ENGINE_INIT_URL, 10 | DETECTION_ENGINE_RULES_URL, 11 | COMPONENT_TEMPLATES_URL, 12 | FLEET_EPM_PACKAGES_URL, 13 | SPACES_URL, 14 | SPACE_URL, 15 | RISK_SCORE_URL, 16 | RISK_SCORE_DASHBOARD_URL, 17 | ASSET_CRITICALITY_BULK_URL, 18 | INIT_ENTITY_ENGINE_URL, 19 | ENTITY_ENGINE_URL, 20 | ENTITY_ENGINES_URL, 21 | DETECTION_ENGINE_RULES_BULK_ACTION_URL, 22 | API_VERSIONS, 23 | } from '../constants'; 24 | 25 | export const buildKibanaUrl = (opts: { path: string; space?: string }) => { 26 | const config = getConfig(); 27 | const { path, space } = opts; 28 | const pathWithSpace = space ? urlJoin(`/s/${space}`, path) : path; 29 | return urlJoin(config.kibana.node, pathWithSpace); 30 | }; 31 | 32 | type ResponseError = Error & { statusCode: number; responseData: unknown }; 33 | 34 | const getAuthorizationHeader = () => { 35 | const config = getConfig(); 36 | if ('apiKey' in config.kibana) { 37 | return 'ApiKey ' + config.kibana.apiKey; 38 | } else 39 | return ( 40 | 'Basic ' + 41 | Buffer.from(config.kibana.username + ':' + config.kibana.password).toString('base64') 42 | ); 43 | }; 44 | 45 | const throwResponseError = (message: string, statusCode: number, response: unknown) => { 46 | const error = new Error(message) as ResponseError; 47 | error.statusCode = statusCode; 48 | error.responseData = response; 49 | throw error; 50 | }; 51 | 52 | export const kibanaFetch = async ( 53 | path: string, 54 | params: object, 55 | opts: { 56 | ignoreStatuses?: number[] | number; 57 | apiVersion?: string; 58 | space?: string; 59 | } = {} 60 | ): Promise => { 61 | const { ignoreStatuses, apiVersion = '1', space } = opts; 62 | const url = buildKibanaUrl({ path, space }); 63 | const ignoreStatusesArray = Array.isArray(ignoreStatuses) ? ignoreStatuses : [ignoreStatuses]; 64 | const headers = new Headers(); 65 | headers.append('Content-Type', 'application/json'); 66 | headers.append('kbn-xsrf', 'true'); 67 | headers.append('Authorization', getAuthorizationHeader()); 68 | 69 | headers.set('x-elastic-internal-origin', 'kibana'); 70 | headers.set('elastic-api-version', apiVersion); 71 | const result = await fetch(url, { 72 | headers: headers, 73 | ...params, 74 | }); 75 | const rawResponse = await result.text(); 76 | // log response status 77 | const data = rawResponse ? JSON.parse(rawResponse) : {}; 78 | if (!data || typeof data !== 'object') { 79 | throw new Error(); 80 | } 81 | 82 | if (result.status >= 400 && !ignoreStatusesArray.includes(result.status)) { 83 | throwResponseError( 84 | `Failed to fetch data from ${url}, status: ${result.status}`, 85 | result.status, 86 | data 87 | ); 88 | } 89 | return data; 90 | }; 91 | 92 | export const fetchRiskScore = async (space?: string) => { 93 | await kibanaFetch( 94 | RISK_SCORE_SCORES_URL, 95 | { 96 | method: 'POST', 97 | body: JSON.stringify({}), 98 | }, 99 | { space } 100 | ); 101 | }; 102 | 103 | export const enableRiskScore = async (space?: string) => { 104 | return kibanaFetch( 105 | RISK_SCORE_ENGINE_INIT_URL, 106 | { 107 | method: 'POST', 108 | body: JSON.stringify({}), 109 | }, 110 | { 111 | space, 112 | } 113 | ); 114 | }; 115 | 116 | export const assignAssetCriticality = async ( 117 | assetCriticalityRecords: Array<{ 118 | id_field: string; 119 | id_value: string; 120 | criticality_level: string; 121 | }>, 122 | space?: string 123 | ) => { 124 | return kibanaFetch( 125 | ASSET_CRITICALITY_BULK_URL, 126 | { 127 | method: 'POST', 128 | body: JSON.stringify({ records: assetCriticalityRecords }), 129 | }, 130 | { apiVersion: API_VERSIONS.public.v1, space } 131 | ); 132 | }; 133 | 134 | export const createRule = ({ 135 | space, 136 | id, 137 | name, 138 | description, 139 | enabled, 140 | risk_score, 141 | severity, 142 | index, 143 | type, 144 | query, 145 | from, 146 | interval, 147 | }: { 148 | space?: string; 149 | id?: string; 150 | name?: string; 151 | description?: string; 152 | enabled?: boolean; 153 | risk_score?: number; 154 | severity?: string; 155 | index?: string[]; 156 | type?: string; 157 | query?: string; 158 | from?: string; 159 | interval?: string; 160 | } = {}): Promise<{ id: string; name: string }> => { 161 | return kibanaFetch<{ id: string; name: string }>( 162 | DETECTION_ENGINE_RULES_URL, 163 | { 164 | method: 'POST', 165 | body: JSON.stringify({ 166 | name: name || 'Match All', 167 | description: description || 'Tests a simple query', 168 | enabled: enabled ?? true, 169 | risk_score: risk_score || 70, 170 | rule_id: id || faker.string.uuid(), 171 | severity: severity || 'high', 172 | index: index || ['logs-*', 'metrics-*', 'auditbeat-*'], 173 | type: type || 'query', 174 | query: query || '*:*', 175 | from: from || 'now-40d', 176 | interval: interval || '1m', 177 | }), 178 | }, 179 | { apiVersion: API_VERSIONS.public.v1, space } 180 | ); 181 | }; 182 | 183 | export const getRule = async (ruleId: string, space?: string) => { 184 | const url = DETECTION_ENGINE_RULES_URL + '?rule_id=' + ruleId; 185 | try { 186 | return await kibanaFetch( 187 | url, 188 | { 189 | method: 'GET', 190 | }, 191 | { apiVersion: API_VERSIONS.public.v1, space } 192 | ); 193 | // eslint-disable-next-line @typescript-eslint/no-unused-vars 194 | } catch (e) { 195 | return null; 196 | } 197 | }; 198 | 199 | export const deleteRule = async (ruleId: string, space?: string) => { 200 | const url = DETECTION_ENGINE_RULES_URL + '?rule_id=' + ruleId; 201 | return kibanaFetch( 202 | url, 203 | { 204 | method: 'DELETE', 205 | }, 206 | { apiVersion: API_VERSIONS.public.v1, space } 207 | ); 208 | }; 209 | 210 | export const createComponentTemplate = async ({ 211 | name, 212 | mappings, 213 | space, 214 | }: { 215 | name: string; 216 | mappings: object; 217 | space?: string; 218 | }) => { 219 | return kibanaFetch( 220 | COMPONENT_TEMPLATES_URL, 221 | { 222 | method: 'POST', 223 | body: JSON.stringify({ 224 | name, 225 | template: { 226 | mappings, 227 | }, 228 | _kbnMeta: { 229 | usedBy: [], 230 | isManaged: false, 231 | }, 232 | }), 233 | }, 234 | { apiVersion: API_VERSIONS.public.v1, ignoreStatuses: [409], space } 235 | ); 236 | }; 237 | export const installPackage = async ({ 238 | packageName, 239 | version = 'latest', 240 | space, 241 | }: { 242 | packageName: string; 243 | version?: string; 244 | space?: string; 245 | }) => { 246 | const url = FLEET_EPM_PACKAGES_URL(packageName, version); 247 | 248 | return kibanaFetch( 249 | url, 250 | { 251 | method: 'POST', 252 | }, 253 | { apiVersion: API_VERSIONS.public.v1, space } 254 | ); 255 | }; 256 | 257 | export const installLegacyRiskScore = async () => { 258 | const userResponse = await kibanaFetch(RISK_SCORE_URL, { 259 | method: 'POST', 260 | body: JSON.stringify({ riskScoreEntity: 'user' }), 261 | }); 262 | 263 | const hostResponse = await kibanaFetch(RISK_SCORE_URL, { 264 | method: 'POST', 265 | body: JSON.stringify({ riskScoreEntity: 'host' }), 266 | }); 267 | 268 | const userDashboardsResponse = await kibanaFetch(RISK_SCORE_DASHBOARD_URL('user'), { 269 | method: 'POST', 270 | body: JSON.stringify({}), 271 | }); 272 | 273 | const hostDashboardsResponse = await kibanaFetch(RISK_SCORE_DASHBOARD_URL('host'), { 274 | method: 'POST', 275 | body: JSON.stringify({}), 276 | }); 277 | 278 | return { 279 | userResponse, 280 | hostResponse, 281 | userDashboardsResponse, 282 | hostDashboardsResponse, 283 | }; 284 | }; 285 | 286 | export const createSpace = async (space: string) => { 287 | return kibanaFetch( 288 | SPACES_URL, 289 | { 290 | method: 'POST', 291 | body: JSON.stringify({ 292 | id: space, 293 | name: space, 294 | description: 'Created by security-documents-generator for testing', 295 | disabledFeatures: [], 296 | }), 297 | }, 298 | { 299 | apiVersion: API_VERSIONS.public.v1, 300 | } 301 | ); 302 | }; 303 | 304 | export const doesSpaceExist = async (space: string): Promise => { 305 | try { 306 | await kibanaFetch( 307 | SPACE_URL(space), 308 | { 309 | method: 'GET', 310 | }, 311 | { apiVersion: API_VERSIONS.public.v1 } 312 | ); 313 | // eslint-disable-next-line @typescript-eslint/no-unused-vars 314 | } catch (e) { 315 | return false; 316 | } 317 | return true; 318 | }; 319 | 320 | const _initEngine = (engineType: string, space?: string) => { 321 | return kibanaFetch( 322 | INIT_ENTITY_ENGINE_URL(engineType), 323 | { 324 | method: 'POST', 325 | body: JSON.stringify({}), 326 | }, 327 | { apiVersion: API_VERSIONS.public.v1, space } 328 | ); 329 | }; 330 | 331 | const _deleteEngine = (engineType: string, space?: string) => { 332 | return kibanaFetch( 333 | ENTITY_ENGINE_URL(engineType), 334 | { 335 | method: 'DELETE', 336 | }, 337 | { apiVersion: API_VERSIONS.public.v1, space } 338 | ); 339 | }; 340 | 341 | export const deleteEngines = async (entityTypes: string[] = ['host', 'user'], space?: string) => { 342 | const responses = await Promise.all( 343 | entityTypes.map((entityType) => _deleteEngine(entityType, space)) 344 | ); 345 | console.log('Delete responses:', responses); 346 | }; 347 | 348 | const _listEngines = (space?: string) => { 349 | const res = kibanaFetch( 350 | ENTITY_ENGINES_URL, 351 | { 352 | method: 'GET', 353 | }, 354 | { apiVersion: API_VERSIONS.public.v1, space } 355 | ); 356 | 357 | return res as Promise<{ engines: Array<{ status: string }> }>; 358 | }; 359 | 360 | const allEnginesAreStarted = async (space?: string) => { 361 | const { engines } = await _listEngines(space); 362 | if (engines.length === 0) { 363 | return false; 364 | } 365 | return engines.every((engine) => engine.status === 'started'); 366 | }; 367 | 368 | export const initEntityEngineForEntityTypes = async ( 369 | entityTypes: string[] = ['host', 'user'], 370 | space?: string 371 | ) => { 372 | if (await allEnginesAreStarted(space)) { 373 | console.log('All engines are already started'); 374 | return; 375 | } 376 | await Promise.all(entityTypes.map((entityType) => _initEngine(entityType, space))); 377 | const attempts = 20; 378 | const delay = 2000; 379 | 380 | for (let i = 0; i < attempts; i++) { 381 | console.log('Checking if all engines are started attempt:', i + 1); 382 | if (await allEnginesAreStarted(space)) { 383 | console.log('All engines are started'); 384 | return; 385 | } 386 | await new Promise((resolve) => setTimeout(resolve, delay)); 387 | } 388 | 389 | throw new Error('Failed to start engines'); 390 | }; 391 | 392 | export const getAllRules = async (space?: string) => { 393 | const perPage = 100; // Maximum items per page 394 | let page = 1; 395 | let allRules: Array<{ rule_id: string; name: string; id: string }> = []; 396 | 397 | try { 398 | while (true) { 399 | const url = DETECTION_ENGINE_RULES_URL + `/_find?page=${page}&per_page=${perPage}`; 400 | const response = await kibanaFetch<{ 401 | data: Array<{ rule_id: string; name: string; id: string }>; 402 | total: number; 403 | }>( 404 | url, 405 | { 406 | method: 'GET', 407 | }, 408 | { apiVersion: API_VERSIONS.public.v1, space } 409 | ); 410 | 411 | if (!response.data || response.data.length === 0) { 412 | break; 413 | } 414 | 415 | allRules = allRules.concat(response.data); 416 | 417 | // If we've fetched all rules, break 418 | if (allRules.length >= (response.total || 0)) { 419 | break; 420 | } 421 | 422 | page++; 423 | } 424 | 425 | return { data: allRules }; 426 | } catch (e) { 427 | console.error('Error fetching rules:', e); 428 | return { data: [] }; 429 | } 430 | }; 431 | 432 | export const bulkDeleteRules = async (ruleIds: string[], space?: string) => { 433 | return kibanaFetch( 434 | DETECTION_ENGINE_RULES_BULK_ACTION_URL, 435 | { 436 | method: 'POST', 437 | body: JSON.stringify({ 438 | action: 'delete', 439 | ids: ruleIds, 440 | }), 441 | }, 442 | { apiVersion: API_VERSIONS.public.v1, space } 443 | ); 444 | }; 445 | 446 | export const uploadPrivmonCsv = async ( 447 | csvFilePath: string, 448 | space?: string 449 | ): Promise<{ success: boolean; message?: string }> => { 450 | try { 451 | const formData = new FormData(); 452 | formData.append('file', fs.createReadStream(csvFilePath)); 453 | 454 | const response = await fetch( 455 | buildKibanaUrl({ 456 | path: '/api/entity_analytics/monitoring/users/_csv', 457 | space, 458 | }), 459 | { 460 | method: 'POST', 461 | headers: { 462 | 'kbn-xsrf': 'true', 463 | 'elastic-api-version': API_VERSIONS.public.v1, 464 | ...formData.getHeaders(), 465 | Authorization: getAuthorizationHeader(), 466 | }, 467 | body: formData, 468 | } 469 | ); 470 | 471 | if (!response.ok) { 472 | const errorText = await response.text(); 473 | throw new Error(`Failed to upload CSV: ${errorText}`); 474 | } 475 | 476 | return { success: true }; 477 | } catch (error) { 478 | console.error('Error uploading CSV:', error); 479 | // @ts-expect-error to have a message property 480 | return { success: false, message: error.message }; 481 | } 482 | }; 483 | 484 | export const enablePrivmon = async (space?: string) => { 485 | try { 486 | const response = await kibanaFetch( 487 | '/api/entity_analytics/monitoring/engine/init', 488 | { 489 | method: 'POST', 490 | body: JSON.stringify({}), 491 | }, 492 | { apiVersion: API_VERSIONS.public.v1, space } 493 | ); 494 | return response; 495 | } catch (error) { 496 | console.error('Error enabling Privileged User Monitoring:', error); 497 | throw error; 498 | } 499 | }; 500 | 501 | export const installPad = async (space?: string) => { 502 | try { 503 | const response = await kibanaFetch( 504 | '/api/entity_analytics/privileged_user_monitoring/pad/install', 505 | { 506 | method: 'POST', 507 | body: JSON.stringify({}), 508 | }, 509 | { apiVersion: API_VERSIONS.public.v1, space } 510 | ); 511 | return response; 512 | } catch (error) { 513 | console.error('Error installing PAD:', error); 514 | throw error; 515 | } 516 | }; 517 | -------------------------------------------------------------------------------- /test_log_data/proof_point_tap.json: -------------------------------------------------------------------------------- 1 | //Below is a log event from Proofpoint for a delivered message from [user}@example.com to [user]@gmail.com 2 | //The scenario is a email exfil attack 3 | 4 | // POST /logs-logen-logen_events-proofpoint_tap-message_delivered-2024-12-12/_doc - edit this line to post to the propser index from Kinana DevTools 5 | {"proofpoint_tap": { 6 | "message_delivered": { 7 | "cluster": "example_hosted", 8 | "message_parts": { 9 | "disposition": "inline", 10 | "o_content_type": "text/plain" 11 | }, 12 | "malware_score": 0, 13 | "modules_run": "urldefense", 14 | "qid": "2XX30XX029XXXX38XXX6", 15 | "message_size": 70000, 16 | "impostor_score": 0, 17 | "policy_routes": "default_inbound", 18 | "to_addresses": "jdoe@gmail.com", 19 | "recipient": "jdoe@gmail.com", 20 | "threat_info_map": { 21 | "threat": { 22 | "artifact": "https://example.com/view/xpxxxx6uxxxxx9y", 23 | "id": "xxxxxx425eaxxxxxxee41d9e81xxxxxxf24a96e48f6e6xxxxxxxxxxxx123456f", 24 | "time": "2022-04-01T18:24:01.050Z", 25 | "type": "url", 26 | "url": "https://threatinsight.proofpoint.com/3183a23b-d9c3-1234-1234-2babcd123478/threat/email/9fxxxxxxxxxee41d94deabcd1234e9ff24axxxxxxxxxxc5b843f", 27 | "status": "active" 28 | }, 29 | "classification": "phish" 30 | }, 31 | "header": { 32 | "replyto": "Trang, Alex & Transpose Platform Team ", 33 | "from": "Trang, Alex & Transpose Platform Team john.doe@example.com" 34 | }, 35 | "spam_score": 0, 36 | "completely_rewritten": "true", 37 | "phish_score": 0 38 | }, 39 | "guid": "RxkxxxxxlxzxkxnxxxxDxrxwxxxxx6" 40 | }, 41 | "tags": [ 42 | "logen_tag", 43 | "/Proofpoint/proofpoint_tap-message_delivered-none-nested.json", 44 | "proofpoint_tap", 45 | "message_delivered", 46 | "logen-os-tag-none", 47 | "preserve_original_event" 48 | ], 49 | "event": { 50 | "agent_id_status": "missing", 51 | "ingested": "2024-11-14T00:37:26Z", 52 | "kind": "event", 53 | "created": "2024-11-14T00:37:57.709Z", 54 | "id": "78f5fa6b-9388-469b-861b-be1ce0c70000", 55 | "category": "email", 56 | "type": "info", 57 | "dataset": "proofpoint_tap.message_delivered" 58 | }, 59 | "email": { 60 | "attachments": { 61 | "file": { 62 | "mime_type": "text/plain", 63 | "name": "text.txt", 64 | "hash": { 65 | "sha256": "a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b277d9ad9f146e", 66 | "md5": "b10a8db164e0754105b7a99be72e3fe5" 67 | } 68 | } 69 | }, 70 | "reply_to": { 71 | "address": "john.doe@example.com" 72 | }, 73 | "sender": { 74 | "address": "xyz-abc.us1_152023242.13741304-5fabcd4567@example.com" 75 | }, 76 | "subject": "Perfectly normal email", 77 | "delivery_timestamp": "2022-03-15T15:00:20.000Z", 78 | "from": { 79 | "address": "john.doe@example.com" 80 | }, 81 | "message_id": "200c524xyz1234xyz12343.5fc4b29057.20210428135110.a1234993344.dxyz1234@example.com", 82 | "to": { 83 | "address": "jdoe@gmail.com" 84 | }, 85 | "x_mailer": "Mailchimp Mailer - **CIxxxxxxxxx1234**" 86 | }, 87 | "@timestamp": "2024-11-14T00:03:57.709Z", 88 | "data_stream": { 89 | "dataset": "proofpoint_tap.message_delivered" 90 | } 91 | } 92 | } -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | /* Visit https://aka.ms/tsconfig to read more about this file */ 4 | 5 | /* Projects */ 6 | // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */ 7 | // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ 8 | // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ 9 | // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ 10 | // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ 11 | // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ 12 | 13 | /* Language and Environment */ 14 | "target": "ES2020" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */, 15 | // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ 16 | // "jsx": "preserve", /* Specify what JSX code is generated. */ 17 | // "experimentalDecorators": true, /* Enable experimental support for legacy experimental decorators. */ 18 | // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ 19 | // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */ 20 | // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ 21 | // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */ 22 | // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */ 23 | // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ 24 | // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ 25 | // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ 26 | 27 | /* Modules */ 28 | "module": "ESNext" /* Specify what module code is generated. */, 29 | // "rootDir": "./", /* Specify the root folder within your source files. */ 30 | "moduleResolution": "node" /* Specify how TypeScript looks up a file from a given module specifier. */, 31 | // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ 32 | // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ 33 | // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ 34 | // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */ 35 | // "types": [], /* Specify type package names to be included without being referenced in a source file. */ 36 | // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ 37 | // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */ 38 | // "allowImportingTsExtensions": true, /* Allow imports to include TypeScript file extensions. Requires '--moduleResolution bundler' and either '--noEmit' or '--emitDeclarationOnly' to be set. */ 39 | // "resolvePackageJsonExports": true, /* Use the package.json 'exports' field when resolving package imports. */ 40 | // "resolvePackageJsonImports": true, /* Use the package.json 'imports' field when resolving imports. */ 41 | // "customConditions": [], /* Conditions to set in addition to the resolver-specific defaults when resolving imports. */ 42 | "resolveJsonModule": true /* Enable importing .json files. */, 43 | // "allowArbitraryExtensions": true, /* Enable importing files with any extension, provided a declaration file is present. */ 44 | // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */ 45 | 46 | /* JavaScript Support */ 47 | // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */ 48 | // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */ 49 | // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */ 50 | 51 | /* Emit */ 52 | // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ 53 | // "declarationMap": true, /* Create sourcemaps for d.ts files. */ 54 | // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ 55 | // "sourceMap": true, /* Create source map files for emitted JavaScript files. */ 56 | // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ 57 | // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */ 58 | // "outDir": "./", /* Specify an output folder for all emitted files. */ 59 | // "removeComments": true, /* Disable emitting comments. */ 60 | // "noEmit": true, /* Disable emitting files from a compilation. */ 61 | // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ 62 | // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */ 63 | // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ 64 | // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ 65 | // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ 66 | // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ 67 | // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ 68 | // "newLine": "crlf", /* Set the newline character for emitting files. */ 69 | // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */ 70 | // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */ 71 | // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ 72 | // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */ 73 | // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ 74 | // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */ 75 | 76 | /* Interop Constraints */ 77 | // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ 78 | // "verbatimModuleSyntax": true, /* Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting. */ 79 | // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ 80 | "esModuleInterop": true /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */, 81 | // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ 82 | "forceConsistentCasingInFileNames": true /* Ensure that casing is correct in imports. */, 83 | 84 | /* Type Checking */ 85 | "strict": true /* Enable all strict type-checking options. */, 86 | // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ 87 | // "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ 88 | // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ 89 | // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */ 90 | // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ 91 | // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ 92 | // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */ 93 | // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ 94 | // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */ 95 | // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */ 96 | // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ 97 | // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ 98 | // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ 99 | // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */ 100 | // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ 101 | // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */ 102 | // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ 103 | // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ 104 | 105 | /* Completeness */ 106 | // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ 107 | "skipLibCheck": true /* Skip type checking all .d.ts files. */ 108 | } 109 | } 110 | --------------------------------------------------------------------------------