├── .eslintignore ├── .eslintrc ├── .gitignore ├── LICENSE.txt ├── README.md ├── lib ├── cfnresponse.js └── createbucket.js ├── setEnv.sh.template ├── splunk-cloudwatch-logs-processor ├── .npmrc.sample ├── index.js ├── integration-test.js ├── lambda.json ├── package.json ├── sampleEvent.json └── template.yaml ├── splunk-dynamodb-stream-processor ├── .npmrc.sample ├── db-stream-lambda.js ├── index.js ├── integration-test.js ├── lambda.json ├── package.json ├── sampleEvent.json └── template.yaml ├── splunk-elb-application-access-logs-processor ├── .npmrc.sample ├── index.js ├── integration-test.js ├── lambda.json ├── package.json ├── sampleEvent.json └── template.yaml ├── splunk-elb-classic-access-logs-processor ├── .npmrc.sample ├── index.js ├── integration-test.js ├── lambda.json ├── package.json ├── sampleEvent.json └── template.yaml ├── splunk-guardduty-processor ├── .npmrc.sample ├── index.js ├── integration-test.js ├── lambda.json ├── package.json ├── sampleEvent.json └── template.yaml ├── splunk-iot-processor ├── .npmrc.sample ├── index.js ├── integration-test.js ├── lambda.json ├── package.json ├── sampleEvent.json └── template.yaml ├── splunk-kinesis-stream-processor ├── .npmrc.sample ├── index.js ├── integration-test.js ├── lambda.json ├── package.json ├── sampleEvent.json └── template.yaml └── splunk-logging ├── .npmrc.sample ├── index.js ├── integration-test.js ├── lambda.json ├── package.json ├── sampleEvent.json └── template.yaml /.eslintignore: -------------------------------------------------------------------------------- 1 | integration-test.js 2 | cfnresponse.js 3 | db-stream-lambda.js 4 | createbucket.js 5 | -------------------------------------------------------------------------------- /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "parser": "babel-eslint", 3 | "extends": "airbnb", 4 | "env": { 5 | "mocha": true 6 | }, 7 | "plugins": [ 8 | "babel" 9 | ], 10 | "rules": { 11 | "consistent-return": 0, 12 | "global-require": 0, 13 | "import/imports-first": 0, 14 | "import/no-extraneous-dependencies": 0, 15 | "import/no-unresolved": 0, 16 | "indent": [2, 4, {"SwitchCase": 1}], 17 | "max-len": [2, 250], 18 | "no-console": 0, 19 | "no-return-assign": 0, 20 | "no-underscore-dangle": 0, 21 | "no-unused-expressions": 0, 22 | "no-useless-escape": 0, 23 | "prefer-rest-params": 0, 24 | "react/require-extension": 0, 25 | "spaced-comment": 0, 26 | "strict": 0, 27 | "no-else-return": 0, 28 | "object-shorthand": [2, "consistent-as-needed"] 29 | } 30 | } -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | npm-debug.log 3 | .DS_Store 4 | # environment setting file 5 | .npmrc 6 | 7 | # backup for local testing 8 | *.backup 9 | 10 | # Artifacts from build & test 11 | *.zip 12 | packaged-template.yaml 13 | template.output.yaml 14 | lib 15 | setEnv.sh 16 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2017 Splunk Inc. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 6 | 7 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 8 | 9 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Splunk AWS Serverless Apps 2 | Splunk AWS Serverless applications and [Lambda blueprints](https://www.splunk.com/blog/2016/11/29/announcing-new-aws-lambda-blueprints-for-splunk.html), including associated CloudFormation templates (using [SAM](https://github.com/awslabs/serverless-application-model)) for automated packaging & deployment. 3 | 4 | ## Table of Contents 5 | * **[Getting Started](#getting-started)** 6 | * **[Prerequisites](#prerequisites)** 7 | * **[Installing](#installing)** 8 | * **[Packaging](#packaging)** 9 | * **[Deploying](#deploying)** 10 | * **[Development & Test](#development--test)** 11 | * **[Available npm tasks](#available-npm-tasks)** 12 | * **[Setup test environment](#setup-test-environment)** 13 | * **[Run integration test](#run-integration-test)** 14 | 15 | ## Getting Started 16 | 17 | ### Prerequisites 18 | - AWS CLI 19 | - Node.js v4.3 or later. 20 | - Splunk Enterprise 6.3.0 or later, or Splunk Cloud. 21 | - Splunk HTTP Event Collector token from your Splunk Enterprise server. 22 | - S3 bucket to host artifacts uploaded by CloudFormation e.g. Lambda ZIP deployment packages 23 | 24 | You can use the following command to create the Amazon S3 bucket, say in `us-east-1` region 25 | ``` 26 | aws s3 mb s3:// --region us-east-1 27 | ``` 28 | 29 | ### Installing 30 | First cd into any of the serverless applications: 31 | ``` 32 | cd splunk-cloudwatch-logs-processor 33 | ``` 34 | Copy over the sample `.npmrc`: 35 | ``` 36 | cp .npmrc.sample .npmrc 37 | ``` 38 | Then modify `.npmrc` file to set required configuration settings to match your environment, such as `parm_hec_url` which specifies the URL of your Splunk HTTP Event Collector endpoint. 39 | 40 | Then install node package dependencies: 41 | ``` 42 | npm install 43 | ``` 44 | 45 | ### Packaging 46 | To build the Serverles Application Module deployment package: 47 | ``` 48 | npm run build:zip 49 | ``` 50 | This will package the necessary Lambda function(s) and dependencies into one local deployment zip as specified in `package.json` build script. i.e. for Splunk CloudWatch Serverless Application it creates `splunk-cloudwatch-logs-processor.zip` 51 | 52 | Then upload all local artifacts needed by the SAM template to your previously created S3 bucket. You can do this either using **npm** task or directly using **AWS CLI**: 53 | 54 | **Upload using npm:** 55 | 56 | Before you run this command please ensure that you have set correct values in your application .npmrc 57 | ``` 58 | npm run build:template 59 | ``` 60 | 61 | **Upload using AWS CLI** 62 | ``` 63 | aws cloudformation package 64 | --template template.yaml 65 | --s3-bucket 66 | --output-template-file template.output.yaml 67 | ``` 68 | 69 | The command returns a copy of the SAM template, in this case `template.output.yaml`, replacing all references to local artifacts with the S3 location where the command uploaded the artifacts. In particular, `CodeUri` property of the Lambda resource points to the deployment zip `splunk-cloudwatch-logs-processor.zip` in the Amazon S3 bucket that you specified. 70 | 71 | ### Deploying 72 | **Deploy using npm:** 73 | 74 | Before you run this command please ensure that you have set correct values in your application .npmrc 75 | ``` 76 | npm run build:deployment 77 | ``` 78 | 79 | **Deploy using AWS CLI** 80 | 81 | Example below is specific to Splunk Splunk CloudWatch Serverless Application. `parameter-overrides` will differ by Splunk Serverless Application and you will need to adjust accordingly. Alternatively, you can use npm task above which retrieves the configurations defined in .npmrc 82 | ``` 83 | aws cloudformation deploy 84 | --template $(pwd)/template.output.yaml 85 | --parameter-overrides 86 | SplunkHttpEventCollectorURL='https://:8088/services/collector' 87 | SplunkHttpEventCollectorToken= 88 | CloudWatchLogsGroupName= 89 | --capabilities "CAPABILITY_IAM" --stack-name my-cloudwatch-logs-forwarder-stack 90 | ``` 91 | 92 | ## Development & Test 93 | 94 | ### Available npm tasks 95 | For each serverless application, you can use the following npm tasks: 96 | 97 | | command | description | 98 | | --- | --- | 99 | | `npm run set:env`| creates .npmrc file in your local project. set project variables here | 100 | | `npm run lint` | run eslint rules against .js files | 101 | | `npm run build:zip` | create zip SAM deployment package with required .js files | 102 | | `npm run build:template` | uploads SAM deployment package with required template files to AWS S3 Bucket| 103 | | `npm run build:deployment` | creates CloudFormation Stack and deploys SAM package from AWS S3 Bucket| 104 | | `npm run clean` | remove zip deployment package | 105 | | `npm run test` (or `npm test`) | run simple integration test with live Splunk Enterprise instance. More details in section below. | 106 | | `npm run build` | runs entire build flow: `build:zip` then `build:template` and then `build:deployment` | 107 | 108 | ### Setup test environment 109 | 110 | >>>> This section requires updates <<<< 111 | i.e. instead of setEnv can use 112 | "test": "SPLUNK_HEC_URL=$npm_config_kinesis_hec_url SPLUNK_HEC_TOKEN=$npm_config_kinesis_hec_token node integration-test.js", 113 | 114 | For test-driven development, you can easily run a simple integration test as you develop the Lambda function. 115 | First, copy over the provided setEnv bash script in root folder: 116 | ``` 117 | cp setEnv.sh.template setEnv.sh 118 | ``` 119 | Modify `setEnv.sh` contents to set the values of `SPLUNK_HEC_URL` and `SPLUNK_HEC_TOKEN` to point to a local (or remote) Splunk Enterprise test instance and its valid HEC token. Then, source these environment variables: 120 | ``` 121 | source setEnv.sh 122 | ``` 123 | ### Run integration test 124 | Now, you can run a simple integration test to validate functionality of the Lambda function and ensure events are being indexed correctly in Splunk Enterprise: 125 | ``` 126 | npm test 127 | ``` 128 | This command first runs lint checks against Lambda function code. Only after successfully lint checks, this command will run the Lambda function passing it the event in `sampleEvent.json` along with `SPLUNK_HEC_URL` and `SPLUNK_HEC_TOKEN` environment variables. The function output and final status is directed to standard out. Here's an example of a successful execution: 129 | ```bash 130 | $ npm test 131 | > splunk-cloudwatch-logs-processor@0.8.1 pretest 132 | > npm run lint 133 | ... 134 | > splunk-cloudwatch-logs-processor@0.8.1 test 135 | > node integration-test.js 136 | 137 | Received event: { 138 | "awslogs": { 139 | ... 140 | ... 141 | } 142 | Done 143 | Decoded payload: { 144 | ... 145 | ... 146 | } 147 | Sending event(s) 148 | Response received 149 | Response from Splunk: 150 | {"text":"Success","code":0} 151 | Successfully processed 2 log event(s). 152 | [ null, 2 ] 153 | ``` 154 | 155 | ## Authors 156 | * **Roy Arsan** - [rarsan](https://github.com/rarsan) 157 | * **Tarik Makota** - [tmakota](https://github.com/tmakota) 158 | * **Nicolas Stone** - [nstonesplunk](https://github.com/nstonesplunk) 159 | 160 | 161 | See also the list of [contributors](https://github.com/splunk/splunk-aws-lambda-blueprints/contributors) who participated in this project. 162 | 163 | ## License 164 | Splunk AWS Serverless Apps are released under the MIT license. Details can be found in the [LICENSE](LICENSE.txt) file. 165 | -------------------------------------------------------------------------------- /lib/cfnresponse.js: -------------------------------------------------------------------------------- 1 | /* Copyright 2015 Amazon Web Services, Inc. or its affiliates. All Rights Reserved. 2 | This file is licensed to you under the AWS Customer Agreement (the "License"). 3 | You may not use this file except in compliance with the License. 4 | A copy of the License is located at http://aws.amazon.com/agreement/. 5 | This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express or implied. 6 | See the License for the specific language governing permissions and limitations under the License. */ 7 | module.exports.SUCCESS = 'SUCCESS'; 8 | module.exports.FAILED = 'FAILED'; 9 | module.exports.send = function (event, context, responseStatus, responseData, physicalResourceId) { 10 | const responseBody = JSON.stringify({ 11 | Status: responseStatus, 12 | Reason: 'See the details in CloudWatch Log Stream: ' + context.logStreamName, 13 | PhysicalResourceId: physicalResourceId || context.logStreamName, 14 | StackId: event.StackId, 15 | RequestId: event.RequestId, 16 | LogicalResourceId: event.LogicalResourceId, 17 | Data: responseData }); 18 | const https = require('https'); 19 | const url = require('url'); 20 | const parsedUrl = url.parse(event.ResponseURL); 21 | const options = { 22 | hostname: parsedUrl.hostname, 23 | port: 443, 24 | path: parsedUrl.path, 25 | method: 'PUT', 26 | headers: { 27 | 'content-type': '', 28 | 'content-length': responseBody.length 29 | } 30 | }; 31 | const request = https.request(options, function(response) { 32 | console.log('Status code: ' + response.statusCode); 33 | console.log('Status message: ' + response.statusMessage); 34 | context.done(); 35 | }); 36 | request.on('error', function(error) { 37 | console.log('send(..) failed executing https.request(..): ' + error); 38 | context.done(); 39 | }); 40 | request.write(responseBody); 41 | request.end(); 42 | }; 43 | 44 | module.exports.testMe = function(param){ 45 | console.log('testMe:: paramater output is ', param); 46 | }; 47 | -------------------------------------------------------------------------------- /lib/createbucket.js: -------------------------------------------------------------------------------- 1 | /*eslint no-unused-vars: [2, {"args": "after-used", "argsIgnorePattern": "^_"}]*/ 2 | /* function code borrows from code by https://stackoverflow.com/users/2518355/wjordan */ 3 | const AWS = require('aws-sdk'); 4 | const response = require('./cfnresponse'); 5 | 6 | const s3 = new AWS.S3(); 7 | 8 | exports.handler = (event, context, callback) => { 9 | const respond = (e) => { 10 | if (e) { console.log(e); } 11 | response.send(event, context, e ? response.FAILED : response.SUCCESS, e ? e : {}); 12 | }; 13 | process.on('uncaughtException', e => failed(e)); 14 | const params = event.ResourceProperties; 15 | console.log('Parameters: ', JSON.stringify(params)); 16 | delete params.ServiceToken; 17 | if (event.RequestType === 'Delete') { 18 | params.NotificationConfiguration = {}; 19 | s3.putBucketNotificationConfiguration(params).promise() 20 | .then(_data => respond()) 21 | .catch(_e => respond()); 22 | } else { 23 | s3.putBucketNotificationConfiguration(params).promise() 24 | .then(_data => respond()) 25 | .catch(e => respond(e)); 26 | } 27 | }; 28 | -------------------------------------------------------------------------------- /setEnv.sh.template: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | export SPLUNK_HEC_URL='https://localhost:8088/services/collector' 4 | export SPLUNK_HEC_TOKEN='' 5 | -------------------------------------------------------------------------------- /splunk-cloudwatch-logs-processor/.npmrc.sample: -------------------------------------------------------------------------------- 1 | sam_s3_bucket_name = fake-s3-bucket 2 | parm_cwl_group_name = fakeCloudWatchLogsGroupName 3 | parm_hec_url = https://192.168.0.0:8088/services/collector 4 | parm_hec_token = FAKE-HEC-TOKEN 5 | parm_stack_name = FakeLoggingStack 6 | -------------------------------------------------------------------------------- /splunk-cloudwatch-logs-processor/index.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Stream events from AWS CloudWatch Logs to Splunk 3 | * 4 | * This function streams AWS CloudWatch Logs to Splunk using Splunk's HTTP event collector API. 5 | * 6 | * Define the following Environment Variables in the console below to configure 7 | * this function to stream logs to your Splunk host: 8 | * 9 | * 1. SPLUNK_HEC_URL: URL address for your Splunk HTTP event collector endpoint. 10 | * Default port for event collector is 8088. Example: https://host.com:8088/services/collector 11 | * 12 | * 2. SPLUNK_HEC_TOKEN: Token for your Splunk HTTP event collector. 13 | * To create a new token for this Lambda function, refer to Splunk Docs: 14 | * http://docs.splunk.com/Documentation/Splunk/latest/Data/UsetheHTTPEventCollector#Create_an_Event_Collector_token 15 | * 16 | * For details about Splunk logging library used below: https://github.com/splunk/splunk-javascript-logging 17 | */ 18 | 19 | 'use strict'; 20 | 21 | const loggerConfig = { 22 | url: process.env.SPLUNK_HEC_URL, 23 | token: process.env.SPLUNK_HEC_TOKEN, 24 | maxBatchCount: 0, // Manually flush events 25 | maxRetries: 3, // Retry 3 times 26 | }; 27 | 28 | const SplunkLogger = require('splunk-logging').Logger; 29 | const zlib = require('zlib'); 30 | 31 | const logger = new SplunkLogger(loggerConfig); 32 | 33 | exports.handler = (event, context, callback) => { 34 | console.log('Received event:', JSON.stringify(event, null, 2)); 35 | 36 | // First, configure logger to automatically add Lambda metadata and to hook into Lambda callback 37 | configureLogger(context, callback); // eslint-disable-line no-use-before-define 38 | 39 | // CloudWatch Logs data is base64 encoded so decode here 40 | const payload = new Buffer.from(event.awslogs.data, 'base64'); 41 | // CloudWatch Logs are gzip compressed so expand here 42 | zlib.gunzip(payload, (error, result) => { 43 | if (error) { 44 | callback(error); 45 | } else { 46 | const parsed = JSON.parse(result.toString('ascii')); 47 | console.log('Decoded payload:', JSON.stringify(parsed, null, 2)); 48 | let count = 0; 49 | if (parsed.logEvents) { 50 | parsed.logEvents.forEach((item) => { 51 | /* Send item message to Splunk with optional metadata properties such as time, index, source, sourcetype, and host. 52 | - Change "item.timestamp" below if time is specified in another field in the event. 53 | - Set or remove metadata properties as needed. For descripion of each property, refer to: 54 | http://docs.splunk.com/Documentation/Splunk/latest/RESTREF/RESTinput#services.2Fcollector */ 55 | logger.send({ 56 | message: item.message, 57 | metadata: { 58 | time: item.timestamp ? new Date(item.timestamp).getTime() / 1000 : Date.now(), 59 | host: 'serverless', 60 | source: `lambda:${context.functionName}`, 61 | sourcetype: 'httpevent', 62 | //index: 'main', 63 | }, 64 | }); 65 | 66 | count += 1; 67 | }); 68 | } 69 | // Send all the events in a single batch to Splunk 70 | logger.flush((err, resp, body) => { 71 | // Request failure or valid response from Splunk with HEC error code 72 | if (err || (body && body.code !== 0)) { 73 | // If failed, error will be handled by pre-configured logger.error() below 74 | } else { 75 | // If succeeded, body will be { text: 'Success', code: 0 } 76 | console.log('Response from Splunk:', body); 77 | console.log(`Successfully processed ${count} log event(s).`); 78 | callback(null, count); // Return number of log events 79 | } 80 | }); 81 | } 82 | }); 83 | }; 84 | 85 | const configureLogger = (context, callback) => { 86 | // Override SplunkLogger default formatter 87 | logger.eventFormatter = (event) => { 88 | // Enrich event only if it is an object 89 | if (typeof event === 'object' && !Object.prototype.hasOwnProperty.call(event, 'awsRequestId')) { 90 | // Add awsRequestId from Lambda context for request tracing 91 | event.awsRequestId = context.awsRequestId; // eslint-disable-line no-param-reassign 92 | } 93 | return event; 94 | }; 95 | 96 | // Set common error handler for logger.send() and logger.flush() 97 | logger.error = (error, payload) => { 98 | console.log('error', error, 'context', payload); 99 | callback(error); 100 | }; 101 | }; 102 | -------------------------------------------------------------------------------- /splunk-cloudwatch-logs-processor/integration-test.js: -------------------------------------------------------------------------------- 1 | var handler = require('./index').handler; 2 | 3 | fs=require('fs'); 4 | var event = JSON.parse(fs.readFileSync('sampleEvent.json', 'utf8')); 5 | 6 | handler( 7 | event, 8 | { 9 | functionName: 'splunk-cloudwatch-logs-processor', 10 | awsRequestId: Math.floor(Math.random() * Math.pow(10,10)) 11 | }, 12 | function() { 13 | console.log(Array.prototype.slice.call(arguments)); 14 | } 15 | ); 16 | 17 | console.log("Done"); -------------------------------------------------------------------------------- /splunk-cloudwatch-logs-processor/lambda.json: -------------------------------------------------------------------------------- 1 | { 2 | "lambda": { 3 | "FunctionName": "splunk-cloudwatch-logs-processor", 4 | "Handler": "index.handler", 5 | "Runtime": "nodejs18.x", 6 | "Description": "Stream events from AWS CloudWatch Logs to Splunk's HTTP event collector", 7 | "Environment": { 8 | "Variables": { 9 | "SPLUNK_HEC_URL": "", 10 | "SPLUNK_HEC_TOKEN": "" 11 | } 12 | }, 13 | "MemorySize": 512, 14 | "Timeout": 10 15 | }, 16 | "triggers": { 17 | "cloudwatch-logs": [ 18 | {} 19 | ] 20 | }, 21 | "version": "0.8.1", 22 | "license": "CC0-1.0", 23 | "tags": [ 24 | "nodejs", 25 | "splunk", 26 | "cloudwatch", 27 | "cloudwatch-logs" 28 | ], 29 | "authors": [ 30 | "Roy Arsan (https://www.splunk.com)", 31 | "Glenn Block (https://www.splunk.com)" 32 | ], 33 | "roleTemplates": [ 34 | ] 35 | } 36 | 37 | -------------------------------------------------------------------------------- /splunk-cloudwatch-logs-processor/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "splunk-cloudwatch-logs-processor", 3 | "version": "1.0.0", 4 | "description": "AWS Lambda function blueprint to stream events from AWS CloudWatch Logs to Splunk's HTTP event collector", 5 | "main": "index.js", 6 | "scripts": { 7 | "lint": "eslint -c ../.eslintrc --ignore-path ../.eslintignore .", 8 | "test": "node integration-test.js", 9 | "pretest": "npm run lint", 10 | "build:zip": "zip -i \\*.js \\*.json -r splunk-cloudwatch-logs-processor.zip index.js $(npm list --prod --parseable | sed -nE 's/.*\\/(node_modules\\/.*)/\\1/p' | awk '{printf \"%s \", $0;}')", 11 | "build:template": "aws cloudformation package --template template.yaml --s3-bucket $npm_config_sam_s3_bucket_name --output-template-file template.output.yaml", 12 | "build:deployment": "aws cloudformation deploy --template-file $(pwd)/template.output.yaml --parameter-overrides CloudWatchLogsGroupName=\"$npm_config_parm_cwl_group_name\" SplunkHttpEventCollectorURL=\"$npm_config_parm_hec_url\" SplunkHttpEventCollectorToken=\"$npm_config_parm_hec_token\" --stack-name $npm_config_parm_stack_name --capabilities \"CAPABILITY_IAM\"", 13 | "build": "npm run build:zip && npm run build:template && npm run build:deployment", 14 | "clean:zip": "rm -f splunk-cloudwatch-logs-processor.zip", 15 | "clean:template": "rm -f template.output.yaml", 16 | "clean:deployment": "aws cloudformation delete-stack --stack-name $npm_config_parm_stack_name", 17 | "clean": "npm run clean:zip && npm run clean:template && npm run clean:deployment" 18 | }, 19 | "keywords": [ 20 | "splunk", 21 | "lambda", 22 | "cloudwatch", 23 | "cloudwatch-logs", 24 | "hec" 25 | ], 26 | "authors": [ 27 | "Roy Arsan (https://www.splunk.com)", 28 | "Glenn Block (https://www.splunk.com)", 29 | "Tarik Makota (https://aws.amazon.com)" 30 | ], 31 | "license": "MIT", 32 | "dependencies": { 33 | "splunk-logging": "^0.11.1" 34 | }, 35 | "devDependencies": { 36 | "babel-eslint": "^10.1.0", 37 | "eslint": "^9.10.0", 38 | "eslint-config-airbnb": "^19.0.4", 39 | "eslint-plugin-babel": "^4.0.0", 40 | "eslint-plugin-import": "^2.2.0", 41 | "eslint-plugin-jsx-a11y": "^6.10.0", 42 | "eslint-plugin-react": "^7.36.1" 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /splunk-cloudwatch-logs-processor/sampleEvent.json: -------------------------------------------------------------------------------- 1 | { 2 | "awslogs": { 3 | "data": "H4sIAAAAAAAAAHWPwQqCQBCGX0Xm7EFtK+smZBEUgXoLCdMhFtKV3akI8d0bLYmibvPPN3wz00CJxmQnTO41whwWQRIctmEcB6sQbFC3CjW3XW8kxpOpP+OC22d1Wml1qZkQGtoMsScxaczKN3plG8zlaHIta5KqWsozoTYw3/djzwhpLwivWFGHGpAFe7DL68JlBUk+l7KSN7tCOEJ4M3/qOI49vMHj+zCKdlFqLaU2ZHV2a4Ct/an0/ivdX8oYc1UVX860fQDQiMdxRQEAAA==" 4 | } 5 | } -------------------------------------------------------------------------------- /splunk-cloudwatch-logs-processor/template.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Transform: 'AWS::Serverless-2016-10-31' 3 | Description: > 4 | Serverless application to stream events from AWS CloudWatch Logs to Splunk HTTP Event Collector (HEC). 5 | 6 | This SAM template creates the Lambda function with its IAM execution role, a CloudWatch Logs subscription 7 | filter for user-provided log group, along with a Lambda permission to grant CloudWatch Logs permission 8 | to invoke this function. Log group must be in same region as the region where this stack is created. 9 | 10 | Last Modified: 06 Oct, 2017 11 | Authors: Roy Arsan , Tarik Makota 12 | 13 | Parameters: 14 | CloudWatchLogsGroupName: 15 | Type: "String" 16 | Description: "Name of a CloudWatch log group (must be in the same region)" 17 | 18 | SplunkHttpEventCollectorURL: 19 | Type: "String" 20 | Description: "URL address of your Splunk HTTP event collector endpoint" 21 | 22 | SplunkHttpEventCollectorToken: 23 | Type: "String" 24 | Description: "Token of your Splunk HTTP event collector endpoint" 25 | 26 | Outputs: 27 | SplunkCloudWatchLogsProcessorFunction: 28 | Description: "Splunk CloudWatch Logs Lambda Function ARN" 29 | Value: !GetAtt SplunkCloudWatchLogsProcessorFunction.Arn 30 | 31 | Resources: 32 | CloudWatchLogsSubscriptionFilter: 33 | Type: "AWS::Logs::SubscriptionFilter" 34 | DependsOn: CloudWatchLogsLambdaPermission 35 | Properties: 36 | DestinationArn: !GetAtt SplunkCloudWatchLogsProcessorFunction.Arn 37 | LogGroupName: !Ref CloudWatchLogsGroupName 38 | FilterPattern: "" 39 | 40 | CloudWatchLogsLambdaPermission: 41 | Type: "AWS::Lambda::Permission" 42 | Properties: 43 | Action: "lambda:InvokeFunction" 44 | FunctionName: !GetAtt SplunkCloudWatchLogsProcessorFunction.Arn 45 | Principal: !Join 46 | - "." 47 | - [ "logs", !Ref "AWS::Region", !Ref "AWS::URLSuffix" ] 48 | SourceAccount: !Ref "AWS::AccountId" 49 | SourceArn: !Join 50 | - ":" 51 | - [ "arn", !Ref "AWS::Partition", "logs", !Ref "AWS::Region", !Ref "AWS::AccountId", "log-group", !Ref CloudWatchLogsGroupName, "*" ] 52 | 53 | SplunkCloudWatchLogsProcessorFunction: 54 | Type: 'AWS::Serverless::Function' 55 | Properties: 56 | Handler: index.handler 57 | Runtime: nodejs18.x 58 | CodeUri: ./splunk-cloudwatch-logs-processor.zip 59 | Description: Stream events from AWS CloudWatch Logs to Splunk HTTP event collector 60 | MemorySize: 512 61 | Timeout: 30 62 | Environment: 63 | Variables: 64 | SPLUNK_HEC_URL: !Ref SplunkHttpEventCollectorURL 65 | SPLUNK_HEC_TOKEN: !Ref SplunkHttpEventCollectorToken 66 | 67 | -------------------------------------------------------------------------------- /splunk-dynamodb-stream-processor/.npmrc.sample: -------------------------------------------------------------------------------- 1 | sam_s3_bucket_name = fake-s3-bucket 2 | parm_table_name = fakeTableName 3 | parm_stream_arn = arn:aws:dynamodb:::table//stream/<36337357357> 4 | parm_stream_starting_position = LATEST 5 | parm_stream_batch_size = 100 6 | parm_hec_url = https://192.168.0.0:8088/services/collector 7 | parm_hec_token = FAKE-HEC-TOKEN 8 | parm_stack_name = FakeDynamoDBStack -------------------------------------------------------------------------------- /splunk-dynamodb-stream-processor/db-stream-lambda.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Create AWS DynamoDB Stream to be used by Splunk 3 | * 4 | * This function creates AWS DynamoDB Stream to be used by Splunk 5 | * 6 | */ 7 | 8 | 'use strict'; 9 | 10 | const AWS = require('aws-sdk'); 11 | 12 | const dynamodb = new AWS.DynamoDB({ version: '2012-08-10' }); 13 | 14 | const SUCCESS = 'SUCCESS'; 15 | const FAILED = 'FAILED'; 16 | /*function sendResponse(event, context, callback, status, data, err) { 17 | const reason = err ? err.message : ''; 18 | const responseBody = { 19 | StackId: event.StackId, 20 | RequestId: event.RequestId, 21 | LogicalResourceId: event.LogicalResourceId, 22 | PhysicalResourceId: "", 23 | Status: status, 24 | Reason: 'See details in CloudWatch Log:' + context.logStreamName, 25 | Body: data, 26 | }; 27 | 28 | console.log('RESPONSE:\n', responseBody); 29 | const json = JSON.stringify(responseBody); 30 | 31 | const https = require('https'); 32 | const url = require('url'); 33 | 34 | const parsedUrl = url.parse(event.ResponseURL); 35 | const options = { 36 | hostname: parsedUrl.hostname, 37 | port: 443, 38 | path: parsedUrl.path, 39 | method: 'PUT', 40 | headers: { 41 | 'content-type': '', 42 | 'content-length': json.length, 43 | }, 44 | }; 45 | 46 | const request = https.request(options, (response) => { 47 | console.log('STATUS: ', response.statusCode); 48 | console.log('HEADERS: ', JSON.stringify(response.headers)); 49 | context.done(null, data); 50 | }); 51 | 52 | request.on('error', (error) => { 53 | console.log('sendResponse Error:\n', error); 54 | context.done(error); 55 | }); 56 | 57 | request.on('end', () => { 58 | console.log('end'); 59 | }); 60 | request.write(json); 61 | request.end(); 62 | } 63 | 64 | */ 65 | 66 | function sendResponse(event, context, responseStatus, responseData, physicalResourceId) { 67 | const responseBody = JSON.stringify({ 68 | Status: responseStatus, 69 | Reason: `See the details in CloudWatch Log Stream: ${context.logStreamName}`, 70 | PhysicalResourceId: physicalResourceId || context.logStreamName, 71 | StackId: event.StackId, 72 | RequestId: event.RequestId, 73 | LogicalResourceId: event.LogicalResourceId, 74 | Data: responseData }); 75 | 76 | const https = require('https'); 77 | const url = require('url'); 78 | 79 | const parsedUrl = url.parse(event.ResponseURL); 80 | const options = { 81 | hostname: parsedUrl.hostname, 82 | port: 443, 83 | path: parsedUrl.path, 84 | method: 'PUT', 85 | headers: { 86 | 'content-type': '', 87 | 'content-length': responseBody.length } }; 88 | 89 | const request = https.request(options, (response) => { 90 | console.log(`Status code: ${response.statusCode}`); 91 | console.log(`Status message: ${response.statusMessage}`); 92 | context.done(); 93 | }); 94 | 95 | request.on('error', (error) => { 96 | console.log(`send(..) failed executing https.request(..): ${error}`); 97 | context.done(); 98 | }); 99 | request.write(responseBody); 100 | request.end(); 101 | } 102 | /******************************************************** 103 | * This function modeled after: 104 | * https://github.com/andrew-templeton/cfn-dynamodb-streamspecification/blob/master/index.js 105 | ********************************************************/ 106 | function stabilizeTable(table, callback) { 107 | // Count number of times we check if Table is ACTIVE 108 | let numberOfTrys = 0; 109 | // Wait 2 second before recursive calls to check state 110 | const timeToWaitInMs = 2000; 111 | // Try 15 times in 2 sec increment - i.e. appx. 30 seconds 112 | const maximumTrysAllowed = 16; 113 | // recursive function that checks Table Status 114 | function wait() { 115 | dynamodb.describeTable({ 116 | TableName: table, 117 | }, (describeTableErr, describeTableData) => { 118 | if (describeTableErr) { 119 | console.log('wait::Describe table failed: %j', describeTableErr); 120 | callback(describeTableErr); 121 | } 122 | // No errors, check state 123 | if (describeTableData.Table.TableStatus !== 'ACTIVE') { 124 | if (numberOfTrys < maximumTrysAllowed) { 125 | // This is fine then. 126 | numberOfTrys += 1; 127 | console.log('wait::Table not ACTIVE yet, waiting longer, making recursive call'); 128 | // Ensure no flooding by waiting for the interval to go by. 129 | return setTimeout(wait, timeToWaitInMs); 130 | } 131 | // Else get really mad! 132 | console.error('wait::TIMEOUT passed by and table is still not ACTIVE'); 133 | callback({ 134 | message: 'Table took too long to stabilize after StreamSpecification change.', 135 | }); 136 | } 137 | // Status is ACTIVE so be happy and callback 138 | console.log('wait::Table stabilized and is ACTIVE'); 139 | console.log('wait::Returning data', JSON.stringify(describeTableData)); 140 | callback(null, describeTableData); 141 | }); 142 | } 143 | 144 | // Begin recursively checking. 145 | console.log('stabilizeTable::Beginning wait sequence to allow Table to stabilize to ACTIVE...'); 146 | wait(); 147 | } 148 | /************************************************************** 149 | * 150 | *************************************************************/ 151 | function createTableStream(pTableName, pStreamViewType, callback) { 152 | console.log('createTableStream for table: %j', pTableName, pStreamViewType); 153 | dynamodb.updateTable({ 154 | TableName: pTableName, 155 | StreamSpecification: { 156 | StreamEnabled: true, 157 | StreamViewType: pStreamViewType, 158 | }, 159 | }, (error, data) => { 160 | if (error) { 161 | console.log('createTableStream::Error when attempting StreamSpecification addition activation: %j', error); 162 | callback(error); 163 | } 164 | console.log('createTableStream::Successfully triggered StreamSpecification addition (but not complete yet): %j', JSON.stringify(data)); 165 | stabilizeTable(pTableName, callback); 166 | }); 167 | } 168 | 169 | 170 | exports.handler = (event, context, callback) => { 171 | // extract Table and Stream Type out of CF request 172 | const DynamoTableName = event.ResourceProperties.DynamoTableName; 173 | const StreamViewType = event.ResourceProperties.StreamViewType; 174 | 175 | const physicalResource = `${DynamoTableName}::${StreamViewType}`; 176 | //const responseStatus = 'FAILED'; 177 | //const responseData = { TEST: 'TEST' }; 178 | 179 | let hasStreamAlready = false; 180 | console.log('handler::REQUEST RECEIVED:\n', JSON.stringify(event)); 181 | console.log('handler::RequestType:', event.RequestType); 182 | 183 | const params = { 184 | TableName: DynamoTableName, /* required */ 185 | }; 186 | 187 | // get current status and description of the DynamoDB table 188 | dynamodb.describeTable(params, (err, descTableData) => { 189 | if (err) { 190 | // error occured while doing describeTable 191 | console.log(err, err.stack); // an error occurred 192 | //callback(err); 193 | sendResponse(event, context, FAILED, err, physicalResource); 194 | } else { 195 | // describeTable was successfull, check if it already has a stream 196 | console.log('handler::', params.TableName, ' status is ', descTableData.Table.TableStatus); 197 | console.log('handler:: data from tableDescribe: ', JSON.stringify(descTableData)); 198 | if (descTableData.Table.StreamSpecification && descTableData.Table.StreamSpecification.StreamEnabled) { 199 | hasStreamAlready = true; 200 | } 201 | console.log('handler:: table already has stream? : ', hasStreamAlready); 202 | } 203 | }); 204 | 205 | 206 | // For Delete requests, immediately send a SUCCESS response. 207 | if (event.RequestType === 'Delete') { 208 | console.log('handeler::DELETE::DynamoDB stream exists?', hasStreamAlready); 209 | if (hasStreamAlready) { 210 | sendResponse(event, context, SUCCESS, '{Message:Table already has an enabled StreamSpecification!}', physicalResource); 211 | } else { 212 | sendResponse(event, context, SUCCESS, '', physicalResource); 213 | } 214 | 215 | } else if (event.RequestType === 'Update') { 216 | //sendResponse(event, context, "SUCCESS"); 217 | //return; 218 | } else if (event.RequestType === 'Create') { 219 | console.log('handeler::CREATE::DynamoDB stream creation'); 220 | // if there is stream alrady , bail out 221 | if (hasStreamAlready) { 222 | console.log('handeler::CREATE::DynamoDB stream already exists'); 223 | sendResponse(event, context, FAILED, '{Message:Table already has an enabled StreamSpecification!}', physicalResource); 224 | } 225 | 226 | // create stream 227 | createTableStream(DynamoTableName, StreamViewType, 228 | (createError, createData) => { 229 | if (createError) { 230 | console.error('handler::CREATE::Failed to create DynamoDB Stream, error ', createError); 231 | //callback(createError); 232 | sendResponse(event, context, FAILED, createError, physicalResource); 233 | } else { 234 | console.log('handeler::CREATE::DynamoDB stream has been created:', JSON.stringify(createData)); 235 | const data = { 236 | Arn: createData.Table.LatestStreamArn, 237 | }; 238 | //const response = buildresponse(event, context, 'SUCCESS', 'physicalId', data); 239 | //console.log('handler:: Response is: ', response); 240 | //cfnresponse.send(event, context, cfnresponse.SUCCESS, data); 241 | //callback(null, response); 242 | sendResponse(event, context, SUCCESS, data, physicalResource); 243 | } 244 | }); 245 | } 246 | 247 | 248 | //sendResponse(event, context, callback, 'SUCCESS', responseData, null); 249 | }; 250 | 251 | -------------------------------------------------------------------------------- /splunk-dynamodb-stream-processor/index.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Stream events from AWS DynamoDB Stream to Splunk 3 | * 4 | * This function streams AWS DynamoDB Stream events to Splunk using Splunk's HTTP event collector API. 5 | * 6 | * Define the following Environment Variables in the console below to configure 7 | * this function to stream events to your Splunk host: 8 | * 9 | * 1. SPLUNK_HEC_URL: URL address for your Splunk HTTP event collector endpoint. 10 | * Default port for event collector is 8088. Example: https://host.com:8088/services/collector 11 | * 12 | * 2. SPLUNK_HEC_TOKEN: Token for your Splunk HTTP event collector. 13 | * To create a new token for this Lambda function, refer to Splunk Docs: 14 | * http://docs.splunk.com/Documentation/Splunk/latest/Data/UsetheHTTPEventCollector#Create_an_Event_Collector_token 15 | * 16 | * For details about Splunk logging library used below: https://github.com/splunk/splunk-javascript-logging 17 | */ 18 | 19 | 'use strict'; 20 | 21 | const loggerConfig = { 22 | url: process.env.SPLUNK_HEC_URL, 23 | token: process.env.SPLUNK_HEC_TOKEN, 24 | maxBatchCount: 0, // Manually flush events 25 | maxRetries: 3, // Retry 3 times 26 | }; 27 | 28 | const SplunkLogger = require('splunk-logging').Logger; 29 | 30 | const logger = new SplunkLogger(loggerConfig); 31 | 32 | exports.handler = (event, context, callback) => { 33 | console.log('Received event:', JSON.stringify(event, null, 2)); 34 | 35 | // First, configure logger to automatically add Lambda metadata and to hook into Lambda callback 36 | configureLogger(context, callback); // eslint-disable-line no-use-before-define 37 | 38 | let count = 0; 39 | event.Records.forEach((record) => { 40 | console.log('DynamoDB Record: %j', record.dynamodb); 41 | 42 | /* Send record to Splunk with optional metadata properties such as time, index, source, sourcetype, and host. 43 | - Set time value below if you want to explicitly set event timestamp. 44 | - Set or remove other metadata properties as needed. For descripion of each property, refer to: 45 | http://docs.splunk.com/Documentation/Splunk/latest/RESTREF/RESTinput#services.2Fcollector */ 46 | logger.send({ 47 | message: record, 48 | metadata: { 49 | host: 'serverless', 50 | source: `lambda:${context.functionName}`, 51 | sourcetype: 'httpevent', 52 | //time: Date.now(), 53 | //index: 'main', 54 | }, 55 | }); 56 | 57 | count += 1; 58 | }); 59 | 60 | // Send all the events in a single batch to Splunk 61 | logger.flush((err, resp, body) => { 62 | // Request failure or valid response from Splunk with HEC error code 63 | if (err || (body && body.code !== 0)) { 64 | // If failed, error will be handled by pre-configured logger.error() below 65 | } else { 66 | // If succeeded, body will be { text: 'Success', code: 0 } 67 | console.log('Response from Splunk:', body); 68 | console.log(`Successfully processed ${count} record(s).`); 69 | callback(null, count); // Return number of log events 70 | } 71 | }); 72 | }; 73 | 74 | const configureLogger = (context, callback) => { 75 | // Override SplunkLogger default formatter 76 | logger.eventFormatter = (event) => { 77 | // Enrich event only if it is an object 78 | if (typeof event === 'object' && !Object.prototype.hasOwnProperty.call(event, 'awsRequestId')) { 79 | // Add awsRequestId from Lambda context for request tracing 80 | event.awsRequestId = context.awsRequestId; // eslint-disable-line no-param-reassign 81 | } 82 | return event; 83 | }; 84 | 85 | // Set common error handler for logger.send() and logger.flush() 86 | logger.error = (error, payload) => { 87 | console.log('error', error, 'context', payload); 88 | callback(error); 89 | }; 90 | }; 91 | -------------------------------------------------------------------------------- /splunk-dynamodb-stream-processor/integration-test.js: -------------------------------------------------------------------------------- 1 | var handler = require('./index').handler; 2 | 3 | fs=require('fs'); 4 | var event = JSON.parse(fs.readFileSync('sampleEvent.json', 'utf8')); 5 | 6 | handler( 7 | event, 8 | { 9 | functionName: 'splunk-dynamodb-stream-processor', 10 | awsRequestId: Math.floor(Math.random() * Math.pow(10,10)) 11 | }, 12 | function() { 13 | console.log(Array.prototype.slice.call(arguments)); 14 | } 15 | ); 16 | 17 | console.log("Done"); -------------------------------------------------------------------------------- /splunk-dynamodb-stream-processor/lambda.json: -------------------------------------------------------------------------------- 1 | { 2 | "lambda": { 3 | "FunctionName": "splunk-dynamodb-stream-processor", 4 | "Handler": "index.handler", 5 | "Runtime": "nodejs10.x", 6 | "Description": "Stream AWS DynamoDB table activity from DynamoDB Stream to Splunk's HTTP event collector", 7 | "Environment": { 8 | "Variables": { 9 | "SPLUNK_HEC_URL": "", 10 | "SPLUNK_HEC_TOKEN": "" 11 | } 12 | }, 13 | "MemorySize": 512, 14 | "Timeout": 10 15 | }, 16 | "triggers": { 17 | "dynamodb": [ 18 | {} 19 | ] 20 | }, 21 | "version": "0.8.1", 22 | "license": "CC0-1.0", 23 | "tags": [ 24 | "nodejs", 25 | "splunk", 26 | "dynamodb", 27 | "dynamodb-stream" 28 | ], 29 | "authors": [ 30 | "Roy Arsan (https://www.splunk.com)", 31 | "Glenn Block (https://www.splunk.com)" 32 | ], 33 | "roleTemplates": [ 34 | "DynamoDB" 35 | ] 36 | } 37 | 38 | -------------------------------------------------------------------------------- /splunk-dynamodb-stream-processor/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "splunk-dynamodb-stream-processor", 3 | "version": "1.0.0", 4 | "description": "AWS Lambda function blueprint to stream events from AWS DynamoDB stream to Splunk's HTTP event collector", 5 | "main": "index.js", 6 | "scripts": { 7 | "lint": "eslint -c ../.eslintrc --ignore-path ../.eslintignore .", 8 | "test": "node integration-test.js", 9 | "pretest": "npm run lint", 10 | "build:zip": "zip -i \\*.js \\*.json -r splunk-dynamodb-stream-processor.zip index.js $(npm list --prod --parseable | sed -nE 's/.*\\/(node_modules\\/.*)/\\1/p' | awk '{printf \"%s \", $0;}')", 11 | "build:template": "aws cloudformation package --template template.yaml --s3-bucket $npm_config_sam_s3_bucket_name --output-template-file template.output.yaml", 12 | "build:deployment": "aws cloudformation deploy --template-file $(pwd)/template.output.yaml --parameter-overrides DynamoDBTableName=\"$npm_config_parm_table_name\" DynamoDBStreamARN=\"$npm_config_parm_stream_arn\" DynamoDBStreamStartingPosition=\"$npm_config_parm_stream_starting_position\" DynamoDBStreamBatchSize=\"$npm_config_parm_stream_batch_size\" SplunkHttpEventCollectorURL=\"$npm_config_parm_hec_url\" SplunkHttpEventCollectorToken=\"$npm_config_parm_hec_token\" CreationDate=\"$(date +'%F %T')\" --stack-name $npm_config_parm_stack_name --capabilities \"CAPABILITY_IAM\"", 13 | "build": "npm run build:zip && npm run build:template && npm run build:deployment", 14 | "clean:zip": "rm -f splunk-dynamodb-stream-processor.zip", 15 | "clean:template": "rm -f template.output.yaml", 16 | "clean:deployment": "aws cloudformation delete-stack --stack-name $npm_config_parm_stack_name", 17 | "clean": "npm run clean:zip && npm run clean:template && npm run clean:deployment" 18 | }, 19 | "keywords": [ 20 | "splunk", 21 | "lambda", 22 | "dynamodb", 23 | "dynamodb-stream", 24 | "hec" 25 | ], 26 | "authors": [ 27 | "Roy Arsan (https://www.splunk.com)", 28 | "Glenn Block (https://www.splunk.com)", 29 | "Tarik Makota (https://aws.amazon.com)" 30 | ], 31 | "license": "MIT", 32 | "dependencies": { 33 | "splunk-logging": "^0.9.3" 34 | }, 35 | "devDependencies": { 36 | "babel-eslint": "^7.1.0", 37 | "eslint": "^3.12.0", 38 | "eslint-config-airbnb": "^13.0.0", 39 | "eslint-plugin-babel": "^4.0.0", 40 | "eslint-plugin-import": "^2.2.0", 41 | "eslint-plugin-jsx-a11y": "^2.2.3", 42 | "eslint-plugin-react": "^6.7.1" 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /splunk-dynamodb-stream-processor/sampleEvent.json: -------------------------------------------------------------------------------- 1 | { 2 | "Records": [ 3 | { 4 | "eventID": "1", 5 | "eventVersion": "1.0", 6 | "dynamodb": { 7 | "Keys": { 8 | "Id": { 9 | "N": "101" 10 | } 11 | }, 12 | "NewImage": { 13 | "Message": { 14 | "S": "New item!" 15 | }, 16 | "Id": { 17 | "N": "101" 18 | } 19 | }, 20 | "StreamViewType": "NEW_AND_OLD_IMAGES", 21 | "SequenceNumber": "111", 22 | "SizeBytes": 26 23 | }, 24 | "awsRegion": "us-west-2", 25 | "eventName": "INSERT", 26 | "eventSourceARN": "arn:aws:dynamodb:us-west-2:account-id:table/ExampleTableWithStream/stream/2015-06-27T00:48:05.899", 27 | "eventSource": "aws:dynamodb" 28 | }, 29 | { 30 | "eventID": "2", 31 | "eventVersion": "1.0", 32 | "dynamodb": { 33 | "OldImage": { 34 | "Message": { 35 | "S": "New item!" 36 | }, 37 | "Id": { 38 | "N": "101" 39 | } 40 | }, 41 | "SequenceNumber": "222", 42 | "Keys": { 43 | "Id": { 44 | "N": "101" 45 | } 46 | }, 47 | "SizeBytes": 59, 48 | "NewImage": { 49 | "Message": { 50 | "S": "This item has changed" 51 | }, 52 | "Id": { 53 | "N": "101" 54 | } 55 | }, 56 | "StreamViewType": "NEW_AND_OLD_IMAGES" 57 | }, 58 | "awsRegion": "us-west-2", 59 | "eventName": "MODIFY", 60 | "eventSourceARN": "arn:aws:dynamodb:us-west-2:account-id:table/ExampleTableWithStream/stream/2015-06-27T00:48:05.899", 61 | "eventSource": "aws:dynamodb" 62 | }, 63 | { 64 | "eventID": "3", 65 | "eventVersion": "1.0", 66 | "dynamodb": { 67 | "Keys": { 68 | "Id": { 69 | "N": "101" 70 | } 71 | }, 72 | "SizeBytes": 38, 73 | "SequenceNumber": "333", 74 | "OldImage": { 75 | "Message": { 76 | "S": "This item has changed" 77 | }, 78 | "Id": { 79 | "N": "101" 80 | } 81 | }, 82 | "StreamViewType": "NEW_AND_OLD_IMAGES" 83 | }, 84 | "awsRegion": "us-west-2", 85 | "eventName": "REMOVE", 86 | "eventSourceARN": "arn:aws:dynamodb:us-west-2:account-id:table/ExampleTableWithStream/stream/2015-06-27T00:48:05.899", 87 | "eventSource": "aws:dynamodb" 88 | } 89 | ] 90 | } -------------------------------------------------------------------------------- /splunk-dynamodb-stream-processor/template.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Transform: 'AWS::Serverless-2016-10-31' 3 | Description: > 4 | Serverless application to stream events from AWS DynamoDB Stream to Splunk HTTP Event Collector (HEC). 5 | 6 | This SAM template creates the Lambda function with its IAM execution role, a subscription to DynamoDB table 7 | along with a Lambda permission to grant DynamoDB permission to invoke this function. 8 | DynamoDB must be in same region as the region where this stack is created. 9 | 10 | Last Modified: 29 Nov, 2017 11 | Authors: Roy Arsan , Tarik Makota 12 | 13 | Parameters: 14 | DynamoDBTableName: 15 | Type: "String" 16 | Description: "Name of DynamoDB table. The Lambda function will be invoked whenever this table is updated." 17 | 18 | DynamoDBStreamARN: 19 | Type: "String" 20 | Description: "ARN of DynamoDB table Stream. The Lambda function will be invoked whenever this stream is updated." 21 | 22 | DynamoDBStreamBatchSize: 23 | Type: "Number" 24 | Description: "The largest number of records that AWS Lambda will retrieve from your table at the time of invoking your function. Your function receives an event with all the retrieved records." 25 | Default: "100" 26 | 27 | DynamoDBStreamStartingPosition: 28 | Type: "String" 29 | Description: "The position in the stream where AWS Lambda should start reading. Allowed values are 'TRIM_HORIZON' or 'LATEST'. For more information, see ShardIteratorType in the Amazon DynamoDB API Reference." 30 | Default: "LATEST" 31 | 32 | SplunkHttpEventCollectorURL: 33 | Type: "String" 34 | Description: "URL address of your Splunk HTTP event collector endpoint" 35 | 36 | SplunkHttpEventCollectorToken: 37 | Type: "String" 38 | Description: "Token of your Splunk HTTP event collector endpoint" 39 | 40 | Outputs: 41 | SplunkDynamoDBProcessor: 42 | Description: "Splunk DynamoDB Stream Lambda Function ARN" 43 | Value: !GetAtt SplunkDynamoDBProcessor.Arn 44 | 45 | Resources: 46 | SplunkDynamoDBProcessor: 47 | Type: 'AWS::Serverless::Function' 48 | Properties: 49 | Handler: index.handler 50 | Runtime: nodejs10.x 51 | CodeUri: ./splunk-dynamodb-stream-processor.zip 52 | Policies: 53 | - DynamoDBStreamReadPolicy: 54 | TableName: !Ref DynamoDBTableName 55 | StreamName: !Ref DynamoDBStreamARN 56 | Description: Stream events from DynamoDB Table to Splunk HTTP event collector 57 | MemorySize: 512 58 | Timeout: 10 59 | Environment: 60 | Variables: 61 | SPLUNK_HEC_URL: !Ref SplunkHttpEventCollectorURL 62 | SPLUNK_HEC_TOKEN: !Ref SplunkHttpEventCollectorToken 63 | Events: 64 | TableStream: 65 | Type: DynamoDB 66 | Properties: 67 | Stream: !Ref DynamoDBStreamARN 68 | StartingPosition: !Ref DynamoDBStreamStartingPosition 69 | BatchSize: !Ref DynamoDBStreamBatchSize 70 | -------------------------------------------------------------------------------- /splunk-elb-application-access-logs-processor/.npmrc.sample: -------------------------------------------------------------------------------- 1 | sam_s3_bucket_name = fake-bucket-name 2 | parm_s3_logs_bucket_name = fake-bucket-name-to-be-created 3 | parm_s3_prefix = 4 | parm_s3_suffix = 5 | parm_hec_url = https://192.168.0.0:8088/services/collector 6 | parm_hec_token = FAKE-FAKE-FAKE 7 | parm_stack_name = fakeStackName 8 | -------------------------------------------------------------------------------- /splunk-elb-application-access-logs-processor/index.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Forward Application Load Balancer Access Logs from S3 to Splunk via AWS Lambda 3 | * 4 | * This function streams access logs to Splunk Enterprise using Splunk's HTTP event collector API. 5 | * 6 | * Define the following Environment Variables in the console below to configure 7 | * this function to log to your Splunk host: 8 | * 9 | * 1. SPLUNK_HEC_URL: URL address for your Splunk HTTP event collector endpoint. 10 | * Default port for event collector is 8088. Example: https://host.com:8088/services/collector 11 | * 12 | * 2. SPLUNK_HEC_TOKEN: Token for your Splunk HTTP event collector. 13 | * To create a new token for this Lambda function, refer to Splunk Docs: 14 | * http://docs.splunk.com/Documentation/Splunk/latest/Data/UsetheHTTPEventCollector#Create_an_Event_Collector_token 15 | * 16 | * For details about Splunk logging library used below: https://github.com/splunk/splunk-javascript-logging 17 | */ 18 | 19 | 'use strict'; 20 | 21 | const loggerConfig = { 22 | url: process.env.SPLUNK_HEC_URL, 23 | token: process.env.SPLUNK_HEC_TOKEN, 24 | maxBatchCount: 0, // Manually flush events 25 | maxRetries: 3, // Retry 3 times 26 | }; 27 | 28 | const SplunkLogger = require('splunk-logging').Logger; 29 | const aws = require('aws-sdk'); 30 | const zlib = require('zlib'); 31 | 32 | const logger = new SplunkLogger(loggerConfig); 33 | const s3 = new aws.S3({ apiVersion: '2006-03-01' }); 34 | 35 | exports.handler = (event, context, callback) => { 36 | console.log('Received event:', JSON.stringify(event, null, 2)); 37 | 38 | // First, configure logger to automatically add Lambda metadata and to hook into Lambda callback 39 | configureLogger(context, callback); // eslint-disable-line no-use-before-define 40 | 41 | // Get the object from the event and show its content type 42 | const bucket = event.Records[0].s3.bucket.name; 43 | const key = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, ' ')); 44 | const params = { 45 | Bucket: bucket, 46 | Key: key, 47 | }; 48 | s3.getObject(params, (error, data) => { 49 | if (error) { 50 | console.log(error); 51 | const message = `Error getting object ${key} from bucket ${bucket}. Make sure they exist and your bucket is in the same region as this function.`; 52 | console.log(message); 53 | callback(message); 54 | } else { 55 | console.log(`Retrieved access log: LastModified="${data.LastModified}" ContentLength=${data.ContentLength}`); 56 | const payload = data.Body; 57 | 58 | zlib.gunzip(payload, (error, result) => { // eslint-disable-line no-shadow 59 | if (error) { 60 | console.log(error); 61 | callback(error); 62 | } else { 63 | const parsed = result.toString('ascii'); 64 | const logEvents = parsed.split('\n'); 65 | let count = 0; 66 | let time; 67 | 68 | if (logEvents) { 69 | logEvents.forEach((logEntry) => { 70 | if (logEntry) { 71 | // Extract timestamp as 2nd field in log entry 72 | // For more details: http://docs.aws.amazon.com/elasticloadbalancing/latest/application/load-balancer-access-logs.html#access-log-entry-format 73 | time = logEntry.split(' ')[1]; 74 | 75 | /* Send log entry to Splunk with optional metadata properties such as time, index, source, sourcetype, and host. 76 | - Set or remove metadata properties as needed. For descripion of each property, refer to: 77 | http://docs.splunk.com/Documentation/Splunk/latest/RESTREF/RESTinput#services.2Fcollector */ 78 | logger.send({ 79 | message: logEntry, 80 | metadata: { 81 | time: new Date(time).getTime() / 1000, 82 | host: 'serverless', 83 | source: `s3://${bucket}/${key}`, 84 | sourcetype: 'aws:elb:accesslogs', 85 | //index: 'main', 86 | }, 87 | }); 88 | count += 1; 89 | } 90 | }); 91 | console.log(`Processed ${count} log entries`); 92 | } 93 | 94 | logger.flush((err, resp, body) => { 95 | // Request failure or valid response from Splunk with HEC error code 96 | if (err || (body && body.code !== 0)) { 97 | // If failed, error will be handled by pre-configured logger.error() below 98 | } else { 99 | // If succeeded, body will be { text: 'Success', code: 0 } 100 | console.log('Response from Splunk:', body); 101 | console.log(`Successfully forwarded ${count} log entries.`); 102 | callback(null, count); // Return number of log events 103 | } 104 | }); 105 | } 106 | }); 107 | } 108 | }); 109 | }; 110 | 111 | const configureLogger = (context, callback) => { 112 | // Override SplunkLogger default formatter 113 | logger.eventFormatter = (event) => { 114 | // Enrich event only if it is an object 115 | if (typeof event === 'object' && !Object.prototype.hasOwnProperty.call(event, 'awsRequestId')) { 116 | // Add awsRequestId from Lambda context for request tracing 117 | event.awsRequestId = context.awsRequestId; // eslint-disable-line no-param-reassign 118 | } 119 | return event; 120 | }; 121 | 122 | // Set common error handler for logger.send() and logger.flush() 123 | logger.error = (error, payload) => { 124 | console.log('error', error, 'context', payload); 125 | callback(error); 126 | }; 127 | }; 128 | -------------------------------------------------------------------------------- /splunk-elb-application-access-logs-processor/integration-test.js: -------------------------------------------------------------------------------- 1 | var handler = require('./index').handler; 2 | 3 | fs=require('fs'); 4 | var event = JSON.parse(fs.readFileSync('sampleEvent.json', 'utf8')); 5 | 6 | handler( 7 | event, 8 | { 9 | functionName: 'splunk-elb-application-access-logs-processor', 10 | awsRequestId: Math.floor(Math.random() * Math.pow(10,10)) 11 | }, 12 | function() { 13 | console.log(Array.prototype.slice.call(arguments)); 14 | } 15 | ); 16 | 17 | console.log("Done"); -------------------------------------------------------------------------------- /splunk-elb-application-access-logs-processor/lambda.json: -------------------------------------------------------------------------------- 1 | { 2 | "lambda": { 3 | "FunctionName": "splunk-elb-application-access-logs-processor", 4 | "Handler": "index.handler", 5 | "Runtime": "nodejs10.x", 6 | "Description": "Stream Application ELB access logs from S3 to Splunk's HTTP event collector", 7 | "Environment": { 8 | "Variables": { 9 | "SPLUNK_HEC_URL": "", 10 | "SPLUNK_HEC_TOKEN": "" 11 | } 12 | }, 13 | "MemorySize": 512, 14 | "Timeout": 10 15 | }, 16 | "triggers": { 17 | "s3": [ 18 | {} 19 | ] 20 | }, 21 | "version": "0.8.1", 22 | "license": "CC0-1.0", 23 | "tags": [ 24 | "nodejs", 25 | "splunk", 26 | "elb", 27 | "s3", 28 | "application-elb", 29 | "cloudtrail" 30 | ], 31 | "authors": [ 32 | "Roy Arsan (https://www.splunk.com)", 33 | "Tarik Makota (https://aws.amazon.com)" 34 | ], 35 | "roleTemplates": [ 36 | "S3" 37 | ] 38 | } 39 | 40 | -------------------------------------------------------------------------------- /splunk-elb-application-access-logs-processor/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "splunk-elb-application-access-logs-processor", 3 | "version": "1.0.0", 4 | "description": "AWS Lambda function blueprint to stream Application ELB access logs from S3 to Splunk's HTTP event collector", 5 | "main": "index.js", 6 | "scripts": { 7 | "lint": "eslint -c ../.eslintrc --ignore-path ../.eslintignore .", 8 | "test": "node integration-test.js", 9 | "postinstall": "node -e \"var s='../lib',d='lib',fs=require('fs');fs.exists(d,function(e){e||fs.symlinkSync(s,d,'dir')});\"", 10 | "pretest": "npm run lint", 11 | "build:zip": "zip -i \\*.js \\*.json -r splunk-elb-application-access-logs-processor.zip index.js $(npm list --prod --parseable | sed -nE 's/.*\\/(node_modules\\/.*)/\\1/p' | awk '{printf \"%s \", $0;}')", 12 | "build:template": "aws cloudformation package --template template.yaml --s3-bucket $npm_config_sam_s3_bucket_name --output-template-file template.output.yaml", 13 | "build:deployment": "aws cloudformation deploy --template-file $(pwd)/template.output.yaml --parameter-overrides S3ALBLogsBucketName=\"$npm_config_parm_s3_logs_bucket_name\" S3EventType=\"$npm_config_parm_s3_event_type\" S3Prefix=\"$npm_config_parm_s3_prefix\" S3Suffix=\"$npm_config_parm_s3_suffix\" SplunkHttpEventCollectorURL=\"$npm_config_parm_hec_url\" SplunkHttpEventCollectorToken=\"$npm_config_parm_hec_token\" --stack-name $npm_config_parm_stack_name --capabilities \"CAPABILITY_IAM\"", 14 | "build": "npm run build:zip && npm run build:template && npm run build:deployment", 15 | "clean:zip": "rm -f splunk-elb-application-access-logs-processor.zip", 16 | "clean:template": "rm -f template.output.yaml", 17 | "clean:deployment": "aws cloudformation delete-stack --stack-name $npm_config_parm_stack_name", 18 | "clean": "npm run clean:zip && npm run clean:template && npm run clean:deployment" 19 | }, 20 | "keywords": [ 21 | "splunk", 22 | "lambda", 23 | "elb", 24 | "application-elb", 25 | "hec" 26 | ], 27 | "authors": [ 28 | "Roy Arsan (https://www.splunk.com)", 29 | "Tarik Makota (https://aws.amazon.com)" 30 | ], 31 | "license": "MIT", 32 | "dependencies": { 33 | "splunk-logging": "^0.9.3" 34 | }, 35 | "devDependencies": { 36 | "aws-sdk": "^2.89.0", 37 | "babel-eslint": "^7.1.0", 38 | "eslint": "^3.12.0", 39 | "eslint-config-airbnb": "^13.0.0", 40 | "eslint-plugin-babel": "^4.0.0", 41 | "eslint-plugin-import": "^2.2.0", 42 | "eslint-plugin-jsx-a11y": "^2.2.3", 43 | "eslint-plugin-react": "^6.7.1" 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /splunk-elb-application-access-logs-processor/sampleEvent.json: -------------------------------------------------------------------------------- 1 | { 2 | "Records": [ 3 | { 4 | "eventVersion": "2.0", 5 | "eventTime": "1970-01-01T00:00:00.000Z", 6 | "requestParameters": { 7 | "sourceIPAddress": "127.0.0.1" 8 | }, 9 | "s3": { 10 | "configurationId": "testConfigRule", 11 | "object": { 12 | "eTag": "0123456789abcdef0123456789abcdef", 13 | "sequencer": "0A1B2C3D4E5F678901", 14 | "key": "elb-application/1234567890_elasticloadbalancing_us-west-2_app.clust-Splun-17UR9GV42PJ29.6d4bac7deed18b9a_20170510T0650Z_52.5.186.38_11phvfc6.log.gz", 15 | "size": 1024 16 | }, 17 | "bucket": { 18 | "arn": "arn:aws:s3:::lambda-sample-events", 19 | "name": "lambda-sample-events", 20 | "ownerIdentity": { 21 | "principalId": "EXAMPLE" 22 | } 23 | }, 24 | "s3SchemaVersion": "1.0" 25 | }, 26 | "responseElements": { 27 | "x-amz-id-2": "EXAMPLE123/5678abcdefghijklambdaisawesome/mnopqrstuvwxyzABCDEFGH", 28 | "x-amz-request-id": "EXAMPLE123456789" 29 | }, 30 | "awsRegion": "us-east-1", 31 | "eventName": "ObjectCreated:Put", 32 | "userIdentity": { 33 | "principalId": "EXAMPLE" 34 | }, 35 | "eventSource": "aws:s3" 36 | } 37 | ] 38 | } -------------------------------------------------------------------------------- /splunk-elb-application-access-logs-processor/template.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Transform: 'AWS::Serverless-2016-10-31' 3 | Description: > 4 | Serverless application to stream access logs of Application ELB from S3 to Splunk HTTP Event Collector (HEC). 5 | 6 | This SAM template creates the Lambda function & associated policy + IAM role, and new S3 bucket 7 | with enabled Events notification to this Lambda function. Direct your load balancers access logs to this newly created S3 Bucket. 8 | To enable access logs on application load balancers, refer to AWS docs: 9 | http://docs.aws.amazon.com/elasticloadbalancing/latest/application/load-balancer-access-logs.html#enable-access-logging 10 | 11 | Last Modified: 29 Nov, 2017 12 | Authors: Roy Arsan , Tarik Makota 13 | 14 | Parameters: 15 | S3ALBLogsBucketName: 16 | Type: "String" 17 | Description: "Name of S3 bucket that will store access logs. This bucket will be created for you. Name must be unique across all existing bucket names in Amazon S3." 18 | 19 | S3Prefix: 20 | Type: "String" 21 | Description: "Optional prefix to limit the notifications to objects with keys that start with matching characters. e.g. uploadedImages/" 22 | Default: "" 23 | 24 | S3Suffix: 25 | Type: "String" 26 | Description: "Optional suffix to limit the notifications to objects with keys that end with matching characters. e.g. .jpg" 27 | Default: ".log.gz" 28 | 29 | SplunkHttpEventCollectorURL: 30 | Type: "String" 31 | Description: "URL address of your Splunk HTTP event collector endpoint" 32 | 33 | SplunkHttpEventCollectorToken: 34 | Type: "String" 35 | Description: "Token of your Splunk HTTP event collector endpoint" 36 | 37 | Outputs: 38 | SplunkALBLogsProcessor: 39 | Description: "Splunk S3 Bucket Stream Lambda Function ARN" 40 | Value: !Ref SplunkALBProcessorFunction 41 | 42 | Resources: 43 | SplunkALBProcessorFunction: 44 | Type: 'AWS::Serverless::Function' 45 | Properties: 46 | Description: Stream ALB events from S3 to Splunk HTTP event collector 47 | Handler: index.handler 48 | Runtime: nodejs10.x 49 | CodeUri: ./splunk-elb-application-access-logs-processor.zip 50 | Policies: 51 | - S3CrudPolicy: 52 | BucketName: !Ref S3ALBLogsBucketName 53 | MemorySize: 512 54 | Timeout: 10 55 | Environment: 56 | Variables: 57 | SPLUNK_HEC_URL: !Ref SplunkHttpEventCollectorURL 58 | SPLUNK_HEC_TOKEN: !Ref SplunkHttpEventCollectorToken 59 | Events: 60 | LogUpload: 61 | Type: S3 62 | Properties: 63 | Bucket: !Ref Bucket 64 | Events: s3:ObjectCreated:* 65 | Filter: 66 | S3Key: 67 | Rules: 68 | - 69 | Name: prefix 70 | Value: !Sub "${S3Prefix}" 71 | - 72 | Name: suffix 73 | Value: !Sub "${S3Suffix}" 74 | Bucket: 75 | Type: AWS::S3::Bucket 76 | Properties: 77 | BucketName: !Ref S3ALBLogsBucketName 78 | -------------------------------------------------------------------------------- /splunk-elb-classic-access-logs-processor/.npmrc.sample: -------------------------------------------------------------------------------- 1 | sam_s3_bucket_name = fake-bucket-name 2 | parm_s3_logs_bucket_name = fake-bucket-name-to-be-created 3 | parm_s3_prefix = 4 | parm_s3_suffix = 5 | parm_hec_url = https://192.168.0.0:8088/services/collector 6 | parm_hec_token = FAKE-FAKE-FAKE 7 | parm_stack_name = fakeStackName 8 | -------------------------------------------------------------------------------- /splunk-elb-classic-access-logs-processor/index.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Forward Classic Load Balancer Access Logs from S3 to Splunk via AWS Lambda 3 | * 4 | * This function streams access logs to Splunk Enterprise using Splunk's HTTP event collector API. 5 | * 6 | * Define the following Environment Variables in the console below to configure 7 | * this function to log to your Splunk host: 8 | * 9 | * 1. SPLUNK_HEC_URL: URL address for your Splunk HTTP event collector endpoint. 10 | * Default port for event collector is 8088. Example: https://host.com:8088/services/collector 11 | * 12 | * 2. SPLUNK_HEC_TOKEN: Token for your Splunk HTTP event collector. 13 | * To create a new token for this Lambda function, refer to Splunk Docs: 14 | * http://docs.splunk.com/Documentation/Splunk/latest/Data/UsetheHTTPEventCollector#Create_an_Event_Collector_token 15 | * 16 | * For details about Splunk logging library used below: https://github.com/splunk/splunk-javascript-logging 17 | */ 18 | 19 | 'use strict'; 20 | 21 | const loggerConfig = { 22 | url: process.env.SPLUNK_HEC_URL, 23 | token: process.env.SPLUNK_HEC_TOKEN, 24 | maxBatchCount: 0, // Manually flush events 25 | maxRetries: 3, // Retry 3 times 26 | }; 27 | 28 | const SplunkLogger = require('splunk-logging').Logger; 29 | const aws = require('aws-sdk'); 30 | 31 | const logger = new SplunkLogger(loggerConfig); 32 | const s3 = new aws.S3({ apiVersion: '2006-03-01' }); 33 | 34 | exports.handler = (event, context, callback) => { 35 | console.log('Received event:', JSON.stringify(event, null, 2)); 36 | 37 | // First, configure logger to automatically add Lambda metadata and to hook into Lambda callback 38 | configureLogger(context, callback); // eslint-disable-line no-use-before-define 39 | 40 | // Get the S3 object from the S3 put event 41 | const bucket = event.Records[0].s3.bucket.name; 42 | const key = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, ' ')); 43 | const params = { 44 | Bucket: bucket, 45 | Key: key, 46 | }; 47 | s3.getObject(params, (error, data) => { 48 | if (error) { 49 | console.log(error); 50 | const message = `Error getting object ${key} from bucket ${bucket}. Make sure they exist and your bucket is in the same region as this function.`; 51 | console.log(message); 52 | callback(message); 53 | } else { 54 | console.log(`Retrieved access log: LastModified="${data.LastModified}" ContentLength=${data.ContentLength}`); 55 | const payload = data.Body; 56 | const parsed = payload.toString('ascii'); 57 | const logEntries = parsed.split('\n'); 58 | let count = 0; 59 | let time; 60 | 61 | if (logEntries) { 62 | logEntries.forEach((logEntry) => { 63 | if (logEntry) { 64 | // Extract timestamp as 1st field in log entry 65 | // For more details: http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/access-log-collection.html#access-log-entry-format 66 | time = logEntry.split(' ')[0]; 67 | 68 | /* Send log entry to Splunk with optional metadata properties such as time, index, source, sourcetype, and host. 69 | - Set or remove metadata properties as needed. For descripion of each property, refer to: 70 | http://docs.splunk.com/Documentation/Splunk/latest/RESTREF/RESTinput#services.2Fcollector */ 71 | logger.send({ 72 | message: logEntry, 73 | metadata: { 74 | time: new Date(time).getTime() / 1000, 75 | host: 'serverless', 76 | source: `s3://${bucket}/${key}`, 77 | sourcetype: 'aws:elb:accesslogs', 78 | //index: 'main', 79 | }, 80 | }); 81 | count += 1; 82 | } 83 | }); 84 | console.log(`Processed ${count} log entries`); 85 | } 86 | 87 | logger.flush((err, resp, body) => { 88 | // Request failure or valid response from Splunk with HEC error code 89 | if (err || (body && body.code !== 0)) { 90 | // If failed, error will be handled by pre-configured logger.error() below 91 | } else { 92 | // If succeeded, body will be { text: 'Success', code: 0 } 93 | console.log('Response from Splunk:', body); 94 | console.log(`Successfully forwarded ${count} log entries.`); 95 | callback(null, count); // Return number of log events 96 | } 97 | }); 98 | } 99 | }); 100 | }; 101 | 102 | const configureLogger = (context, callback) => { 103 | // Override SplunkLogger default formatter 104 | logger.eventFormatter = (event) => { 105 | // Enrich event only if it is an object 106 | if (typeof event === 'object' && !Object.prototype.hasOwnProperty.call(event, 'awsRequestId')) { 107 | // Add awsRequestId from Lambda context for request tracing 108 | event.awsRequestId = context.awsRequestId; // eslint-disable-line no-param-reassign 109 | } 110 | return event; 111 | }; 112 | 113 | // Set common error handler for logger.send() and logger.flush() 114 | logger.error = (error, payload) => { 115 | console.log('error', error, 'context', payload); 116 | callback(error); 117 | }; 118 | }; 119 | -------------------------------------------------------------------------------- /splunk-elb-classic-access-logs-processor/integration-test.js: -------------------------------------------------------------------------------- 1 | var handler = require('./index').handler; 2 | 3 | fs=require('fs'); 4 | var event = JSON.parse(fs.readFileSync('sampleEvent.json', 'utf8')); 5 | 6 | handler( 7 | event, 8 | { 9 | functionName: 'splunk-elb-classic-access-logs-processor', 10 | awsRequestId: Math.floor(Math.random() * Math.pow(10,10)) 11 | }, 12 | function() { 13 | console.log(Array.prototype.slice.call(arguments)); 14 | } 15 | ); 16 | 17 | console.log("Done"); -------------------------------------------------------------------------------- /splunk-elb-classic-access-logs-processor/lambda.json: -------------------------------------------------------------------------------- 1 | { 2 | "lambda": { 3 | "FunctionName": "splunk-elb-classic-access-logs-processor", 4 | "Handler": "index.handler", 5 | "Runtime": "nodejs10.x", 6 | "Description": "Stream Classic ELB access logs from S3 to Splunk's HTTP event collector", 7 | "Environment": { 8 | "Variables": { 9 | "SPLUNK_HEC_URL": "", 10 | "SPLUNK_HEC_TOKEN": "" 11 | } 12 | }, 13 | "MemorySize": 512, 14 | "Timeout": 10 15 | }, 16 | "triggers": { 17 | "s3": [ 18 | {} 19 | ] 20 | }, 21 | "version": "0.8.1", 22 | "license": "CC0-1.0", 23 | "tags": [ 24 | "nodejs", 25 | "splunk", 26 | "elb", 27 | "s3", 28 | "classic-elb" 29 | ], 30 | "authors": [ 31 | "Roy Arsan (https://www.splunk.com)" 32 | ], 33 | "roleTemplates": [ 34 | "S3" 35 | ] 36 | } 37 | 38 | -------------------------------------------------------------------------------- /splunk-elb-classic-access-logs-processor/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "splunk-elb-classic-access-logs-processor", 3 | "version": "1.0.0", 4 | "description": "AWS Lambda function blueprint to stream Classic ELB access logs from S3 to Splunk's HTTP event collector", 5 | "main": "index.js", 6 | "scripts": { 7 | "lint": "eslint -c ../.eslintrc --ignore-path ../.eslintignore .", 8 | "test": "node integration-test.js", 9 | "postinstall": "node -e \"var s='../lib',d='lib',fs=require('fs');fs.exists(d,function(e){e||fs.symlinkSync(s,d,'dir')});\"", 10 | "pretest": "npm run lint", 11 | "build:zip": "zip -i \\*.js \\*.json -r splunk-elb-classic-access-logs-processor.zip index.js $(npm list --prod --parseable | sed -nE 's/.*\\/(node_modules\\/.*)/\\1/p' | awk '{printf \"%s \", $0;}')", 12 | "build:template": "aws cloudformation package --template template.yaml --s3-bucket $npm_config_sam_s3_bucket_name --output-template-file template.output.yaml", 13 | "build:deployment": "aws cloudformation deploy --template-file $(pwd)/template.output.yaml --parameter-overrides S3CLBLogsBucketName=\"$npm_config_parm_s3_logs_bucket_name\" S3EventType=\"$npm_config_parm_s3_event_type\" S3Prefix=\"$npm_config_parm_s3_prefix\" S3Suffix=\"$npm_config_parm_s3_suffix\" SplunkHttpEventCollectorURL=\"$npm_config_parm_hec_url\" SplunkHttpEventCollectorToken=\"$npm_config_parm_hec_token\" --stack-name $npm_config_parm_stack_name --capabilities \"CAPABILITY_IAM\"", 14 | "build": "npm run build:zip && npm run build:template && npm run build:deployment", 15 | "clean:zip": "rm -f splunk-elb-classic-access-logs-processor.zip", 16 | "clean:template": "rm -f template.output.yaml", 17 | "clean:deployment": "aws cloudformation delete-stack --stack-name $npm_config_parm_stack_name", 18 | "clean": "npm run clean:zip && npm run clean:template && npm run clean:deployment" 19 | }, 20 | "keywords": [ 21 | "splunk", 22 | "lambda", 23 | "elb", 24 | "classic-elb", 25 | "hec" 26 | ], 27 | "authors": [ 28 | "Roy Arsan (https://www.splunk.com)", 29 | "Tarik Makota (https://aws.amazon.com)" 30 | ], 31 | "license": "MIT", 32 | "dependencies": { 33 | "splunk-logging": "^0.9.3" 34 | }, 35 | "devDependencies": { 36 | "aws-sdk": "^2.89.0", 37 | "babel-eslint": "^7.1.0", 38 | "eslint": "^3.12.0", 39 | "eslint-config-airbnb": "^13.0.0", 40 | "eslint-plugin-babel": "^4.0.0", 41 | "eslint-plugin-import": "^2.2.0", 42 | "eslint-plugin-jsx-a11y": "^2.2.3", 43 | "eslint-plugin-react": "^6.7.1" 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /splunk-elb-classic-access-logs-processor/sampleEvent.json: -------------------------------------------------------------------------------- 1 | { 2 | "Records": [ 3 | { 4 | "eventVersion": "2.0", 5 | "eventTime": "1970-01-01T00:00:00.000Z", 6 | "requestParameters": { 7 | "sourceIPAddress": "127.0.0.1" 8 | }, 9 | "s3": { 10 | "configurationId": "testConfigRule", 11 | "object": { 12 | "eTag": "0123456789abcdef0123456789abcdef", 13 | "sequencer": "0A1B2C3D4E5F678901", 14 | "key": "elb-classic/1234567890_elasticloadbalancing_us-west-2_Splunk-En-SplunkSH-CBJD72EA9E5K_20170525T2025Z_52.43.6.3_4rw05n16.log", 15 | "size": 1024 16 | }, 17 | "bucket": { 18 | "arn": "arn:aws:s3:::lambda-sample-events", 19 | "name": "lambda-sample-events", 20 | "ownerIdentity": { 21 | "principalId": "EXAMPLE" 22 | } 23 | }, 24 | "s3SchemaVersion": "1.0" 25 | }, 26 | "responseElements": { 27 | "x-amz-id-2": "EXAMPLE123/5678abcdefghijklambdaisawesome/mnopqrstuvwxyzABCDEFGH", 28 | "x-amz-request-id": "EXAMPLE123456789" 29 | }, 30 | "awsRegion": "us-east-1", 31 | "eventName": "ObjectCreated:Put", 32 | "userIdentity": { 33 | "principalId": "EXAMPLE" 34 | }, 35 | "eventSource": "aws:s3" 36 | } 37 | ] 38 | } -------------------------------------------------------------------------------- /splunk-elb-classic-access-logs-processor/template.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Transform: 'AWS::Serverless-2016-10-31' 3 | Description: > 4 | Serverless application to stream access logs of Classic ELB from S3 to Splunk HTTP Event Collector (HEC). 5 | 6 | This SAM template creates the Lambda function & associated policy + IAM role, and new S3 bucket 7 | with enabled Events notification to this Lambda function. Direct your load balancers access logs to this newly created S3 Bucket. 8 | To enable access logs on classic load balancers, refer to AWS docs: 9 | http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/enable-access-logs.html 10 | 11 | Last Modified: 29 Nov, 2017 12 | Authors: Roy Arsan , Tarik Makota 13 | 14 | Parameters: 15 | S3CLBLogsBucketName: 16 | Type: "String" 17 | Description: "Name of S3 bucket that will store access logs. This bucket will be created for you. Name must be unique across all existing bucket names in Amazon S3." 18 | 19 | S3Prefix: 20 | Type: "String" 21 | Description: "Optional prefix to limit the notifications to objects with keys that start with matching characters. e.g. uploadedImages/" 22 | 23 | S3Suffix: 24 | Type: "String" 25 | Description: "Optional suffix to limit the notifications to objects with keys that end with matching characters.. e.g. .jpg" 26 | 27 | SplunkHttpEventCollectorURL: 28 | Type: "String" 29 | Description: "URL address of your Splunk HTTP event collector endpoint" 30 | 31 | SplunkHttpEventCollectorToken: 32 | Type: "String" 33 | Description: "Token of your Splunk HTTP event collector endpoint" 34 | 35 | Outputs: 36 | SplunkCLBLogsProcessor: 37 | Description: "Splunk S3 Bucket Stream Lambda Function ARN" 38 | Value: !Ref SplunkCLBProcessorFunction 39 | 40 | Resources: 41 | SplunkCLBProcessorFunction: 42 | Type: 'AWS::Serverless::Function' 43 | Properties: 44 | Description: Stream CLB events from S3 to Splunk HTTP event collector 45 | Handler: index.handler 46 | Runtime: nodejs10.x 47 | CodeUri: ./splunk-elb-classic-access-logs-processor.zip 48 | Policies: 49 | - S3CrudPolicy: 50 | BucketName: !Ref S3CLBLogsBucketName 51 | MemorySize: 512 52 | Timeout: 10 53 | Environment: 54 | Variables: 55 | SPLUNK_HEC_URL: !Ref SplunkHttpEventCollectorURL 56 | SPLUNK_HEC_TOKEN: !Ref SplunkHttpEventCollectorToken 57 | Events: 58 | LogUpload: 59 | Type: S3 60 | Properties: 61 | Bucket: !Ref Bucket 62 | Events: s3:ObjectCreated:* 63 | Filter: 64 | S3Key: 65 | Rules: 66 | - 67 | Name: prefix 68 | Value: !Sub "${S3Prefix}" 69 | - 70 | Name: suffix 71 | Value: !Sub "${S3Suffix}" 72 | Bucket: 73 | Type: AWS::S3::Bucket 74 | Properties: 75 | BucketName: !Ref S3CLBLogsBucketName 76 | -------------------------------------------------------------------------------- /splunk-guardduty-processor/.npmrc.sample: -------------------------------------------------------------------------------- 1 | sam_s3_bucket_name = fake-s3-bucket 2 | parm_hec_url = https://192.168.0.0:8088/services/collector 3 | parm_hec_token = FAKE-HEC-TOKEN 4 | parm_stack_name = FakeGuardDutyProcessorStack 5 | -------------------------------------------------------------------------------- /splunk-guardduty-processor/index.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Splunk logging for AWS Lambda 3 | * 4 | * This function logs to Splunk Enterprise using Splunk's HTTP event collector API. 5 | * 6 | * Define the following Environment Variables in the console below to configure 7 | * this function to log to your Splunk host: 8 | * 9 | * 1. SPLUNK_HEC_URL: URL address for your Splunk HTTP event collector endpoint. 10 | * Default port for event collector is 8088. Example: https://host.com:8088/services/collector 11 | * 12 | * 2. SPLUNK_HEC_TOKEN: Token for your Splunk HTTP event collector. 13 | * To create a new token for this Lambda function, refer to Splunk Docs: 14 | * http://docs.splunk.com/Documentation/Splunk/latest/Data/UsetheHTTPEventCollector#Create_an_Event_Collector_token 15 | * 16 | * For details about Splunk logging library used below: https://github.com/splunk/splunk-javascript-logging 17 | */ 18 | 19 | 'use strict'; 20 | 21 | const loggerConfig = { 22 | url: process.env.SPLUNK_HEC_URL, 23 | token: process.env.SPLUNK_HEC_TOKEN, 24 | maxBatchCount: 0, // Manually flush events 25 | maxRetries: 3, // Retry 3 times 26 | }; 27 | 28 | const SplunkLogger = require('splunk-logging').Logger; 29 | 30 | const logger = new SplunkLogger(loggerConfig); 31 | 32 | exports.handler = (event, context, callback) => { 33 | console.log('Received event:', JSON.stringify(event, null, 2)); 34 | 35 | // First, configure logger to automatically add Lambda metadata and to hook into Lambda callback 36 | configureLogger(context, callback); // eslint-disable-line no-use-before-define 37 | 38 | // Log object or string with optional metadata parameters - useful to set input settings per event vs HEC token-level 39 | // For descripion of each metadata parameter, refer to: 40 | // http://docs.splunk.com/Documentation/Splunk/latest/RESTREF/RESTinput#services.2Fcollector */ 41 | logger.send({ 42 | message: event, 43 | metadata: { 44 | time: Date.now(), 45 | host: 'sam', 46 | source: 'aws_cloudwatchevents_guardduty', 47 | sourcetype: 'aws:cloudwatch:guardduty', 48 | //index: 'main', 49 | }, 50 | }); 51 | 52 | // Send all the events in a single batch to Splunk 53 | logger.flush((err, resp, body) => { 54 | // Request failure or valid response from Splunk with HEC error code 55 | if (err || (body && body.code !== 0)) { 56 | // If failed, error will be handled by pre-configured logger.error() below 57 | } else { 58 | // If succeeded, body will be { text: 'Success', code: 0 } 59 | console.log('Response from Splunk:', body); 60 | callback(null, event.key1); // Echo back the first key value 61 | } 62 | }); 63 | }; 64 | 65 | const configureLogger = (context, callback) => { 66 | // Override SplunkLogger default formatter 67 | logger.eventFormatter = (event) => { 68 | // Enrich event here if needed but be aware it may break Splunk side parsing 69 | // It is best to enrich the actual GuardDuty finding object 70 | return event; 71 | }; 72 | 73 | // Set common error handler for logger.send() and logger.flush() 74 | logger.error = (error, payload) => { 75 | console.log('error', error, 'context', payload); 76 | callback(error); 77 | }; 78 | }; 79 | -------------------------------------------------------------------------------- /splunk-guardduty-processor/integration-test.js: -------------------------------------------------------------------------------- 1 | var handler = require('./index').handler; 2 | 3 | fs=require('fs'); 4 | var event = JSON.parse(fs.readFileSync('sampleEvent.json', 'utf8')); 5 | 6 | handler( 7 | event, 8 | { 9 | functionName: 'splunk-guardduty-processor', 10 | awsRequestId: Math.floor(Math.random() * Math.pow(10,10)) 11 | }, 12 | function() { 13 | console.log(Array.prototype.slice.call(arguments)); 14 | } 15 | ); 16 | 17 | console.log("Done"); -------------------------------------------------------------------------------- /splunk-guardduty-processor/lambda.json: -------------------------------------------------------------------------------- 1 | { 2 | "lambda": { 3 | "FunctionName": "splunk-guardduty-processor", 4 | "Handler": "index.handler", 5 | "Runtime": "nodejs10.x", 6 | "Description": "Stream findings from GuardDuty to Splunk's HTTP event collector", 7 | "Environment": { 8 | "Variables": { 9 | "SPLUNK_HEC_URL": "", 10 | "SPLUNK_HEC_TOKEN": "" 11 | } 12 | }, 13 | "MemorySize": 512, 14 | "Timeout": 10 15 | }, 16 | "version": "0.8.1", 17 | "license": "CC0-1.0", 18 | "tags": [ 19 | "nodejs", 20 | "splunk", 21 | "logging", 22 | "guardduty" 23 | ], 24 | "authors": [ 25 | "Nicolas Stone (https://www.splunk.com)" 26 | ], 27 | "roleTemplates": [ 28 | ] 29 | } 30 | 31 | -------------------------------------------------------------------------------- /splunk-guardduty-processor/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "splunk-guardduty-processor", 3 | "version": "1.0.0", 4 | "description": "Stream findings from GuardDuty to Splunk's HTTP event collector", 5 | "main": "index.js", 6 | "scripts": { 7 | "lint": "eslint -c ../.eslintrc --ignore-path ../.eslintignore .", 8 | "test": "node integration-test.js", 9 | "pretest": "npm run lint", 10 | "build:zip": "zip -i \\*.js \\*.json -r splunk-guardduty-processor.zip index.js $(npm list --prod --parseable | sed -nE 's/.*\\/(node_modules\\/.*)/\\1/p' | awk '{printf \"%s \", $0;}')", 11 | "build:template": "aws cloudformation package --template template.yaml --s3-bucket $npm_config_sam_s3_bucket_name --output-template-file template.output.yaml", 12 | "build:deployment": "aws cloudformation deploy --template-file $(pwd)/template.output.yaml --parameter-overrides SplunkHttpEventCollectorURL=\"$npm_config_parm_hec_url\" SplunkHttpEventCollectorToken=\"$npm_config_parm_hec_token\" --stack-name $npm_config_parm_stack_name --capabilities \"CAPABILITY_IAM\"", 13 | "build": "npm run build:zip && npm run build:template && npm run build:deployment", 14 | "clean:zip": "rm -f splunk-guardduty-processor.zip", 15 | "clean:template": "rm -f template.output.yaml", 16 | "clean:deployment": "aws cloudformation delete-stack --stack-name $npm_config_parm_stack_name", 17 | "clean": "npm run clean:zip && npm run clean:template && npm run clean:deployment" 18 | }, 19 | "keywords": [ 20 | "splunk", 21 | "lambda", 22 | "logging", 23 | "guardduty", 24 | "hec" 25 | ], 26 | "authors": [ 27 | "Nicolas Stone (https://www.splunk.com)" 28 | ], 29 | "license": "MIT", 30 | "dependencies": { 31 | "splunk-logging": "^0.9.3" 32 | }, 33 | "devDependencies": { 34 | "babel-eslint": "^7.1.0", 35 | "eslint": "^3.12.0", 36 | "eslint-config-airbnb": "^13.0.0", 37 | "eslint-plugin-babel": "^4.0.0", 38 | "eslint-plugin-import": "^2.2.0", 39 | "eslint-plugin-jsx-a11y": "^2.2.3", 40 | "eslint-plugin-react": "^6.7.1" 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /splunk-guardduty-processor/sampleEvent.json: -------------------------------------------------------------------------------- 1 | { 2 | "key1": "a", 3 | "key2": "b", 4 | "key3": "c" 5 | } 6 | -------------------------------------------------------------------------------- /splunk-guardduty-processor/template.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Transform: 'AWS::Serverless-2016-10-31' 3 | Description: > 4 | Serverless application example that streams findings from GuardDuty to Splunk's HTTP event collector. 5 | 6 | This SAM template creates a Lambda function with its IAM execution role. The created Lambda 7 | function includes the JavaScript client library for logging to Splunk HEC: 8 | https://github.com/splunk/splunk-javascript-logging 9 | 10 | Last Modified: 24 Sep, 2018 11 | Authors: Nicolas Stone 12 | 13 | Parameters: 14 | SplunkHttpEventCollectorURL: 15 | Type: "String" 16 | Description: "URL address of your Splunk HTTP event collector endpoint" 17 | 18 | SplunkHttpEventCollectorToken: 19 | Type: "String" 20 | Description: "Token of your Splunk HTTP event collector endpoint" 21 | 22 | Outputs: 23 | SplunkLoggingFunction: 24 | Description: "Stream findings from GuardDuty to Splunk's HTTP event collector" 25 | Value: !GetAtt SplunkGuardDutyProcessorFunction.Arn 26 | 27 | Resources: 28 | SplunkGuardDutyProcessorFunction: 29 | Type: 'AWS::Serverless::Function' 30 | Properties: 31 | Handler: index.handler 32 | Runtime: nodejs10.x 33 | CodeUri: ./splunk-guardduty-processor.zip 34 | Description: Stream findings from GuardDuty to Splunk's HTTP event collector 35 | MemorySize: 512 36 | Timeout: 30 37 | Environment: 38 | Variables: 39 | SPLUNK_HEC_URL: !Ref SplunkHttpEventCollectorURL 40 | SPLUNK_HEC_TOKEN: !Ref SplunkHttpEventCollectorToken 41 | 42 | -------------------------------------------------------------------------------- /splunk-iot-processor/.npmrc.sample: -------------------------------------------------------------------------------- 1 | sam_s3_bucket_name = fake-s3-bucket-name 2 | parm_iot_topic_filter = "fakeTopic/#" 3 | parm_iot_sql_version = 2016-03-23 4 | parm_hec_url = https://192.168.0.1:8088/services/collector 5 | parm_hec_token = FAKE-FAKE-FAKE 6 | parm_stack_name = splunk-iot-stack 7 | -------------------------------------------------------------------------------- /splunk-iot-processor/index.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Stream events from AWS IoT to Splunk 3 | * 4 | * This function streams AWS IoT events to Splunk using Splunk's HTTP event collector API. 5 | * 6 | * Define the following Environment Variables in the console below to configure 7 | * this function to stream events to your Splunk host: 8 | * 9 | * 1. SPLUNK_HEC_URL: URL address for your Splunk HTTP event collector endpoint. 10 | * Default port for event collector is 8088. Example: https://host.com:8088/services/collector 11 | * 12 | * 2. SPLUNK_HEC_TOKEN: Token for your Splunk HTTP event collector. 13 | * To create a new token for this Lambda function, refer to Splunk Docs: 14 | * http://docs.splunk.com/Documentation/Splunk/latest/Data/UsetheHTTPEventCollector#Create_an_Event_Collector_token 15 | * 16 | * Once Lambda function created, add an AWS IoT Rule with Lambda action set to this function name. 17 | * For more details, including adding permissions to AWS IoT to invoke Lambda, refer to AWS Docs: 18 | * http://docs.aws.amazon.com/iot/latest/developerguide/iot-lambda-rule.html#iot-create-lambda-rule 19 | * http://docs.aws.amazon.com/iot/latest/developerguide/lambda-rule.html 20 | * 21 | * For details about Splunk logging library used below: https://github.com/splunk/splunk-javascript-logging 22 | */ 23 | 24 | 'use strict'; 25 | 26 | const loggerConfig = { 27 | url: process.env.SPLUNK_HEC_URL, 28 | token: process.env.SPLUNK_HEC_TOKEN, 29 | maxBatchCount: 0, // Manually flush events 30 | maxRetries: 3, // Retry 3 times 31 | }; 32 | 33 | const SplunkLogger = require('splunk-logging').Logger; 34 | 35 | const logger = new SplunkLogger(loggerConfig); 36 | 37 | exports.handler = (event, context, callback) => { 38 | console.log('Received event:', JSON.stringify(event, null, 2)); 39 | 40 | // First, configure logger to automatically add Lambda metadata and to hook into Lambda callback 41 | configureLogger(context, callback); // eslint-disable-line no-use-before-define 42 | 43 | /* Send item to Splunk with optional metadata properties such as time, index, source, sourcetype, and host. 44 | - Set time value below if you want to explicitly set event timestamp. 45 | - Set or remove other metadata properties as needed. For descripion of each property, refer to: 46 | http://docs.splunk.com/Documentation/Splunk/latest/RESTREF/RESTinput#services.2Fcollector */ 47 | logger.send({ 48 | message: event, 49 | metadata: { 50 | host: 'serverless', 51 | source: `lambda:${context.functionName}`, 52 | sourcetype: 'httpevent', 53 | //time: Date.now(), 54 | //index: 'main', 55 | }, 56 | }); 57 | 58 | // Flush event to Splunk 59 | logger.flush((err, resp, body) => { 60 | // Request failure or valid response from Splunk with HEC error code 61 | if (err || (body && body.code !== 0)) { 62 | // If failed, error will be handled by pre-configured logger.error() below 63 | } else { 64 | // If succeeded, body will be { text: 'Success', code: 0 } 65 | console.log('Response from Splunk:', body); 66 | callback(null, event); // Echo back event itself 67 | } 68 | }); 69 | }; 70 | 71 | const configureLogger = (context, callback) => { 72 | // Override SplunkLogger default formatter 73 | logger.eventFormatter = (event) => { 74 | // Enrich event only if it is an object 75 | if (typeof event === 'object' && !Object.prototype.hasOwnProperty.call(event, 'awsRequestId')) { 76 | // Add awsRequestId from Lambda context for request tracing 77 | event.awsRequestId = context.awsRequestId; // eslint-disable-line no-param-reassign 78 | } 79 | return event; 80 | }; 81 | 82 | // Set common error handler for logger.send() and logger.flush() 83 | logger.error = (error, payload) => { 84 | console.log('error', error, 'context', payload); 85 | callback(error); 86 | }; 87 | }; 88 | -------------------------------------------------------------------------------- /splunk-iot-processor/integration-test.js: -------------------------------------------------------------------------------- 1 | var handler = require('./index').handler; 2 | 3 | fs=require('fs'); 4 | var event = JSON.parse(fs.readFileSync('sampleEvent.json', 'utf8')); 5 | 6 | handler( 7 | event, 8 | { 9 | functionName: 'splunk-iot-processor', 10 | awsRequestId: Math.floor(Math.random() * Math.pow(10,10)) 11 | }, 12 | function() { 13 | console.log(Array.prototype.slice.call(arguments)); 14 | } 15 | ); 16 | 17 | console.log("Done"); -------------------------------------------------------------------------------- /splunk-iot-processor/lambda.json: -------------------------------------------------------------------------------- 1 | { 2 | "lambda": { 3 | "FunctionName": "splunk-iot-processor", 4 | "Handler": "index.handler", 5 | "Runtime": "nodejs10.x", 6 | "Description": "Stream events from AWS IoT to Splunk's HTTP event collector", 7 | "Environment": { 8 | "Variables": { 9 | "SPLUNK_HEC_URL": "", 10 | "SPLUNK_HEC_TOKEN": "" 11 | } 12 | }, 13 | "MemorySize": 512, 14 | "Timeout": 10 15 | }, 16 | "triggers": { 17 | "iot": [ 18 | {} 19 | ] 20 | }, 21 | "version": "0.8.1", 22 | "license": "CC0-1.0", 23 | "tags": [ 24 | "nodejs", 25 | "splunk", 26 | "iot", 27 | ], 28 | "authors": [ 29 | "Roy Arsan (https://www.splunk.com)", 30 | "Glenn Block (https://www.splunk.com)" 31 | ], 32 | "roleTemplates": [ 33 | ] 34 | } 35 | 36 | -------------------------------------------------------------------------------- /splunk-iot-processor/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "splunk-iot-processor", 3 | "version": "1.0.0", 4 | "description": "AWS Lambda function blueprint to stream events from AWS IoT to Splunk's HTTP event collector", 5 | "main": "index.js", 6 | "scripts": { 7 | "lint": "eslint -c ../.eslintrc --ignore-path ../.eslintignore .", 8 | "test": "node integration-test.js", 9 | "pretest": "npm run lint", 10 | "build:zip": "zip -i \\*.js \\*.json -r splunk-iot-processor.zip index.js $(npm list --prod --parseable | sed -nE 's/.*\\/(node_modules\\/.*)/\\1/p' | awk '{printf \"%s \", $0;}')", 11 | "build:template": "aws cloudformation package --template template.yaml --s3-bucket $npm_config_sam_s3_bucket_name --output-template-file template.output.yaml", 12 | "build:deployment": "aws cloudformation deploy --template-file $(pwd)/template.output.yaml --parameter-overrides IoTTopicFilter=\"$npm_config_parm_iot_topic_filter\" IoTSqlVersion=\"$npm_config_parm_iot_sql_version\" SplunkHttpEventCollectorURL=\"$npm_config_parm_hec_url\" SplunkHttpEventCollectorToken=\"$npm_config_parm_hec_token\" --stack-name $npm_config_parm_stack_name --capabilities \"CAPABILITY_IAM\"", 13 | "build": "npm run build:zip && npm run build:template && npm run build:deployment", 14 | "clean:zip": "rm -f splunk-iot-processor.zip", 15 | "clean:template": "rm -f template.output.yaml", 16 | "clean:deployment": "aws cloudformation delete-stack --stack-name $npm_config_parm_stack_name", 17 | "clean": "npm run clean:zip && npm run clean:template && npm run clean:deployment" 18 | }, 19 | "keywords": [ 20 | "splunk", 21 | "lambda", 22 | "iot", 23 | "hec" 24 | ], 25 | "authors": [ 26 | "Roy Arsan (https://www.splunk.com)", 27 | "Glenn Block (https://www.splunk.com)", 28 | "Bill Bartlett (https://www.splunk.com)", 29 | "Tarik Makota (https://aws.amazon.com)" 30 | ], 31 | "license": "MIT", 32 | "dependencies": { 33 | "splunk-logging": "^0.9.3" 34 | }, 35 | "devDependencies": { 36 | "babel-eslint": "^7.1.0", 37 | "eslint": "^3.12.0", 38 | "eslint-config-airbnb": "^13.0.0", 39 | "eslint-plugin-babel": "^4.0.0", 40 | "eslint-plugin-import": "^2.2.0", 41 | "eslint-plugin-jsx-a11y": "^2.2.3", 42 | "eslint-plugin-react": "^6.7.1" 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /splunk-iot-processor/sampleEvent.json: -------------------------------------------------------------------------------- 1 | { 2 | "serialNumber": "G030JF055364XVRB", 3 | "batteryVoltage": "25mV", 4 | "clickType": "DOUBLE" 5 | } -------------------------------------------------------------------------------- /splunk-iot-processor/template.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Transform: 'AWS::Serverless-2016-10-31' 3 | Description: > 4 | Serverless application to respond to events from IoT and send to Splunk HTTP Event Collector (HEC). 5 | 6 | This SAM template creates the Lambda function with its IAM execution role, and IoT topic rule, 7 | along with a Lambda permission to grant AWS IoT permission to invoke this function. 8 | 9 | Last Modified: 29 Nov, 2017 10 | Authors: Roy Arsan , Tarik Makota 11 | 12 | Parameters: 13 | IoTTopicFilter: 14 | Type: "String" 15 | Description: "IoT topic filter. To learn more about topics and topic filters, see http://docs.aws.amazon.com/iot/latest/developerguide/topics.html" 16 | 17 | IoTSqlVersion: 18 | Type: "String" 19 | Description: "Version of the SQL rule engine. Allowed values are '2016-03-23' or '2015-10-08'" 20 | Default: "2016-03-23" 21 | 22 | SplunkHttpEventCollectorURL: 23 | Type: "String" 24 | Description: "URL address of your Splunk HTTP event collector endpoint" 25 | 26 | SplunkHttpEventCollectorToken: 27 | Type: "String" 28 | Description: "Token of your Splunk HTTP event collector endpoint" 29 | 30 | Outputs: 31 | SplunkIoTProcessorFunction: 32 | Description: "Splunk IoT Serverless Function" 33 | Value: !GetAtt IoTFunction.Arn 34 | 35 | Resources: 36 | IoTFunction: 37 | Type: AWS::Serverless::Function 38 | Properties: 39 | Handler: index.handler 40 | Runtime: nodejs10.x 41 | CodeUri: ./splunk-iot-processor.zip 42 | Description: Capture events from IOT topic and send to Splunk HTTP event collector 43 | MemorySize: 512 44 | Timeout: 10 45 | Environment: 46 | Variables: 47 | SPLUNK_HEC_URL: !Ref SplunkHttpEventCollectorURL 48 | SPLUNK_HEC_TOKEN: !Ref SplunkHttpEventCollectorToken 49 | Events: 50 | IoT: 51 | Type: IoTRule 52 | Properties: 53 | AwsIotSqlVersion: !Ref IoTSqlVersion 54 | Sql: !Join [ "", [ "SELECT * FROM '", !Ref IoTTopicFilter, "'"] ] -------------------------------------------------------------------------------- /splunk-kinesis-stream-processor/.npmrc.sample: -------------------------------------------------------------------------------- 1 | sam_s3_bucket_name = fake-s3-bucket-name 2 | parm_stream_name = fake-stream-name 3 | parm_stream_start_position = LATEST 4 | parm_hec_url = https://192.168.0.0:8088/services/collector 5 | parm_hec_token = FAKE-FAKE-FAKE 6 | parm_stack_name = fakeStackName -------------------------------------------------------------------------------- /splunk-kinesis-stream-processor/index.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Stream events from AWS Kinesis to Splunk 3 | * 4 | * This function streams AWS Kinesis events to Splunk using Splunk's HTTP event collector API. 5 | * 6 | * Define the following Environment Variables in the console below to configure 7 | * this function to stream events to your Splunk host: 8 | * 9 | * 1. SPLUNK_HEC_URL: URL address for your Splunk HTTP event collector endpoint. 10 | * Default port for event collector is 8088. Example: https://host.com:8088/services/collector 11 | * 12 | * 2. SPLUNK_HEC_TOKEN: Token for your Splunk HTTP event collector. 13 | * To create a new token for this Lambda function, refer to Splunk Docs: 14 | * http://docs.splunk.com/Documentation/Splunk/latest/Data/UsetheHTTPEventCollector#Create_an_Event_Collector_token 15 | * 16 | * For details about Splunk logging library used below: https://github.com/splunk/splunk-javascript-logging 17 | */ 18 | 19 | 'use strict'; 20 | 21 | const loggerConfig = { 22 | url: process.env.SPLUNK_HEC_URL, 23 | token: process.env.SPLUNK_HEC_TOKEN, 24 | maxBatchCount: 0, // Manually flush events 25 | maxRetries: 3, // Retry 3 times 26 | }; 27 | 28 | const SplunkLogger = require('splunk-logging').Logger; 29 | 30 | const logger = new SplunkLogger(loggerConfig); 31 | 32 | exports.handler = (event, context, callback) => { 33 | console.log('Received event:', JSON.stringify(event, null, 2)); 34 | 35 | // First, configure logger to automatically add Lambda metadata and to hook into Lambda callback 36 | configureLogger(context, callback); // eslint-disable-line no-use-before-define 37 | 38 | let count = 0; 39 | event.Records.forEach((record) => { 40 | // Kinesis data is base64 encoded so decode here 41 | const data = new Buffer(record.kinesis.data, 'base64').toString('ascii'); 42 | let item = null; 43 | 44 | /* NOTE: if Kinesis stream records originates from CloudWatch Logs, data is 45 | compressed and needs to be expanded here. Refer to 'splunk-cloudwatch-log-processor' 46 | blueprint in AWS Lambda console for sample code using zlib */ 47 | 48 | console.log('Decoded payload:', JSON.stringify(data, null, 2)); 49 | 50 | try { 51 | item = JSON.parse(data); 52 | } catch (exception) { 53 | item = data; 54 | } 55 | 56 | /* Send item to Splunk with optional metadata properties such as time, index, source, sourcetype, and host. 57 | - Change time value below if time is specified in an event property (e.g. record.kinesis.approximateArrivalTimestamp). 58 | - Set or remove metadata properties as needed. For descripion of each property, refer to: 59 | http://docs.splunk.com/Documentation/Splunk/latest/RESTREF/RESTinput#services.2Fcollector */ 60 | logger.send({ 61 | message: item, 62 | metadata: { 63 | time: Date.now(), 64 | host: 'serverless', 65 | source: `lambda:${context.functionName}`, 66 | /* Set sourcetype and index in HEC token configuration for now. Should be configurable from Splunk. 67 | Currently when sourcetype is assigned here, the assigned sourcetype has precedence over HEC token default 68 | sourcetype. */ 69 | //sourcetype: 'httpevent', 70 | //index: 'main', 71 | }, 72 | }); 73 | 74 | count += 1; 75 | }); 76 | 77 | logger.flush((err, resp, body) => { 78 | // Request failure or valid response from Splunk with HEC error code 79 | if (err || (body && body.code !== 0)) { 80 | // If failed, error will be handled by pre-configured logger.error() below 81 | } else { 82 | // If succeeded, body will be { text: 'Success', code: 0 } 83 | console.log('Response from Splunk:', body); 84 | console.log(`Successfully processed ${count} record(s).`); 85 | callback(null, count); // Return number of log events 86 | } 87 | }); 88 | }; 89 | 90 | const configureLogger = (context, callback) => { 91 | // Override SplunkLogger default formatter 92 | logger.eventFormatter = (event) => { 93 | // Enrich event only if it is an object 94 | if (typeof event === 'object' && !Object.prototype.hasOwnProperty.call(event, 'awsRequestId')) { 95 | // Add awsRequestId from Lambda context for request tracing 96 | event.awsRequestId = context.awsRequestId; // eslint-disable-line no-param-reassign 97 | } 98 | return event; 99 | }; 100 | 101 | // Set common error handler for logger.send() and logger.flush() 102 | logger.error = (error, payload) => { 103 | console.log('error', error, 'context', payload); 104 | callback(error); 105 | }; 106 | }; 107 | -------------------------------------------------------------------------------- /splunk-kinesis-stream-processor/integration-test.js: -------------------------------------------------------------------------------- 1 | var handler = require('./index').handler; 2 | 3 | fs=require('fs'); 4 | var event = JSON.parse(fs.readFileSync('sampleEvent.json', 'utf8')); 5 | 6 | handler( 7 | event, 8 | { 9 | functionName: 'splunk-kinesis-stream-processor', 10 | awsRequestId: Math.floor(Math.random() * Math.pow(10,10)) 11 | }, 12 | function() { 13 | console.log(Array.prototype.slice.call(arguments)); 14 | } 15 | ); 16 | 17 | console.log("Done"); -------------------------------------------------------------------------------- /splunk-kinesis-stream-processor/lambda.json: -------------------------------------------------------------------------------- 1 | { 2 | "lambda": { 3 | "FunctionName": "splunk-kinesis-stream-processor", 4 | "Handler": "index.handler", 5 | "Runtime": "nodejs10.x", 6 | "Description": "Stream events from AWS Kinesis to Splunk's HTTP event collector", 7 | "Environment": { 8 | "Variables": { 9 | "SPLUNK_HEC_URL": "", 10 | "SPLUNK_HEC_TOKEN": "" 11 | } 12 | }, 13 | "MemorySize": 512, 14 | "Timeout": 10 15 | }, 16 | "triggers": { 17 | "kinesis": [ 18 | {} 19 | ] 20 | }, 21 | "version": "0.8.1", 22 | "license": "CC0-1.0", 23 | "tags": [ 24 | "nodejs", 25 | "splunk", 26 | "kinesis", 27 | ], 28 | "authors": [ 29 | "Roy Arsan (https://www.splunk.com)", 30 | "Glenn Block (https://www.splunk.com)" 31 | ], 32 | "roleTemplates": [ 33 | "Kinesis" 34 | ] 35 | } 36 | 37 | -------------------------------------------------------------------------------- /splunk-kinesis-stream-processor/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "splunk-kinesis-stream-processor", 3 | "version": "1.0.0", 4 | "description": "AWS Lambda function blueprint to stream events from AWS Kinesis to Splunk's HTTP event collector", 5 | "main": "index.js", 6 | "scripts": { 7 | "lint": "eslint -c ../.eslintrc --ignore-path ../.eslintignore .", 8 | "test": "SPLUNK_HEC_URL=$npm_config_parm_hec_url SPLUNK_HEC_TOKEN=$npm_config_parm_hec_token node integration-test.js", 9 | "pretest": "npm run lint", 10 | "build:zip": "zip -i \\*.js \\*.json -r splunk-kinesis-stream-processor.zip index.js $(npm list --prod --parseable | sed -nE 's/.*\\/(node_modules\\/.*)/\\1/p' | awk '{printf \"%s \", $0;}')", 11 | "build:template": "aws cloudformation package --template template.yaml --s3-bucket $npm_config_sam_s3_bucket_name --output-template-file template.output.yaml", 12 | "build:deployment": "aws cloudformation deploy --template-file $(pwd)/template.output.yaml --parameter-overrides KinesisStreamName=\"$npm_config_parm_stream_name\" KinesisStreamStartingPosition=\"$npm_config_parm_stream_start_position\" SplunkHttpEventCollectorURL=\"$npm_config_parm_hec_url\" SplunkHttpEventCollectorToken=\"$npm_config_parm_hec_token\" --stack-name $npm_config_parm_stack_name --capabilities \"CAPABILITY_IAM\"", 13 | "build": "npm run build:zip && npm run build:template && npm run build:deployment", 14 | "clean:zip": "rm -f splunk-kinesis-stream-processor.zip", 15 | "clean:template": "rm -f template.output.yaml", 16 | "clean:deployment": "aws cloudformation delete-stack --stack-name $npm_config_parm_stack_name", 17 | "clean": "npm run clean:zip && npm run clean:template && npm run clean:deployment", 18 | "kinesis:put": "aws kinesis put-record --stream-name \"$npm_config_parm_stream_name\" --data \"{'message': 'Hello Test from npm'}\" --partition-key 123" 19 | }, 20 | "keywords": [ 21 | "splunk", 22 | "lambda", 23 | "kinesis", 24 | "hec" 25 | ], 26 | "authors": [ 27 | "Roy Arsan (https://www.splunk.com)", 28 | "Glenn Block (https://www.splunk.com)", 29 | "Bill Bartlett (https://www.splunk.com)", 30 | "Tarik Makota (https://aws.amazon.com)" 31 | ], 32 | "license": "MIT", 33 | "dependencies": { 34 | "splunk-logging": "^0.9.3" 35 | }, 36 | "devDependencies": { 37 | "babel-eslint": "^7.1.0", 38 | "eslint": "^3.12.0", 39 | "eslint-config-airbnb": "^13.0.0", 40 | "eslint-plugin-babel": "^4.0.0", 41 | "eslint-plugin-import": "^2.2.0", 42 | "eslint-plugin-jsx-a11y": "^2.2.3", 43 | "eslint-plugin-react": "^6.7.1" 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /splunk-kinesis-stream-processor/sampleEvent.json: -------------------------------------------------------------------------------- 1 | { 2 | "Records": [ 3 | { 4 | "eventID": "shardId-000000000000:49545115243490985018280067714973144582180062593244200961", 5 | "eventVersion": "1.0", 6 | "kinesis": { 7 | "approximateArrivalTimestamp": 1428537600, 8 | "partitionKey": "partitionKey-3", 9 | "data": "SGVsbG8sIHRoaXMgaXMgYSB0ZXN0IDEyMy4=", 10 | "kinesisSchemaVersion": "1.0", 11 | "sequenceNumber": "49545115243490985018280067714973144582180062593244200961" 12 | }, 13 | "invokeIdentityArn": "arn:aws:iam::EXAMPLE", 14 | "eventName": "aws:kinesis:record", 15 | "eventSourceARN": "arn:aws:kinesis:EXAMPLE", 16 | "eventSource": "aws:kinesis", 17 | "awsRegion": "us-east-1" 18 | } 19 | ] 20 | } -------------------------------------------------------------------------------- /splunk-kinesis-stream-processor/template.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Transform: 'AWS::Serverless-2016-10-31' 3 | Description: > 4 | Serverless application to stream events from AWS Kinesis to Splunk HTTP Event Collector (HEC). 5 | 6 | This SAM template creates the Lambda function with its IAM execution role and the event source mapping to Kinesis stream. 7 | Kinesis Stream must be in same region as the region where this stack is created. 8 | 9 | Last Modified: 29 Nov, 2017 10 | Authors: Roy Arsan , Tarik Makota 11 | 12 | Parameters: 13 | KinesisStreamName: 14 | Type: "String" 15 | Description: "Name of a Kinesis stream (must be in the same region). The Lambda function will be invoked whenever this stream is updated." 16 | 17 | KinesisStreamBatchSize: 18 | Type: "Number" 19 | Description: "The largest number of records that AWS Lambda will retrieve from your stream at the time of invoking your function. Your function receives an event with all the retrieved records." 20 | Default: "100" 21 | 22 | KinesisStreamStartingPosition: 23 | Type: "String" 24 | Description: "The position in the stream where AWS Lambda should start reading. Allowed values are 'AT_TIMESTAMP', 'TRIM_HORIZON', 'LATEST'. For more information, see ShardIteratorType in the Amazon Kinesis API Reference." 25 | Default: "LATEST" 26 | 27 | SplunkHttpEventCollectorURL: 28 | Type: "String" 29 | Description: "URL address of your Splunk HTTP event collector endpoint" 30 | 31 | SplunkHttpEventCollectorToken: 32 | Type: "String" 33 | Description: "Token of your Splunk HTTP event collector endpoint" 34 | 35 | Outputs: 36 | SplunkKinesisStreamProcessor: 37 | Description: "Splunk Kinesis Stream Lambda Function ARN" 38 | Value: !GetAtt SplunkKinesisStreamProcessor.Arn 39 | 40 | Resources: 41 | SplunkKinesisStreamProcessor: 42 | Type: 'AWS::Serverless::Function' 43 | Properties: 44 | Handler: index.handler 45 | Runtime: nodej10.x 46 | CodeUri: ./splunk-kinesis-stream-processor.zip 47 | Policies: 48 | - KinesisStreamReadPolicy: 49 | StreamName: !Ref KinesisStreamName 50 | Description: Stream events from AWS Kinesis Stream to Splunk HTTP event collector 51 | MemorySize: 512 52 | Timeout: 10 53 | Environment: 54 | Variables: 55 | SPLUNK_HEC_URL: !Ref SplunkHttpEventCollectorURL 56 | SPLUNK_HEC_TOKEN: !Ref SplunkHttpEventCollectorToken 57 | Events: 58 | KinesisStream: 59 | Type: Kinesis 60 | Properties: 61 | Stream: !Join [ "", [ "arn:", !Ref "AWS::Partition", ":kinesis:", !Ref "AWS::Region", ":", !Ref "AWS::AccountId", ":stream/", !Ref KinesisStreamName ] ] 62 | StartingPosition: !Ref KinesisStreamStartingPosition 63 | BatchSize: !Ref KinesisStreamBatchSize 64 | 65 | -------------------------------------------------------------------------------- /splunk-logging/.npmrc.sample: -------------------------------------------------------------------------------- 1 | sam_s3_bucket_name = fake-s3-bucket 2 | parm_hec_url = https://192.168.0.0:8088/services/collector 3 | parm_hec_token = FAKE-HEC-TOKEN 4 | parm_stack_name = FakeLoggingStack 5 | -------------------------------------------------------------------------------- /splunk-logging/index.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Splunk logging for AWS Lambda 3 | * 4 | * This function logs to Splunk Enterprise using Splunk's HTTP event collector API. 5 | * 6 | * Define the following Environment Variables in the console below to configure 7 | * this function to log to your Splunk host: 8 | * 9 | * 1. SPLUNK_HEC_URL: URL address for your Splunk HTTP event collector endpoint. 10 | * Default port for event collector is 8088. Example: https://host.com:8088/services/collector 11 | * 12 | * 2. SPLUNK_HEC_TOKEN: Token for your Splunk HTTP event collector. 13 | * To create a new token for this Lambda function, refer to Splunk Docs: 14 | * http://docs.splunk.com/Documentation/Splunk/latest/Data/UsetheHTTPEventCollector#Create_an_Event_Collector_token 15 | * 16 | * For details about Splunk logging library used below: https://github.com/splunk/splunk-javascript-logging 17 | */ 18 | 19 | 'use strict'; 20 | 21 | const loggerConfig = { 22 | url: process.env.SPLUNK_HEC_URL, 23 | token: process.env.SPLUNK_HEC_TOKEN, 24 | maxBatchCount: 0, // Manually flush events 25 | maxRetries: 3, // Retry 3 times 26 | }; 27 | 28 | const SplunkLogger = require('splunk-logging').Logger; 29 | 30 | const logger = new SplunkLogger(loggerConfig); 31 | 32 | exports.handler = (event, context, callback) => { 33 | console.log('Received event:', JSON.stringify(event, null, 2)); 34 | 35 | // First, configure logger to automatically add Lambda metadata and to hook into Lambda callback 36 | configureLogger(context, callback); // eslint-disable-line no-use-before-define 37 | 38 | // Log JSON objects to Splunk 39 | logger.send({ message: event }); 40 | 41 | // Log strings 42 | logger.send({ message: `value1 = ${event.key1}` }); 43 | 44 | // Log object or string with explicit timestamp - useful for forwarding events with embedded 45 | // timestamps, such as from AWS IoT, AWS Kinesis Stream & Firehose, AWS CloudWatch Logs 46 | // Change "Date.now()" below to event timestamp if specified in event payload 47 | logger.send({ 48 | message: event, 49 | metadata: { 50 | time: Date.now(), 51 | }, 52 | }); 53 | 54 | // Log object or string with optional metadata parameters - useful to set input settings per event vs HEC token-level 55 | // For descripion of each metadata parameter, refer to: 56 | // http://docs.splunk.com/Documentation/Splunk/latest/RESTREF/RESTinput#services.2Fcollector */ 57 | logger.send({ 58 | message: event, 59 | metadata: { 60 | time: Date.now(), 61 | host: 'serverless', 62 | source: `lambda:${context.functionName}`, 63 | sourcetype: 'httpevent', 64 | //index: 'main', 65 | }, 66 | }); 67 | 68 | // Send all the events in a single batch to Splunk 69 | logger.flush((err, resp, body) => { 70 | // Request failure or valid response from Splunk with HEC error code 71 | if (err || (body && body.code !== 0)) { 72 | // If failed, error will be handled by pre-configured logger.error() below 73 | } else { 74 | // If succeeded, body will be { text: 'Success', code: 0 } 75 | console.log('Response from Splunk:', body); 76 | callback(null, event.key1); // Echo back the first key value 77 | } 78 | }); 79 | }; 80 | 81 | const configureLogger = (context, callback) => { 82 | // Override SplunkLogger default formatter 83 | logger.eventFormatter = (event) => { 84 | // Enrich event only if it is an object 85 | if (typeof event === 'object' && !Object.prototype.hasOwnProperty.call(event, 'awsRequestId')) { 86 | // Add awsRequestId from Lambda context for request tracing 87 | event.awsRequestId = context.awsRequestId; // eslint-disable-line no-param-reassign 88 | } 89 | return event; 90 | }; 91 | 92 | // Set common error handler for logger.send() and logger.flush() 93 | logger.error = (error, payload) => { 94 | console.log('error', error, 'context', payload); 95 | callback(error); 96 | }; 97 | }; 98 | -------------------------------------------------------------------------------- /splunk-logging/integration-test.js: -------------------------------------------------------------------------------- 1 | var handler = require('./index').handler; 2 | 3 | fs=require('fs'); 4 | var event = JSON.parse(fs.readFileSync('sampleEvent.json', 'utf8')); 5 | 6 | handler( 7 | event, 8 | { 9 | functionName: 'splunk-logging', 10 | awsRequestId: Math.floor(Math.random() * Math.pow(10,10)) 11 | }, 12 | function() { 13 | console.log(Array.prototype.slice.call(arguments)); 14 | } 15 | ); 16 | 17 | console.log("Done"); -------------------------------------------------------------------------------- /splunk-logging/lambda.json: -------------------------------------------------------------------------------- 1 | { 2 | "lambda": { 3 | "FunctionName": "splunk-logging", 4 | "Handler": "index.handler", 5 | "Runtime": "nodejs10.x", 6 | "Description": "Demonstrates logging from AWS Lambda code to Splunk's HTTP event collector", 7 | "Environment": { 8 | "Variables": { 9 | "SPLUNK_HEC_URL": "", 10 | "SPLUNK_HEC_TOKEN": "" 11 | } 12 | }, 13 | "MemorySize": 512, 14 | "Timeout": 10 15 | }, 16 | "version": "0.8.1", 17 | "license": "CC0-1.0", 18 | "tags": [ 19 | "nodejs", 20 | "splunk", 21 | "logging" 22 | ], 23 | "authors": [ 24 | "Roy Arsan (https://www.splunk.com)", 25 | "Glenn Block (https://www.splunk.com)" 26 | ], 27 | "roleTemplates": [ 28 | ] 29 | } 30 | 31 | -------------------------------------------------------------------------------- /splunk-logging/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "splunk-logging", 3 | "version": "1.0.0", 4 | "description": "AWS Lambda function blueprint to demonstrate logging from AWS Lambda code to Splunk's HTTP event collector", 5 | "main": "index.js", 6 | "scripts": { 7 | "lint": "eslint -c ../.eslintrc --ignore-path ../.eslintignore .", 8 | "test": "node integration-test.js", 9 | "pretest": "npm run lint", 10 | "build:zip": "zip -i \\*.js \\*.json -r splunk-logging.zip index.js $(npm list --prod --parseable | sed -nE 's/.*\\/(node_modules\\/.*)/\\1/p' | awk '{printf \"%s \", $0;}')", 11 | "build:template": "aws cloudformation package --template template.yaml --s3-bucket $npm_config_sam_s3_bucket_name --output-template-file template.output.yaml", 12 | "build:deployment": "aws cloudformation deploy --template-file $(pwd)/template.output.yaml --parameter-overrides SplunkHttpEventCollectorURL=\"$npm_config_parm_hec_url\" SplunkHttpEventCollectorToken=\"$npm_config_parm_hec_token\" --stack-name $npm_config_parm_stack_name --capabilities \"CAPABILITY_IAM\"", 13 | "build": "npm run build:zip && npm run build:template && npm run build:deployment", 14 | "clean:zip": "rm -f splunk-logging.zip", 15 | "clean:template": "rm -f template.output.yaml", 16 | "clean:deployment": "aws cloudformation delete-stack --stack-name $npm_config_parm_stack_name", 17 | "clean": "npm run clean:zip && npm run clean:template && npm run clean:deployment" 18 | }, 19 | "keywords": [ 20 | "splunk", 21 | "lambda", 22 | "logging", 23 | "hec" 24 | ], 25 | "authors": [ 26 | "Roy Arsan (https://www.splunk.com)", 27 | "Glenn Block (https://www.splunk.com)", 28 | "Tarik Makota (https://aws.amazon.com)" 29 | ], 30 | "license": "MIT", 31 | "dependencies": { 32 | "splunk-logging": "^0.9.3" 33 | }, 34 | "devDependencies": { 35 | "babel-eslint": "^7.1.0", 36 | "eslint": "^3.12.0", 37 | "eslint-config-airbnb": "^13.0.0", 38 | "eslint-plugin-babel": "^4.0.0", 39 | "eslint-plugin-import": "^2.2.0", 40 | "eslint-plugin-jsx-a11y": "^2.2.3", 41 | "eslint-plugin-react": "^6.7.1" 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /splunk-logging/sampleEvent.json: -------------------------------------------------------------------------------- 1 | { 2 | "key1": "a", 3 | "key2": "b", 4 | "key3": "c" 5 | } 6 | -------------------------------------------------------------------------------- /splunk-logging/template.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Transform: 'AWS::Serverless-2016-10-31' 3 | Description: > 4 | Serverless application example that logs events from Lambda to Splunk HTTP Event Collector (HEC). 5 | 6 | This SAM template creates a Lambda function with its IAM execution role. The created Lambda 7 | function includes the JavaScript client library for logging to Splunk HEC: 8 | https://github.com/splunk/splunk-javascript-logging 9 | 10 | Last Modified: 06 Oct, 2017 11 | Authors: Roy Arsan , Tarik Makota 12 | 13 | Parameters: 14 | SplunkHttpEventCollectorURL: 15 | Type: "String" 16 | Description: "URL address of your Splunk HTTP event collector endpoint" 17 | 18 | SplunkHttpEventCollectorToken: 19 | Type: "String" 20 | Description: "Token of your Splunk HTTP event collector endpoint" 21 | 22 | Outputs: 23 | SplunkLoggingFunction: 24 | Description: "Splunk Logging Lambda Function ARN" 25 | Value: !GetAtt SplunkLoggingFunction.Arn 26 | 27 | Resources: 28 | SplunkLoggingFunction: 29 | Type: 'AWS::Serverless::Function' 30 | Properties: 31 | Handler: index.handler 32 | Runtime: nodejs10.x 33 | CodeUri: ./splunk-logging.zip 34 | Description: Demonstrates logging from AWS Lambda code to Splunk HTTP event collector 35 | MemorySize: 512 36 | Timeout: 30 37 | Environment: 38 | Variables: 39 | SPLUNK_HEC_URL: !Ref SplunkHttpEventCollectorURL 40 | SPLUNK_HEC_TOKEN: !Ref SplunkHttpEventCollectorToken 41 | 42 | --------------------------------------------------------------------------------