├── .gitignore ├── LICENSE ├── README.md ├── configuration └── cloudformation │ ├── README.md │ ├── cwl_kinesis.template │ └── cwl_kinesis_custom_vpc.template ├── pom.xml ├── src ├── main │ ├── java │ │ └── com │ │ │ └── sumologic │ │ │ ├── client │ │ │ ├── CloudWatchMessageModelSumologicTransformer.java │ │ │ ├── DefaultKinesisMessageModelSumologicTransformer.java │ │ │ ├── KinesisConnectorForSumologicConfiguration.java │ │ │ ├── SumologicExecutor.java │ │ │ ├── SumologicKinesisUtils.java │ │ │ ├── SumologicMessageModelPipeline.java │ │ │ ├── SumologicSender.java │ │ │ ├── implementations │ │ │ │ ├── SumologicEmitter.java │ │ │ │ └── SumologicTransformer.java │ │ │ └── model │ │ │ │ ├── CloudWatchLogsMessageModel.java │ │ │ │ ├── LogEvent.java │ │ │ │ └── SimpleKinesisMessageModel.java │ │ │ └── kinesis │ │ │ ├── BatchedStreamSource.java │ │ │ ├── KinesisConnectorExecutor.java │ │ │ ├── KinesisConnectorExecutorBase.java │ │ │ ├── KinesisConnectorMetricsExecutor.java │ │ │ ├── KinesisConnectorRecordProcessor.java │ │ │ ├── KinesisConnectorRecordProcessorFactory.java │ │ │ ├── StreamSource.java │ │ │ └── utils │ │ │ └── KinesisUtils.java │ └── resources │ │ ├── SumologicConnector.properties.stub │ │ └── log4j.properties └── test │ └── java │ └── com │ └── sumologic │ └── client │ ├── CloudWatchMessageModelSumologicTransformerTest.java │ ├── SumologicKinesisUtilsTest.java │ ├── SumologicSenderTest.java │ └── implementations │ └── SumologicEmitterTest.java └── users.txt /.gitignore: -------------------------------------------------------------------------------- 1 | *.class 2 | 3 | # Mobile Tools for Java (J2ME) 4 | .mtj.tmp/ 5 | 6 | # Package Files # 7 | *.jar 8 | *.war 9 | *.ear 10 | 11 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml 12 | hs_err_pid* 13 | 14 | *.properties 15 | !log4j.properties 16 | .idea/ 17 | *.iml 18 | .scratch/ 19 | target/ -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | Copyright 2015, Sumo Logic, Inc. 179 | 180 | Licensed under the Apache License, Version 2.0 (the "License"); 181 | you may not use this file except in compliance with the License. 182 | You may obtain a copy of the License at 183 | 184 | http://www.apache.org/licenses/LICENSE-2.0 185 | 186 | Unless required by applicable law or agreed to in writing, software 187 | distributed under the License is distributed on an "AS IS" BASIS, 188 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 189 | See the License for the specific language governing permissions and 190 | limitations under the License. 191 | 192 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Kinesis-Sumologic Connector 2 | 3 | The **Kinesis-Sumologic Connector** is a Java connector that acts as a pipeline between an [Amazon Kinesis] stream and a [Sumo Logic] Collection. Data gets fetched from the Kinesis Stream, transformed into a POJO and then sent to the Sumo Logic Collection as JSON. End-user setup instructions can be found in [here](https://help.sumologic.com/03Send-Data/Collect-from-Other-Data-Sources/Amazon-CloudWatch-Logs/Collect-Amazon-CloudWatch-Logs-using-Amazon-Kinesis). 4 | 5 | ## Requirements 6 | 7 | + **Java JDK 1.8**: This connector has been built with Java version 1.8. 8 | + **Maven**: A pom.xml file has been provided to build the connector with Maven. 9 | + **AWS Kinesis Account**: An Amazon AWS Kinesis account to use as a source of data. 10 | + **Sumo Logic Account**: A Sumo Logic account to use as a destination. 11 | 12 | ## Overview 13 | 14 | Incoming records from one (or many) Shards of an AWS Kinesis Stream will be read using the [Kinesis Client Library]. Records will be: 15 | 16 | + **Transformed**: Raw records will be transformed into a POJO using a Kinesis Model class and then serialized. The transformer used will be specified in the properties file. 17 | + **Filtered**: A filter may be applied to the records. Default filter will let all records pass. 18 | + **Buffered**: A custom buffer may be used to define thresholds that, when crossed, will flush all records into the emitter. 19 | + **Emitted**: The records will get send to the Sumo Logic Collector. 20 | 21 | ## Installation 22 | 23 | The appender can be added to your project using Maven Central by adding the following dependency to a POM file: 24 | 25 | ``` 26 | 27 | com.sumologic 28 | kinesis-sumologic-connector 29 | 0.2 30 | 31 | ``` 32 | 33 | ## Configuration 34 | 35 | A sample properties file is provided, which should be modified to use your AWS Accounts (**accessKey** and **secretKey**), Kinesis Stream(**kinesisInputStream**), Sumo Logic HTTP source (**sumologicUrl**), App Name (**appName**) and Transformer class used (**transformerClass**). Reading from multiple kinesis streams is also supported (see PR14) by specifying multiple config files, launching multiple SumologicExecutors like so: 36 | ``` 37 | mvn clean compile exec:java -Dexec.args="app1.properties app2.properties" 38 | ``` 39 | The SumologicConnector.properties file is still required to be present in the working directory, as it's hardcoded into the application as the file AWS credentials are read from. If no .properties files are passed as arguments, SumologicConnector.properties is assumed as the only SumologicExecutor 40 | 41 | ## Running the Connector with Maven 42 | 43 | After modifying the .properties file, run the connector using Maven 44 | ``` 45 | mvn clean compile exec:java -Dargs="SumologicConnector.properties" 46 | ``` 47 | 48 | ## Building the JAR 49 | After modifying the .properties file, build the JAR file using Maven 50 | ``` 51 | mvn clean install 52 | ``` 53 | 54 | ## Running the JAR 55 | ``` 56 | java -jar target/kinesis-sumologic-connector-0.2.jar SumologicConnector.properties 57 | ``` 58 | 59 | 60 | ## Related sources 61 | 62 | [Amazon Kinesis](http://aws.amazon.com/kinesis/) 63 | 64 | [Sumologic](https://www.sumologic.com/) 65 | 66 | [Open JDK 8](http://openjdk.java.net/projects/jdk8) 67 | 68 | [Maven](http://maven.apache.org/) 69 | 70 | [AWS Kinesis Account](http://aws.amazon.com/account/) 71 | 72 | [Sumologic Account](https://www.sumologic.com/pricing/) 73 | 74 | [Kinesis Client Library](https://github.com/awslabs/amazon-kinesis-client/) 75 | -------------------------------------------------------------------------------- /configuration/cloudformation/README.md: -------------------------------------------------------------------------------- 1 | # CloudFormation templates for Kinesis-Sumologic Connector 2 | 3 | This folder contains the Cloudformation templates to setup a **Kinesis-Sumologic Connector** for Amazon VPC Flow Logs. 4 | 5 | ## Overview 6 | 7 | These templates are included: 8 | + **cwl_kinesis.template**: This template will create a new VPC for the new EC2 instance that hosts the connector. 9 | + **cwl_kinesis_custom_vpc.template**: This template will use an existing VPC and security group for the new EC2 instance that hosts the connector. 10 | 11 | In any scenario, the template will create a new Kinesis stream and subscribes the user specified log group to this stream. 12 | 13 | ## Special Notes 14 | + It is strongly recommended to use a separate log group for each VPC to differentiate their logs. 15 | + By default, each template (and hence the corresponding stack) runs under the region the AWS user uses at run time. You will need to creat one separate stack per region. Within the same region, when there are multiple CloudWatch log groups, you can create multiple stacks, or create one stack and use the created Kinesis stream by this stack for multiple log groups. When use multiple stacks in the *same* region, make sure to use different values for the parameter "KinesisConnectorAppName" of the template. 16 | 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /configuration/cloudformation/cwl_kinesis.template: -------------------------------------------------------------------------------- 1 | { 2 | "AWSTemplateFormatVersion" : "2010-09-09", 3 | 4 | "Description" : "A sample SumoLogic/Kinesis stack that hooks up with real-time data from CloudWatch Logs using a Subscription Filter. **NOTE** This template creates one or more Amazon EC2 instances, an Amazon Kinesis stream. You will be billed for the AWS resources used if you create a stack from this template. This template requires setting the \"Create IAM resources\" parameter to True.", 5 | 6 | "Parameters" : { 7 | 8 | "KeyName": { 9 | "Description" : "The name of an existing key pair to enable SSH access to the EC2 instances", 10 | "Type": "AWS::EC2::KeyPair::KeyName" 11 | }, 12 | 13 | "InstanceType" : { 14 | "Description" : "EC2 instance type for the SumoLogic nodes", 15 | "Type" : "String", 16 | "Default" : "t2.small", 17 | "AllowedValues" : [ 18 | "t2.micro", 19 | "t2.small", 20 | "m3.medium", 21 | "m3.large", 22 | "m3.xlarge", 23 | "m3.2xlarge", 24 | "c3.large", 25 | "c3.xlarge", 26 | "c3.2xlarge", 27 | "c3.4xlarge", 28 | "c3.8xlarge", 29 | "c4.large", 30 | "c4.xlarge", 31 | "c4.2xlarge", 32 | "c4.4xlarge", 33 | "c4.8xlarge", 34 | "r3.large", 35 | "r3.xlarge", 36 | "r3.2xlarge", 37 | "r3.4xlarge", 38 | "r3.8xlarge", 39 | "i2.xlarge", 40 | "i2.2xlarge", 41 | "i2.4xlarge", 42 | "i2.8xlarge", 43 | "d2.xlarge", 44 | "d2.2xlarge", 45 | "d2.4xlarge", 46 | "d2.8xlarge", 47 | "hi1.4xlarge", 48 | "hs1.8xlarge", 49 | "cr1.8xlarge", 50 | "cc2.8xlarge" 51 | ], 52 | "ConstraintDescription" : "Must be a valid EC2 instance type" 53 | }, 54 | 55 | "SumoLogicHttpCollectorURL" : { 56 | "Description" : "The URL of a SumoLogic Http Collector source", 57 | "Type": "String" 58 | }, 59 | 60 | "AllowedIpSource" : { 61 | "Description" : "The IP address range that can be used to access the EC2 instances via SSH", 62 | "Type": "String", 63 | "MinLength": "9", 64 | "MaxLength": "18", 65 | "Default": "0.0.0.0/0", 66 | "AllowedPattern": "(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})/(\\d{1,2})", 67 | "ConstraintDescription": "Must be a valid IP CIDR range of the form x.x.x.x/x" 68 | }, 69 | 70 | "LogGroupName": { 71 | "Description": "The CloudWatch log group containing the VPC flow logs", 72 | "Type": "String", 73 | "Default": "" 74 | }, 75 | 76 | "MonitorStack" : { 77 | "Description": "Push logs from KCL and CloudFormation to CloudWatch Logs", 78 | "Type": "String", 79 | "Default" : "false", 80 | "AllowedValues" : ["true", "false"] 81 | }, 82 | 83 | "KinesisConnectorAppName" : { 84 | "Description": "This is the application name in Kinesis Connector's context. No two live/active stacks should be using the same application name.", 85 | "Type": "String", 86 | "Default" : "SumoLogic-VPC-FlowLog-Connector" 87 | }, 88 | 89 | "KinesisShards": { 90 | "Description": "Number of shards to create for the Kinesis subscription stream", 91 | "Type": "Number", 92 | "Default" : "1", 93 | "MinValue" : "1" 94 | } 95 | }, 96 | 97 | "Mappings" : { 98 | 99 | "Constants" : { 100 | "DownloadPath" : { "Value": "sumologic-kinesis-connector" }, 101 | "DownloadMainPropertyFile" : { "Value": "SumologicConnector.properties" }, 102 | "DownloadLog4JPropertyFile" : { "Value": "log4j.properties" }, 103 | "DownloadWrapperScript" : { "Value": "SumoVPCKinesis.bash" }, 104 | "DownloadJarFile" : { "Value": "kinesis-sumologic-connector-0.1.jar" } 105 | }, 106 | 107 | "AWSInstanceType2Arch" : { 108 | "t2.micro" : { "Arch" : "HVM64" }, 109 | "t2.small" : { "Arch" : "HVM64" }, 110 | "t2.medium" : { "Arch" : "HVM64" }, 111 | "m3.medium" : { "Arch" : "HVM64" }, 112 | "m3.large" : { "Arch" : "HVM64" }, 113 | "m3.xlarge" : { "Arch" : "HVM64" }, 114 | "m3.2xlarge" : { "Arch" : "HVM64" }, 115 | "c3.large" : { "Arch" : "HVM64" }, 116 | "c3.xlarge" : { "Arch" : "HVM64" }, 117 | "c3.2xlarge" : { "Arch" : "HVM64" }, 118 | "c3.4xlarge" : { "Arch" : "HVM64" }, 119 | "c3.8xlarge" : { "Arch" : "HVM64" }, 120 | "c4.large" : { "Arch" : "HVM64" }, 121 | "c4.xlarge" : { "Arch" : "HVM64" }, 122 | "c4.2xlarge" : { "Arch" : "HVM64" }, 123 | "c4.4xlarge" : { "Arch" : "HVM64" }, 124 | "c4.8xlarge" : { "Arch" : "HVM64" }, 125 | "r3.large" : { "Arch" : "HVM64" }, 126 | "r3.xlarge" : { "Arch" : "HVM64" }, 127 | "r3.2xlarge" : { "Arch" : "HVM64" }, 128 | "r3.4xlarge" : { "Arch" : "HVM64" }, 129 | "r3.8xlarge" : { "Arch" : "HVM64" }, 130 | "i2.xlarge" : { "Arch" : "HVM64" }, 131 | "i2.2xlarge" : { "Arch" : "HVM64" }, 132 | "i2.4xlarge" : { "Arch" : "HVM64" }, 133 | "i2.8xlarge" : { "Arch" : "HVM64" }, 134 | "d2.xlarge" : { "Arch" : "HVM64" }, 135 | "d2.2xlarge" : { "Arch" : "HVM64" }, 136 | "d2.4xlarge" : { "Arch" : "HVM64" }, 137 | "d2.8xlarge" : { "Arch" : "HVM64" }, 138 | "hi1.4xlarge" : { "Arch" : "HVM64" }, 139 | "hs1.8xlarge" : { "Arch" : "HVM64" }, 140 | "cr1.8xlarge" : { "Arch" : "HVM64" }, 141 | "cc2.8xlarge" : { "Arch" : "HVM64" } 142 | }, 143 | 144 | "AWSRegionArch2AMI" : { 145 | "us-east-1" : {"PV64" : "ami-1ccae774", "HVM64" : "ami-1ecae776", "HVMG2" : "ami-8c6b40e4"}, 146 | "us-west-2" : {"PV64" : "ami-ff527ecf", "HVM64" : "ami-e7527ed7", "HVMG2" : "ami-abbe919b"}, 147 | "us-west-1" : {"PV64" : "ami-d514f291", "HVM64" : "ami-d114f295", "HVMG2" : "ami-f31ffeb7"}, 148 | "eu-west-1" : {"PV64" : "ami-bf0897c8", "HVM64" : "ami-a10897d6", "HVMG2" : "ami-d5bc24a2"}, 149 | "eu-central-1" : {"PV64" : "ami-ac221fb1", "HVM64" : "ami-a8221fb5", "HVMG2" : "ami-7cd2ef61"}, 150 | "ap-northeast-1" : {"PV64" : "ami-27f90e27", "HVM64" : "ami-cbf90ecb", "HVMG2" : "ami-6318e863"}, 151 | "ap-southeast-1" : {"PV64" : "ami-acd9e8fe", "HVM64" : "ami-68d8e93a", "HVMG2" : "ami-3807376a"}, 152 | "ap-southeast-2" : {"PV64" : "ami-ff9cecc5", "HVM64" : "ami-fd9cecc7", "HVMG2" : "ami-89790ab3"}, 153 | "sa-east-1" : {"PV64" : "ami-bb2890a6", "HVM64" : "ami-b52890a8", "HVMG2" : "NOT_SUPPORTED"}, 154 | "cn-north-1" : {"PV64" : "ami-fa39abc3", "HVM64" : "ami-f239abcb", "HVMG2" : "NOT_SUPPORTED"} 155 | } 156 | }, 157 | 158 | "Conditions" : { 159 | "CreateCWLForStack" : {"Fn::Equals" : [{"Ref" : "MonitorStack"}, "true"]}, 160 | "NoKeySpecified" : {"Fn::Equals" : [{"Ref" : "KeyName"}, ""]} 161 | }, 162 | 163 | "Resources" : { 164 | 165 | "KinesisSubscriptionStream": { 166 | "Type": "AWS::Kinesis::Stream", 167 | "Properties" : { 168 | "ShardCount": { "Ref": "KinesisShards" } 169 | } 170 | }, 171 | 172 | "SumoLogicVPC" : { 173 | "Type" : "AWS::EC2::VPC", 174 | "Properties" : { 175 | "CidrBlock" : "10.0.0.0/16", 176 | "Tags" : [ {"Key" : "Application", "Value" : { "Ref" : "AWS::StackId"} }, { "Key": "Name", "Value" : "SumoLogic_VPC" } ] 177 | } 178 | }, 179 | 180 | "Subnet" : { 181 | "Type" : "AWS::EC2::Subnet", 182 | "Properties" : { 183 | "VpcId" : { "Ref" : "SumoLogicVPC" }, 184 | "CidrBlock" : "10.0.0.0/24", 185 | "Tags" : [ {"Key" : "Application", "Value" : { "Ref" : "AWS::StackId"} } ] 186 | } 187 | }, 188 | 189 | "InternetGateway" : { 190 | "Type" : "AWS::EC2::InternetGateway", 191 | "Properties" : { 192 | "Tags" : [ {"Key" : "Application", "Value" : { "Ref" : "AWS::StackId"} } ] 193 | } 194 | }, 195 | 196 | "AttachGateway" : { 197 | "Type" : "AWS::EC2::VPCGatewayAttachment", 198 | "Properties" : { 199 | "VpcId" : { "Ref" : "SumoLogicVPC" }, 200 | "InternetGatewayId" : { "Ref" : "InternetGateway" } 201 | } 202 | }, 203 | 204 | "RouteTable" : { 205 | "Type" : "AWS::EC2::RouteTable", 206 | "Properties" : { 207 | "VpcId" : {"Ref" : "SumoLogicVPC"}, 208 | "Tags" : [ {"Key" : "Application", "Value" : { "Ref" : "AWS::StackId"} } ] 209 | } 210 | }, 211 | 212 | "Route" : { 213 | "Type" : "AWS::EC2::Route", 214 | "DependsOn" : "AttachGateway", 215 | "Properties" : { 216 | "RouteTableId" : { "Ref" : "RouteTable" }, 217 | "DestinationCidrBlock" : "0.0.0.0/0", 218 | "GatewayId" : { "Ref" : "InternetGateway" } 219 | } 220 | }, 221 | 222 | "SubnetRouteTableAssociation" : { 223 | "Type" : "AWS::EC2::SubnetRouteTableAssociation", 224 | "Properties" : { 225 | "SubnetId" : { "Ref" : "Subnet" }, 226 | "RouteTableId" : { "Ref" : "RouteTable" } 227 | } 228 | }, 229 | 230 | "NetworkAcl" : { 231 | "Type" : "AWS::EC2::NetworkAcl", 232 | "Properties" : { 233 | "VpcId" : {"Ref" : "SumoLogicVPC"}, 234 | "Tags" : [ {"Key" : "Application", "Value" : { "Ref" : "AWS::StackId"} } ] 235 | } 236 | }, 237 | 238 | "InboundSSHNetworkAclEntry" : { 239 | "Type" : "AWS::EC2::NetworkAclEntry", 240 | "Properties" : { 241 | "NetworkAclId" : {"Ref" : "NetworkAcl"}, 242 | "RuleNumber" : "101", 243 | "Protocol" : "6", 244 | "RuleAction" : "allow", 245 | "Egress" : "false", 246 | "CidrBlock" : "0.0.0.0/0", 247 | "PortRange" : {"From" : "22", "To" : "22"} 248 | } 249 | }, 250 | 251 | "InboundResponsePortsNetworkAclEntry" : { 252 | "Type" : "AWS::EC2::NetworkAclEntry", 253 | "Properties" : { 254 | "NetworkAclId" : {"Ref" : "NetworkAcl"}, 255 | "RuleNumber" : "102", 256 | "Protocol" : "6", 257 | "RuleAction" : "allow", 258 | "Egress" : "false", 259 | "CidrBlock" : "0.0.0.0/0", 260 | "PortRange" : {"From" : "1024", "To" : "65535"} 261 | } 262 | }, 263 | 264 | "OutBoundNetworkAclEntry" : { 265 | "Type" : "AWS::EC2::NetworkAclEntry", 266 | "Properties" : { 267 | "NetworkAclId" : {"Ref" : "NetworkAcl"}, 268 | "RuleNumber" : "100", 269 | "Protocol" : "-1", 270 | "RuleAction" : "allow", 271 | "Egress" : "true", 272 | "CidrBlock" : "0.0.0.0/0" 273 | } 274 | }, 275 | 276 | "OutBoundResponsePortsNetworkAclEntry" : { 277 | "Type" : "AWS::EC2::NetworkAclEntry", 278 | "Properties" : { 279 | "NetworkAclId" : {"Ref" : "NetworkAcl"}, 280 | "RuleNumber" : "102", 281 | "Protocol" : "6", 282 | "RuleAction" : "allow", 283 | "Egress" : "true", 284 | "CidrBlock" : "0.0.0.0/0", 285 | "PortRange" : {"From" : "1024", "To" : "65535"} 286 | } 287 | }, 288 | 289 | "SubnetNetworkAclAssociation" : { 290 | "Type" : "AWS::EC2::SubnetNetworkAclAssociation", 291 | "Properties" : { 292 | "SubnetId" : { "Ref" : "Subnet" }, 293 | "NetworkAclId" : { "Ref" : "NetworkAcl" } 294 | } 295 | }, 296 | 297 | "IPAddress" : { 298 | "Type" : "AWS::EC2::EIP", 299 | "DependsOn" : "AttachGateway", 300 | "Properties" : { 301 | "Domain" : "vpc", 302 | "InstanceId" : { "Ref" : "SumoLogicCWLKinesisNode" } 303 | } 304 | }, 305 | 306 | 307 | "SumoLogicInstanceSecurityGroup" : { 308 | "Type" : "AWS::EC2::SecurityGroup", 309 | "Properties" : { 310 | "VpcId" : { "Ref" : "SumoLogicVPC" }, 311 | "GroupDescription" : "Enable SSH access via port 22", 312 | "SecurityGroupIngress" : [ { 313 | "IpProtocol" : "tcp", 314 | "FromPort" : "22", 315 | "ToPort" : "22", 316 | "CidrIp" : { "Ref" : "AllowedIpSource"} 317 | } ] 318 | } 319 | }, 320 | 321 | 322 | "SumoLogicCWLKinesisNode":{ 323 | "Type" : "AWS::EC2::Instance", 324 | "Metadata" : { 325 | "Comment" : "Install SumoLogic Kinesiss application", 326 | "AWS::CloudFormation::Init" : { 327 | "configSets" : { 328 | "Install" : [ "config" ] 329 | }, 330 | 331 | "config" : { 332 | "packages": { 333 | "yum": { 334 | "awslogs": [] 335 | } 336 | }, 337 | 338 | "commands" : { 339 | 340 | "00_CopyCredFile": { 341 | "command": { "Fn::Join": [ "", ["mkdir /root/.aws ; ", "cp /home/ec2-user/.aws/credentials /root/.aws/" ] ] } , 342 | "cwd":"/root", 343 | "ignoreErrors":"true" 344 | }, 345 | 346 | "03_describeSubscriptionFilter": { 347 | "command": { "Fn::Join" : ["", [ 348 | "/usr/bin/aws logs describe-subscription-filters ", 349 | "--log-group-name \"", { "Ref": "LogGroupName" }, "\" ", 350 | "--region \"", { "Ref" : "AWS::Region" }, "\" ", 351 | "--filter-name-prefix \"cwl-cfn-es-\" " 352 | ]]} 353 | }, 354 | 355 | "04_deleteSubscriptionFilter": { 356 | "command": { "Fn::Join" : ["", [ 357 | "/usr/bin/aws logs delete-subscription-filter ", 358 | "--log-group-name \"", { "Ref": "LogGroupName" }, "\" ", 359 | "--region \"", { "Ref" : "AWS::Region" }, "\" ", 360 | "--filter-name $(aws logs describe-subscription-filters ", 361 | "--log-group-name ", { "Ref": "LogGroupName" }, " ", 362 | "--region ", { "Ref": "AWS::Region" }, " ", 363 | "--filter-name-prefix \"cwl-cfn-es-\" ", 364 | "| grep filterName | awk -F \\\" '{ print $4 };' )" 365 | ]]}, 366 | "ignoreErrors":"true" 367 | }, 368 | 369 | "05_putSubscriptionFilter": { 370 | "command": { "Fn::Join": ["", [ 371 | "/usr/bin/aws logs put-subscription-filter ", 372 | "--log-group-name \"", { "Ref": "LogGroupName" }, "\" ", 373 | "--filter-name \"cwl-cfn-es-", { "Ref": "KinesisSubscriptionStream" }, "\" ", 374 | "--filter-pattern \"\" ", 375 | "--region \"", { "Ref" : "AWS::Region" }, "\" ", 376 | "--destination-arn \"", 377 | "arn:aws:kinesis:", { "Ref": "AWS::Region" }, 378 | ":", { "Ref": "AWS::AccountId" }, 379 | ":stream/", { "Ref": "KinesisSubscriptionStream"} ,"\" ", 380 | "--role-arn \"", 381 | "arn:aws:iam::", { "Ref": "AWS::AccountId" }, 382 | ":role/", { "Ref": "SumoLogicCWL2KinesisRole" },"\"" 383 | ]]} 384 | }, 385 | 386 | "06_CWLStateDir" : { 387 | "command" : "mkdir -p /var/awslogs/state" 388 | } 389 | }, 390 | 391 | "files" : { 392 | "/etc/logrotate.d/cloudwatch-logs-subscription-consumer": { 393 | "content": { "Fn::Join" : ["", [ 394 | "/home/ec2-user/nohup.out\n", 395 | "{\n", 396 | " daily\n", 397 | " rotate 5\n", 398 | " copytruncate\n", 399 | " dateext\n", 400 | " compress\n", 401 | "}\n" 402 | ]]} 403 | }, 404 | 405 | "/home/ec2-user/.aws/credentials" : { 406 | "content" : { "Fn::Join" : ["", [ 407 | "[default]\n", 408 | "# http://aws.amazon.com/security-credentials\n", 409 | "aws_access_key_id = ",{"Ref": "SumoLogicCWLKinesisUserAccessKey"}, "\n", 410 | "aws_secret_access_key = ",{"Fn::GetAtt": [ "SumoLogicCWLKinesisUserAccessKey", "SecretAccessKey" ]}, "\n"]] }, 411 | "mode" : "000600", 412 | "owner" : "ec2-user", 413 | "group" : "ec2-user" 414 | }, 415 | 416 | "/home/ec2-user/SumologicConnector.properties" : { 417 | "content" : { "Fn::Join" : ["", [ 418 | "# Fill in your AWS Access Key ID and Secret Access Key\n", 419 | "# http://aws.amazon.com/security-credentials\n", 420 | "accessKey = ",{"Ref": "SumoLogicCWLKinesisUserAccessKey"}, "\n", 421 | "secretKey = ",{"Fn::GetAtt": [ "SumoLogicCWLKinesisUserAccessKey", "SecretAccessKey" ]}, "\n", 422 | "\n", 423 | 424 | "# KinesisConnector Application Settings\n", 425 | "# Since Kinesis Creates a DynamoDB table for each APP,\n", 426 | "# each appName must be unique for different kinesisInputStreams and connectorDestinations\n", 427 | "appName = ",{"Fn::Join": ["", [{"Ref": "KinesisConnectorAppName"}]]},"\n", 428 | "\n", 429 | 430 | "# By specifying the region name, the connector will connect from the Amazon Kinesis stream in this region\n", 431 | "# unless the endpoint for Amazon Kinesis is explicitly specified. The Amazon DynamoDB lease table and Amazon CloudWatch\n", 432 | "# metrics for connector will be created in this region. All resources in outgoing destination will not be affected by this region name.\n", 433 | "regionName = ",{"Ref": "AWS::Region"},"\n", 434 | "retryLimit = 3\n", 435 | "backoffInterval = 50000\n", 436 | "bufferRecordCountLimit = 100\n", 437 | "bufferMillisecondsLimit = 10000\n", 438 | 439 | "# Amazon Kinesis parameters for KinesisConnector\n\n", 440 | 441 | "# Uncomment the following property if you would like to explicitly configure the Amazon Kinesis endpoint.\n", 442 | "# This property will configure the connector's Amazon Kinesis client to read from this specific endpoint,\n", 443 | "# overwriting the regionName property for ONLY the Amazon Kinesis client. The lease table and Amazon CloudWatch\n", 444 | "# metrics will still use the regionName property.\n", 445 | "# kinesisEndpoint = https://kinesis.us-west-2.amazonaws.com\n\n", 446 | 447 | "# Kinesis Stream where data will be grabbed from\n", 448 | "kinesisInputStream = ",{"Ref": "KinesisSubscriptionStream"},"\n\n", 449 | 450 | "# Optional Amazon Kinesis parameters for automatically creating the stream\n", 451 | "createKinesisInputStream = false\n", 452 | "createKinesisOutputStream = false\n", 453 | "kinesisInputStreamShardCount = 2\n", 454 | "kinesisOutputStreamShardCount = 2\n\n", 455 | 456 | "# Transformer class that will be used to handle records\n", 457 | "transformerClass = CloudWatchMessageModelSumologicTransformer\n\n", 458 | 459 | "# Specifies the input file from which the StreamSource will read records\n", 460 | "createStreamSource = false\n", 461 | "inputStreamFile = users.txt\n\n", 462 | 463 | "# Connector name to be appendend to the UserAgent\n", 464 | "connectorDestination = sumologic\n\n", 465 | 466 | "# Sumologic HTTP Collector URL\n", 467 | "sumologicUrl = ",{"Ref": "SumoLogicHttpCollectorURL"},"\n" 468 | ]]}, 469 | "mode" : "000600", 470 | "owner" : "ec2-user", 471 | "group" : "ec2-user" 472 | }, 473 | 474 | "/etc/cfn/cfn-hup.conf" : { 475 | "content" : { "Fn::Join" : ["", [ 476 | "[main]\n", 477 | "stack=", { "Ref" : "AWS::StackId" }, "\n", 478 | "region=", { "Ref" : "AWS::Region" }, "\n" 479 | ]]}, 480 | "mode" : "000400", 481 | "owner" : "root", 482 | "group" : "root" 483 | }, 484 | 485 | "/etc/awslogs/awscli.conf": { 486 | "content": { "Fn::Join" : ["", [ 487 | "[plugins]\n", 488 | "cwlogs = cwlogs\n", 489 | "[default]\n", 490 | "region = ", { "Ref": "AWS::Region"} ,"\n" 491 | ]]} 492 | }, 493 | 494 | "/etc/awslogs/awslogs.conf": { 495 | "Fn::If": [ "CreateCWLForStack", 496 | { 497 | "content": { "Fn::Join": [ "", [ 498 | "[general]\n", 499 | "state_file= /var/awslogs/state/agent-state\n", 500 | 501 | "[/var/log/cfn-init.log]\n", 502 | "file = /var/log/cfn-init.log\n", 503 | "log_group_name = ", { "Ref": "CloudFormationLogs" }, "\n", 504 | "log_stream_name = {instance_id}\n", 505 | 506 | "[/var/log/cloud-init-output.log]\n", 507 | "file = /var/log/cloud-init-output.log\n", 508 | "log_group_name = ", { "Ref": "CWEC2Logs" }, "\n", 509 | "log_stream_name = {instance_id}\n", 510 | 511 | "[/var/log/cloud-init.log]\n", 512 | "file = /var/log/cloud-init.log\n", 513 | "log_group_name = ", { "Ref": "CWEC2Logs" }, "\n", 514 | "log_stream_name = {instance_id}\n", 515 | 516 | "[cloudwatch-logs-subscription-consumer]\n", 517 | "file = /home/ec2-user/nohup.out\n", 518 | "log_group_name = ", { "Ref": "KCLLogs" }, "\n", 519 | "log_stream_name = {instance_id}\n" 520 | ]]}, 521 | "mode": "000444", 522 | "owner": "root", 523 | "group": "root" 524 | }, 525 | { 526 | "content": "# Find original defaults in .bak file" 527 | } 528 | ] 529 | }, 530 | 531 | "/etc/cfn/hooks.d/cfn-auto-reloader.conf" : { 532 | "content": { "Fn::Join" : ["", [ 533 | "[cfn-auto-reloader-hook]\n", 534 | "triggers=post.update\n", 535 | "path=Resources.SumoLogicCWLKinesisNode.Metadata.AWS::CloudFormation::Init\n", 536 | "action=/opt/aws/bin/cfn-init -s ", { "Ref" : "AWS::StackId" }, " -r SumoLogicCWLKinesisNode", 537 | " --region ", { "Ref" : "AWS::Region" }, "\n", 538 | "runas=root\n" 539 | ]]} 540 | } 541 | 542 | }, 543 | 544 | "services": { 545 | "sysvinit": { 546 | "awslogs": { 547 | "Fn::If": [ 548 | "CreateCWLForStack", 549 | { 550 | "enabled" : "true", 551 | "ensureRunning" : "true", 552 | "files" : [ "/etc/awslogs/awslogs.conf" ] 553 | }, 554 | "AWS::NoValue" 555 | ] 556 | } 557 | } 558 | } 559 | } 560 | } 561 | }, 562 | 563 | 564 | "Properties" : { 565 | "DisableApiTermination" : "false", 566 | "ImageId" : { "Fn::FindInMap" : [ "AWSRegionArch2AMI", { "Ref" : "AWS::Region"}, { "Fn::FindInMap" : [ "AWSInstanceType2Arch", { "Ref" :"InstanceType" } , "Arch" ]}]}, 567 | "InstanceInitiatedShutdownBehavior" : "stop", 568 | "SecurityGroupIds" : [ { "Ref" : "SumoLogicInstanceSecurityGroup" } ], 569 | "SubnetId" : { "Ref" : "Subnet" }, 570 | "InstanceType" : { "Ref" : "InstanceType" }, 571 | "KeyName" : { "Ref" : "KeyName"}, 572 | "IamInstanceProfile" : { "Ref" : "SumoLogicInstanceProfile" }, 573 | "Monitoring" : "false", 574 | "Tags" : [ {"Key": "Deployment", "Value" : "Kinesis"}, {"Key":"Name","Value":"SumoLogic_Kinesis"} ], 575 | "UserData" : { "Fn::Base64" : { "Fn::Join" : ["", [ 576 | "#!/bin/bash -xe\n", 577 | 578 | "# Install the files and packages from the metadata\n ", 579 | "yum update -y aws-cfn-bootstrap\n", 580 | "yum update -y aws-cli\n", 581 | 582 | "# Download files \n", 583 | "cd /home/ec2-user","\n", 584 | "wget https://s3.amazonaws.com/", { "Fn::FindInMap" : [ "Constants", "DownloadPath", "Value" ]}, "/", { "Fn::FindInMap" : [ "Constants", "DownloadJarFile", "Value" ]}, "\n", 585 | "chown ec2-user ",{ "Fn::FindInMap" : [ "Constants", "DownloadJarFile", "Value" ]}, "\n", 586 | "chgrp ec2-user ",{ "Fn::FindInMap" : [ "Constants", "DownloadJarFile", "Value" ]}, "\n", 587 | "wget https://s3.amazonaws.com/", { "Fn::FindInMap" : [ "Constants", "DownloadPath", "Value" ]}, "/", { "Fn::FindInMap" : [ "Constants", "DownloadWrapperScript", "Value" ]}, "\n", 588 | "chown ec2-user ",{ "Fn::FindInMap" : [ "Constants", "DownloadWrapperScript", "Value" ]}, "\n", 589 | "chgrp ec2-user ",{ "Fn::FindInMap" : [ "Constants", "DownloadWrapperScript", "Value" ]}, "\n", 590 | "chmod 755 ",{ "Fn::FindInMap" : [ "Constants", "DownloadWrapperScript", "Value" ]}, "\n", 591 | 592 | "wget https://s3.amazonaws.com/", { "Fn::FindInMap" : [ "Constants", "DownloadPath", "Value" ]}, "/", { "Fn::FindInMap" : [ "Constants", "DownloadLog4JPropertyFile", "Value" ]}, "\n", 593 | "chown ec2-user ",{ "Fn::FindInMap" : [ "Constants", "DownloadLog4JPropertyFile", "Value" ]}, "\n", 594 | "chgrp ec2-user ",{ "Fn::FindInMap" : [ "Constants", "DownloadLog4JPropertyFile", "Value" ]}, "\n", 595 | 596 | "# Config\n", 597 | "/opt/aws/bin/cfn-init -v ", 598 | " --stack ", { "Ref" : "AWS::StackName" }, 599 | " --resource SumoLogicCWLKinesisNode", 600 | " --configsets Install ", 601 | " --region ", { "Ref" : "AWS::Region" }, "\n", 602 | 603 | "/opt/aws/bin/cfn-signal -e $? ", 604 | " --stack ", { "Ref" : "AWS::StackName" }, 605 | " --resource SumoLogicCWLKinesisNode", 606 | " --region ", { "Ref" : "AWS::Region" }, "\n" 607 | 608 | ]]}} 609 | 610 | } 611 | }, 612 | 613 | "SumoLogicCWLKinesisUser" : { 614 | "Type": "AWS::IAM::User", 615 | "Properties": { 616 | "Path": "/" 617 | } 618 | }, 619 | 620 | "SumoLogicCWLKinesisUserAccessKey" : { 621 | "Type" : "AWS::IAM::AccessKey", 622 | "Properties" : { 623 | "UserName" : { "Ref" : "SumoLogicCWLKinesisUser" }, 624 | "Status" : "Active" 625 | } 626 | }, 627 | 628 | 629 | "SumoLogicCWL2KinesisRole" : { 630 | "Type": "AWS::IAM::Role", 631 | "Properties": { 632 | "AssumeRolePolicyDocument" : { 633 | "Version": "2012-10-17", 634 | "Statement": [{ 635 | "Effect": "Allow", 636 | "Principal": { 637 | "Service": { "Fn::Join": ["", ["logs.", { "Ref": "AWS::Region" } ,".amazonaws.com" ]]} 638 | }, 639 | "Action": "sts:AssumeRole" 640 | }] 641 | } 642 | } 643 | }, 644 | 645 | "SumoLogicCWLKinesisPolicy": { 646 | "Type": "AWS::IAM::Policy", 647 | "Properties": { 648 | "PolicyName": "SumoLogicCWLKinesisPolicy", 649 | "Roles": [ { "Ref": "SumoLogicCWL2KinesisRole" } ], 650 | "PolicyDocument": { 651 | "Version": "2012-10-17", 652 | "Statement": [ 653 | { 654 | "Effect": "Allow", 655 | "Action": "kinesis:PutRecord", 656 | "Resource": { "Fn::Join" : ["", ["arn:aws:kinesis:", { "Ref": "AWS::Region" }, ":", { "Ref": "AWS::AccountId" }, ":stream/", { "Ref": "KinesisSubscriptionStream"} ]] } 657 | }, 658 | { 659 | "Effect": "Allow", 660 | "Action": "iam:PassRole", 661 | "Resource": { "Fn::Join" : ["", ["arn:aws:iam::", { "Ref": "AWS::AccountId" }, ":role/", { "Ref": "SumoLogicCWL2KinesisRole" } ]] } 662 | } 663 | ] 664 | } 665 | } 666 | }, 667 | 668 | "SumoLogicNodeRole" : { 669 | "Type" : "AWS::IAM::Role", 670 | "Properties" : { 671 | "AssumeRolePolicyDocument" : { 672 | "Version" : "2012-10-17", 673 | "Statement" : [ { 674 | "Effect" : "Allow", 675 | "Principal" : { 676 | "Service" : [ "ec2.amazonaws.com" ] 677 | }, 678 | "Action" : [ "sts:AssumeRole" ] 679 | } ] 680 | }, 681 | "Path" : "/" 682 | } 683 | }, 684 | 685 | "SumoLogicCWLKinesisNodePolicies" : { 686 | "Type" : "AWS::IAM::Policy", 687 | "Properties" : { 688 | "PolicyName" : "sumologic-cwl-kinesis-node-policy", 689 | "PolicyDocument": { 690 | "Version" : "2012-10-17", 691 | "Statement": [ 692 | { 693 | "Effect": "Allow", 694 | "Action": [ 695 | "logs:*" 696 | ], 697 | "Resource": "*" 698 | }, 699 | { 700 | "Effect": "Allow", 701 | "Action": "iam:PassRole", 702 | "Resource": { "Fn::Join" : ["", ["arn:aws:iam::", { "Ref": "AWS::AccountId" }, ":role/", { "Ref": "SumoLogicCWL2KinesisRole" } ]] } 703 | } 704 | ] 705 | }, 706 | "Roles": [ { "Ref": "SumoLogicNodeRole" } ] 707 | } 708 | }, 709 | 710 | "SumoLogicCWLKinesisUserPolicies" : { 711 | "Type" : "AWS::IAM::Policy", 712 | "Properties" : { 713 | "PolicyName" : "sumologic-cwl-kinesis-user-policy", 714 | "PolicyDocument": { 715 | "Version" : "2012-10-17", 716 | "Statement": [ { 717 | "Effect": "Allow", 718 | "Action": "dynamodb:*", 719 | "Resource": { "Fn::Join": ["", ["arn:aws:dynamodb:",{ "Ref": "AWS::Region" },":", {"Ref": "AWS::AccountId"},":table/", { "Ref": "KinesisConnectorAppName" } ]]} 720 | }, 721 | { 722 | "Effect": "Allow", 723 | "Action": [ 724 | "kinesis:Get*", 725 | "kinesis:List*", 726 | "kinesis:Describe*" 727 | ], 728 | 729 | "Resource": { "Fn::Join": ["", ["arn:aws:kinesis:", { "Ref": "AWS::Region" }, ":",{"Ref": "AWS::AccountId"},":stream/", { "Ref": "KinesisSubscriptionStream" }]]} 730 | }, 731 | { 732 | "Effect": "Allow", 733 | "Action": [ 734 | "logs:*" 735 | ], 736 | "Resource": "*" 737 | }, 738 | { 739 | "Effect": "Allow", 740 | "Action": "iam:PassRole", 741 | "Resource": { "Fn::Join" : ["", ["arn:aws:iam::", { "Ref": "AWS::AccountId" }, ":role/", { "Ref": "SumoLogicCWL2KinesisRole" } ]] } 742 | } 743 | ] 744 | }, 745 | "Users": [ { "Ref": "SumoLogicCWLKinesisUser" } ] 746 | } 747 | }, 748 | 749 | "SumoLogicInstanceProfile" : { 750 | "Type": "AWS::IAM::InstanceProfile", 751 | "Properties": { 752 | "Path": "/", 753 | "Roles": [ { "Ref": "SumoLogicNodeRole" } ] 754 | } 755 | }, 756 | 757 | "CWEC2Logs": { 758 | "Type": "AWS::Logs::LogGroup", 759 | "Condition": "CreateCWLForStack" 760 | }, 761 | 762 | "CloudFormationLogs": { 763 | "Type": "AWS::Logs::LogGroup", 764 | "Condition": "CreateCWLForStack" 765 | }, 766 | 767 | "KCLLogs": { 768 | "Type": "AWS::Logs::LogGroup", 769 | "Condition": "CreateCWLForStack" 770 | } 771 | }, 772 | 773 | "Outputs" : { 774 | "InstanceId" : { 775 | "Description" : "InstanceId of the newly created SumoLogic EC2 VPC instance", 776 | "Value" : { "Ref" : "SumoLogicCWLKinesisNode" } 777 | }, 778 | "AZ" : { 779 | "Description" : "Availability Zone of the newly created SumoLogic EC2 instance", 780 | "Value" : { "Fn::GetAtt" : [ "SumoLogicCWLKinesisNode", "AvailabilityZone" ] } 781 | }, 782 | 783 | "PublicDNS" : { 784 | "Description" : "Public DNSName of the newly created EC2 SumoLogic instance", 785 | "Value" : { "Fn::GetAtt" : [ "SumoLogicCWLKinesisNode", "PublicDnsName" ] } 786 | }, 787 | 788 | "PublicIP" : { 789 | "Description" : "Public IP address of the newly created EC2 SumoLogic instance", 790 | "Value" : { "Fn::GetAtt" : [ "SumoLogicCWLKinesisNode", "PublicIp" ] } 791 | }, 792 | 793 | "CWLtoKinesisRoleArn" : { 794 | "Description" : "Arn of CloudWatchLogs to Kinesis Role, if need to use manually ", 795 | "Value" : {"Fn::Join" : ["", [ "arn:aws:iam::", { "Ref": "AWS::AccountId" }, ":role/",{ "Ref" : "SumoLogicCWL2KinesisRole" } ]]} 796 | }, 797 | 798 | "KinesisStream" : { 799 | "Description" : "Kinesis Stream for VPC flow logs ", 800 | "Value" : {"Fn::Join" : ["", [ "arn:aws:kinesis:", {"Ref":"AWS::Region"},":",{ "Ref": "AWS::AccountId" }, ":stream/",{ "Ref" : "KinesisSubscriptionStream" } ]]} 801 | }, 802 | 803 | "SumoLogicCWLKinesisUserAccessKeyValue" : { 804 | "Value" : { "Ref" : "SumoLogicCWLKinesisUserAccessKey"} 805 | }, 806 | 807 | "SumoLogicCWLKinesisUserSecretKeyValue" : { 808 | "Value" : { 809 | "Fn::GetAtt" : [ "SumoLogicCWLKinesisUserAccessKey", "SecretAccessKey" ] 810 | } 811 | } 812 | } 813 | } 814 | -------------------------------------------------------------------------------- /configuration/cloudformation/cwl_kinesis_custom_vpc.template: -------------------------------------------------------------------------------- 1 | { 2 | "AWSTemplateFormatVersion" : "2010-09-09", 3 | 4 | "Description" : "A sample SumoLogic/Kinesis stack that hooks up with real-time data from CloudWatch Logs using a Subscription Filter. **NOTE** This template creates one or more Amazon EC2 instances, an Amazon Kinesis stream. You will be billed for the AWS resources used if you create a stack from this template. This template requires setting the \"Create IAM resources\" parameter to True.", 5 | 6 | "Parameters" : { 7 | 8 | "KeyName": { 9 | "Description" : "The name of an existing key pair to enable SSH access to the EC2 instances", 10 | "Type": "AWS::EC2::KeyPair::KeyName" 11 | }, 12 | 13 | "HostingVPC": { 14 | "Description" : "The name of an existing VPC to host the new SumoLogic EC2 instance", 15 | "Type": "AWS::EC2::VPC::Id" 16 | }, 17 | 18 | "HostingVPCSubnet": { 19 | "Description" : "The subnet of an existing VPC to host the new SumoLogic EC2 instance", 20 | "Type": "AWS::EC2::Subnet::Id" 21 | }, 22 | 23 | "HostingVPCSecurityGroup": { 24 | "Description" : "The security group of an existing VPC to host the new SumoLogic EC2 instance", 25 | "Type": "AWS::EC2::SecurityGroup::Id" 26 | }, 27 | 28 | 29 | "InstanceType" : { 30 | "Description" : "EC2 instance type for the SumoLogic nodes", 31 | "Type" : "String", 32 | "Default" : "t2.small", 33 | "AllowedValues" : [ 34 | "t2.micro", 35 | "t2.small", 36 | "m3.medium", 37 | "m3.large", 38 | "m3.xlarge", 39 | "m3.2xlarge", 40 | "c3.large", 41 | "c3.xlarge", 42 | "c3.2xlarge", 43 | "c3.4xlarge", 44 | "c3.8xlarge", 45 | "c4.large", 46 | "c4.xlarge", 47 | "c4.2xlarge", 48 | "c4.4xlarge", 49 | "c4.8xlarge", 50 | "r3.large", 51 | "r3.xlarge", 52 | "r3.2xlarge", 53 | "r3.4xlarge", 54 | "r3.8xlarge", 55 | "i2.xlarge", 56 | "i2.2xlarge", 57 | "i2.4xlarge", 58 | "i2.8xlarge", 59 | "d2.xlarge", 60 | "d2.2xlarge", 61 | "d2.4xlarge", 62 | "d2.8xlarge", 63 | "hi1.4xlarge", 64 | "hs1.8xlarge", 65 | "cr1.8xlarge", 66 | "cc2.8xlarge" 67 | ], 68 | "ConstraintDescription" : "Must be a valid EC2 instance type" 69 | }, 70 | 71 | "SumoLogicHttpCollectorURL" : { 72 | "Description" : "The URL of a SumoLogic Http Collector source", 73 | "Type": "String" 74 | }, 75 | 76 | "AllowedIpSource" : { 77 | "Description" : "The IP address range that can be used to access the EC2 instances via SSH", 78 | "Type": "String", 79 | "MinLength": "9", 80 | "MaxLength": "18", 81 | "Default": "0.0.0.0/0", 82 | "AllowedPattern": "(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})/(\\d{1,2})", 83 | "ConstraintDescription": "Must be a valid IP CIDR range of the form x.x.x.x/x" 84 | }, 85 | 86 | "LogGroupName": { 87 | "Description": "The CloudWatch log group containing the VPC flow logs", 88 | "Type": "String", 89 | "Default": "" 90 | }, 91 | 92 | "MonitorStack" : { 93 | "Description": "Push logs from KCL and CloudFormation to CloudWatch Logs", 94 | "Type": "String", 95 | "Default" : "false", 96 | "AllowedValues" : ["true", "false"] 97 | }, 98 | 99 | "KinesisConnectorAppName" : { 100 | "Description": "This is the application name in Kinesis Connector's context. No two live/active stacks should be using the same application name.", 101 | "Type": "String", 102 | "Default" : "SumoLogic-VPC-FlowLog-Connector" 103 | }, 104 | 105 | "KinesisShards": { 106 | "Description": "Number of shards to create for the Kinesis subscription stream", 107 | "Type": "Number", 108 | "Default" : "1", 109 | "MinValue" : "1" 110 | } 111 | }, 112 | 113 | "Mappings" : { 114 | 115 | "Constants" : { 116 | "DownloadPath" : { "Value": "sumologic-kinesis-connector" }, 117 | "DownloadMainPropertyFile" : { "Value": "SumologicConnector.properties" }, 118 | "DownloadLog4JPropertyFile" : { "Value": "log4j.properties" }, 119 | "DownloadWrapperScript" : { "Value": "SumoVPCKinesis.bash" }, 120 | "DownloadJarFile" : { "Value": "kinesis-sumologic-connector-0.1.jar" } 121 | }, 122 | 123 | "AWSInstanceType2Arch" : { 124 | "t2.micro" : { "Arch" : "HVM64" }, 125 | "t2.small" : { "Arch" : "HVM64" }, 126 | "t2.medium" : { "Arch" : "HVM64" }, 127 | "m3.medium" : { "Arch" : "HVM64" }, 128 | "m3.large" : { "Arch" : "HVM64" }, 129 | "m3.xlarge" : { "Arch" : "HVM64" }, 130 | "m3.2xlarge" : { "Arch" : "HVM64" }, 131 | "c3.large" : { "Arch" : "HVM64" }, 132 | "c3.xlarge" : { "Arch" : "HVM64" }, 133 | "c3.2xlarge" : { "Arch" : "HVM64" }, 134 | "c3.4xlarge" : { "Arch" : "HVM64" }, 135 | "c3.8xlarge" : { "Arch" : "HVM64" }, 136 | "c4.large" : { "Arch" : "HVM64" }, 137 | "c4.xlarge" : { "Arch" : "HVM64" }, 138 | "c4.2xlarge" : { "Arch" : "HVM64" }, 139 | "c4.4xlarge" : { "Arch" : "HVM64" }, 140 | "c4.8xlarge" : { "Arch" : "HVM64" }, 141 | "r3.large" : { "Arch" : "HVM64" }, 142 | "r3.xlarge" : { "Arch" : "HVM64" }, 143 | "r3.2xlarge" : { "Arch" : "HVM64" }, 144 | "r3.4xlarge" : { "Arch" : "HVM64" }, 145 | "r3.8xlarge" : { "Arch" : "HVM64" }, 146 | "i2.xlarge" : { "Arch" : "HVM64" }, 147 | "i2.2xlarge" : { "Arch" : "HVM64" }, 148 | "i2.4xlarge" : { "Arch" : "HVM64" }, 149 | "i2.8xlarge" : { "Arch" : "HVM64" }, 150 | "d2.xlarge" : { "Arch" : "HVM64" }, 151 | "d2.2xlarge" : { "Arch" : "HVM64" }, 152 | "d2.4xlarge" : { "Arch" : "HVM64" }, 153 | "d2.8xlarge" : { "Arch" : "HVM64" }, 154 | "hi1.4xlarge" : { "Arch" : "HVM64" }, 155 | "hs1.8xlarge" : { "Arch" : "HVM64" }, 156 | "cr1.8xlarge" : { "Arch" : "HVM64" }, 157 | "cc2.8xlarge" : { "Arch" : "HVM64" } 158 | }, 159 | 160 | "AWSRegionArch2AMI" : { 161 | "us-east-1" : {"PV64" : "ami-1ccae774", "HVM64" : "ami-1ecae776", "HVMG2" : "ami-8c6b40e4"}, 162 | "us-west-2" : {"PV64" : "ami-ff527ecf", "HVM64" : "ami-e7527ed7", "HVMG2" : "ami-abbe919b"}, 163 | "us-west-1" : {"PV64" : "ami-d514f291", "HVM64" : "ami-d114f295", "HVMG2" : "ami-f31ffeb7"}, 164 | "eu-west-1" : {"PV64" : "ami-bf0897c8", "HVM64" : "ami-a10897d6", "HVMG2" : "ami-d5bc24a2"}, 165 | "eu-central-1" : {"PV64" : "ami-ac221fb1", "HVM64" : "ami-a8221fb5", "HVMG2" : "ami-7cd2ef61"}, 166 | "ap-northeast-1" : {"PV64" : "ami-27f90e27", "HVM64" : "ami-cbf90ecb", "HVMG2" : "ami-6318e863"}, 167 | "ap-southeast-1" : {"PV64" : "ami-acd9e8fe", "HVM64" : "ami-68d8e93a", "HVMG2" : "ami-3807376a"}, 168 | "ap-southeast-2" : {"PV64" : "ami-ff9cecc5", "HVM64" : "ami-fd9cecc7", "HVMG2" : "ami-89790ab3"}, 169 | "sa-east-1" : {"PV64" : "ami-bb2890a6", "HVM64" : "ami-b52890a8", "HVMG2" : "NOT_SUPPORTED"}, 170 | "cn-north-1" : {"PV64" : "ami-fa39abc3", "HVM64" : "ami-f239abcb", "HVMG2" : "NOT_SUPPORTED"} 171 | } 172 | }, 173 | 174 | "Conditions" : { 175 | "CreateCWLForStack" : {"Fn::Equals" : [{"Ref" : "MonitorStack"}, "true"]}, 176 | "NoKeySpecified" : {"Fn::Equals" : [{"Ref" : "KeyName"}, ""]} 177 | }, 178 | 179 | "Resources" : { 180 | 181 | "KinesisSubscriptionStream": { 182 | "Type": "AWS::Kinesis::Stream", 183 | "Properties" : { 184 | "ShardCount": { "Ref": "KinesisShards" } 185 | } 186 | }, 187 | 188 | "SumoLogicCWLKinesisNode":{ 189 | "Type" : "AWS::EC2::Instance", 190 | "Metadata" : { 191 | "Comment" : "Install SumoLogic Kinesiss application", 192 | "AWS::CloudFormation::Init" : { 193 | "configSets" : { 194 | "Install" : [ "config" ] 195 | }, 196 | 197 | "config" : { 198 | "packages": { 199 | "yum": { 200 | "awslogs": [] 201 | } 202 | }, 203 | 204 | "commands" : { 205 | 206 | "00_CopyCredFile": { 207 | "command": { "Fn::Join": [ "", ["mkdir /root/.aws ; ", "cp /home/ec2-user/.aws/credentials /root/.aws/" ] ] } , 208 | "cwd":"/root", 209 | "ignoreErrors":"true" 210 | }, 211 | 212 | "03_describeSubscriptionFilter": { 213 | "command": { "Fn::Join" : ["", [ 214 | "/usr/bin/aws logs describe-subscription-filters ", 215 | "--log-group-name \"", { "Ref": "LogGroupName" }, "\" ", 216 | "--region \"", { "Ref" : "AWS::Region" }, "\" ", 217 | "--filter-name-prefix \"cwl-cfn-es-\" " 218 | ]]} 219 | }, 220 | 221 | "04_deleteSubscriptionFilter": { 222 | "command": { "Fn::Join" : ["", [ 223 | "/usr/bin/aws logs delete-subscription-filter ", 224 | "--log-group-name \"", { "Ref": "LogGroupName" }, "\" ", 225 | "--region \"", { "Ref" : "AWS::Region" }, "\" ", 226 | "--filter-name $(aws logs describe-subscription-filters ", 227 | "--log-group-name ", { "Ref": "LogGroupName" }, " ", 228 | "--region ", { "Ref": "AWS::Region" }, " ", 229 | "--filter-name-prefix \"cwl-cfn-es-\" ", 230 | "| grep filterName | awk -F \\\" '{ print $4 };' )" 231 | ]]}, 232 | "ignoreErrors":"true" 233 | }, 234 | 235 | "05_putSubscriptionFilter": { 236 | "command": { "Fn::Join": ["", [ 237 | "/usr/bin/aws logs put-subscription-filter ", 238 | "--log-group-name \"", { "Ref": "LogGroupName" }, "\" ", 239 | "--filter-name \"cwl-cfn-es-", { "Ref": "KinesisSubscriptionStream" }, "\" ", 240 | "--filter-pattern \"\" ", 241 | "--region \"", { "Ref" : "AWS::Region" }, "\" ", 242 | "--destination-arn \"", 243 | "arn:aws:kinesis:", { "Ref": "AWS::Region" }, 244 | ":", { "Ref": "AWS::AccountId" }, 245 | ":stream/", { "Ref": "KinesisSubscriptionStream"} ,"\" ", 246 | "--role-arn \"", 247 | "arn:aws:iam::", { "Ref": "AWS::AccountId" }, 248 | ":role/", { "Ref": "SumoLogicCWL2KinesisRole" },"\"" 249 | ]]} 250 | }, 251 | 252 | "06_CWLStateDir" : { 253 | "command" : "mkdir -p /var/awslogs/state" 254 | } 255 | }, 256 | 257 | "files" : { 258 | "/etc/logrotate.d/cloudwatch-logs-subscription-consumer": { 259 | "content": { "Fn::Join" : ["", [ 260 | "/home/ec2-user/nohup.out\n", 261 | "{\n", 262 | " daily\n", 263 | " rotate 5\n", 264 | " copytruncate\n", 265 | " dateext\n", 266 | " compress\n", 267 | "}\n" 268 | ]]} 269 | }, 270 | 271 | "/home/ec2-user/.aws/credentials" : { 272 | "content" : { "Fn::Join" : ["", [ 273 | "[default]\n", 274 | "# http://aws.amazon.com/security-credentials\n", 275 | "aws_access_key_id = ",{"Ref": "SumoLogicCWLKinesisUserAccessKey"}, "\n", 276 | "aws_secret_access_key = ",{"Fn::GetAtt": [ "SumoLogicCWLKinesisUserAccessKey", "SecretAccessKey" ]}, "\n"]] }, 277 | "mode" : "000600", 278 | "owner" : "ec2-user", 279 | "group" : "ec2-user" 280 | }, 281 | 282 | "/home/ec2-user/SumologicConnector.properties" : { 283 | "content" : { "Fn::Join" : ["", [ 284 | "# Fill in your AWS Access Key ID and Secret Access Key\n", 285 | "# http://aws.amazon.com/security-credentials\n", 286 | "accessKey = ",{"Ref": "SumoLogicCWLKinesisUserAccessKey"}, "\n", 287 | "secretKey = ",{"Fn::GetAtt": [ "SumoLogicCWLKinesisUserAccessKey", "SecretAccessKey" ]}, "\n", 288 | "\n", 289 | 290 | "# KinesisConnector Application Settings\n", 291 | "# Since Kinesis Creates a DynamoDB table for each APP,\n", 292 | "# each appName must be unique for different kinesisInputStreams and connectorDestinations\n", 293 | "appName = ",{"Fn::Join": ["", [{"Ref": "KinesisConnectorAppName"}]]},"\n", 294 | "\n", 295 | 296 | "# By specifying the region name, the connector will connect from the Amazon Kinesis stream in this region\n", 297 | "# unless the endpoint for Amazon Kinesis is explicitly specified. The Amazon DynamoDB lease table and Amazon CloudWatch\n", 298 | "# metrics for connector will be created in this region. All resources in outgoing destination will not be affected by this region name.\n", 299 | "regionName = ",{"Ref": "AWS::Region"},"\n", 300 | "retryLimit = 3\n", 301 | "backoffInterval = 50000\n", 302 | "bufferRecordCountLimit = 100\n", 303 | "bufferMillisecondsLimit = 10000\n", 304 | 305 | "# Amazon Kinesis parameters for KinesisConnector\n\n", 306 | 307 | "# Uncomment the following property if you would like to explicitly configure the Amazon Kinesis endpoint.\n", 308 | "# This property will configure the connector's Amazon Kinesis client to read from this specific endpoint,\n", 309 | "# overwriting the regionName property for ONLY the Amazon Kinesis client. The lease table and Amazon CloudWatch\n", 310 | "# metrics will still use the regionName property.\n", 311 | "# kinesisEndpoint = https://kinesis.us-west-2.amazonaws.com\n\n", 312 | 313 | "# Kinesis Stream where data will be grabbed from\n", 314 | "kinesisInputStream = ",{"Ref": "KinesisSubscriptionStream"},"\n\n", 315 | 316 | "# Optional Amazon Kinesis parameters for automatically creating the stream\n", 317 | "createKinesisInputStream = false\n", 318 | "createKinesisOutputStream = false\n", 319 | "kinesisInputStreamShardCount = 2\n", 320 | "kinesisOutputStreamShardCount = 2\n\n", 321 | 322 | "# Transformer class that will be used to handle records\n", 323 | "transformerClass = CloudWatchMessageModelSumologicTransformer\n\n", 324 | 325 | "# Specifies the input file from which the StreamSource will read records\n", 326 | "createStreamSource = false\n", 327 | "inputStreamFile = users.txt\n\n", 328 | 329 | "# Connector name to be appendend to the UserAgent\n", 330 | "connectorDestination = sumologic\n\n", 331 | 332 | "# Sumologic HTTP Collector URL\n", 333 | "sumologicUrl = ",{"Ref": "SumoLogicHttpCollectorURL"},"\n" 334 | ]]}, 335 | "mode" : "000600", 336 | "owner" : "ec2-user", 337 | "group" : "ec2-user" 338 | }, 339 | 340 | "/etc/cfn/cfn-hup.conf" : { 341 | "content" : { "Fn::Join" : ["", [ 342 | "[main]\n", 343 | "stack=", { "Ref" : "AWS::StackId" }, "\n", 344 | "region=", { "Ref" : "AWS::Region" }, "\n" 345 | ]]}, 346 | "mode" : "000400", 347 | "owner" : "root", 348 | "group" : "root" 349 | }, 350 | 351 | "/etc/awslogs/awscli.conf": { 352 | "content": { "Fn::Join" : ["", [ 353 | "[plugins]\n", 354 | "cwlogs = cwlogs\n", 355 | "[default]\n", 356 | "region = ", { "Ref": "AWS::Region"} ,"\n" 357 | ]]} 358 | }, 359 | 360 | "/etc/awslogs/awslogs.conf": { 361 | "Fn::If": [ "CreateCWLForStack", 362 | { 363 | "content": { "Fn::Join": [ "", [ 364 | "[general]\n", 365 | "state_file= /var/awslogs/state/agent-state\n", 366 | 367 | "[/var/log/cfn-init.log]\n", 368 | "file = /var/log/cfn-init.log\n", 369 | "log_group_name = ", { "Ref": "CloudFormationLogs" }, "\n", 370 | "log_stream_name = {instance_id}\n", 371 | 372 | "[/var/log/cloud-init-output.log]\n", 373 | "file = /var/log/cloud-init-output.log\n", 374 | "log_group_name = ", { "Ref": "CWEC2Logs" }, "\n", 375 | "log_stream_name = {instance_id}\n", 376 | 377 | "[/var/log/cloud-init.log]\n", 378 | "file = /var/log/cloud-init.log\n", 379 | "log_group_name = ", { "Ref": "CWEC2Logs" }, "\n", 380 | "log_stream_name = {instance_id}\n", 381 | 382 | "[cloudwatch-logs-subscription-consumer]\n", 383 | "file = /home/ec2-user/nohup.out\n", 384 | "log_group_name = ", { "Ref": "KCLLogs" }, "\n", 385 | "log_stream_name = {instance_id}\n" 386 | ]]}, 387 | "mode": "000444", 388 | "owner": "root", 389 | "group": "root" 390 | }, 391 | { 392 | "content": "# Find original defaults in .bak file" 393 | } 394 | ] 395 | }, 396 | 397 | "/etc/cfn/hooks.d/cfn-auto-reloader.conf" : { 398 | "content": { "Fn::Join" : ["", [ 399 | "[cfn-auto-reloader-hook]\n", 400 | "triggers=post.update\n", 401 | "path=Resources.SumoLogicCWLKinesisNode.Metadata.AWS::CloudFormation::Init\n", 402 | "action=/opt/aws/bin/cfn-init -s ", { "Ref" : "AWS::StackId" }, " -r SumoLogicCWLKinesisNode", 403 | " --region ", { "Ref" : "AWS::Region" }, "\n", 404 | "runas=root\n" 405 | ]]} 406 | } 407 | 408 | }, 409 | 410 | "services": { 411 | "sysvinit": { 412 | "awslogs": { 413 | "Fn::If": [ 414 | "CreateCWLForStack", 415 | { 416 | "enabled" : "true", 417 | "ensureRunning" : "true", 418 | "files" : [ "/etc/awslogs/awslogs.conf" ] 419 | }, 420 | "AWS::NoValue" 421 | ] 422 | } 423 | } 424 | } 425 | } 426 | } 427 | }, 428 | 429 | 430 | "Properties" : { 431 | "DisableApiTermination" : "false", 432 | "ImageId" : { "Fn::FindInMap" : [ "AWSRegionArch2AMI", { "Ref" : "AWS::Region"}, { "Fn::FindInMap" : [ "AWSInstanceType2Arch", { "Ref" :"InstanceType" } , "Arch" ]}]}, 433 | "InstanceInitiatedShutdownBehavior" : "stop", 434 | "InstanceType" : { "Ref" : "InstanceType" }, 435 | "KeyName" : { "Ref" : "KeyName"}, 436 | "IamInstanceProfile" : { "Ref" : "SumoLogicInstanceProfile" }, 437 | "NetworkInterfaces": [ 438 | { 439 | "AssociatePublicIpAddress": true, 440 | "DeleteOnTermination": true, 441 | "DeviceIndex": "0", 442 | "GroupSet": [ { "Ref": "HostingVPCSecurityGroup" } ], 443 | "SubnetId": { "Ref": "HostingVPCSubnet" } 444 | } 445 | ], 446 | "Monitoring" : "false", 447 | "Tags" : [ {"Key": "Deployment", "Value" : "Kinesis"}, {"Key":"Name","Value":"SumoLogic_Kinesis_CustomVPC"} ], 448 | "UserData" : { "Fn::Base64" : { "Fn::Join" : ["", [ 449 | "#!/bin/bash -xe\n", 450 | 451 | "# Install the files and packages from the metadata\n ", 452 | "yum update -y aws-cfn-bootstrap\n", 453 | "yum update -y aws-cli\n", 454 | 455 | "# Download files \n", 456 | "cd /home/ec2-user","\n", 457 | "wget https://s3.amazonaws.com/", { "Fn::FindInMap" : [ "Constants", "DownloadPath", "Value" ]}, "/", { "Fn::FindInMap" : [ "Constants", "DownloadJarFile", "Value" ]}, "\n", 458 | "chown ec2-user ",{ "Fn::FindInMap" : [ "Constants", "DownloadJarFile", "Value" ]}, "\n", 459 | "chgrp ec2-user ",{ "Fn::FindInMap" : [ "Constants", "DownloadJarFile", "Value" ]}, "\n", 460 | "wget https://s3.amazonaws.com/", { "Fn::FindInMap" : [ "Constants", "DownloadPath", "Value" ]}, "/", { "Fn::FindInMap" : [ "Constants", "DownloadWrapperScript", "Value" ]}, "\n", 461 | "chown ec2-user ",{ "Fn::FindInMap" : [ "Constants", "DownloadWrapperScript", "Value" ]}, "\n", 462 | "chgrp ec2-user ",{ "Fn::FindInMap" : [ "Constants", "DownloadWrapperScript", "Value" ]}, "\n", 463 | "chmod 755 ",{ "Fn::FindInMap" : [ "Constants", "DownloadWrapperScript", "Value" ]}, "\n", 464 | 465 | "wget https://s3.amazonaws.com/", { "Fn::FindInMap" : [ "Constants", "DownloadPath", "Value" ]}, "/", { "Fn::FindInMap" : [ "Constants", "DownloadLog4JPropertyFile", "Value" ]}, "\n", 466 | "chown ec2-user ",{ "Fn::FindInMap" : [ "Constants", "DownloadLog4JPropertyFile", "Value" ]}, "\n", 467 | "chgrp ec2-user ",{ "Fn::FindInMap" : [ "Constants", "DownloadLog4JPropertyFile", "Value" ]}, "\n", 468 | 469 | "# Config\n", 470 | "/opt/aws/bin/cfn-init -v ", 471 | " --stack ", { "Ref" : "AWS::StackName" }, 472 | " --resource SumoLogicCWLKinesisNode", 473 | " --configsets Install ", 474 | " --region ", { "Ref" : "AWS::Region" }, "\n", 475 | 476 | "/opt/aws/bin/cfn-signal -e $? ", 477 | " --stack ", { "Ref" : "AWS::StackName" }, 478 | " --resource SumoLogicCWLKinesisNode", 479 | " --region ", { "Ref" : "AWS::Region" }, "\n" 480 | 481 | ]]}} 482 | 483 | } 484 | }, 485 | 486 | "SumoLogicCWLKinesisUser" : { 487 | "Type": "AWS::IAM::User", 488 | "Properties": { 489 | "Path": "/" 490 | } 491 | }, 492 | 493 | "SumoLogicCWLKinesisUserAccessKey" : { 494 | "Type" : "AWS::IAM::AccessKey", 495 | "Properties" : { 496 | "UserName" : { "Ref" : "SumoLogicCWLKinesisUser" }, 497 | "Status" : "Active" 498 | } 499 | }, 500 | 501 | 502 | "SumoLogicCWL2KinesisRole" : { 503 | "Type": "AWS::IAM::Role", 504 | "Properties": { 505 | "AssumeRolePolicyDocument" : { 506 | "Version": "2012-10-17", 507 | "Statement": [{ 508 | "Effect": "Allow", 509 | "Principal": { 510 | "Service": { "Fn::Join": ["", ["logs.", { "Ref": "AWS::Region" } ,".amazonaws.com" ]]} 511 | }, 512 | "Action": "sts:AssumeRole" 513 | }] 514 | } 515 | } 516 | }, 517 | 518 | "SumoLogicCWLKinesisPolicy": { 519 | "Type": "AWS::IAM::Policy", 520 | "Properties": { 521 | "PolicyName": "SumoLogicCWLKinesisPolicy", 522 | "Roles": [ { "Ref": "SumoLogicCWL2KinesisRole" } ], 523 | "PolicyDocument": { 524 | "Version": "2012-10-17", 525 | "Statement": [ 526 | { 527 | "Effect": "Allow", 528 | "Action": "kinesis:PutRecord", 529 | "Resource": { "Fn::Join" : ["", ["arn:aws:kinesis:", { "Ref": "AWS::Region" }, ":", { "Ref": "AWS::AccountId" }, ":stream/", { "Ref": "KinesisSubscriptionStream"} ]] } 530 | }, 531 | { 532 | "Effect": "Allow", 533 | "Action": "iam:PassRole", 534 | "Resource": { "Fn::Join" : ["", ["arn:aws:iam::", { "Ref": "AWS::AccountId" }, ":role/", { "Ref": "SumoLogicCWL2KinesisRole" } ]] } 535 | } 536 | ] 537 | } 538 | } 539 | }, 540 | 541 | "SumoLogicNodeRole" : { 542 | "Type" : "AWS::IAM::Role", 543 | "Properties" : { 544 | "AssumeRolePolicyDocument" : { 545 | "Version" : "2012-10-17", 546 | "Statement" : [ { 547 | "Effect" : "Allow", 548 | "Principal" : { 549 | "Service" : [ "ec2.amazonaws.com" ] 550 | }, 551 | "Action" : [ "sts:AssumeRole" ] 552 | } ] 553 | }, 554 | "Path" : "/" 555 | } 556 | }, 557 | 558 | "SumoLogicCWLKinesisNodePolicies" : { 559 | "Type" : "AWS::IAM::Policy", 560 | "Properties" : { 561 | "PolicyName" : "sumologic-cwl-kinesis-node-policy", 562 | "PolicyDocument": { 563 | "Version" : "2012-10-17", 564 | "Statement": [ 565 | { 566 | "Effect": "Allow", 567 | "Action": [ 568 | "logs:*" 569 | ], 570 | "Resource": "*" 571 | }, 572 | { 573 | "Effect": "Allow", 574 | "Action": "iam:PassRole", 575 | "Resource": { "Fn::Join" : ["", ["arn:aws:iam::", { "Ref": "AWS::AccountId" }, ":role/", { "Ref": "SumoLogicCWL2KinesisRole" } ]] } 576 | } 577 | ] 578 | }, 579 | "Roles": [ { "Ref": "SumoLogicNodeRole" } ] 580 | } 581 | }, 582 | 583 | "SumoLogicCWLKinesisUserPolicies" : { 584 | "Type" : "AWS::IAM::Policy", 585 | "Properties" : { 586 | "PolicyName" : "sumologic-cwl-kinesis-user-policy", 587 | "PolicyDocument": { 588 | "Version" : "2012-10-17", 589 | "Statement": [ { 590 | "Effect": "Allow", 591 | "Action": "dynamodb:*", 592 | "Resource": { "Fn::Join": ["", ["arn:aws:dynamodb:",{ "Ref": "AWS::Region" },":", {"Ref": "AWS::AccountId"},":table/", { "Ref": "KinesisConnectorAppName" } ]]} 593 | }, 594 | { 595 | "Effect": "Allow", 596 | "Action": [ 597 | "kinesis:Get*", 598 | "kinesis:List*", 599 | "kinesis:Describe*" 600 | ], 601 | 602 | "Resource": { "Fn::Join": ["", ["arn:aws:kinesis:", { "Ref": "AWS::Region" }, ":",{"Ref": "AWS::AccountId"},":stream/", { "Ref": "KinesisSubscriptionStream" }]]} 603 | }, 604 | { 605 | "Effect": "Allow", 606 | "Action": [ 607 | "logs:*" 608 | ], 609 | "Resource": "*" 610 | }, 611 | { 612 | "Effect": "Allow", 613 | "Action": "iam:PassRole", 614 | "Resource": { "Fn::Join" : ["", ["arn:aws:iam::", { "Ref": "AWS::AccountId" }, ":role/", { "Ref": "SumoLogicCWL2KinesisRole" } ]] } 615 | } 616 | ] 617 | }, 618 | "Users": [ { "Ref": "SumoLogicCWLKinesisUser" } ] 619 | } 620 | }, 621 | 622 | "SumoLogicInstanceProfile" : { 623 | "Type": "AWS::IAM::InstanceProfile", 624 | "Properties": { 625 | "Path": "/", 626 | "Roles": [ { "Ref": "SumoLogicNodeRole" } ] 627 | } 628 | }, 629 | 630 | "CWEC2Logs": { 631 | "Type": "AWS::Logs::LogGroup", 632 | "Condition": "CreateCWLForStack" 633 | }, 634 | 635 | "CloudFormationLogs": { 636 | "Type": "AWS::Logs::LogGroup", 637 | "Condition": "CreateCWLForStack" 638 | }, 639 | 640 | "KCLLogs": { 641 | "Type": "AWS::Logs::LogGroup", 642 | "Condition": "CreateCWLForStack" 643 | } 644 | }, 645 | 646 | "Outputs" : { 647 | "InstanceId" : { 648 | "Description" : "InstanceId of the newly created SumoLogic EC2 VPC instance", 649 | "Value" : { "Ref" : "SumoLogicCWLKinesisNode" } 650 | }, 651 | "AZ" : { 652 | "Description" : "Availability Zone of the newly created SumoLogic EC2 instance", 653 | "Value" : { "Fn::GetAtt" : [ "SumoLogicCWLKinesisNode", "AvailabilityZone" ] } 654 | }, 655 | 656 | "PublicDNS" : { 657 | "Description" : "Public DNSName of the newly created EC2 SumoLogic instance", 658 | "Value" : { "Fn::GetAtt" : [ "SumoLogicCWLKinesisNode", "PublicDnsName" ] } 659 | }, 660 | 661 | "PublicIP" : { 662 | "Description" : "Public IP address of the newly created EC2 SumoLogic instance", 663 | "Value" : { "Fn::GetAtt" : [ "SumoLogicCWLKinesisNode", "PublicIp" ] } 664 | }, 665 | 666 | "CWLtoKinesisRoleArn" : { 667 | "Description" : "Arn of CloudWatchLogs to Kinesis Role, if need to use manually ", 668 | "Value" : {"Fn::Join" : ["", [ "arn:aws:iam::", { "Ref": "AWS::AccountId" }, ":role/",{ "Ref" : "SumoLogicCWL2KinesisRole" } ]]} 669 | }, 670 | 671 | "KinesisStream" : { 672 | "Description" : "Kinesis Stream for VPC flow logs ", 673 | "Value" : {"Fn::Join" : ["", [ "arn:aws:kinesis:", {"Ref":"AWS::Region"},":",{ "Ref": "AWS::AccountId" }, ":stream/",{ "Ref" : "KinesisSubscriptionStream" } ]]} 674 | }, 675 | 676 | "SumoLogicCWLKinesisUserAccessKeyValue" : { 677 | "Value" : { "Ref" : "SumoLogicCWLKinesisUserAccessKey"} 678 | }, 679 | 680 | "SumoLogicCWLKinesisUserSecretKeyValue" : { 681 | "Value" : { 682 | "Fn::GetAtt" : [ "SumoLogicCWLKinesisUserAccessKey", "SecretAccessKey" ] 683 | } 684 | } 685 | } 686 | } 687 | -------------------------------------------------------------------------------- /pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 4.0.0 3 | com.sumologic 4 | kinesis-sumologic-connector 5 | 0.2 6 | jar 7 | kinesis-sumologic-connector 8 | Connector for feeding Amazon Kinesis data into Sumologic 9 | http://maven.apache.org 10 | 11 | 12 | Apache License, Version 2.0 13 | http://www.apache.org/licenses/LICENSE-2.0.txt 14 | repo 15 | 16 | 17 | 18 | https://github.com/SumoLogic/sumologic-kinesis-connector 19 | scm:git:git://github.com/SumoLogic/sumologic-kinesis-connector.git 20 | scm:git:git@github.com/SumoLogic/sumologic-kinesis-connector.git 21 | 22 | 23 | 24 | jpdiazvaz 25 | Juan Pablo Diaz-Vaz 26 | jpdiazvaz@mcplusa.com 27 | MC+A 28 | http://www.mcplusa.com 29 | 30 | developer 31 | 32 | 33 | 34 | duchatran 35 | Duc Ha 36 | duc@sumologic.com 37 | Sumo Logic 38 | https://www.sumologic.com 39 | 40 | developer 41 | 42 | 43 | 44 | 45 | UTF-8 46 | 1.8 47 | 1.8 48 | 49 | 50 | 51 | ossrh 52 | https://oss.sonatype.org/content/repositories/snapshots 53 | 54 | 55 | ossrh 56 | https://oss.sonatype.org/service/local/staging/deploy/maven2/ 57 | 58 | 59 | 60 | 61 | 62 | src/main/resources 63 | 64 | 65 | 66 | 67 | 68 | org.sonatype.plugins 69 | nexus-staging-maven-plugin 70 | 1.6.3 71 | true 72 | 73 | ossrh 74 | https://oss.sonatype.org/ 75 | true 76 | 77 | 78 | 79 | 105 | 106 | 120 | 121 | 122 | org.apache.maven.plugins 123 | maven-shade-plugin 124 | 3.2.1 125 | 126 | 127 | package 128 | 129 | shade 130 | 131 | 132 | 133 | 134 | com.amazonaws:aws-java-sdk-textract 135 | com.amazonaws:aws-java-sdk-worklink 136 | com.amazonaws:aws-java-sdk-backup 137 | com.amazonaws:aws-java-sdk-docdb 138 | com.amazonaws:aws-java-sdk-apigatewayv2 139 | com.amazonaws:aws-java-sdk-apigatewaymanagementapi 140 | com.amazonaws:aws-java-sdk-kafka 141 | com.amazonaws:aws-java-sdk-appmesh 142 | com.amazonaws:aws-java-sdk-licensemanager 143 | com.amazonaws:aws-java-sdk-securityhub 144 | com.amazonaws:aws-java-sdk-fsx 145 | com.amazonaws:aws-java-sdk-mediaconnect 146 | com.amazonaws:aws-java-sdk-kinesisanalyticsv2 147 | com.amazonaws:aws-java-sdk-comprehendmedical 148 | com.amazonaws:aws-java-sdk-globalaccelerator 149 | com.amazonaws:aws-java-sdk-transfer 150 | com.amazonaws:aws-java-sdk-datasync 151 | com.amazonaws:aws-java-sdk-robomaker 152 | com.amazonaws:aws-java-sdk-amplify 153 | com.amazonaws:aws-java-sdk-quicksight 154 | com.amazonaws:aws-java-sdk-rdsdata 155 | com.amazonaws:aws-java-sdk-route53resolver 156 | com.amazonaws:aws-java-sdk-ram 157 | com.amazonaws:aws-java-sdk-s3control 158 | com.amazonaws:aws-java-sdk-pinpointsmsvoice 159 | com.amazonaws:aws-java-sdk-pinpointemail 160 | com.amazonaws:aws-java-sdk-chime 161 | com.amazonaws:aws-java-sdk-signer 162 | com.amazonaws:aws-java-sdk-dlm 163 | com.amazonaws:aws-java-sdk-macie 164 | com.amazonaws:aws-java-sdk-eks 165 | com.amazonaws:aws-java-sdk-mediatailor 166 | com.amazonaws:aws-java-sdk-neptune 167 | com.amazonaws:aws-java-sdk-pi 168 | com.amazonaws:aws-java-sdk-iot1clickprojects 169 | com.amazonaws:aws-java-sdk-iot1clickdevices 170 | com.amazonaws:aws-java-sdk-iotanalytics 171 | com.amazonaws:aws-java-sdk-acmpca 172 | com.amazonaws:aws-java-sdk-secretsmanager 173 | com.amazonaws:aws-java-sdk-fms 174 | com.amazonaws:aws-java-sdk-connect 175 | com.amazonaws:aws-java-sdk-transcribe 176 | com.amazonaws:aws-java-sdk-autoscalingplans 177 | com.amazonaws:aws-java-sdk-workmail 178 | com.amazonaws:aws-java-sdk-servicediscovery 179 | com.amazonaws:aws-java-sdk-cloud9 180 | com.amazonaws:aws-java-sdk-serverlessapplicationrepository 181 | com.amazonaws:aws-java-sdk-alexaforbusiness 182 | com.amazonaws:aws-java-sdk-resourcegroups 183 | com.amazonaws:aws-java-sdk-comprehend 184 | com.amazonaws:aws-java-sdk-translate 185 | com.amazonaws:aws-java-sdk-sagemaker 186 | com.amazonaws:aws-java-sdk-iotjobsdataplane 187 | com.amazonaws:aws-java-sdk-sagemakerruntime 188 | com.amazonaws:aws-java-sdk-kinesisvideo 189 | io.netty:netty-codec-http 190 | io.netty:netty-codec 191 | io.netty:netty-handler 192 | io.netty:netty-buffer 193 | io.netty:netty-common 194 | io.netty:netty-transport 195 | io.netty:netty-resolver 196 | com.amazonaws:aws-java-sdk-appsync 197 | com.amazonaws:aws-java-sdk-guardduty 198 | com.amazonaws:aws-java-sdk-mq 199 | com.amazonaws:aws-java-sdk-mediaconvert 200 | com.amazonaws:aws-java-sdk-mediastore 201 | com.amazonaws:aws-java-sdk-mediastoredata 202 | com.amazonaws:aws-java-sdk-medialive 203 | com.amazonaws:aws-java-sdk-mediapackage 204 | com.amazonaws:aws-java-sdk-costexplorer 205 | com.amazonaws:aws-java-sdk-pricing 206 | com.amazonaws:aws-java-sdk-mobile 207 | com.amazonaws:aws-java-sdk-cloudhsmv2 208 | com.amazonaws:aws-java-sdk-glue 209 | com.amazonaws:aws-java-sdk-migrationhub 210 | com.amazonaws:aws-java-sdk-dax 211 | com.amazonaws:aws-java-sdk-greengrass 212 | com.amazonaws:aws-java-sdk-athena 213 | com.amazonaws:aws-java-sdk-marketplaceentitlement 214 | com.amazonaws:aws-java-sdk-codestar 215 | com.amazonaws:aws-java-sdk-lexmodelbuilding 216 | com.amazonaws:aws-java-sdk-resourcegroupstaggingapi 217 | com.amazonaws:aws-java-sdk-pinpoint 218 | com.amazonaws:aws-java-sdk-xray 219 | com.amazonaws:aws-java-sdk-opsworkscm 220 | com.amazonaws:aws-java-sdk-support 221 | com.amazonaws:aws-java-sdk-simpledb 222 | com.amazonaws:aws-java-sdk-servicecatalog 223 | com.amazonaws:aws-java-sdk-servermigration 224 | com.amazonaws:aws-java-sdk-simpleworkflow 225 | com.amazonaws:aws-java-sdk-storagegateway 226 | com.amazonaws:aws-java-sdk-route53 227 | com.amazonaws:aws-java-sdk-s3 228 | com.amazonaws:aws-java-sdk-importexport 229 | com.amazonaws:aws-java-sdk-sts 230 | com.amazonaws:aws-java-sdk-sqs 231 | com.amazonaws:aws-java-sdk-rds 232 | com.amazonaws:aws-java-sdk-redshift 233 | com.amazonaws:aws-java-sdk-elasticbeanstalk 234 | com.amazonaws:aws-java-sdk-glacier 235 | com.amazonaws:aws-java-sdk-iam 236 | com.amazonaws:aws-java-sdk-datapipeline 237 | com.amazonaws:aws-java-sdk-elasticloadbalancing 238 | com.amazonaws:aws-java-sdk-elasticloadbalancingv2 239 | com.amazonaws:aws-java-sdk-emr 240 | com.amazonaws:aws-java-sdk-elasticache 241 | com.amazonaws:aws-java-sdk-elastictranscoder 242 | com.amazonaws:aws-java-sdk-ec2 243 | com.amazonaws:aws-java-sdk-sns 244 | com.amazonaws:aws-java-sdk-budgets 245 | com.amazonaws:aws-java-sdk-cloudtrail 246 | com.amazonaws:aws-java-sdk-logs 247 | com.amazonaws:aws-java-sdk-events 248 | com.amazonaws:aws-java-sdk-cognitoidentity 249 | com.amazonaws:aws-java-sdk-cognitosync 250 | com.amazonaws:aws-java-sdk-directconnect 251 | com.amazonaws:aws-java-sdk-cloudformation 252 | com.amazonaws:aws-java-sdk-cloudfront 253 | com.amazonaws:aws-java-sdk-clouddirectory 254 | com.amazonaws:aws-java-sdk-opsworks 255 | com.amazonaws:aws-java-sdk-ses 256 | com.amazonaws:aws-java-sdk-autoscaling 257 | com.amazonaws:aws-java-sdk-cloudsearch 258 | com.amazonaws:aws-java-sdk-cloudwatchmetrics 259 | com.amazonaws:aws-java-sdk-codedeploy 260 | com.amazonaws:aws-java-sdk-codepipeline 261 | com.amazonaws:aws-java-sdk-kms 262 | com.amazonaws:aws-java-sdk-config 263 | com.amazonaws:aws-java-sdk-lambda 264 | com.amazonaws:aws-java-sdk-ecs 265 | com.amazonaws:aws-java-sdk-ecr 266 | com.amazonaws:aws-java-sdk-cloudhsm 267 | com.amazonaws:aws-java-sdk-ssm 268 | com.amazonaws:aws-java-sdk-workspaces 269 | com.amazonaws:aws-java-sdk-machinelearning 270 | com.amazonaws:aws-java-sdk-directory 271 | com.amazonaws:aws-java-sdk-efs 272 | com.amazonaws:aws-java-sdk-codecommit 273 | com.amazonaws:aws-java-sdk-devicefarm 274 | com.amazonaws:aws-java-sdk-elasticsearch 275 | com.amazonaws:aws-java-sdk-waf 276 | com.amazonaws:aws-java-sdk-marketplacecommerceanalytics 277 | com.amazonaws:aws-java-sdk-inspector 278 | com.amazonaws:aws-java-sdk-iot 279 | com.amazonaws:aws-java-sdk-api-gateway 280 | com.amazonaws:aws-java-sdk-acm 281 | com.amazonaws:aws-java-sdk-gamelift 282 | com.amazonaws:aws-java-sdk-dms 283 | com.amazonaws:aws-java-sdk-marketplacemeteringservice 284 | com.amazonaws:aws-java-sdk-cognitoidp 285 | com.amazonaws:aws-java-sdk-discovery 286 | com.amazonaws:aws-java-sdk-applicationautoscaling 287 | com.amazonaws:aws-java-sdk-snowball 288 | com.amazonaws:aws-java-sdk-rekognition 289 | com.amazonaws:aws-java-sdk-polly 290 | com.amazonaws:aws-java-sdk-lightsail 291 | com.amazonaws:aws-java-sdk-stepfunctions 292 | com.amazonaws:aws-java-sdk-health 293 | com.amazonaws:aws-java-sdk-costandusagereport 294 | com.amazonaws:aws-java-sdk-codebuild 295 | com.amazonaws:aws-java-sdk-appstream 296 | com.amazonaws:aws-java-sdk-shield 297 | com.amazonaws:aws-java-sdk-batch 298 | com.amazonaws:aws-java-sdk-lex 299 | com.amazonaws:aws-java-sdk-mechanicalturkrequester 300 | com.amazonaws:aws-java-sdk-organizations 301 | com.amazonaws:aws-java-sdk-workdocs 302 | com.amazonaws:aws-java-sdk-models 303 | com.amazonaws:aws-java-sdk-swf-libraries 304 | software.amazon.ion:ion-java 305 | com.amazonaws:jmespath-java 306 | 307 | 308 | false 309 | 310 | 311 | com.sumologic.client.SumologicExecutor 312 | 313 | 314 | 315 | 316 | 317 | 318 | 319 | 320 | org.codehaus.mojo 321 | exec-maven-plugin 322 | 1.6.0 323 | 324 | com.sumologic.client.SumologicExecutor 325 | true 326 | 327 | 328 | 329 | 330 | 331 | 332 | 333 | com.amazonaws 334 | amazon-kinesis-client 335 | 1.9.3 336 | 337 | 338 | com.amazonaws 339 | amazon-kinesis-connectors 340 | 1.3.0 341 | 342 | 343 | com.google.protobuf 344 | protobuf-java 345 | 2.6.1 346 | 347 | 348 | commons-lang 349 | commons-lang 350 | 2.6 351 | 352 | 353 | org.apache.commons 354 | commons-lang3 355 | 3.8.1 356 | 357 | 358 | org.apache.lucene 359 | lucene-core 360 | 7.1.0 361 | 362 | 363 | commons-codec 364 | commons-codec 365 | 1.10 366 | 367 | 368 | com.ning 369 | async-http-client 370 | 1.9.30 371 | 372 | 373 | com.google.code.gson 374 | gson 375 | 2.3.1 376 | 377 | 378 | log4j 379 | log4j 380 | 1.2.17 381 | 382 | 383 | org.slf4j 384 | slf4j-simple 385 | 1.7.12 386 | 387 | 388 | org.slf4j 389 | slf4j-api 390 | 1.7.12 391 | 392 | 393 | org.hamcrest 394 | hamcrest-core 395 | 1.3 396 | 397 | 398 | com.fasterxml.jackson.core 399 | jackson-core 400 | 2.9.8 401 | 402 | 403 | com.fasterxml.jackson.core 404 | jackson-databind 405 | 2.9.8 406 | 407 | 408 | com.fasterxml.jackson.core 409 | jackson-annotations 410 | 2.9.8 411 | 412 | 413 | com.fasterxml.jackson.dataformat 414 | jackson-dataformat-cbor 415 | 2.9.8 416 | 417 | 418 | commons-logging 419 | commons-logging 420 | 1.1.3 421 | 422 | 423 | com.amazonaws 424 | aws-java-sdk 425 | 1.11.534 426 | 427 | 428 | com.amazonaws 429 | aws-java-sdk-core 430 | 1.11.534 431 | 432 | 433 | com.amazonaws 434 | aws-java-sdk-kinesis 435 | 1.11.534 436 | 437 | 438 | com.amazonaws 439 | aws-java-sdk-cloudwatch 440 | 1.11.534 441 | 442 | 443 | com.amazonaws 444 | aws-java-sdk-dynamodb 445 | 1.11.534 446 | 447 | 448 | org.json 449 | json 450 | 20180813 451 | 452 | 453 | org.apache.httpcomponents 454 | httpclient 455 | 4.5.8 456 | 457 | 458 | org.apache.httpcomponents 459 | httpcore 460 | 4.4.11 461 | 462 | 463 | joda-time 464 | joda-time 465 | 2.10.1 466 | 467 | 468 | junit 469 | junit 470 | 4.12 471 | test 472 | 473 | 474 | org.mortbay.jetty 475 | jetty 476 | 6.1.26 477 | test 478 | 479 | 480 | org.mortbay.jetty 481 | jetty-util 482 | 6.1.26 483 | test 484 | 485 | 486 | com.google.guava 487 | guava 488 | 18.0 489 | 490 | 491 | ch.qos.logback 492 | logback-core 493 | 1.1.3 494 | test 495 | 496 | 497 | com.jayway.jsonpath 498 | json-path 499 | 2.0.0 500 | test 501 | 502 | 503 | net.sf.jopt-simple 504 | jopt-simple 505 | 4.9 506 | test 507 | 508 | 509 | xmlunit 510 | xmlunit 511 | 1.6 512 | test 513 | 514 | 515 | org.skyscreamer 516 | jsonassert 517 | 1.2.3 518 | test 519 | 520 | 521 | javax.servlet 522 | javax.servlet-api 523 | 3.1.0 524 | test 525 | 526 | 527 | com.github.tomakehurst 528 | wiremock 529 | 1.56 530 | test 531 | 532 | 533 | 534 | -------------------------------------------------------------------------------- /src/main/java/com/sumologic/client/CloudWatchMessageModelSumologicTransformer.java: -------------------------------------------------------------------------------- 1 | package com.sumologic.client; 2 | 3 | import java.io.IOException; 4 | 5 | import com.amazonaws.services.kinesis.model.Record; 6 | import org.json.JSONArray; 7 | import org.json.JSONException; 8 | import org.json.JSONObject; 9 | import com.sumologic.client.implementations.SumologicTransformer; 10 | import com.sumologic.client.model.CloudWatchLogsMessageModel; 11 | import com.sumologic.client.model.LogEvent; 12 | 13 | import org.apache.log4j.Logger; 14 | 15 | import java.nio.ByteBuffer; 16 | import java.nio.CharBuffer; 17 | import java.nio.charset.CharacterCodingException; 18 | import java.nio.charset.Charset; 19 | import java.nio.charset.CharsetEncoder; 20 | import java.util.List; 21 | 22 | import com.fasterxml.jackson.databind.ObjectMapper; 23 | 24 | /** 25 | * A custom transfomer for {@link CloudWatchLogsMessageModel} records in JSON format. The output is in a format 26 | * usable for insertions to Sumologic. 27 | */ 28 | public class CloudWatchMessageModelSumologicTransformer 29 | implements SumologicTransformer { 30 | private static final Logger LOG = Logger.getLogger(CloudWatchMessageModelSumologicTransformer.class.getName()); 31 | 32 | private static CharsetEncoder encoder = Charset.forName("UTF-8").newEncoder(); 33 | 34 | /** 35 | * Creates a new KinesisMessageModelSumologicTransformer. 36 | */ 37 | public CloudWatchMessageModelSumologicTransformer() { 38 | super(); 39 | } 40 | 41 | @Override 42 | public String fromClass(CloudWatchLogsMessageModel message) { 43 | String jsonMessage = ""; 44 | JSONObject outputObject; 45 | int recordsInMessageCount = 0; 46 | 47 | List logEvents = message.getLogEvents(); 48 | int logEventsSize = logEvents.size(); 49 | for (int i=0;i { 15 | /** 16 | * Creates a new KinesisMessageModelSumologicTransformer. 17 | */ 18 | public DefaultKinesisMessageModelSumologicTransformer() { 19 | super(); 20 | } 21 | 22 | @Override 23 | public String fromClass(SimpleKinesisMessageModel message) { 24 | return message.toString(); 25 | } 26 | 27 | @Override 28 | public SimpleKinesisMessageModel toClass(Record record) throws IOException { 29 | byte[] decodedRecord = record.getData().array(); 30 | String stringifiedRecord = new String(decodedRecord); 31 | 32 | return new SimpleKinesisMessageModel(stringifiedRecord); 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /src/main/java/com/sumologic/client/KinesisConnectorForSumologicConfiguration.java: -------------------------------------------------------------------------------- 1 | package com.sumologic.client; 2 | 3 | import java.util.Properties; 4 | 5 | import com.amazonaws.auth.AWSCredentialsProvider; 6 | import com.amazonaws.services.kinesis.connectors.KinesisConnectorConfiguration; 7 | 8 | /** 9 | * This class contains constants used to configure AWS Services in Amazon Kinesis Connectors. The user 10 | * should use System properties to set their proper configuration. An instance of 11 | * KinesisConnectorConfiguration is created with System properties and an AWSCredentialsProvider. 12 | */ 13 | public class KinesisConnectorForSumologicConfiguration extends KinesisConnectorConfiguration { 14 | // Properties added for Sumologic 15 | public static final String PROP_SUMOLOGIC_URL = "sumologicUrl"; 16 | public static final String PROP_TRANSFORMER_CLASS = "transformerClass"; 17 | 18 | private static final String DEFAULT_SUMOLOGIC_URL = null; 19 | private static final String DEFAULT_TRANSFORMER_CLASS = null; 20 | 21 | public final String SUMOLOGIC_URL; 22 | public final String TRANSFORMER_CLASS; 23 | 24 | /** 25 | * Configure the connector application with any set of properties that are unique to the application. Any 26 | * unspecified property will be set to a default value. 27 | */ 28 | public KinesisConnectorForSumologicConfiguration(Properties properties, AWSCredentialsProvider credentialsProvider) { 29 | super(properties, credentialsProvider); 30 | 31 | SUMOLOGIC_URL = properties.getProperty(PROP_SUMOLOGIC_URL, DEFAULT_SUMOLOGIC_URL); 32 | TRANSFORMER_CLASS = properties.getProperty(PROP_TRANSFORMER_CLASS, DEFAULT_TRANSFORMER_CLASS); 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /src/main/java/com/sumologic/client/SumologicExecutor.java: -------------------------------------------------------------------------------- 1 | package com.sumologic.client; 2 | 3 | import java.util.ArrayList; 4 | 5 | import com.sumologic.kinesis.KinesisConnectorRecordProcessorFactory; 6 | import com.sumologic.kinesis.KinesisConnectorExecutor; 7 | import com.sumologic.client.SumologicMessageModelPipeline; 8 | import com.sumologic.client.model.SimpleKinesisMessageModel; 9 | 10 | public class SumologicExecutor extends 11 | KinesisConnectorExecutor { 12 | private static String defaultConfigFile = "SumologicConnector.properties"; 13 | 14 | public SumologicExecutor() { 15 | super(); 16 | } 17 | 18 | /** 19 | * SumologicExecutor constructor. 20 | * 21 | * @param configFile 22 | * Properties for the connector 23 | */ 24 | public SumologicExecutor(String configFile) { 25 | super(configFile); 26 | } 27 | 28 | @Override 29 | public KinesisConnectorRecordProcessorFactory getKinesisConnectorRecordProcessorFactory() { 30 | return new KinesisConnectorRecordProcessorFactory( 31 | new SumologicMessageModelPipeline(), config); 32 | } 33 | 34 | /** 35 | * Main method starts and runs the SumologicExecutor. 36 | * 37 | * @param args 38 | * @throws InterruptedException 39 | */ 40 | public static void main(String[] args) throws InterruptedException { 41 | ArrayList configFiles = new ArrayList(); 42 | ArrayList executorThreads = new ArrayList(); 43 | 44 | Boolean use_env = false; 45 | 46 | for (String arg : args) { 47 | if (arg.endsWith(".properties")) { 48 | configFiles.add(arg); 49 | } else if (arg.equals("use-env")) { 50 | use_env = true; 51 | } 52 | } 53 | 54 | if (use_env) { 55 | KinesisConnectorExecutor sumologicExecutor = new SumologicExecutor(); 56 | 57 | Thread executorThread = new Thread(sumologicExecutor); 58 | executorThreads.add(executorThread); 59 | executorThread.start(); 60 | 61 | } else { 62 | // if none of the arguments contained a config file, try the default 63 | // file name 64 | if (configFiles.size() == 0) { 65 | configFiles.add(defaultConfigFile); 66 | } 67 | 68 | for (String configFile : configFiles) { 69 | KinesisConnectorExecutor sumologicExecutor = new SumologicExecutor( 70 | configFile); 71 | 72 | Thread executorThread = new Thread(sumologicExecutor); 73 | executorThreads.add(executorThread); 74 | executorThread.start(); 75 | } 76 | } 77 | 78 | for (Thread executorThread : executorThreads) { 79 | executorThread.join(); 80 | } 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /src/main/java/com/sumologic/client/SumologicKinesisUtils.java: -------------------------------------------------------------------------------- 1 | package com.sumologic.client; 2 | 3 | import java.io.BufferedReader; 4 | import java.io.ByteArrayInputStream; 5 | import java.io.ByteArrayOutputStream; 6 | import java.io.IOException; 7 | import java.io.InputStreamReader; 8 | import java.nio.ByteBuffer; 9 | import java.nio.charset.Charset; 10 | import java.nio.charset.CharsetDecoder; 11 | import java.util.zip.GZIPInputStream; 12 | import java.util.zip.GZIPOutputStream; 13 | 14 | import org.apache.log4j.Logger; 15 | 16 | import com.google.gson.Gson; 17 | 18 | public class SumologicKinesisUtils { 19 | private static final Logger LOG = Logger.getLogger(SumologicKinesisUtils.class.getName()); 20 | 21 | public static byte[] compressGzip(String data) { 22 | if (data == null || data.length() == 0) { 23 | return null; 24 | } 25 | 26 | ByteArrayOutputStream outputStream=new ByteArrayOutputStream(); 27 | GZIPOutputStream gzip; 28 | try { 29 | gzip = new GZIPOutputStream(outputStream); 30 | } catch (IOException e) { 31 | LOG.error("Cannot compress into GZIP "+e.getMessage()); 32 | return null; 33 | } 34 | 35 | // Put data into the GZIP buffer 36 | try { 37 | gzip.write(data.getBytes("UTF-8")); 38 | gzip.close(); 39 | } catch (IOException e) { 40 | e.printStackTrace(); 41 | } 42 | 43 | return outputStream.toByteArray(); 44 | } 45 | 46 | public static String decompressGzip(byte[] compressedData) { 47 | try { 48 | GZIPInputStream gis = new GZIPInputStream(new ByteArrayInputStream(compressedData)); 49 | BufferedReader bf = new BufferedReader(new InputStreamReader(gis, "UTF-8")); 50 | 51 | String outStr = ""; 52 | String line; 53 | while ((line=bf.readLine())!=null) { 54 | outStr += line; 55 | } 56 | return outStr; 57 | } catch (IOException exc) { 58 | LOG.warn("Exception during decompression of data: " + exc.getMessage()); 59 | return null; 60 | } 61 | } 62 | 63 | public static String byteBufferToString(ByteBuffer buffer){ 64 | String data = ""; 65 | CharsetDecoder decoder = Charset.forName("UTF-8").newDecoder(); 66 | try{ 67 | int old_position = buffer.position(); 68 | data = decoder.decode(buffer).toString(); 69 | buffer.position(old_position); 70 | }catch (Exception e){ 71 | e.printStackTrace(); 72 | return ""; 73 | } 74 | return data; 75 | } 76 | 77 | private static final Gson gson = new Gson(); 78 | public static boolean verifyJSON(String json) { 79 | try { 80 | gson.fromJson(json, Object.class); 81 | return true; 82 | } catch(com.google.gson.JsonSyntaxException ex) { 83 | return false; 84 | } 85 | } 86 | } -------------------------------------------------------------------------------- /src/main/java/com/sumologic/client/SumologicMessageModelPipeline.java: -------------------------------------------------------------------------------- 1 | package com.sumologic.client; 2 | 3 | import org.apache.log4j.Logger; 4 | 5 | import com.sumologic.client.implementations.SumologicEmitter; 6 | import com.sumologic.client.model.SimpleKinesisMessageModel; 7 | import com.amazonaws.services.kinesis.connectors.interfaces.IKinesisConnectorPipeline; 8 | import com.amazonaws.services.kinesis.connectors.KinesisConnectorConfiguration; 9 | import com.amazonaws.services.kinesis.connectors.impl.BasicMemoryBuffer; 10 | import com.amazonaws.services.kinesis.connectors.impl.AllPassFilter; 11 | import com.amazonaws.services.kinesis.connectors.interfaces.IEmitter; 12 | import com.amazonaws.services.kinesis.connectors.interfaces.IBuffer; 13 | import com.amazonaws.services.kinesis.connectors.interfaces.ITransformer; 14 | import com.amazonaws.services.kinesis.connectors.interfaces.IFilter; 15 | 16 | 17 | /** 18 | * The Pipeline used by the Sumologic. Processes KinesisMessageModel records in JSON String 19 | * format. Uses: 20 | *
    21 | *
  • {@link SumologicEmitter}
  • 22 | *
  • {@link BasicMemoryBuffer}
  • 23 | *
  • {@link CloudWatchMessageModelSumologicTransformer}
  • 24 | *
  • {@link AllPassFilter}
  • 25 | *
26 | */ 27 | public class SumologicMessageModelPipeline implements 28 | IKinesisConnectorPipeline { 29 | private static final Logger LOG = Logger.getLogger(SumologicMessageModelPipeline.class.getName()); 30 | 31 | @Override 32 | public IEmitter getEmitter(KinesisConnectorConfiguration configuration) { 33 | return new SumologicEmitter(configuration); 34 | } 35 | 36 | @Override 37 | public IBuffer getBuffer(KinesisConnectorConfiguration configuration) { 38 | return new BasicMemoryBuffer(configuration); 39 | } 40 | 41 | @Override 42 | public ITransformer 43 | getTransformer(KinesisConnectorConfiguration configuration) { 44 | 45 | // Load specified class 46 | String argClass = ((KinesisConnectorForSumologicConfiguration)configuration).TRANSFORMER_CLASS; 47 | String className = "com.sumologic.client."+argClass; 48 | ClassLoader classLoader = SumologicMessageModelPipeline.class.getClassLoader(); 49 | Class ModelClass = null; 50 | try { 51 | ModelClass = classLoader.loadClass(className); 52 | ITransformer ITransformerObject = (ITransformer)ModelClass.newInstance(); 53 | LOG.info("Using transformer: "+ITransformerObject.getClass().getName()); 54 | return ITransformerObject; 55 | } catch (ClassNotFoundException e) { 56 | LOG.error("Class not found: "+className+" error: "+e.getMessage()); 57 | } catch (InstantiationException e) { 58 | LOG.error("Class not found: "+className+" error: "+e.getMessage()); 59 | } catch (IllegalAccessException e) { 60 | LOG.error("Class not found: "+className+" error: "+e.getMessage()); 61 | } 62 | 63 | return new DefaultKinesisMessageModelSumologicTransformer(); 64 | } 65 | 66 | @Override 67 | public IFilter getFilter(KinesisConnectorConfiguration configuration) { 68 | return new AllPassFilter(); 69 | } 70 | 71 | } 72 | -------------------------------------------------------------------------------- /src/main/java/com/sumologic/client/SumologicSender.java: -------------------------------------------------------------------------------- 1 | package com.sumologic.client; 2 | 3 | import java.io.IOException; 4 | import java.util.concurrent.ExecutionException; 5 | 6 | import org.apache.commons.logging.Log; 7 | import org.apache.commons.logging.LogFactory; 8 | 9 | import com.ning.http.client.AsyncHttpClient; 10 | import com.ning.http.client.AsyncHttpClientConfig; 11 | import com.ning.http.client.AsyncHttpClient.BoundRequestBuilder; 12 | import com.ning.http.client.AsyncHttpClientConfig.Builder; 13 | import com.ning.http.client.Response; 14 | 15 | public class SumologicSender { 16 | private static final Log LOG = LogFactory.getLog(SumologicSender.class); 17 | 18 | private String url = null; 19 | private AsyncHttpClient client = null; 20 | 21 | public SumologicSender(String url) { 22 | this.url = url; 23 | 24 | Builder builder = new AsyncHttpClientConfig.Builder(); 25 | this.client = new AsyncHttpClient(builder.build()); 26 | } 27 | 28 | private BoundRequestBuilder clientPreparePost(String url){ 29 | if (this.client.isClosed()){ 30 | Builder builder = new AsyncHttpClientConfig.Builder(); 31 | this.client = new AsyncHttpClient(builder.build()); 32 | } 33 | return this.client.preparePost(url); 34 | } 35 | 36 | public boolean sendToSumologic(String data) throws IOException{ 37 | int statusCode = -1; 38 | 39 | BoundRequestBuilder builder = null; 40 | builder = this.clientPreparePost(url); 41 | 42 | byte[] compressedData = SumologicKinesisUtils.compressGzip(data); 43 | if (compressedData == null) { 44 | LOG.error("Unable to compress data to send: "+data); 45 | return false; 46 | } 47 | 48 | LOG.info("HTTP POST body of size " + compressedData.length + " bytes"); 49 | 50 | builder.setHeader("Content-Encoding", "gzip"); 51 | builder.setBody(compressedData); 52 | 53 | Response response = null; 54 | try { 55 | response = builder.execute().get(); 56 | statusCode = response.getStatusCode(); 57 | } catch (InterruptedException e) { 58 | LOG.error("Can't send POST to Sumologic "+e.getMessage()); 59 | } catch (ExecutionException e) { 60 | LOG.error("Can't send POST to Sumologic "+e.getMessage()); 61 | } 62 | 63 | // Check if the request was successful; 64 | if (statusCode != 200) { 65 | LOG.warn(String.format("Received HTTP error from Sumo Service: %d", statusCode)); 66 | return false; 67 | } 68 | else{ 69 | return true; 70 | } 71 | } 72 | } -------------------------------------------------------------------------------- /src/main/java/com/sumologic/client/implementations/SumologicEmitter.java: -------------------------------------------------------------------------------- 1 | package com.sumologic.client.implementations; 2 | 3 | import java.io.IOException; 4 | import java.util.ArrayList; 5 | import java.util.LinkedList; 6 | import java.util.List; 7 | import java.util.Queue; 8 | 9 | import org.apache.log4j.Logger; 10 | 11 | import com.sumologic.client.SumologicSender; 12 | import com.sumologic.client.KinesisConnectorForSumologicConfiguration; 13 | import com.amazonaws.services.kinesis.connectors.KinesisConnectorConfiguration; 14 | import com.amazonaws.services.kinesis.connectors.UnmodifiableBuffer; 15 | import com.amazonaws.services.kinesis.connectors.interfaces.IEmitter; 16 | 17 | /** 18 | * This class is used to store records from a stream to Sumologic log files. It requires the use of a 19 | * SumologicTransformer, which is able to transform records into a format that can be sent to 20 | * Sumologic. 21 | */ 22 | public class SumologicEmitter implements IEmitter { 23 | private static final Logger LOG = Logger.getLogger(SumologicEmitter.class.getName()); 24 | 25 | private SumologicSender sender; 26 | private KinesisConnectorForSumologicConfiguration config; 27 | private static final boolean SEND_RECORDS_IN_BATCHES = true; 28 | private long batchSize = 1000L; 29 | 30 | public SumologicEmitter(KinesisConnectorConfiguration configuration) { 31 | this.config = (KinesisConnectorForSumologicConfiguration) configuration; 32 | sender = new SumologicSender(this.config.SUMOLOGIC_URL); 33 | batchSize = this.config.BUFFER_RECORD_COUNT_LIMIT; 34 | } 35 | 36 | public SumologicEmitter(String url) { 37 | sender = new SumologicSender(url); 38 | } 39 | 40 | @Override 41 | public List emit(final UnmodifiableBuffer buffer) 42 | throws IOException { 43 | List records = buffer.getRecords(); 44 | if (SEND_RECORDS_IN_BATCHES) { 45 | return sendBatchConcatenating(records); 46 | } else { 47 | return sendRecordsOneByOne(records); 48 | } 49 | } 50 | 51 | public List sendBatchConcatenating(List records) { 52 | boolean success = false; 53 | List failedRecords = new ArrayList(); 54 | List currentBatch = new ArrayList(); 55 | Queue unprocessedRecords = new LinkedList(records); 56 | 57 | String message = ""; 58 | int recordCount = 0; 59 | for(String record: records) { 60 | currentBatch.add(record); 61 | unprocessedRecords.poll(); 62 | message += record; 63 | message += "\n"; 64 | recordCount++; 65 | if (recordCount >= batchSize) { 66 | try { 67 | LOG.info("Sending batch of: "+recordCount+" records"); 68 | success = sender.sendToSumologic(message); 69 | } catch (IOException e) { 70 | LOG.warn("Couldn't send batch of " + recordCount 71 | + " record to Sumologic: "+e.getMessage()); 72 | success = false; 73 | } 74 | if (!success) { 75 | failedRecords.addAll(currentBatch); 76 | failedRecords.addAll(unprocessedRecords); 77 | return failedRecords; 78 | } 79 | currentBatch = new ArrayList(); 80 | recordCount = 0; 81 | message = ""; 82 | } 83 | } 84 | try { 85 | LOG.info("Sending batch of: "+recordCount+" records"); 86 | success = sender.sendToSumologic(message); 87 | } catch (IOException e) { 88 | LOG.warn("Couldn't send record to Sumologic: "+e.getMessage()); 89 | success = false; 90 | } 91 | if (!success) { 92 | failedRecords.addAll(currentBatch); 93 | failedRecords.addAll(unprocessedRecords); 94 | return failedRecords; 95 | } 96 | 97 | return failedRecords; 98 | } 99 | 100 | public List sendRecordsOneByOne (List records) { 101 | ArrayList failedRecords = new ArrayList(); 102 | for (String record: records) { 103 | try { 104 | if (!sender.sendToSumologic(record)) { 105 | failedRecords.add(record); 106 | } 107 | } catch (IOException e) { 108 | LOG.warn("Couldn't send record: "+record); 109 | } 110 | } 111 | LOG.info("Sent records: "+(records.size()-failedRecords.size())+" failed: "+failedRecords.size()); 112 | return failedRecords; 113 | } 114 | 115 | @Override 116 | public void fail(List records) { 117 | for (String record : records) { 118 | LOG.error("Could not emit record: " + record); 119 | } 120 | } 121 | 122 | @Override 123 | public void shutdown() { 124 | } 125 | } -------------------------------------------------------------------------------- /src/main/java/com/sumologic/client/implementations/SumologicTransformer.java: -------------------------------------------------------------------------------- 1 | package com.sumologic.client.implementations; 2 | 3 | import com.amazonaws.services.kinesis.connectors.interfaces.ITransformer; 4 | 5 | /** 6 | * This interface defines an ITransformer that can transform an object of any type if necesary. 7 | * 8 | * @param 9 | */ 10 | public interface SumologicTransformer extends ITransformer { 11 | 12 | } 13 | -------------------------------------------------------------------------------- /src/main/java/com/sumologic/client/model/CloudWatchLogsMessageModel.java: -------------------------------------------------------------------------------- 1 | package com.sumologic.client.model; 2 | 3 | import java.util.ArrayList; 4 | import java.util.HashMap; 5 | import java.util.List; 6 | import java.util.Map; 7 | 8 | import org.apache.commons.lang.builder.ToStringBuilder; 9 | 10 | import com.fasterxml.jackson.annotation.JsonAnyGetter; 11 | import com.fasterxml.jackson.annotation.JsonAnySetter; 12 | import com.fasterxml.jackson.annotation.JsonIgnore; 13 | import com.fasterxml.jackson.annotation.JsonInclude; 14 | import com.fasterxml.jackson.annotation.JsonProperty; 15 | import com.fasterxml.jackson.annotation.JsonPropertyOrder; 16 | 17 | @JsonInclude(JsonInclude.Include.NON_NULL) 18 | @JsonPropertyOrder({ 19 | "logEvents", 20 | "logGroup", 21 | "logStream", 22 | "messageType", 23 | "owner", 24 | "subscriptionFilters" 25 | }) 26 | 27 | public class CloudWatchLogsMessageModel { 28 | 29 | @JsonProperty("logEvents") 30 | private List logEvents = new ArrayList(); 31 | @JsonProperty("logGroup") 32 | private String logGroup; 33 | @JsonProperty("logStream") 34 | private String logStream; 35 | @JsonProperty("messageType") 36 | private String messageType; 37 | @JsonProperty("owner") 38 | private String owner; 39 | @JsonProperty("subscriptionFilters") 40 | private List subscriptionFilters = new ArrayList(); 41 | @JsonIgnore 42 | private Map additionalProperties = new HashMap(); 43 | 44 | @JsonProperty("logEvents") 45 | public List getLogEvents() { 46 | return logEvents; 47 | } 48 | 49 | @JsonProperty("logEvents") 50 | public void setLogEvents(List logEvents) { 51 | this.logEvents = logEvents; 52 | } 53 | 54 | @JsonProperty("logGroup") 55 | public String getLogGroup() { 56 | return logGroup; 57 | } 58 | 59 | @JsonProperty("logGroup") 60 | public void setLogGroup(String logGroup) { 61 | this.logGroup = logGroup; 62 | } 63 | 64 | @JsonProperty("logStream") 65 | public String getLogStream() { 66 | return logStream; 67 | } 68 | 69 | @JsonProperty("logStream") 70 | public void setLogStream(String logStream) { 71 | this.logStream = logStream; 72 | } 73 | 74 | @JsonProperty("messageType") 75 | public String getMessageType() { 76 | return messageType; 77 | } 78 | 79 | @JsonProperty("messageType") 80 | public void setMessageType(String messageType) { 81 | this.messageType = messageType; 82 | } 83 | 84 | @JsonProperty("owner") 85 | public String getOwner() { 86 | return owner; 87 | } 88 | 89 | @JsonProperty("owner") 90 | public void setOwner(String owner) { 91 | this.owner = owner; 92 | } 93 | 94 | @JsonProperty("subscriptionFilters") 95 | public List getSubscriptionFilters() { 96 | return subscriptionFilters; 97 | } 98 | 99 | @JsonProperty("subscriptionFilters") 100 | public void setSubscriptionFilters(List subscriptionFilters) { 101 | this.subscriptionFilters = subscriptionFilters; 102 | } 103 | 104 | @JsonAnyGetter 105 | public Map getAdditionalProperties() { 106 | return this.additionalProperties; 107 | } 108 | 109 | @JsonAnySetter 110 | public void setAdditionalProperty(String name, Object value) { 111 | this.additionalProperties.put(name, value); 112 | } 113 | 114 | @Override 115 | public String toString() { 116 | return ToStringBuilder.reflectionToString(this); 117 | } 118 | } -------------------------------------------------------------------------------- /src/main/java/com/sumologic/client/model/LogEvent.java: -------------------------------------------------------------------------------- 1 | package com.sumologic.client.model; 2 | 3 | import java.util.HashMap; 4 | import java.util.Map; 5 | 6 | import com.fasterxml.jackson.annotation.JsonAnyGetter; 7 | import com.fasterxml.jackson.annotation.JsonAnySetter; 8 | import com.fasterxml.jackson.annotation.JsonIgnore; 9 | import com.fasterxml.jackson.annotation.JsonInclude; 10 | import com.fasterxml.jackson.annotation.JsonProperty; 11 | import com.fasterxml.jackson.annotation.JsonPropertyOrder; 12 | 13 | @JsonInclude(JsonInclude.Include.NON_NULL) 14 | @JsonPropertyOrder({ 15 | "id", 16 | "message", 17 | "timestamp" 18 | }) 19 | 20 | public class LogEvent { 21 | 22 | @JsonProperty("id") 23 | private String id; 24 | @JsonProperty("message") 25 | private String message; 26 | @JsonProperty("timestamp") 27 | private Long timestamp; 28 | @JsonIgnore 29 | private Map additionalProperties = new HashMap(); 30 | 31 | @JsonProperty("id") 32 | public String getId() { 33 | return id; 34 | } 35 | 36 | @JsonProperty("id") 37 | public void setId(String id) { 38 | this.id = id; 39 | } 40 | 41 | @JsonProperty("message") 42 | public String getMessage() { 43 | return message; 44 | } 45 | 46 | @JsonProperty("message") 47 | public void setMessage(String message) { 48 | this.message = message; 49 | } 50 | 51 | @JsonProperty("timestamp") 52 | public Long getTimestamp() { 53 | return timestamp; 54 | } 55 | 56 | @JsonProperty("timestamp") 57 | public void setTimestamp(Long timestamp) { 58 | this.timestamp = timestamp; 59 | } 60 | 61 | @JsonAnyGetter 62 | public Map getAdditionalProperties() { 63 | return this.additionalProperties; 64 | } 65 | 66 | @JsonAnySetter 67 | public void setAdditionalProperty(String name, Object value) { 68 | this.additionalProperties.put(name, value); 69 | } 70 | 71 | } -------------------------------------------------------------------------------- /src/main/java/com/sumologic/client/model/SimpleKinesisMessageModel.java: -------------------------------------------------------------------------------- 1 | package com.sumologic.client.model; 2 | 3 | import java.io.Serializable; 4 | 5 | public class SimpleKinesisMessageModel implements Serializable { 6 | private String data; 7 | private int id; 8 | 9 | public SimpleKinesisMessageModel(String data) { 10 | this.data = data; 11 | this.id = 1; 12 | } 13 | 14 | public String getData() { 15 | return data; 16 | } 17 | 18 | public void setData(String data) { 19 | this.data = data; 20 | } 21 | 22 | public int getId() { 23 | return id; 24 | } 25 | 26 | public void setId(int id) { 27 | this.id = id; 28 | } 29 | 30 | public String toString() { 31 | return data; 32 | } 33 | } -------------------------------------------------------------------------------- /src/main/java/com/sumologic/kinesis/BatchedStreamSource.java: -------------------------------------------------------------------------------- 1 | package com.sumologic.kinesis; 2 | 3 | import java.io.BufferedReader; 4 | import java.io.ByteArrayOutputStream; 5 | import java.io.IOException; 6 | import java.io.InputStream; 7 | import java.io.InputStreamReader; 8 | import java.io.ObjectOutputStream; 9 | import java.nio.ByteBuffer; 10 | import java.util.ArrayList; 11 | import java.util.List; 12 | import java.util.UUID; 13 | 14 | import org.apache.log4j.Logger; 15 | 16 | import com.sumologic.client.model.SimpleKinesisMessageModel; 17 | import com.amazonaws.services.kinesis.connectors.KinesisConnectorConfiguration; 18 | import com.amazonaws.services.kinesis.model.PutRecordRequest; 19 | 20 | /** 21 | * This class is a data source for supplying input to the Amazon Kinesis stream. It reads lines from the 22 | * input file specified in the constructor and batches up records before emitting them. 23 | */ 24 | public class BatchedStreamSource extends StreamSource { 25 | private static final Logger LOG = Logger.getLogger(BatchedStreamSource.class.getName()); 26 | 27 | private static int NUM_BYTES_PER_PUT_REQUEST = 50000; 28 | List buffer; 29 | 30 | public BatchedStreamSource(KinesisConnectorConfiguration config, String inputFile) { 31 | this(config, inputFile, false); 32 | } 33 | 34 | public BatchedStreamSource(KinesisConnectorConfiguration config, String inputFile, boolean loopOverStreamSource) { 35 | super(config, inputFile, loopOverStreamSource); 36 | buffer = new ArrayList(); 37 | } 38 | 39 | @Override 40 | protected void processInputStream(InputStream inputStream, int iteration) throws IOException { 41 | try (BufferedReader br = new BufferedReader(new InputStreamReader(inputStream))) { 42 | String line; 43 | int lines = 0; 44 | 45 | while ((line = br.readLine()) != null) { 46 | SimpleKinesisMessageModel kinesisMessageModel = objectMapper.readValue(line, SimpleKinesisMessageModel.class); 47 | buffer.add(kinesisMessageModel); 48 | if (numBytesInBuffer() > NUM_BYTES_PER_PUT_REQUEST) { 49 | /* 50 | * We need to remove the last record to ensure this data blob is accepted by the Amazon Kinesis 51 | * client which restricts the data blob to be less than 50 KB. 52 | */ 53 | SimpleKinesisMessageModel lastRecord = buffer.remove(buffer.size() - 1); 54 | flushBuffer(); 55 | /* 56 | * We add it back so it will be part of the next batch. 57 | */ 58 | buffer.add(lastRecord); 59 | } 60 | lines++; 61 | } 62 | if (!buffer.isEmpty()) { 63 | flushBuffer(); 64 | } 65 | 66 | LOG.info("Added " + lines + " records to stream source."); 67 | } 68 | } 69 | 70 | private byte[] bufferToBytes() throws IOException { 71 | ByteArrayOutputStream bos = new ByteArrayOutputStream(); 72 | ObjectOutputStream oos = new ObjectOutputStream(bos); 73 | oos.writeObject(buffer); 74 | return bos.toByteArray(); 75 | } 76 | 77 | private int numBytesInBuffer() throws IOException { 78 | return bufferToBytes().length; 79 | } 80 | 81 | private void flushBuffer() throws IOException { 82 | PutRecordRequest putRecordRequest = new PutRecordRequest(); 83 | putRecordRequest.setStreamName(config.KINESIS_INPUT_STREAM); 84 | putRecordRequest.setData(ByteBuffer.wrap(bufferToBytes())); 85 | putRecordRequest.setPartitionKey(String.valueOf(UUID.randomUUID())); 86 | kinesisClient.putRecord(putRecordRequest); 87 | buffer.clear(); 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /src/main/java/com/sumologic/kinesis/KinesisConnectorExecutor.java: -------------------------------------------------------------------------------- 1 | package com.sumologic.kinesis; 2 | 3 | import java.io.IOException; 4 | import java.io.InputStream; 5 | import java.util.Properties; 6 | 7 | import org.apache.log4j.Logger; 8 | 9 | import com.sumologic.client.KinesisConnectorForSumologicConfiguration; 10 | import com.amazonaws.auth.*; 11 | import com.amazonaws.auth.profile.ProfileCredentialsProvider; 12 | import com.amazonaws.services.kinesis.connectors.KinesisConnectorConfiguration; 13 | 14 | /** 15 | * This class defines the execution of a Amazon Kinesis Connector. 16 | * 17 | */ 18 | public abstract class KinesisConnectorExecutor extends KinesisConnectorExecutorBase { 19 | private static final Logger LOG = Logger.getLogger(KinesisConnectorExecutor.class.getName()); 20 | 21 | // Create Stream Source constants 22 | private static final String CREATE_STREAM_SOURCE = "createStreamSource"; 23 | private static final String LOOP_OVER_STREAM_SOURCE = "loopOverStreamSource"; 24 | private static final String INPUT_STREAM_FILE = "inputStreamFile"; 25 | 26 | private static final boolean DEFAULT_CREATE_STREAM_SOURCE = false; 27 | private static final boolean DEFAULT_LOOP_OVER_STREAM_SOURCE = false; 28 | 29 | 30 | // Class variables 31 | protected final KinesisConnectorForSumologicConfiguration config; 32 | private final Properties properties; 33 | 34 | public KinesisConnectorExecutor() { 35 | // Load ENV vars into properties 36 | LOG.info("Using ENV vars for properties"); 37 | 38 | properties = new Properties(); 39 | System.getenv().forEach(properties::setProperty); 40 | 41 | this.config = new KinesisConnectorForSumologicConfiguration(properties, getAWSCredentialsProvider()); 42 | 43 | // Send sample data to AWS Kinesis if specified in the properties file 44 | setupInputStream(); 45 | 46 | // Initialize executor with configurations 47 | super.initialize((KinesisConnectorConfiguration)config); 48 | } 49 | 50 | /** 51 | * Create a new KinesisConnectorExecutor based on the provided configuration (*.propertes) file. 52 | * 53 | * @param configFile 54 | * The name of the configuration file to look for on the classpath 55 | */ 56 | public KinesisConnectorExecutor(String configFile) { 57 | // Load configuration properties 58 | InputStream configStream = Thread.currentThread().getContextClassLoader().getResourceAsStream(configFile); 59 | 60 | if (configStream == null) { 61 | String msg = "Could not find resource " + configFile + " in the classpath"; 62 | throw new IllegalStateException(msg); 63 | } 64 | properties = new Properties(); 65 | try { 66 | properties.load(configStream); 67 | configStream.close(); 68 | } catch (IOException e) { 69 | String msg = "Could not load properties file " + configFile + " from classpath"; 70 | throw new IllegalStateException(msg, e); 71 | } 72 | this.config = new KinesisConnectorForSumologicConfiguration(properties, getAWSCredentialsProvider(configFile)); 73 | 74 | LOG.info("Using " + configFile); 75 | 76 | // Send sample data to AWS Kinesis if specified in the properties file 77 | setupInputStream(); 78 | 79 | // Initialize executor with configurations 80 | super.initialize((KinesisConnectorConfiguration)config); 81 | } 82 | 83 | /** 84 | * Returns an {@link AWSCredentialsProvider} with the permissions necessary to accomplish all specified 85 | * tasks. At the minimum it will require read permissions for Amazon Kinesis. Additional read permissions 86 | * and write permissions may be required based on the Pipeline used. 87 | * 88 | * @return 89 | */ 90 | public AWSCredentialsProvider getAWSCredentialsProvider() { 91 | return new DefaultAWSCredentialsProviderChain(); 92 | } 93 | 94 | public AWSCredentialsProvider getAWSCredentialsProvider(String configFile) { 95 | return new AWSCredentialsProviderChain( 96 | new EnvironmentVariableCredentialsProvider(), 97 | new SystemPropertiesCredentialsProvider(), 98 | new ProfileCredentialsProvider(), 99 | new EC2ContainerCredentialsProviderWrapper(), 100 | new ClasspathPropertiesFileCredentialsProvider(configFile) 101 | ); 102 | } 103 | 104 | /** 105 | * Helper method to spawn the {@link StreamSource} in a separate thread. 106 | */ 107 | private void setupInputStream() { 108 | if (parseBoolean(CREATE_STREAM_SOURCE, DEFAULT_CREATE_STREAM_SOURCE, properties)) { 109 | String inputFile = properties.getProperty(INPUT_STREAM_FILE); 110 | StreamSource streamSource; 111 | if (config.BATCH_RECORDS_IN_PUT_REQUEST) { 112 | streamSource = 113 | new BatchedStreamSource(config, inputFile, parseBoolean(LOOP_OVER_STREAM_SOURCE, 114 | DEFAULT_LOOP_OVER_STREAM_SOURCE, 115 | properties)); 116 | 117 | } else { 118 | streamSource = 119 | new StreamSource(config, inputFile, parseBoolean(LOOP_OVER_STREAM_SOURCE, 120 | DEFAULT_LOOP_OVER_STREAM_SOURCE, 121 | properties)); 122 | } 123 | Thread streamSourceThread = new Thread(streamSource); 124 | LOG.info("Starting stream source."); 125 | streamSourceThread.start(); 126 | } 127 | } 128 | 129 | 130 | /** 131 | * Helper method used to parse boolean properties. 132 | * 133 | * @param property 134 | * The String key for the property 135 | * @param defaultValue 136 | * The default value for the boolean property 137 | * @param properties 138 | * The properties file to get property from 139 | * @return property from property file, or if it is not specified, the default value 140 | */ 141 | private static boolean parseBoolean(String property, boolean defaultValue, Properties properties) { 142 | return Boolean.parseBoolean(properties.getProperty(property, Boolean.toString(defaultValue))); 143 | 144 | } 145 | } -------------------------------------------------------------------------------- /src/main/java/com/sumologic/kinesis/KinesisConnectorExecutorBase.java: -------------------------------------------------------------------------------- 1 | package com.sumologic.kinesis; 2 | 3 | import org.apache.log4j.Logger; 4 | 5 | import com.sumologic.kinesis.KinesisConnectorRecordProcessorFactory; 6 | import com.amazonaws.services.kinesis.clientlibrary.lib.worker.KinesisClientLibConfiguration; 7 | import com.amazonaws.services.kinesis.clientlibrary.lib.worker.Worker; 8 | import com.amazonaws.services.kinesis.connectors.KinesisConnectorConfiguration; 9 | import com.amazonaws.services.kinesis.metrics.impl.NullMetricsFactory; 10 | import com.amazonaws.services.kinesis.metrics.interfaces.IMetricsFactory; 11 | 12 | public abstract class KinesisConnectorExecutorBase implements Runnable { 13 | private static final Logger LOG = Logger.getLogger(KinesisConnectorExecutorBase.class.getName()); 14 | 15 | // Amazon Kinesis Client Library worker to process records 16 | protected Worker worker; 17 | 18 | /** 19 | * Initialize the Amazon Kinesis Client Library configuration and worker 20 | * 21 | * @param kinesisConnectorConfiguration Amazon Kinesis connector configuration 22 | */ 23 | protected void initialize(KinesisConnectorConfiguration kinesisConnectorConfiguration) { 24 | initialize(kinesisConnectorConfiguration, new NullMetricsFactory()); 25 | } 26 | 27 | /** 28 | * Initialize the Amazon Kinesis Client Library configuration and worker with metrics factory 29 | * 30 | * @param kinesisConnectorConfiguration Amazon Kinesis connector configuration 31 | * @param metricFactory would be used to emit metrics in Amazon Kinesis Client Library 32 | */ 33 | protected void 34 | initialize(KinesisConnectorConfiguration kinesisConnectorConfiguration, IMetricsFactory metricFactory) { 35 | 36 | KinesisClientLibConfiguration kinesisClientLibConfiguration = 37 | new KinesisClientLibConfiguration(kinesisConnectorConfiguration.APP_NAME, 38 | kinesisConnectorConfiguration.KINESIS_INPUT_STREAM, 39 | kinesisConnectorConfiguration.AWS_CREDENTIALS_PROVIDER, 40 | kinesisConnectorConfiguration.WORKER_ID).withKinesisEndpoint(kinesisConnectorConfiguration.KINESIS_ENDPOINT) 41 | .withFailoverTimeMillis(kinesisConnectorConfiguration.FAILOVER_TIME) 42 | .withMaxRecords(kinesisConnectorConfiguration.MAX_RECORDS) 43 | .withInitialPositionInStream(kinesisConnectorConfiguration.INITIAL_POSITION_IN_STREAM) 44 | .withIdleTimeBetweenReadsInMillis(kinesisConnectorConfiguration.IDLE_TIME_BETWEEN_READS) 45 | .withCallProcessRecordsEvenForEmptyRecordList(KinesisConnectorConfiguration.DEFAULT_CALL_PROCESS_RECORDS_EVEN_FOR_EMPTY_LIST) 46 | .withCleanupLeasesUponShardCompletion(kinesisConnectorConfiguration.CLEANUP_TERMINATED_SHARDS_BEFORE_EXPIRY) 47 | .withParentShardPollIntervalMillis(kinesisConnectorConfiguration.PARENT_SHARD_POLL_INTERVAL) 48 | .withShardSyncIntervalMillis(kinesisConnectorConfiguration.SHARD_SYNC_INTERVAL) 49 | .withTaskBackoffTimeMillis(kinesisConnectorConfiguration.BACKOFF_INTERVAL) 50 | .withMetricsBufferTimeMillis(kinesisConnectorConfiguration.CLOUDWATCH_BUFFER_TIME) 51 | .withMetricsMaxQueueSize(kinesisConnectorConfiguration.CLOUDWATCH_MAX_QUEUE_SIZE) 52 | .withUserAgent(kinesisConnectorConfiguration.APP_NAME + "," 53 | + kinesisConnectorConfiguration.CONNECTOR_DESTINATION + "," 54 | + KinesisConnectorConfiguration.KINESIS_CONNECTOR_USER_AGENT) 55 | .withRegionName(kinesisConnectorConfiguration.REGION_NAME); 56 | 57 | 58 | if (!kinesisConnectorConfiguration.CALL_PROCESS_RECORDS_EVEN_FOR_EMPTY_LIST) { 59 | LOG.warn("The false value of callProcessRecordsEvenForEmptyList will be ignored. It must be set to true for the bufferTimeMillisecondsLimit to work correctly."); 60 | } 61 | 62 | if (kinesisConnectorConfiguration.IDLE_TIME_BETWEEN_READS > kinesisConnectorConfiguration.BUFFER_MILLISECONDS_LIMIT) { 63 | LOG.warn("idleTimeBetweenReads is greater than bufferTimeMillisecondsLimit. For best results, ensure that bufferTimeMillisecondsLimit is more than or equal to idleTimeBetweenReads "); 64 | } 65 | 66 | // If a metrics factory was specified, use it. 67 | if (metricFactory != null) { 68 | worker = 69 | new Worker(getKinesisConnectorRecordProcessorFactory(), 70 | kinesisClientLibConfiguration, 71 | metricFactory); 72 | } else { 73 | worker = new Worker(getKinesisConnectorRecordProcessorFactory(), kinesisClientLibConfiguration); 74 | } 75 | LOG.info(getClass().getSimpleName() + " worker created"); 76 | } 77 | 78 | @Override 79 | public void run() { 80 | if (worker != null) { 81 | // Start Amazon Kinesis Client Library worker to process records 82 | LOG.info("Starting worker in " + getClass().getSimpleName()); 83 | try { 84 | worker.run(); 85 | } catch (Throwable t) { 86 | LOG.error(t); 87 | throw t; 88 | } finally { 89 | LOG.error("Worker " + getClass().getSimpleName() + " is not running."); 90 | } 91 | } else { 92 | throw new RuntimeException("Initialize must be called before run."); 93 | } 94 | } 95 | 96 | /** 97 | * This method returns a {@link KinesisConnectorRecordProcessorFactory} that contains the 98 | * appropriate {@link IKinesisConnectorPipeline} for the Amazon Kinesis Enabled Application 99 | * 100 | * @return a {@link KinesisConnectorRecordProcessorFactory} that contains the appropriate 101 | * {@link IKinesisConnectorPipeline} for the Amazon Kinesis Enabled Application 102 | */ 103 | public abstract KinesisConnectorRecordProcessorFactory getKinesisConnectorRecordProcessorFactory(); 104 | } 105 | -------------------------------------------------------------------------------- /src/main/java/com/sumologic/kinesis/KinesisConnectorMetricsExecutor.java: -------------------------------------------------------------------------------- 1 | package com.sumologic.kinesis; 2 | 3 | import com.amazonaws.services.kinesis.metrics.impl.CWMetricsFactory; 4 | import com.amazonaws.services.kinesis.metrics.interfaces.IMetricsFactory; 5 | 6 | /** 7 | * This class defines the execution of an Amazon Kinesis Connector with Amazon CloudWatch metrics. 8 | * 9 | */ 10 | public abstract class KinesisConnectorMetricsExecutor extends KinesisConnectorExecutor { 11 | 12 | /** 13 | * Creates a new KinesisConnectorMetricsExecutor. 14 | * 15 | * @param configFile The name of the configuration file to look for on the classpath 16 | */ 17 | public KinesisConnectorMetricsExecutor(String configFile) { 18 | super(configFile); 19 | 20 | // Amazon CloudWatch Metrics Factory used to emit metrics in KCL 21 | IMetricsFactory mFactory = 22 | new CWMetricsFactory(config.AWS_CREDENTIALS_PROVIDER, 23 | config.CLOUDWATCH_NAMESPACE, 24 | config.CLOUDWATCH_BUFFER_TIME, 25 | config.CLOUDWATCH_MAX_QUEUE_SIZE); 26 | super.initialize(config, mFactory); 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /src/main/java/com/sumologic/kinesis/KinesisConnectorRecordProcessor.java: -------------------------------------------------------------------------------- 1 | package com.sumologic.kinesis; 2 | 3 | import java.io.IOException; 4 | import java.util.ArrayList; 5 | import java.util.Collection; 6 | import java.util.List; 7 | 8 | import org.apache.log4j.Logger; 9 | 10 | import com.amazonaws.services.kinesis.clientlibrary.exceptions.InvalidStateException; 11 | import com.amazonaws.services.kinesis.clientlibrary.exceptions.KinesisClientLibDependencyException; 12 | import com.amazonaws.services.kinesis.clientlibrary.exceptions.ShutdownException; 13 | import com.amazonaws.services.kinesis.clientlibrary.exceptions.ThrottlingException; 14 | import com.amazonaws.services.kinesis.clientlibrary.interfaces.IRecordProcessor; 15 | import com.amazonaws.services.kinesis.clientlibrary.interfaces.IRecordProcessorCheckpointer; 16 | import com.amazonaws.services.kinesis.clientlibrary.lib.worker.ShutdownReason; 17 | import com.amazonaws.services.kinesis.connectors.KinesisConnectorConfiguration; 18 | import com.amazonaws.services.kinesis.connectors.UnmodifiableBuffer; 19 | import com.amazonaws.services.kinesis.connectors.interfaces.IBuffer; 20 | import com.amazonaws.services.kinesis.connectors.interfaces.ICollectionTransformer; 21 | import com.amazonaws.services.kinesis.connectors.interfaces.IEmitter; 22 | import com.amazonaws.services.kinesis.connectors.interfaces.IFilter; 23 | import com.amazonaws.services.kinesis.connectors.interfaces.ITransformer; 24 | import com.amazonaws.services.kinesis.connectors.interfaces.ITransformerBase; 25 | import com.amazonaws.services.kinesis.model.Record; 26 | 27 | /** 28 | * This is the base class for any KinesisConnector. It is configured by a constructor that takes in 29 | * as parameters implementations of the IBuffer, ITransformer, and IEmitter dependencies defined in 30 | * a IKinesisConnectorPipeline. It is typed to match the class that records are transformed into for 31 | * filtering and manipulation. This class is produced by a KinesisConnectorRecordProcessorFactory. 32 | *

33 | * When a Worker calls processRecords() on this class, the pipeline is used in the following way: 34 | *

    35 | *
  1. Records are transformed into the corresponding data model (parameter type T) via the ITransformer.
  2. 36 | *
  3. Transformed records are passed to the IBuffer.consumeRecord() method, which may optionally filter based on the 37 | * IFilter in the pipeline.
  4. 38 | *
  5. When the buffer is full (IBuffer.shouldFlush() returns true), records are transformed with the ITransformer to 39 | * the output type (parameter type U) and a call is made to IEmitter.emit(). IEmitter.emit() returning an empty list is 40 | * considered a success, so the record processor will checkpoint and emit will not be retried. Non-empty return values 41 | * will result in additional calls to emit with failed records as the unprocessed list until the retry limit is reached. 42 | * Upon exceeding the retry limit or an exception being thrown, the IEmitter.fail() method will be called with the 43 | * unprocessed records.
  6. 44 | *
  7. When the shutdown() method of this class is invoked, a call is made to the IEmitter.shutdown() method which 45 | * should close any existing client connections.
  8. 46 | *
47 | * 48 | */ 49 | public class KinesisConnectorRecordProcessor implements IRecordProcessor { 50 | 51 | private final IEmitter emitter; 52 | private final ITransformerBase transformer; 53 | private final IFilter filter; 54 | private final IBuffer buffer; 55 | private final int retryLimit; 56 | private final long backoffInterval; 57 | private boolean isShutdown = false; 58 | 59 | private static final Logger LOG = Logger.getLogger(KinesisConnectorRecordProcessor.class.getName()); 60 | 61 | private String shardId; 62 | 63 | public KinesisConnectorRecordProcessor(IBuffer buffer, 64 | IFilter filter, 65 | IEmitter emitter, 66 | ITransformerBase transformer, 67 | KinesisConnectorConfiguration configuration) { 68 | if (buffer == null || filter == null || emitter == null || transformer == null) { 69 | throw new IllegalArgumentException("buffer, filter, emitter, and transformer must not be null"); 70 | } 71 | this.buffer = buffer; 72 | this.filter = filter; 73 | this.emitter = emitter; 74 | this.transformer = transformer; 75 | // Limit must be greater than zero 76 | if (configuration.RETRY_LIMIT <= 0) { 77 | retryLimit = 1; 78 | } else { 79 | retryLimit = configuration.RETRY_LIMIT; 80 | } 81 | this.backoffInterval = configuration.BACKOFF_INTERVAL; 82 | } 83 | 84 | @Override 85 | public void initialize(String shardId) { 86 | this.shardId = shardId; 87 | } 88 | 89 | @Override 90 | public void processRecords(List records, IRecordProcessorCheckpointer checkpointer) { 91 | // Note: This method will be called even for empty record lists. This is needed for checking the buffer time 92 | // threshold. 93 | if (isShutdown) { 94 | LOG.warn("processRecords called on shutdown record processor for shardId: " + shardId); 95 | return; 96 | } 97 | if (shardId == null) { 98 | throw new IllegalStateException("Record processor not initialized"); 99 | } 100 | 101 | // Transform each Amazon Kinesis Record and add the result to the buffer 102 | for (Record record : records) { 103 | try { 104 | if (transformer instanceof ITransformer) { 105 | ITransformer singleTransformer = (ITransformer) transformer; 106 | filterAndBufferRecord(singleTransformer.toClass(record), record); 107 | } else if (transformer instanceof ICollectionTransformer) { 108 | ICollectionTransformer listTransformer = (ICollectionTransformer) transformer; 109 | Collection transformedRecords = listTransformer.toClass(record); 110 | for (T transformedRecord : transformedRecords) { 111 | filterAndBufferRecord(transformedRecord, record); 112 | } 113 | } else { 114 | throw new RuntimeException("Transformer must implement ITransformer or ICollectionTransformer"); 115 | } 116 | } catch (IOException e) { 117 | LOG.error(e); 118 | } 119 | } 120 | 121 | if (buffer.shouldFlush()) { 122 | List emitItems = transformToOutput(buffer.getRecords()); 123 | emit(checkpointer, emitItems); 124 | } 125 | } 126 | 127 | private void filterAndBufferRecord(T transformedRecord, Record record) { 128 | if (filter.keepRecord(transformedRecord)) { 129 | buffer.consumeRecord(transformedRecord, record.getData().array().length, record.getSequenceNumber()); 130 | } 131 | } 132 | 133 | private List transformToOutput(List items) { 134 | List emitItems = new ArrayList(); 135 | for (T item : items) { 136 | try { 137 | emitItems.add(transformer.fromClass(item)); 138 | } catch (IOException e) { 139 | LOG.error("Failed to transform record " + item + " to output type", e); 140 | } 141 | } 142 | return emitItems; 143 | } 144 | 145 | private void emit(IRecordProcessorCheckpointer checkpointer, List emitItems) { 146 | List unprocessed = new ArrayList(emitItems); 147 | try { 148 | for (int numTries = 0; numTries < retryLimit; numTries++) { 149 | unprocessed = emitter.emit(new UnmodifiableBuffer(buffer, unprocessed)); 150 | if (unprocessed.isEmpty()) { 151 | break; 152 | } 153 | try { 154 | Thread.sleep(backoffInterval); 155 | } catch (InterruptedException e) { 156 | } 157 | } 158 | if (!unprocessed.isEmpty()) { 159 | emitter.fail(unprocessed); 160 | } 161 | final String lastSequenceNumberProcessed = buffer.getLastSequenceNumber(); 162 | buffer.clear(); 163 | // checkpoint once all the records have been consumed 164 | if (lastSequenceNumberProcessed != null && unprocessed.isEmpty()) { 165 | checkpointer.checkpoint(lastSequenceNumberProcessed); 166 | } 167 | } catch (IOException | KinesisClientLibDependencyException | InvalidStateException | ThrottlingException 168 | | ShutdownException e) { 169 | LOG.error(e); 170 | emitter.fail(unprocessed); 171 | } 172 | } 173 | 174 | @Override 175 | public void shutdown(IRecordProcessorCheckpointer checkpointer, ShutdownReason reason) { 176 | LOG.info("Shutting down record processor with shardId: " + shardId + " with reason " + reason); 177 | if (isShutdown) { 178 | LOG.warn("Record processor for shardId: " + shardId + " has been shutdown multiple times."); 179 | return; 180 | } 181 | switch (reason) { 182 | case TERMINATE: 183 | emit(checkpointer, transformToOutput(buffer.getRecords())); 184 | try { 185 | checkpointer.checkpoint(); 186 | } catch (KinesisClientLibDependencyException | InvalidStateException | ThrottlingException | ShutdownException e) { 187 | LOG.error(e); 188 | } 189 | break; 190 | case ZOMBIE: 191 | break; 192 | default: 193 | throw new IllegalStateException("invalid shutdown reason"); 194 | } 195 | emitter.shutdown(); 196 | isShutdown = true; 197 | } 198 | 199 | } -------------------------------------------------------------------------------- /src/main/java/com/sumologic/kinesis/KinesisConnectorRecordProcessorFactory.java: -------------------------------------------------------------------------------- 1 | package com.sumologic.kinesis; 2 | 3 | import com.amazonaws.services.kinesis.clientlibrary.interfaces.IRecordProcessor; 4 | import com.amazonaws.services.kinesis.clientlibrary.interfaces.IRecordProcessorFactory; 5 | import com.amazonaws.services.kinesis.connectors.KinesisConnectorConfiguration; 6 | import com.amazonaws.services.kinesis.connectors.interfaces.IBuffer; 7 | import com.amazonaws.services.kinesis.connectors.interfaces.IEmitter; 8 | import com.amazonaws.services.kinesis.connectors.interfaces.IFilter; 9 | import com.amazonaws.services.kinesis.connectors.interfaces.IKinesisConnectorPipeline; 10 | import com.amazonaws.services.kinesis.connectors.interfaces.ITransformerBase; 11 | 12 | /** 13 | * This class is used to generate KinesisConnectorRecordProcessors that operate using the user's 14 | * implemented classes. The createProcessor() method sets the dependencies of the 15 | * KinesisConnectorRecordProcessor that are specified in the KinesisConnectorPipeline argument, 16 | * which accesses instances of the users implementations. 17 | */ 18 | public class KinesisConnectorRecordProcessorFactory implements IRecordProcessorFactory { 19 | 20 | private IKinesisConnectorPipeline pipeline; 21 | private KinesisConnectorConfiguration configuration; 22 | 23 | public KinesisConnectorRecordProcessorFactory(IKinesisConnectorPipeline pipeline, 24 | KinesisConnectorConfiguration configuration) { 25 | this.configuration = configuration; 26 | this.pipeline = pipeline; 27 | } 28 | 29 | @Override 30 | public IRecordProcessor createProcessor() { 31 | try { 32 | IBuffer buffer = pipeline.getBuffer(configuration); 33 | IEmitter emitter = pipeline.getEmitter(configuration); 34 | ITransformerBase transformer = pipeline.getTransformer(configuration); 35 | IFilter filter = pipeline.getFilter(configuration); 36 | KinesisConnectorRecordProcessor processor = 37 | new KinesisConnectorRecordProcessor(buffer, filter, emitter, transformer, configuration); 38 | return processor; 39 | } catch (Throwable t) { 40 | throw new RuntimeException(t); 41 | } 42 | } 43 | } -------------------------------------------------------------------------------- /src/main/java/com/sumologic/kinesis/StreamSource.java: -------------------------------------------------------------------------------- 1 | package com.sumologic.kinesis; 2 | 3 | import java.io.BufferedReader; 4 | import java.io.IOException; 5 | import java.io.InputStream; 6 | import java.io.InputStreamReader; 7 | import java.nio.ByteBuffer; 8 | 9 | import org.apache.log4j.Logger; 10 | 11 | import com.sumologic.client.model.SimpleKinesisMessageModel; 12 | import com.sumologic.kinesis.utils.KinesisUtils; 13 | import com.amazonaws.auth.AWSCredentialsProvider; 14 | import com.amazonaws.regions.RegionUtils; 15 | import com.amazonaws.services.kinesis.AmazonKinesisClient; 16 | import com.amazonaws.services.kinesis.connectors.KinesisConnectorConfiguration; 17 | import com.amazonaws.services.kinesis.model.PutRecordRequest; 18 | import com.fasterxml.jackson.databind.ObjectMapper; 19 | 20 | /** 21 | * This class is a data source for supplying input to the Amazon Kinesis stream. It reads lines from the 22 | * input file specified in the constructor and emits them by calling String.getBytes() into the 23 | * stream defined in the KinesisConnectorConfiguration. 24 | */ 25 | public class StreamSource implements Runnable { 26 | private static final Logger LOG = Logger.getLogger(StreamSource.class.getName()); 27 | protected AmazonKinesisClient kinesisClient; 28 | protected KinesisConnectorConfiguration config; 29 | protected final String inputFile; 30 | protected final boolean loopOverInputFile; 31 | protected ObjectMapper objectMapper; 32 | 33 | /** 34 | * Creates a new StreamSource. 35 | * 36 | * @param config 37 | * Configuration to determine which stream to put records to and get {@link AWSCredentialsProvider} 38 | * @param inputFile 39 | * File containing record data to emit on each line 40 | */ 41 | public StreamSource(KinesisConnectorConfiguration config, String inputFile) { 42 | this(config, inputFile, false); 43 | } 44 | 45 | /** 46 | * Creates a new StreamSource. 47 | * 48 | * @param config 49 | * Configuration to determine which stream to put records to and get {@link AWSCredentialsProvider} 50 | * @param inputFile 51 | * File containing record data to emit on each line 52 | * @param loopOverStreamSource 53 | * Loop over the stream source to continually put records 54 | */ 55 | public StreamSource(KinesisConnectorConfiguration config, String inputFile, boolean loopOverStreamSource) { 56 | this.config = config; 57 | this.inputFile = inputFile; 58 | this.loopOverInputFile = loopOverStreamSource; 59 | this.objectMapper = new ObjectMapper(); 60 | kinesisClient = new AmazonKinesisClient(config.AWS_CREDENTIALS_PROVIDER); 61 | kinesisClient.setRegion(RegionUtils.getRegion(config.REGION_NAME)); 62 | if (config.KINESIS_ENDPOINT != null) { 63 | kinesisClient.setEndpoint(config.KINESIS_ENDPOINT); 64 | } 65 | KinesisUtils.createInputStream(config); 66 | } 67 | 68 | @Override 69 | public void run() { 70 | int iteration = 0; 71 | do { 72 | InputStream inputStream = Thread.currentThread().getContextClassLoader().getResourceAsStream(inputFile); 73 | if (inputStream == null) { 74 | throw new IllegalStateException("Could not find input file: " + inputFile); 75 | } 76 | if (loopOverInputFile) { 77 | LOG.info("Starting iteration " + iteration + " over input file."); 78 | } 79 | try { 80 | processInputStream(inputStream, iteration); 81 | } catch (IOException e) { 82 | LOG.error("Encountered exception while putting data in source stream.", e); 83 | break; 84 | } 85 | iteration++; 86 | } while (loopOverInputFile); 87 | } 88 | 89 | /** 90 | * Process the input file and send PutRecordRequests to Amazon Kinesis. 91 | * 92 | * This function serves to Isolate StreamSource logic so subclasses 93 | * can process input files differently. 94 | * 95 | * @param inputStream 96 | * the input stream to process 97 | * @param iteration 98 | * the iteration if looping over file 99 | * @throws IOException 100 | * throw exception if error processing inputStream. 101 | */ 102 | protected void processInputStream(InputStream inputStream, int iteration) throws IOException { 103 | try (BufferedReader br = new BufferedReader(new InputStreamReader(inputStream))) { 104 | String line; 105 | int lines = 0; 106 | while ((line = br.readLine()) != null) { 107 | SimpleKinesisMessageModel kinesisMessageModel = new SimpleKinesisMessageModel(line); 108 | //SimpleKinesisMessageModel kinesisMessageModel = objectMapper.readValue(line, SimpleKinesisMessageModel.class); 109 | 110 | PutRecordRequest putRecordRequest = new PutRecordRequest(); 111 | putRecordRequest.setStreamName(config.KINESIS_INPUT_STREAM); 112 | putRecordRequest.setData(ByteBuffer.wrap(line.getBytes())); 113 | putRecordRequest.setPartitionKey(Integer.toString(kinesisMessageModel.getId())); 114 | kinesisClient.putRecord(putRecordRequest); 115 | lines++; 116 | } 117 | LOG.info("Added " + lines + " records to stream source."); 118 | } 119 | } 120 | 121 | @Override 122 | protected void finalize() throws Throwable { 123 | super.finalize(); 124 | } 125 | } 126 | -------------------------------------------------------------------------------- /src/main/java/com/sumologic/kinesis/utils/KinesisUtils.java: -------------------------------------------------------------------------------- 1 | package com.sumologic.kinesis.utils; 2 | 3 | import java.util.List; 4 | 5 | import org.apache.commons.logging.Log; 6 | import org.apache.commons.logging.LogFactory; 7 | 8 | import com.amazonaws.AmazonServiceException; 9 | import com.amazonaws.auth.AWSCredentialsProvider; 10 | import com.amazonaws.regions.RegionUtils; 11 | import com.amazonaws.services.kinesis.AmazonKinesisClient; 12 | import com.amazonaws.services.kinesis.connectors.KinesisConnectorConfiguration; 13 | import com.amazonaws.services.kinesis.model.CreateStreamRequest; 14 | import com.amazonaws.services.kinesis.model.DeleteStreamRequest; 15 | import com.amazonaws.services.kinesis.model.DescribeStreamRequest; 16 | import com.amazonaws.services.kinesis.model.ListStreamsRequest; 17 | import com.amazonaws.services.kinesis.model.ListStreamsResult; 18 | import com.amazonaws.services.kinesis.model.ResourceNotFoundException; 19 | 20 | /** 21 | * Utilities to create and delete Amazon Kinesis streams. 22 | */ 23 | public class KinesisUtils { 24 | 25 | private static Log LOG = LogFactory.getLog(KinesisUtils.class); 26 | 27 | /** 28 | * Creates the Amazon Kinesis stream specified by config.KINESIS_INPUT_STREAM 29 | * 30 | * @param config 31 | * The configuration with the specified input stream name and {@link AWSCredentialsProvider} 32 | * @param shardCount 33 | * The shard count to create the stream with 34 | */ 35 | public static void createInputStream(KinesisConnectorConfiguration config) { 36 | AmazonKinesisClient kinesisClient = new AmazonKinesisClient(config.AWS_CREDENTIALS_PROVIDER); 37 | kinesisClient.setRegion(RegionUtils.getRegion(config.REGION_NAME)); 38 | if (config.KINESIS_ENDPOINT != null) { 39 | kinesisClient.setEndpoint(config.KINESIS_ENDPOINT); 40 | } 41 | createAndWaitForStreamToBecomeAvailable(kinesisClient, 42 | config.KINESIS_INPUT_STREAM, 43 | config.KINESIS_INPUT_STREAM_SHARD_COUNT); 44 | } 45 | 46 | /** 47 | * Creates the Amazon Kinesis stream specified by config.KINESIS_OUTPUT_STREAM. 48 | * 49 | * @param config 50 | * The configuration with the specified output stream name and {@link AWSCredentialsProvider} 51 | * @param shardCount 52 | * The shard count to create the stream with 53 | */ 54 | public static void createOutputStream(KinesisConnectorConfiguration config) { 55 | AmazonKinesisClient kinesisClient = new AmazonKinesisClient(config.AWS_CREDENTIALS_PROVIDER); 56 | kinesisClient.setRegion(RegionUtils.getRegion(config.REGION_NAME)); 57 | if (config.KINESIS_ENDPOINT != null) { 58 | kinesisClient.setEndpoint(config.KINESIS_ENDPOINT); 59 | } 60 | createAndWaitForStreamToBecomeAvailable(kinesisClient, 61 | config.KINESIS_OUTPUT_STREAM, 62 | config.KINESIS_OUTPUT_STREAM_SHARD_COUNT); 63 | } 64 | 65 | /** 66 | * Creates an Amazon Kinesis stream if it does not exist and waits for it to become available 67 | * 68 | * @param kinesisClient 69 | * The {@link AmazonKinesisClient} with Amazon Kinesis read and write privileges 70 | * @param streamName 71 | * The Amazon Kinesis stream name to create 72 | * @param shardCount 73 | * The shard count to create the stream with 74 | * @throws IllegalStateException 75 | * Invalid Amazon Kinesis stream state 76 | * @throws IllegalStateException 77 | * Stream does not go active before the timeout 78 | */ 79 | public static void createAndWaitForStreamToBecomeAvailable(AmazonKinesisClient kinesisClient, 80 | String streamName, 81 | int shardCount) { 82 | if (streamExists(kinesisClient, streamName)) { 83 | String state = streamState(kinesisClient, streamName); 84 | switch (state) { 85 | case "DELETING": 86 | long startTime = System.currentTimeMillis(); 87 | long endTime = startTime + 1000 * 120; 88 | while (System.currentTimeMillis() < endTime && streamExists(kinesisClient, streamName)) { 89 | try { 90 | LOG.info("...Deleting Stream " + streamName + "..."); 91 | Thread.sleep(1000 * 10); 92 | } catch (InterruptedException e) { 93 | } 94 | } 95 | if (streamExists(kinesisClient, streamName)) { 96 | LOG.error("KinesisUtils timed out waiting for stream " + streamName + " to delete"); 97 | throw new IllegalStateException("KinesisUtils timed out waiting for stream " + streamName 98 | + " to delete"); 99 | } 100 | case "ACTIVE": 101 | LOG.info("Stream " + streamName + " is ACTIVE"); 102 | return; 103 | case "CREATING": 104 | break; 105 | case "UPDATING": 106 | LOG.info("Stream " + streamName + " is UPDATING"); 107 | return; 108 | default: 109 | throw new IllegalStateException("Illegal stream state: " + state); 110 | } 111 | } else { 112 | CreateStreamRequest createStreamRequest = new CreateStreamRequest(); 113 | createStreamRequest.setStreamName(streamName); 114 | createStreamRequest.setShardCount(shardCount); 115 | kinesisClient.createStream(createStreamRequest); 116 | LOG.info("Stream " + streamName + " created"); 117 | } 118 | long startTime = System.currentTimeMillis(); 119 | long endTime = startTime + (10 * 60 * 1000); 120 | while (System.currentTimeMillis() < endTime) { 121 | try { 122 | Thread.sleep(1000 * 10); 123 | } catch (Exception e) { 124 | } 125 | try { 126 | String streamStatus = streamState(kinesisClient, streamName); 127 | if (streamStatus.equals("ACTIVE")) { 128 | LOG.info("Stream " + streamName + " is ACTIVE"); 129 | return; 130 | } 131 | } catch (ResourceNotFoundException e) { 132 | throw new IllegalStateException("Stream " + streamName + " never went active"); 133 | } 134 | } 135 | } 136 | 137 | /** 138 | * Helper method to determine if an Amazon Kinesis stream exists. 139 | * 140 | * @param kinesisClient 141 | * The {@link AmazonKinesisClient} with Amazon Kinesis read privileges 142 | * @param streamName 143 | * The Amazon Kinesis stream to check for 144 | * @return true if the Amazon Kinesis stream exists, otherwise return false 145 | */ 146 | private static boolean streamExists(AmazonKinesisClient kinesisClient, String streamName) { 147 | DescribeStreamRequest describeStreamRequest = new DescribeStreamRequest(); 148 | describeStreamRequest.setStreamName(streamName); 149 | try { 150 | kinesisClient.describeStream(describeStreamRequest); 151 | return true; 152 | } catch (ResourceNotFoundException e) { 153 | return false; 154 | } 155 | } 156 | 157 | /** 158 | * Return the state of a Amazon Kinesis stream. 159 | * 160 | * @param kinesisClient 161 | * The {@link AmazonKinesisClient} with Amazon Kinesis read privileges 162 | * @param streamName 163 | * The Amazon Kinesis stream to get the state of 164 | * @return String representation of the Stream state 165 | */ 166 | private static String streamState(AmazonKinesisClient kinesisClient, String streamName) { 167 | DescribeStreamRequest describeStreamRequest = new DescribeStreamRequest(); 168 | describeStreamRequest.setStreamName(streamName); 169 | try { 170 | return kinesisClient.describeStream(describeStreamRequest).getStreamDescription().getStreamStatus(); 171 | } catch (AmazonServiceException e) { 172 | return null; 173 | } 174 | } 175 | 176 | /** 177 | * Gets a list of all Amazon Kinesis streams 178 | * 179 | * @param kinesisClient 180 | * The {@link AmazonKinesisClient} with Amazon Kinesis read privileges 181 | * @return list of Amazon Kinesis streams 182 | */ 183 | public static List listAllStreams(AmazonKinesisClient kinesisClient) { 184 | 185 | ListStreamsRequest listStreamsRequest = new ListStreamsRequest(); 186 | listStreamsRequest.setLimit(10); 187 | ListStreamsResult listStreamsResult = kinesisClient.listStreams(listStreamsRequest); 188 | List streamNames = listStreamsResult.getStreamNames(); 189 | while (listStreamsResult.isHasMoreStreams()) { 190 | if (streamNames.size() > 0) { 191 | listStreamsRequest.setExclusiveStartStreamName(streamNames.get(streamNames.size() - 1)); 192 | } 193 | 194 | listStreamsResult = kinesisClient.listStreams(listStreamsRequest); 195 | streamNames.addAll(listStreamsResult.getStreamNames()); 196 | } 197 | return streamNames; 198 | } 199 | 200 | /** 201 | * Deletes the input stream specified by config.KINESIS_INPUT_STREAM 202 | * 203 | * @param config 204 | * The configuration containing the stream name and {@link AWSCredentialsProvider} 205 | */ 206 | public static void deleteInputStream(KinesisConnectorConfiguration config) { 207 | AmazonKinesisClient kinesisClient = new AmazonKinesisClient(config.AWS_CREDENTIALS_PROVIDER); 208 | kinesisClient.setRegion(RegionUtils.getRegion(config.REGION_NAME)); 209 | if (config.KINESIS_ENDPOINT != null) { 210 | kinesisClient.setEndpoint(config.KINESIS_ENDPOINT); 211 | } 212 | deleteStream(kinesisClient, config.KINESIS_INPUT_STREAM); 213 | } 214 | 215 | /** 216 | * Deletes the output stream specified by config.KINESIS_OUTPUT_STREAM 217 | * 218 | * @param config 219 | * The configuration containing the stream name and {@link AWSCredentialsProvider} 220 | */ 221 | public static void deleteOutputStream(KinesisConnectorConfiguration config) { 222 | AmazonKinesisClient kinesisClient = new AmazonKinesisClient(config.AWS_CREDENTIALS_PROVIDER); 223 | kinesisClient.setRegion(RegionUtils.getRegion(config.REGION_NAME)); 224 | if (config.KINESIS_ENDPOINT != null) { 225 | kinesisClient.setEndpoint(config.KINESIS_ENDPOINT); 226 | } 227 | deleteStream(kinesisClient, config.KINESIS_OUTPUT_STREAM); 228 | } 229 | 230 | /** 231 | * Deletes an Amazon Kinesis stream if it exists. 232 | * 233 | * @param kinesisClient 234 | * The {@link AmazonKinesisClient} with Amazon Kinesis read and write privileges 235 | * @param streamName 236 | * The Amazon Kinesis stream to delete 237 | */ 238 | public static void deleteStream(AmazonKinesisClient kinesisClient, String streamName) { 239 | if (streamExists(kinesisClient, streamName)) { 240 | DeleteStreamRequest deleteStreamRequest = new DeleteStreamRequest(); 241 | deleteStreamRequest.setStreamName(streamName); 242 | kinesisClient.deleteStream(deleteStreamRequest); 243 | LOG.info("Deleting stream " + streamName); 244 | } else { 245 | LOG.warn("Stream " + streamName + " does not exist"); 246 | } 247 | } 248 | 249 | } 250 | -------------------------------------------------------------------------------- /src/main/resources/SumologicConnector.properties.stub: -------------------------------------------------------------------------------- 1 | # Fill in your AWS Access Key ID and Secret Access Key 2 | # http://aws.amazon.com/security-credentials 3 | accessKey = [ACCESS-KEY] 4 | secretKey = [SECRET-KEY] 5 | 6 | # KinesisConnector Application Settings 7 | # Since Kinesis Creates a DynamoDB table for each APP, 8 | # each appName must be unique for different kinesisInputStreams and connectorDestinations 9 | appName = kinesisToSumologicConnector 10 | 11 | # By specifying the region name, the connector will connect from the Amazon Kinesis stream in this region 12 | # unless the endpoint for Amazon Kinesis is explicitly specified. The Amazon DynamoDB lease table and Amazon CloudWatch 13 | # metrics for connector will be created in this region. All resources in outgoing destination will 14 | # not be affected by this region name. 15 | regionName = us-east-1 16 | retryLimit = 3 17 | backoffInterval = 50000 18 | bufferRecordCountLimit = 100 19 | bufferMillisecondsLimit = 10000 20 | 21 | # Amazon Kinesis parameters for KinesisConnector 22 | 23 | # Uncomment the following property if you would like to explicitly configure the Amazon Kinesis endpoint. 24 | # This property will configure the connector's Amazon Kinesis client to read from this specific endpoint, 25 | # overwriting the regionName property for ONLY the Amazon Kinesis client. The lease table and Amazon CloudWatch 26 | # metrics will still use the regionName property. 27 | # kinesisEndpoint = https\://kinesis.us-west-2.amazonaws.com 28 | 29 | # Kinesis Stream where data will be grabbed from 30 | kinesisInputStream = VPC 31 | 32 | # Optional Amazon Kinesis parameters for automatically creating the stream 33 | createKinesisInputStream = false 34 | createKinesisOutputStream = false 35 | kinesisInputStreamShardCount = 2 36 | kinesisOutputStreamShardCount = 2 37 | 38 | # Transformer class that will be used to handle records 39 | transformerClass = CloudWatchMessageModelSumologicTransformer 40 | 41 | # Specifies the input file from which the StreamSource will read records 42 | createStreamSource = false 43 | inputStreamFile = users.txt 44 | 45 | # Connector name to be appendend to the UserAgent 46 | connectorDestination = sumologic 47 | 48 | # Sumologic HTTP Collector URL 49 | sumologicUrl = [SUMOLOGIC-URL] 50 | -------------------------------------------------------------------------------- /src/main/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | # Root logger option 2 | log4j.rootLogger=INFO, stdout 3 | 4 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender 5 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout 6 | log4j.appender.stdout.layout.ConversionPattern=%d{DATE} %5p %c{1}:%L - %m%n 7 | -------------------------------------------------------------------------------- /src/test/java/com/sumologic/client/CloudWatchMessageModelSumologicTransformerTest.java: -------------------------------------------------------------------------------- 1 | package com.sumologic.client; 2 | 3 | import org.junit.Assert; 4 | import org.junit.Before; 5 | import org.junit.Rule; 6 | import org.junit.Test; 7 | import org.junit.Ignore; 8 | 9 | import com.amazonaws.services.kinesis.model.Record; 10 | import com.sumologic.client.model.CloudWatchLogsMessageModel; 11 | import com.sumologic.client.model.SimpleKinesisMessageModel; 12 | 13 | import java.io.IOException; 14 | import java.nio.charset.Charset; 15 | import java.nio.charset.CharsetEncoder; 16 | import java.nio.CharBuffer; 17 | import java.nio.ByteBuffer; 18 | 19 | 20 | public class CloudWatchMessageModelSumologicTransformerTest { 21 | public static Charset charset = Charset.forName("UTF-8"); 22 | public static CharsetEncoder encoder = charset.newEncoder(); 23 | 24 | @Test 25 | public void theTransformerShouldFailGracefullyWhenUnableToCompress () { 26 | CloudWatchMessageModelSumologicTransformer transfomer = new CloudWatchMessageModelSumologicTransformer(); 27 | 28 | String randomData = "Some random string without GZIP compression"; 29 | ByteBuffer bufferedData = null; 30 | try { 31 | bufferedData = encoder.encode(CharBuffer.wrap(randomData)); 32 | } catch (Exception e) { 33 | Assert.fail("Getting error: "+e.getMessage()); 34 | } 35 | 36 | Record mockedRecord = new Record(); 37 | mockedRecord.setData(bufferedData); 38 | 39 | CloudWatchLogsMessageModel messageModel = transfomer.toClass(mockedRecord); 40 | 41 | 42 | Assert.assertNull(messageModel); 43 | } 44 | 45 | @Test 46 | public void theTransformerShouldSucceedWhenTransformingAProperJSON() { 47 | CloudWatchMessageModelSumologicTransformer transfomer = new CloudWatchMessageModelSumologicTransformer(); 48 | 49 | String jsonData = "" 50 | +"{" 51 | + "\"logEvents\": [{" 52 | + "\"id\": \"3889492387492837492374982374897239847289374892\"," 53 | + "\"message\": \"1 23423532532 eni-ac9342k3492 10.1.1.75 66.175.209.17 123 123 17 1 76 1437755534 1437755549 ACCEPT OK\"," 54 | + "\"timestamp\": \"2342342342300\"" 55 | + "}]," 56 | + "\"logGroup\": \"MyFirstVPC\"," 57 | + "\"logStream\": \"eni-ac6a7de4-all\"," 58 | + "\"messageType\": \"DATA_MESSAGE\"," 59 | + "\"owner\": \"2342352352\"," 60 | + "\"subscriptionFilters\": [\"MyFirstVPC\"]" 61 | + "}" 62 | +""; 63 | 64 | byte[] compressData = SumologicKinesisUtils.compressGzip(jsonData); 65 | 66 | ByteBuffer bufferedData = null; 67 | try { 68 | bufferedData = ByteBuffer.wrap(compressData); 69 | } catch (Exception e) { 70 | Assert.fail("Getting error: "+e.getMessage()); 71 | } 72 | 73 | Record mockedRecord = new Record(); 74 | mockedRecord.setData(bufferedData); 75 | 76 | CloudWatchLogsMessageModel messageModel = transfomer.toClass(mockedRecord); 77 | 78 | Assert.assertNotNull(messageModel); 79 | } 80 | 81 | @Test 82 | public void theTransformerShouldFailWhenTransformingAJSONWithTrailingCommas() { 83 | CloudWatchMessageModelSumologicTransformer transfomer = new CloudWatchMessageModelSumologicTransformer(); 84 | 85 | String jsonData = "" 86 | +"{" 87 | + "\"logEvents\": [{" 88 | + "\"id\": \"3889492387492837492374982374897239847289374892\"," 89 | + "\"message\": \"1 23423532532 eni-ac9342k3492 10.1.1.75 66.175.209.17 123 123 17 1 76 1437755534 1437755549 ACCEPT OK\"," 90 | + "\"timestamp\": \"2342342342300\"" 91 | + "}]," 92 | + "\"logGroup\": \"MyFirstVPC\"," 93 | + "\"logStream\": \"eni-ac6a7de4-all\"," 94 | + "\"messageType\": \"DATA_MESSAGE\"," 95 | + "\"owner\": \"2342352352\"," 96 | + "\"subscriptionFilters\": [\"MyFirstVPC\"]," 97 | + "}" 98 | +""; 99 | 100 | byte[] compressData = SumologicKinesisUtils.compressGzip(jsonData); 101 | 102 | ByteBuffer bufferedData = null; 103 | try { 104 | bufferedData = ByteBuffer.wrap(compressData); 105 | } catch (Exception e) { 106 | Assert.fail("Getting error: "+e.getMessage()); 107 | } 108 | 109 | Record mockedRecord = new Record(); 110 | mockedRecord.setData(bufferedData); 111 | 112 | CloudWatchLogsMessageModel messageModel = null; 113 | messageModel = transfomer.toClass(mockedRecord); 114 | 115 | Assert.assertNull(messageModel); 116 | } 117 | 118 | @Test 119 | public void theTransfomerShouldSeparateBatchesOfLogs() { 120 | CloudWatchMessageModelSumologicTransformer transfomer = new CloudWatchMessageModelSumologicTransformer(); 121 | 122 | String jsonData = "" 123 | +"{" 124 | + "\"logEvents\": [{" 125 | + "\"id\": \"3889492387492837492374982374897239847289374892\"," 126 | + "\"message\": \"1 23423532532 eni-ac9342k3492 10.1.1.75 66.175.209.17 123 123 17 1 76 1437755534 1437755549 ACCEPT OK\"," 127 | + "\"timestamp\": \"2342342342300\"" 128 | + "}," 129 | + "{" 130 | + "\"id\": \"3289429357928375892739857238975235235235\"," 131 | + "\"message\": \"1 23423516 eni-ac9342k3492 10.1.1.75 66.175.209.17 123 123 17 1 76 1437755534 1437755549 REJECT OK\"," 132 | + "\"timestamp\": \"2342352351616\"" 133 | + "}]," 134 | + "\"logGroup\": \"MyFirstVPC\"," 135 | + "\"logStream\": \"eni-ac6a7de4-all\"," 136 | + "\"messageType\": \"DATA_MESSAGE\"," 137 | + "\"owner\": \"2342352352\"," 138 | + "\"subscriptionFilters\": [\"MyFirstVPC\"]" 139 | + "}" 140 | +""; 141 | 142 | byte[] compressData = SumologicKinesisUtils.compressGzip(jsonData); 143 | 144 | ByteBuffer bufferedData = null; 145 | try { 146 | bufferedData = ByteBuffer.wrap(compressData); 147 | } catch (Exception e) { 148 | Assert.fail("Getting error: "+e.getMessage()); 149 | } 150 | 151 | Record mockedRecord = new Record(); 152 | mockedRecord.setData(bufferedData); 153 | 154 | CloudWatchLogsMessageModel messageModel = null; 155 | messageModel = transfomer.toClass(mockedRecord); 156 | 157 | String debatchedMessage = transfomer.fromClass(messageModel); 158 | System.out.println(debatchedMessage); 159 | 160 | String[] messages = debatchedMessage.split("\n"); 161 | Assert.assertTrue(messages.length == 2); 162 | } 163 | } -------------------------------------------------------------------------------- /src/test/java/com/sumologic/client/SumologicKinesisUtilsTest.java: -------------------------------------------------------------------------------- 1 | package com.sumologic.client; 2 | 3 | import org.junit.Assert; 4 | import org.junit.Test; 5 | 6 | public class SumologicKinesisUtilsTest { 7 | @Test 8 | public void compressDecompressGzipTest() { 9 | String data = "a string of characters"; 10 | 11 | byte[] compressData = SumologicKinesisUtils.compressGzip(data); 12 | String result = SumologicKinesisUtils.decompressGzip(compressData); 13 | 14 | Assert.assertTrue(data.equals(result)); 15 | } 16 | 17 | @Test 18 | public void properJSONVerificationShouldReturnTrue() { 19 | String jsonData = "" 20 | +"{" 21 | + "\"logEvents\": [{" 22 | + "\"id\": \"3889492387492837492374982374897239847289374892\"," 23 | + "\"message\": \"1 23423532532 eni-ac9342k3492 10.1.1.75 66.175.209.17 123 123 17 1 76 1437755534 1437755549 ACCEPT OK\"," 24 | + "\"timestamp\": \"2342342342300\"" 25 | + "}]," 26 | + "\"logGroup\": \"MyFirstVPC\"," 27 | + "\"logStream\": \"eni-ac6a7de4-all\"," 28 | + "\"messageType\": \"DATA_MESSAGE\"," 29 | + "\"owner\": \"2342352352\"," 30 | + "\"subscriptionFilters\": [\"MyFirstVPC\"]" 31 | + "}" 32 | +""; 33 | 34 | Assert.assertTrue(SumologicKinesisUtils.verifyJSON(jsonData)); 35 | } 36 | 37 | @Test 38 | public void malformedJSONVerificationShouldReturnTrue() { 39 | String jsonData = "" 40 | +"{" 41 | + "\"logEvents\": [{" 42 | + "\"id\": \"3889492387492837492374982374897239847289374892\"," 43 | + "\"message\": \"1 23423532532 eni-ac9342k3492 10.1.1.75 66.175.209.17 123 123 17 1 76 1437755534 1437755549 ACCEPT OK\"," 44 | + "\"timestamp\": \"2342342342300\"" 45 | + "}]," 46 | + "\"logGroup\": \"MyFirstVPC\"," 47 | + "\"logStream\": \"eni-ac6a7de4-all\"," 48 | + "\"messageType\": \"DATA_MESSAGE\"," 49 | + "\"owner\": \"2342352352\"," 50 | + "\"subscriptionFilters\": [\"MyFirstVPC\"]," 51 | + "}" 52 | +""; 53 | 54 | Assert.assertFalse(SumologicKinesisUtils.verifyJSON(jsonData)); 55 | } 56 | } -------------------------------------------------------------------------------- /src/test/java/com/sumologic/client/SumologicSenderTest.java: -------------------------------------------------------------------------------- 1 | package com.sumologic.client; 2 | 3 | import java.io.IOException; 4 | import java.nio.charset.Charset; 5 | import java.util.ArrayList; 6 | import java.util.Arrays; 7 | import java.util.List; 8 | 9 | import org.junit.Assert; 10 | import org.junit.Before; 11 | import org.junit.Rule; 12 | import org.junit.Test; 13 | import org.junit.Ignore; 14 | 15 | import static com.github.tomakehurst.wiremock.client.WireMock.*; 16 | 17 | import com.github.tomakehurst.wiremock.client.WireMock; 18 | import com.github.tomakehurst.wiremock.junit.WireMockRule; 19 | import com.sumologic.client.SumologicSender; 20 | import com.sumologic.client.implementations.SumologicEmitter; 21 | 22 | public class SumologicSenderTest { 23 | 24 | @Rule 25 | public WireMockRule wireMockRule = new WireMockRule(8089); 26 | 27 | private static final String MOCKED_HOST = "http://localhost:8089"; 28 | private static final String MOCKED_COLLECTION = "/sumologic/collections/1234"; 29 | 30 | 31 | @Before 32 | public void setUp() { 33 | mockEmitMessages(); 34 | } 35 | 36 | @Test 37 | public void theSenderShouldReturnFalseWhenFailing () { 38 | String url = MOCKED_HOST + "/sumologic/collections/fake-url"; 39 | 40 | String data = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. " 41 | + "\nIn nisl tortor, dictum nec tristique ut, tincidunt vitae tortor. " 42 | + "\nNam vitae urna ac sem vulputate dignissim at ac nibh. "; 43 | 44 | SumologicSender sender = new SumologicSender(url); 45 | try{ 46 | boolean response = sender.sendToSumologic(data); 47 | Assert.assertFalse(response); 48 | } catch (IOException e) { 49 | Assert.fail("Got an exception during test: "+e.getMessage()); 50 | } 51 | } 52 | 53 | @Test 54 | public void theSenderShouldReturnTrueOnSuccess () { 55 | String url = MOCKED_HOST + MOCKED_COLLECTION; 56 | 57 | String data = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. " 58 | + "\nIn nisl tortor, dictum nec tristique ut, tincidunt vitae tortor. " 59 | + "\nNam vitae urna ac sem vulputate dignissim at ac nibh. "; 60 | 61 | SumologicSender sender = new SumologicSender(url); 62 | try{ 63 | boolean response = sender.sendToSumologic(data); 64 | Assert.assertTrue(response); 65 | } catch (IOException e) { 66 | Assert.fail("Got an exception during test: "+e.getMessage()); 67 | } 68 | } 69 | 70 | 71 | private void mockEmitMessages () { 72 | WireMock.stubFor(WireMock.post(WireMock.urlMatching(MOCKED_COLLECTION)) 73 | .willReturn(WireMock.aResponse() 74 | .withStatus(200) 75 | .withHeader("Content-Type", "text/html") 76 | .withBody(""))); 77 | } 78 | 79 | } -------------------------------------------------------------------------------- /src/test/java/com/sumologic/client/implementations/SumologicEmitterTest.java: -------------------------------------------------------------------------------- 1 | package com.sumologic.client.implementations; 2 | 3 | import java.util.ArrayList; 4 | import java.util.List; 5 | 6 | import org.junit.Assert; 7 | import org.junit.Before; 8 | import org.junit.Rule; 9 | import org.junit.Test; 10 | import org.junit.Ignore; 11 | 12 | import static com.github.tomakehurst.wiremock.client.WireMock.*; 13 | 14 | import com.github.tomakehurst.wiremock.client.WireMock; 15 | import com.github.tomakehurst.wiremock.junit.WireMockRule; 16 | import com.sumologic.client.implementations.SumologicEmitter; 17 | 18 | public class SumologicEmitterTest { 19 | 20 | @Rule 21 | public WireMockRule wireMockRule = new WireMockRule(8089); 22 | 23 | private static final String MOCKED_HOST = "http://localhost:8089"; 24 | private static final String MOCKED_COLLECTION = "/sumologic/collections/1234"; 25 | 26 | @Before 27 | public void setUp() { 28 | mockEmitMessages(); 29 | } 30 | 31 | @Test 32 | public void theEmitterShouldReturnTheListParameterWhenFailing () { 33 | String url = MOCKED_HOST + "/sumologic/collections/fake-url"; 34 | 35 | List messages = new ArrayList(); 36 | messages.add("This is message #1"); 37 | messages.add("This is message #2"); 38 | messages.add("This is message #3"); 39 | messages.add("This is message #4"); 40 | 41 | SumologicEmitter emitter = new SumologicEmitter(url); 42 | List notEmittedMessages = emitter.sendBatchConcatenating(messages); 43 | 44 | Assert.assertEquals(messages, notEmittedMessages); 45 | } 46 | 47 | @Test 48 | public void theEmitterShouldReturnAnEmptyListOnSuccess () { 49 | String url = MOCKED_HOST + MOCKED_COLLECTION; 50 | 51 | List messages = new ArrayList(); 52 | messages.add("This is message #1"); 53 | messages.add("This is message #2"); 54 | messages.add("This is message #3"); 55 | messages.add("This is message #4"); 56 | 57 | SumologicEmitter emitter = new SumologicEmitter(url); 58 | List notEmittedMessages = emitter.sendBatchConcatenating(messages); 59 | 60 | Assert.assertEquals(0, notEmittedMessages.size()); 61 | } 62 | 63 | private void mockEmitMessages () { 64 | WireMock.stubFor(WireMock.post(WireMock.urlMatching(MOCKED_COLLECTION)) 65 | .willReturn(WireMock.aResponse() 66 | .withStatus(200) 67 | .withHeader("Content-Type", "text/html") 68 | .withBody(""))); 69 | } 70 | 71 | } --------------------------------------------------------------------------------