├── License.txt ├── Notice.txt ├── README.md ├── classic-index.js └── index.js /License.txt: -------------------------------------------------------------------------------- 1 | Amazon Software License 2 | 3 | This Amazon Software License (“License”) governs your use, reproduction, and distribution of the accompanying software as specified below. 4 | 5 | 1. Definitions 6 | “Licensor” means any person or entity that distributes its Work. 7 | 8 | “Software” means the original work of authorship made available under this License. 9 | 10 | “Work” means the Software and any additions to or derivative works of the Software that are made available under this License. 11 | 12 | The terms “reproduce,” “reproduction,” “derivative works,” and “distribution” have the meaning as provided under U.S. copyright law; provided, however, that for the purposes of this License, derivative works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work. 13 | 14 | Works, including the Software, are “made available” under this License by including in or with the Work either (a) a copyright notice referencing the applicability of this License to the Work, or (b) a copy of this License. 15 | 16 | 2. License Grants 17 | 18 | 2.1 Copyright Grant. Subject to the terms and conditions of this License, each Licensor grants to you a perpetual, worldwide, non-exclusive, royalty-free, copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, sublicense and distribute its Work and any resulting derivative works in any form. 19 | 20 | 2.2 Patent Grant. Subject to the terms and conditions of this License, each Licensor grants to you a perpetual, worldwide, non-exclusive, royalty-free patent license to make, have made, use, sell, offer for sale, import, and otherwise transfer its Work, in whole or in part. The foregoing license applies only to the patent claims licensable by Licensor that would be infringed by Licensor’s Work (or portion thereof) individually and excluding any combinations with any other materials or technology. 21 | 22 | 3. Limitations 23 | 24 | 3.1 Redistribution. You may reproduce or distribute the Work only if (a) you do so under this License, (b) you include a complete copy of this License with your distribution, and (c) you retain without modification any copyright, patent, trademark, or attribution notices that are present in the Work. 25 | 26 | 3.2 Derivative Works. You may specify that additional or different terms apply to the use, reproduction, and distribution of your derivative works of the Work (“Your Terms”) only if (a) Your Terms provide that the use limitation in Section 3.3 applies to your derivative works, and (b) you identify the specific derivative works that are subject to Your Terms. Notwithstanding Your Terms, this License (including the redistribution requirements in Section 3.1) will continue to apply to the Work itself. 27 | 28 | 3.3 Use Limitation. The Work and any derivative works thereof only may be used or intended for use with the web services, computing platforms or applications provided by Amazon.com, Inc. or its affiliates, including Amazon Web Services, Inc. 29 | 30 | 3.4 Patent Claims. If you bring or threaten to bring a patent claim against any Licensor (including any claim, cross-claim or counterclaim in a lawsuit) to enforce any patents that you allege are infringed by any Work, then your rights under this License from such Licensor (including the grants in Sections 2.1 and 2.2) will terminate immediately. 31 | 32 | 3.5 Trademarks. This License does not grant any rights to use any Licensor’s or its affiliates’ names, logos, or trademarks, except as necessary to reproduce the notices described in this License. 33 | 34 | 3.6 Termination. If you violate any term of this License, then your rights under this License (including the grants in Sections 2.1 and 2.2) will terminate immediately. 35 | 36 | 4. Disclaimer of Warranty. 37 | 38 | THE WORK IS PROVIDED “AS IS” WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WARRANTIES OR CONDITIONS OF M ERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE OR NON-INFRINGEMENT. YOU BEAR THE RISK OF UNDERTAKING ANY ACTIVITIES UNDER THIS LICENSE. SOME STATES’ CONSUMER LAWS DO NOT ALLOW EXCLUSION OF AN IMPLIED WARRANTY, SO THIS DISCLAIMER MAY NOT APPLY TO YOU. 39 | 40 | 5. Limitation of Liability. 41 | EXCEPT AS PROHIBITED BY APPLICABLE LAW, IN NO EVENT AND UNDER NO LEGAL THEORY, WHETHER IN TORT (INCLUDING NEGLIGENCE), CONTRACT, OR OTHERWISE SHALL ANY LICENSOR BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF OR RELATED TO THIS LICENSE, THE USE OR INABILITY TO USE THE WORK (INCLUDING BUT NOT LIMITED TO LOSS OF GOODWILL, BUSINESS INTERRUPTION, LOST PROFITS OR DATA, COMPUTER FAILURE OR MALFUNCTION, OR ANY OTHER COMM ERCIAL DAMAGES OR LOSSES), EVEN IF THE LICENSOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. 42 | 43 | Effective Date – April 18, 2008 © 2008 Amazon.com, Inc. or its affiliates. All rights reserved. 44 | 45 | -------------------------------------------------------------------------------- /Notice.txt: -------------------------------------------------------------------------------- 1 | CloudWatch Logs Centralize Logs 2 | Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | 4 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Cloudwatch Logs Centralize Logs 2 | 3 | ### Package cloudwatch-logs-centralize-logs 4 | 5 | Copyright 2016- Amazon.com, Inc. or its affiliates. All Rights Reserved. 6 | 7 | ## Introduction 8 | 9 | It is often useful to centralize log data from different sources as it gets generated. Data can then be used for searching, filtering or doing analysis. The **CloudWatch Logs Centralize Logs** is a Lambda function that helps in centralizing logs from Elastic Load Balancing (ELB) using Amazon S3 bucket triggers. In this lambda function we have showed how ELB logs that are delivered to S3 can be posted to CloudWatch Logs. But it can be modified to read any logs from S3. 10 | 11 | ## Flow of Events 12 | 13 | ![Flow of events](https://s3.amazonaws.com/aws-cloudwatch/downloads/cloudwatch-logs-centralize-logs/Demo-1.png) 14 | 15 | ## Setup Overview 16 | 17 | Lambda function is written in Node.js. Since we don't have a dependency on a specific version of library, we rely on the defaults provided by Lambda. Correspoindingly a Lambda deployment package is not required. Instead we can use the inline editor in Lambda. You can create a new Lambda function, and copy the code in index.js in this repository to your function. 18 | 19 | ### Pre-requisite 20 | 21 | * S3 bucket where ELB logs can be archived to. 22 | * Enable archiving of ELB access logs in S3. 23 | 24 | 25 | ### Triggers 26 | 27 | * The Lambda function is triggered at an S3 'ObjectCreated' event type 28 | * You need to also provide the S3 bucket where the ELB logs will be delivered 29 | 30 | ### Authorization 31 | 32 | Since there is a need here for various AWS services making calls to each other, appropriate authorization is required. This takes the form of configuring an IAM role, to which various authorization policies are attached. This role will be assumed by the Lambda function when running. The below two permissions are required: 33 | 34 | 1.S3 permits Lambda to fetch the created objects from a given bucket 35 | 36 | ```json 37 | { 38 | "Version": "2012-10-17", 39 | "Statement": [ 40 | { 41 | "Effect": "Allow", 42 | "Action": [ 43 | "s3:GetObject" 44 | ], 45 | "Resource": "arn:aws:s3:::*" 46 | } 47 | ] 48 | } 49 | ``` 50 | 51 | 2.CloudWatch Logs permits Lambda to perform various operations. Below we have given Full Access to CloudWatch logs. You can choose to give specific access to describeLogStreams, describeLogGroups, createLogGroup, createLogStream, putLogEvents 52 | 53 | ```json 54 | { 55 | "Version": "2012-10-17", 56 | "Statement": [ 57 | { 58 | "Action": [ 59 | "logs:*" 60 | ], 61 | "Effect": "Allow", 62 | "Resource": "*" 63 | } 64 | ] 65 | } 66 | ``` 67 | 68 | ### Lambda Function 69 | 70 | ***Configurable parameters:*** 71 | 72 | * Log Group Name: Name of the log group in CloudWatch Logs in which the logs will be published 73 | * Log Steram Name: Name of the log stream within the log group 74 | 75 | ***Instructions:*** 76 | 77 | * There are two lambda functions. Index.js for compressed logs from Application Load Balancer type and classic-index.js for uncompressed logs from Classic Load Balancer type. 78 | * Handler: The name of the main code file. In this example we have used index as the name of the handler. 79 | * The Lambda function reads the data from the S3 object using the S3 getObject API. The data is encoded and compresses. 80 | * The Lambda function decodes and decompresses the data using the zlib library 81 | * The data is then send to CloudWatch Logs using the putLogEvents api of CloudWatch Logs 82 | * We check for the existence of the specified log group and stream using the describeLogGroups and describeLogStreams. If not found, we create the group and stream. 83 | * When the log steam is existing, we use the sequenceToken during the putLogEvents call. 84 | 85 | ### Lambda Configuration 86 | 87 | This Lambda function was created with runtime Node.js 4.3. It has been tested with 128 KB and 3 second timeout. No VPC was used. You can change the number based on your testing. 88 | 89 | ## Known Limitations 90 | 91 | This Lambda function has the following limitation: 92 | * Currently describeLogStreams is called at every Lambda invocation. But describeLogStreams has a limit of 5 transactions per second (TPS/account/region). This can be resolved by modifying the Lambda function to create log group and log stream only if we get a ResourceNotFound error from calling putLogEvents. 93 | -------------------------------------------------------------------------------- /classic-index.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const aws = require('aws-sdk'); 4 | var zlib = require('zlib'); 5 | const s3 = new aws.S3({ apiVersion: '2006-03-01' }); 6 | const cloudWatchLogs = new aws.CloudWatchLogs({ 7 | apiVersion: '2014-03-28' 8 | }); 9 | const readline = require('readline'); 10 | const stream = require('stream'); 11 | 12 | //specifying the log group and the log stream name for CloudWatch Logs 13 | const logGroupName = 'classic-elb-logs' //Name of the log group goes here; 14 | const logStreamName = 'classic-elb-stream' //Name of the log stream goes here; 15 | 16 | exports.handler = (event, context, callback) => { 17 | 18 | // Get the object from the event and show its content type 19 | console.log('S3 object is:', event.Records[0].s3); 20 | const bucket = event.Records[0].s3.bucket.name; 21 | console.log('Name of S3 bucket is:', bucket); 22 | const key = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, ' ')); 23 | const params = { 24 | Bucket: bucket, 25 | Key: key, 26 | }; 27 | s3.getObject(params, (err, data) => { 28 | if (err) { 29 | console.log(err); 30 | const message = `Error getting object ${key} from bucket ${bucket}. Make sure they exist and your bucket is in the same region as this function.`; 31 | console.log(message); 32 | callback(message); 33 | } else { 34 | console.log('Data is:', data); 35 | if (data.Body) { 36 | var logData = data.Body.toString('ascii'); 37 | //manage the log group, streams and push log events to CloudWatch Logs 38 | manageLogGroups(logData); 39 | } 40 | callback(null, data.ContentType); 41 | } 42 | }); 43 | 44 | //Manage the log group 45 | function manageLogGroups (logData) { 46 | 47 | var describeLogGroupParams = { 48 | logGroupNamePrefix: logGroupName 49 | }; 50 | 51 | //check if the log group already exists 52 | cloudWatchLogs.describeLogGroups(describeLogGroupParams, function (err, data){ 53 | if (err) { 54 | console.log('Error while describing log group:', err); 55 | createLogGroup (logData); 56 | } else { 57 | if (!data.logGroups[0]) { 58 | console.log ('Need to create log group:', data); 59 | //create log group 60 | createLogGroup(logData); 61 | } else { 62 | console.log('Success while describing log group:', data); 63 | manageLogStreams(logData); 64 | } 65 | } 66 | }); 67 | } 68 | 69 | //Create log group 70 | function createLogGroup (logData) { 71 | var logGroupParams = { 72 | logGroupName: logGroupName 73 | } 74 | cloudWatchLogs.createLogGroup(logGroupParams, function (err, data){ 75 | if (err) { 76 | console.log('error while creating log group: ', err, err.stack); 77 | return; 78 | } else { 79 | console.log ('Success in creating log group: ', logGroupName); 80 | manageLogStreams(logData); 81 | } 82 | }); 83 | } 84 | 85 | //Manage the log stream and get the sequenceToken 86 | function manageLogStreams (logData) { 87 | var describeLogStreamsParams = { 88 | logGroupName: logGroupName, 89 | logStreamNamePrefix: logStreamName 90 | } 91 | 92 | //check if the log stream already exists and get the sequenceToken 93 | cloudWatchLogs.describeLogStreams (describeLogStreamsParams, function (err, data) { 94 | if (err) { 95 | console.log ('Error during describe log streams:', err); 96 | //create log stream 97 | createLogStream(logData); 98 | } else { 99 | if (!data.logStreams[0]) { 100 | console.log ('Need to create log stream:', data); 101 | //create log stream 102 | createLogStream(logData); 103 | } else { 104 | console.log ('Log Stream already defined:', logStreamName); 105 | putLogEvents (data.logStreams[0].uploadSequenceToken, logData); 106 | } 107 | } 108 | }); 109 | } 110 | 111 | //Create Log Stream 112 | function createLogStream (logData) { 113 | var logStreamParams = { 114 | logGroupName: logGroupName, 115 | logStreamName: logStreamName 116 | }; 117 | 118 | cloudWatchLogs.createLogStream(logStreamParams, function (err, data){ 119 | if (err) { 120 | console.log('error while creating log stream: ', err, err.stack); 121 | return; 122 | } else { 123 | console.log ('Success in creating log stream: ', logStreamName); 124 | putLogEvents (null, logData); 125 | } 126 | }); 127 | } 128 | 129 | function putLogEvents (sequenceToken, logData) { 130 | //From http://docs.aws.amazon.com/AmazonCloudWatchLogs/latest/APIReference/API_PutLogEvents.html 131 | const MAX_BATCH_SIZE = 1048576; // maximum size in bytes of Log Events (with overhead) per invocation of PutLogEvents 132 | const MAX_BATCH_COUNT = 10000; // maximum number of Log Events per invocation of PutLogEvents 133 | const LOG_EVENT_OVERHEAD = 26; // bytes of overhead per Log Event 134 | 135 | // holds a list of batches 136 | var batches = []; 137 | 138 | // holds the list of events in current batch 139 | var batch = []; 140 | 141 | // size of events in the current batch 142 | var batch_size = 0; 143 | 144 | var bufferStream = new stream.PassThrough(); 145 | bufferStream.end(logData); 146 | 147 | var rl = readline.createInterface({ 148 | input: bufferStream 149 | }); 150 | 151 | var line_count = 0; 152 | 153 | rl.on('line', (line) => { 154 | ++line_count; 155 | 156 | var ts = line.split(' ', 1)[0]; 157 | var tval = Date.parse(ts); 158 | 159 | var event_size = line.length + LOG_EVENT_OVERHEAD; 160 | 161 | batch_size += event_size; 162 | 163 | if(batch_size >= MAX_BATCH_SIZE || 164 | batch.length >= MAX_BATCH_COUNT) { 165 | // start a new batch 166 | batches.push(batch); 167 | batch = []; 168 | batch_size = event_size; 169 | } 170 | 171 | batch.push({ 172 | message: line, 173 | timestamp: tval 174 | }); 175 | }); 176 | 177 | rl.on('close', () => { 178 | // add the final batch 179 | batches.push(batch); 180 | sendBatches(sequenceToken, batches); 181 | }); 182 | } 183 | 184 | function sendBatches(sequenceToken, batches) { 185 | var count = 0; 186 | var batch_count = 0; 187 | 188 | function sendNextBatch(err, nextSequenceToken) { 189 | if(err) { 190 | console.log('Error sending batch: ', err, err.stack); 191 | return; 192 | } else { 193 | var nextBatch = batches.shift(); 194 | if(nextBatch) { 195 | // send this batch 196 | ++batch_count; 197 | count += nextBatch.length; 198 | sendBatch(nextSequenceToken, nextBatch, sendNextBatch); 199 | } else { 200 | // no more batches: we are done 201 | var msg = `Successfully put ${count} events in ${batch_count} batches`; 202 | console.log(msg); 203 | callback(null, msg); 204 | } 205 | } 206 | } 207 | 208 | sendNextBatch(null, sequenceToken); 209 | } 210 | 211 | function sendBatch(sequenceToken, batch, doNext) { 212 | var putLogEventParams = { 213 | logEvents: batch, 214 | logGroupName: logGroupName, 215 | logStreamName: logStreamName 216 | } 217 | if (sequenceToken) { 218 | putLogEventParams['sequenceToken'] = sequenceToken; 219 | } 220 | 221 | // sort the events in ascending order by timestamp as required by PutLogEvents 222 | putLogEventParams.logEvents.sort(function(a, b) { 223 | if(a.timestamp > b.timestamp) { 224 | return 1; 225 | } 226 | if(a.timestamp < b.timestamp) { 227 | return -1; 228 | } 229 | return 0; 230 | }); 231 | 232 | cloudWatchLogs.putLogEvents (putLogEventParams, function (err, data) { 233 | if (err) { 234 | console.log('Error during put log events: ', err, err.stack); 235 | doNext(err, null); 236 | } else { 237 | console.log(`Success in putting ${putLogEventParams.logEvents.length} events`); 238 | doNext(null, data.nextSequenceToken); 239 | } 240 | }); 241 | } 242 | }; 243 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const aws = require('aws-sdk'); 4 | var zlib = require('zlib'); 5 | const s3 = new aws.S3({ apiVersion: '2006-03-01' }); 6 | const cloudWatchLogs = new aws.CloudWatchLogs({ 7 | apiVersion: '2014-03-28' 8 | }); 9 | 10 | const readline = require('readline'); 11 | const stream = require('stream'); 12 | 13 | //specifying the log group and the log stream name for CloudWatch Logs 14 | const logGroupName = 'apache-elb-logs' //Name of the log group goes here; 15 | const logStreamName = 'apache-elb-stream' //Name of the log stream goes here; 16 | 17 | exports.handler = (event, context, callback) => { 18 | 19 | // Get the object from the event and show its content type 20 | console.log('S3 object is:', event.Records[0].s3); 21 | const bucket = event.Records[0].s3.bucket.name; 22 | console.log('Name of S3 bucket is:', bucket); 23 | const key = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, ' ')); 24 | const params = { 25 | Bucket: bucket, 26 | Key: key, 27 | }; 28 | s3.getObject(params, (err, data) => { 29 | if (err) { 30 | console.log(err); 31 | const message = `Error getting object ${key} from bucket ${bucket}. Make sure they exist and your bucket is in the same region as this function.`; 32 | console.log(message); 33 | callback(message); 34 | } else { 35 | //uncompressing the S3 data 36 | zlib.gunzip(data.Body, function(error, buffer){ 37 | if (error) { 38 | console.log('Error uncompressing data', error); 39 | return; 40 | } 41 | 42 | var logData = buffer.toString('ascii'); 43 | //manage the log group, streams and push log events to CloudWatch Logs 44 | manageLogGroups(logData); 45 | 46 | }); 47 | callback(null, data.ContentType); 48 | } 49 | }); 50 | 51 | //Manage the log group 52 | function manageLogGroups (logData) { 53 | 54 | var describeLogGroupParams = { 55 | logGroupNamePrefix: logGroupName 56 | }; 57 | 58 | //check if the log group already exists 59 | cloudWatchLogs.describeLogGroups(describeLogGroupParams, function (err, data){ 60 | if (err) { 61 | console.log('Error while describing log group:', err); 62 | createLogGroup (logData); 63 | } else { 64 | if (!data.logGroups[0]) { 65 | console.log ('Need to create log group:', data); 66 | //create log group 67 | createLogGroup(logData); 68 | } else { 69 | console.log('Success while describing log group:', data); 70 | manageLogStreams(logData); 71 | } 72 | } 73 | }); 74 | } 75 | 76 | //Create log group 77 | function createLogGroup (logData) { 78 | var logGroupParams = { 79 | logGroupName: logGroupName 80 | } 81 | cloudWatchLogs.createLogGroup(logGroupParams, function (err, data){ 82 | if (err) { 83 | console.log('error while creating log group: ', err, err.stack); 84 | return; 85 | } else { 86 | console.log ('Success in creating log group: ', logGroupName); 87 | manageLogStreams(logData); 88 | } 89 | }); 90 | } 91 | 92 | //Manage the log stream and get the sequenceToken 93 | function manageLogStreams (logData) { 94 | var describeLogStreamsParams = { 95 | logGroupName: logGroupName, 96 | logStreamNamePrefix: logStreamName 97 | } 98 | 99 | //check if the log stream already exists and get the sequenceToken 100 | cloudWatchLogs.describeLogStreams (describeLogStreamsParams, function (err, data) { 101 | if (err) { 102 | console.log ('Error during describe log streams:', err); 103 | //create log stream 104 | createLogStream(logData); 105 | } else { 106 | if (!data.logStreams[0]) { 107 | console.log ('Need to create log stream:', data); 108 | //create log stream 109 | createLogStream(logData); 110 | } else { 111 | console.log ('Log Stream already defined:', logStreamName); 112 | putLogEvents (data.logStreams[0].uploadSequenceToken, logData); 113 | } 114 | } 115 | }); 116 | } 117 | 118 | //Create Log Stream 119 | function createLogStream (logData) { 120 | var logStreamParams = { 121 | logGroupName: logGroupName, 122 | logStreamName: logStreamName 123 | }; 124 | 125 | cloudWatchLogs.createLogStream(logStreamParams, function (err, data){ 126 | if (err) { 127 | console.log('error while creating log stream: ', err, err.stack); 128 | return; 129 | } else { 130 | console.log ('Success in creating log stream: ', logStreamName); 131 | putLogEvents (null, logData); 132 | } 133 | }); 134 | } 135 | 136 | function putLogEvents (sequenceToken, logData) { 137 | //From http://docs.aws.amazon.com/AmazonCloudWatchLogs/latest/APIReference/API_PutLogEvents.html 138 | const MAX_BATCH_SIZE = 1048576; // maximum size in bytes of Log Events (with overhead) per invocation of PutLogEvents 139 | const MAX_BATCH_COUNT = 10000; // maximum number of Log Events per invocation of PutLogEvents 140 | const LOG_EVENT_OVERHEAD = 26; // bytes of overhead per Log Event 141 | 142 | // holds a list of batches 143 | var batches = []; 144 | 145 | // holds the list of events in current batch 146 | var batch = []; 147 | 148 | // size of events in the current batch 149 | var batch_size = 0; 150 | 151 | var bufferStream = new stream.PassThrough(); 152 | bufferStream.end(logData); 153 | 154 | var rl = readline.createInterface({ 155 | input: bufferStream 156 | }); 157 | 158 | var line_count = 0; 159 | 160 | rl.on('line', (line) => { 161 | ++line_count; 162 | 163 | var ts = line.split(' ', 2)[1]; 164 | var tval = Date.parse(ts); 165 | 166 | var event_size = line.length + LOG_EVENT_OVERHEAD; 167 | 168 | batch_size += event_size; 169 | 170 | if(batch_size >= MAX_BATCH_SIZE || 171 | batch.length >= MAX_BATCH_COUNT) { 172 | // start a new batch 173 | batches.push(batch); 174 | batch = []; 175 | batch_size = event_size; 176 | } 177 | 178 | batch.push({ 179 | message: line, 180 | timestamp: tval 181 | }); 182 | }); 183 | 184 | rl.on('close', () => { 185 | // add the final batch 186 | batches.push(batch); 187 | sendBatches(sequenceToken, batches); 188 | }); 189 | } 190 | 191 | function sendBatches(sequenceToken, batches) { 192 | var count = 0; 193 | var batch_count = 0; 194 | 195 | function sendNextBatch(err, nextSequenceToken) { 196 | if(err) { 197 | console.log('Error sending batch: ', err, err.stack); 198 | return; 199 | } else { 200 | var nextBatch = batches.shift(); 201 | if(nextBatch) { 202 | // send this batch 203 | ++batch_count; 204 | count += nextBatch.length; 205 | sendBatch(nextSequenceToken, nextBatch, sendNextBatch); 206 | } else { 207 | // no more batches: we are done 208 | var msg = `Successfully put ${count} events in ${batch_count} batches`; 209 | console.log(msg); 210 | callback(null, msg); 211 | } 212 | } 213 | } 214 | 215 | sendNextBatch(null, sequenceToken); 216 | } 217 | 218 | function sendBatch(sequenceToken, batch, doNext) { 219 | var putLogEventParams = { 220 | logEvents: batch, 221 | logGroupName: logGroupName, 222 | logStreamName: logStreamName 223 | } 224 | if (sequenceToken) { 225 | putLogEventParams['sequenceToken'] = sequenceToken; 226 | } 227 | 228 | // sort the events in ascending order by timestamp as required by PutLogEvents 229 | putLogEventParams.logEvents.sort(function(a, b) { 230 | if(a.timestamp > b.timestamp) { 231 | return 1; 232 | } 233 | if(a.timestamp < b.timestamp) { 234 | return -1; 235 | } 236 | return 0; 237 | }); 238 | 239 | cloudWatchLogs.putLogEvents (putLogEventParams, function (err, data) { 240 | if (err) { 241 | console.log('Error during put log events: ', err, err.stack); 242 | doNext(err, null); 243 | } else { 244 | console.log(`Success in putting ${putLogEventParams.logEvents.length} events`); 245 | doNext(null, data.nextSequenceToken); 246 | } 247 | }); 248 | } 249 | }; --------------------------------------------------------------------------------