├── .gitignore
├── Dockerfile
├── README.md
├── files.txt
├── index.js
├── lib
├── common.js
├── constants.js
├── importer.js
├── kmsCrypto.js
└── s3-keys.js
├── package.json
├── run.js
└── setup.js
/.gitignore:
--------------------------------------------------------------------------------
1 | .env
2 | deploy.env
3 | event.json
4 | node_modules
5 | .DS_Store
6 | dist
7 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM node:0.12
2 |
3 | RUN mkdir /opt/lambda-rds-loader
4 | ADD . /opt/lambda-rds-loader
5 | RUN ln -s /opt/lambda-rds-loader/run.js /usr/local/bin/lambda-rds-loader && chmod 755 /usr/local/bin/lambda-rds-loader
6 |
7 | CMD []
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # AWS Lambda RDS Database Loader
2 |
3 | Use this function to load CSV files from any S3 location into RDS tables. Tables and columns will be auto generated if they don't exist.
4 |
5 | ## Getting Started - Lambda Execution Role
6 | You also need to add an IAM policy as shown below to the role that AWS Lambda
7 | uses when it runs. Once your function is deployed, add the following policy to
8 | the `LambdaExecRole` to enable AWS Lambda to call SNS, use DynamoDB, write Manifest
9 | files to S3, perform encryption with the AWS Key Management Service. At the moment
10 | Lambda cannot communicate over VPC and so the RDS needs to allow all external
11 | connections.
12 |
13 | ```
14 | {
15 | "Version": "2012-10-17",
16 | "Statement": [
17 | {
18 | "Sid": "Stmt1424787824000",
19 | "Effect": "Allow",
20 | "Action": [
21 | "dynamodb:DeleteItem",
22 | "dynamodb:DescribeTable",
23 | "dynamodb:GetItem",
24 | "dynamodb:ListTables",
25 | "dynamodb:PutItem",
26 | "dynamodb:Query",
27 | "dynamodb:Scan",
28 | "dynamodb:UpdateItem",
29 | "sns:GetEndpointAttributes",
30 | "sns:GetSubscriptionAttributes",
31 | "sns:GetTopicAttributes",
32 | "sns:ListTopics",
33 | "sns:Publish",
34 | "sns:Subscribe",
35 | "sns:Unsubscribe",
36 | "s3:Get*",
37 | "s3:Put*",
38 | "s3:List*",
39 | "kms:Decrypt",
40 | "kms:DescribeKey",
41 | "kms:GetKeyPolicy"
42 | ],
43 | "Resource": [
44 | "*"
45 | ]
46 | }
47 | ]
48 | }
49 | ```
50 |
51 | ## Getting Started - Configuration
52 | In order to setup a lamda configuration run:
53 | ```
54 | nodejs setup.js
55 | ```
56 |
57 | Item | Required | Notes
58 | :---- | :--------: | :-----
59 | Enter the Region for the Configuration | Y | Any AWS Region from http://docs.aws.amazon.com/general/latest/gr/rande.html, using the short name (for example us-east-1 for US East 1)
60 | Enter the S3 Bucket | Y | Bucket name will be used to lookup RDS configuration for the particular bucket.
61 | Enter a Filename Filter Regex | N | A Regular Expression used to filter files before they are processed.
62 | Enter the RDS Host | Y | Database host.
63 | Enter the RDS Port | Y | Database port.
64 | Enter the Database Name | Y | Database to use.
65 | Enter the Schema | N | Schema to use, default: public.
66 | Enter table prefix | N | Prefix newly created tables. This is recommended if files start with numbers.
67 | Enter the folder depth from bucket root to use as table name. Use negative index to select from the input file | Y | Determines names for new tables. For example: /test/path/file.csv, index of 0 would create a table named *test* and index of -1 would creat a table named *file_csv*.
68 | Should the Table be Truncated before Load? | N | Truncate table before loading new data.
69 | Enter the Database Username | Y | Database username.
70 | Enter the Database Password | Y | Database password.
71 | Enter the CSV Delimiter | N | CSV delimiter, default: ,.
72 |
73 | Configuration will be stored in DynamoDB database LambdaRDSLoaderConfig.
74 |
75 | ## Getting Started - Running
76 | Upload the lambda function as a zip file or use node-lambda. Create an S3 watch on an S3 location. Bucket will correspond to RDS configuration tagged with such bucket name.
77 |
--------------------------------------------------------------------------------
/files.txt:
--------------------------------------------------------------------------------
1 | null
--------------------------------------------------------------------------------
/index.js:
--------------------------------------------------------------------------------
1 | var Importer = require('./lib/importer');
2 |
3 | exports.handler = function(event, context) {
4 | if(!event.Records) {
5 | console.log('No records found');
6 | context.done();
7 | } else {
8 | var importer = new Importer(event.Records);
9 | importer.run();
10 | }
11 | };
12 |
--------------------------------------------------------------------------------
/lib/common.js:
--------------------------------------------------------------------------------
1 | /*
2 | Copyright 2014-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
3 |
4 | Licensed under the Amazon Software License (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at
5 |
6 | http://aws.amazon.com/asl/
7 |
8 | or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions and limitations under the License.
9 | */
10 |
11 | var async = require('async');
12 | require('./constants');
13 |
14 | // function which creates a string representation of now suitable for use in S3
15 | // paths
16 | exports.getFormattedDate = function(date) {
17 | if (!date) {
18 | date = new Date();
19 | }
20 |
21 | var hour = date.getHours();
22 | hour = (hour < 10 ? "0" : "") + hour;
23 |
24 | var min = date.getMinutes();
25 | min = (min < 10 ? "0" : "") + min;
26 |
27 | var sec = date.getSeconds();
28 | sec = (sec < 10 ? "0" : "") + sec;
29 |
30 | var year = date.getFullYear();
31 |
32 | var month = date.getMonth() + 1;
33 | month = (month < 10 ? "0" : "") + month;
34 |
35 | var day = date.getDate();
36 | day = (day < 10 ? "0" : "") + day;
37 |
38 | return year + "-" + month + "-" + day + "-" + hour + ":" + min + ":" + sec;
39 | };
40 |
41 | /* current time as seconds */
42 | exports.now = function() {
43 | return new Date().getTime() / 1000;
44 | };
45 |
46 | exports.readableTime = function(epochSeconds) {
47 | var d = new Date(0);
48 | d.setUTCSeconds(epochSeconds);
49 | return exports.getFormattedDate(d);
50 | };
51 |
52 | exports.createTables = function(dynamoDB, callback) {
53 | // processed files table spec
54 | var pfKey = 'loadFile';
55 | var configKey = s3prefix;
56 | var configSpec = {
57 | AttributeDefinitions : [ {
58 | AttributeName : configKey,
59 | AttributeType : 'S'
60 | } ],
61 | KeySchema : [ {
62 | AttributeName : configKey,
63 | KeyType : 'HASH'
64 | } ],
65 | TableName : configTable,
66 | ProvisionedThroughput : {
67 | ReadCapacityUnits : 5,
68 | WriteCapacityUnits : 5
69 | }
70 | };
71 |
72 | console.log("Creating Tables in Dynamo DB if Required");
73 | dynamoDB.createTable(configSpec, function(err, data) {
74 | if (err) {
75 | if (err.code !== 'ResourceInUseException') {
76 | console.log(Object.prototype.toString.call(err).toString());
77 | console.log(err.toString());
78 | process.exit(ERROR);
79 | }
80 | }
81 | });
82 | };
83 |
84 | exports.updateConfig = function(setRegion, dynamoDB, updateRequest, outerCallback) {
85 | var tryNumber = 0;
86 | var writeConfigRetryLimit = 100;
87 |
88 | async.whilst(function() {
89 | // retry until the try count is hit
90 | return tryNumber < writeConfigRetryLimit;
91 | }, function(callback) {
92 | tryNumber++;
93 |
94 | dynamoDB.updateItem(updateRequest, function(err, data) {
95 | if (err) {
96 | if (err.code === 'ResourceInUseException' || err.code === 'ResourceNotFoundException') {
97 | console.log(err.code);
98 |
99 | // retry if the table is in use after 1 second
100 | setTimeout(callback(), 1000);
101 | } else {
102 | // some other error - fail
103 | console.log(JSON.stringify(updateRequest));
104 | console.log(err);
105 | outerCallback(err);
106 | }
107 | } else {
108 | // all OK - exit OK
109 | if (data) {
110 | console.log("Configuration for " + updateRequest.Key.s3Prefix.S + " updated in " + setRegion);
111 | outerCallback(null);
112 | }
113 | }
114 | });
115 | }, function(error) {
116 | // never called
117 | });
118 | };
119 |
120 | exports.writeConfig = function(setRegion, dynamoDB, dynamoConfig, outerCallback) {
121 | var tryNumber = 0;
122 | var writeConfigRetryLimit = 100;
123 |
124 | async.whilst(function() {
125 | // retry until the try count is hit
126 | return tryNumber < writeConfigRetryLimit;
127 | }, function(callback) {
128 | tryNumber++;
129 |
130 | dynamoDB.putItem(dynamoConfig, function(err, data) {
131 | if (err) {
132 | if (err.code === 'ResourceInUseException' || err.code === 'ResourceNotFoundException') {
133 | // retry if the table is in use after 1 second
134 | setTimeout(callback(), 1000);
135 | } else {
136 | // some other error - fail
137 | console.log(JSON.stringify(dynamoConfig));
138 | console.log(JSON.stringify(err));
139 | if (outerCallback)
140 | outerCallback(err);
141 | }
142 | } else {
143 | // all OK - exit OK
144 | if (data) {
145 | console.log("Configuration for " + dynamoConfig.Item.s3Prefix.S + " successfully written in "
146 | + setRegion);
147 | if (outerCallback)
148 | outerCallback(null);
149 | }
150 | }
151 | });
152 | }, function(error) {
153 | // never called
154 | });
155 | };
156 |
157 | exports.dropTables = function(dynamoDB, callback) {
158 | // drop the config table
159 | dynamoDB.deleteTable({
160 | TableName : configTable
161 | }, function(err, data) {
162 | if (err && err.code !== 'ResourceNotFoundException') {
163 | console.log(err);
164 | process.exit(ERROR);
165 | } else {
166 | console.log("All Configuration Tables Dropped");
167 | // call the callback requested
168 | }
169 | });
170 | };
171 |
172 | /* validate that the given value is a number, and if so return it */
173 | exports.getIntValue = function(value, rl) {
174 | if (!value || value === null) {
175 | rl.close();
176 | console.log('Null Value');
177 | process.exit(INVALID_ARG);
178 | } else {
179 | var num = parseInt(value);
180 |
181 | if (isNaN(num)) {
182 | rl.close();
183 | console.log('Value \'' + value + '\' is not a Number');
184 | process.exit(INVALID_ARG);
185 | } else {
186 | return num;
187 | }
188 | }
189 | };
190 |
191 | exports.getBooleanValue = function(value) {
192 | if (value) {
193 | if ([ 'TRUE', '1', 'YES', 'Y' ].indexOf(value.toUpperCase()) > -1) {
194 | return true;
195 | } else {
196 | return false;
197 | }
198 | } else {
199 | return false;
200 | }
201 | };
202 |
203 | /* validate that the provided value is not null/undefined */
204 | exports.validateNotNull = function(value, message, rl) {
205 | if (!value || value === null || value === '') {
206 | rl.close();
207 | console.log(message);
208 | process.exit(INVALID_ARG);
209 | }
210 | };
211 |
212 | /* turn blank lines read from STDIN to Null */
213 | exports.blank = function(value) {
214 | if (value === '') {
215 | return null;
216 | } else {
217 | return value;
218 | }
219 | };
220 |
221 | exports.validateArrayContains = function(array, value, rl) {
222 | if (!(array.indexOf(value) > -1)) {
223 | rl.close();
224 | console.log('Value must be one of ' + array.toString());
225 | process.exit(INVALID_ARG);
226 | }
227 | };
228 |
229 | exports.createManifestInfo = function(config) {
230 | // manifest file will be at the configuration location, with a fixed
231 | // prefix and the date plus a random value for uniqueness across all
232 | // executing functions
233 | var dateName = exports.getFormattedDate();
234 | var rand = Math.floor(Math.random() * 10000);
235 |
236 | var manifestInfo = {
237 | manifestBucket : config.manifestBucket.S,
238 | manifestKey : config.manifestKey.S,
239 | manifestName : 'manifest-' + dateName + '-' + rand
240 | };
241 | manifestInfo.manifestPrefix = manifestInfo.manifestKey + '/' + manifestInfo.manifestName;
242 | manifestInfo.manifestPath = manifestInfo.manifestBucket + "/" + manifestInfo.manifestPrefix;
243 |
244 | return manifestInfo;
245 | };
246 |
247 | exports.randomInt = function(low, high) {
248 | return Math.floor(Math.random() * (high - low) + low);
249 | };
--------------------------------------------------------------------------------
/lib/constants.js:
--------------------------------------------------------------------------------
1 | /*
2 | Copyright 2014-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
3 |
4 | Licensed under the Amazon Software License (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at
5 |
6 | http://aws.amazon.com/asl/
7 |
8 | or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions and limitations under the License.
9 | */
10 |
11 | batchId = 'batchId';
12 | currentBatch = 'currentBatch';
13 | s3prefix = 's3Prefix';
14 | lastUpdate = 'lastUpdate';
15 | complete = 'complete';
16 | locked = 'locked';
17 | open = 'open';
18 | error = 'error';
19 | entries = 'entries';
20 | status = 'status';
21 | configTable = 'LambdaRDSLoaderConfig';
22 | conditionCheckFailed = 'ConditionalCheckFailedException';
23 | provisionedThroughputExceeded = 'ProvisionedThroughputExceededException';
24 | INVALID_ARG = -1;
25 | ERROR = -1;
26 | OK = 0;
27 |
--------------------------------------------------------------------------------
/lib/importer.js:
--------------------------------------------------------------------------------
1 | var region = process.env['AWS_REGION'];
2 | if (!region || region === null || region === "") {
3 | region = "us-east-1";
4 | console.log("AWS Lambda RDS Database Loader using default region " + region);
5 | }
6 |
7 | var aws = require('aws-sdk');
8 | aws.config.update({
9 | region : region
10 | });
11 | var s3 = new aws.S3({
12 | apiVersion : '2006-03-01',
13 | region : region
14 | });
15 | var dynamoDB = new aws.DynamoDB({
16 | apiVersion : '2012-08-10',
17 | region : region
18 | });
19 |
20 | require('./constants');
21 | var common = require('./common');
22 | var kmsCrypto = require('./kmsCrypto');
23 | kmsCrypto.setRegion(region);
24 | var S3S = require('s3-streams');
25 | var pgCopy = require('pg-copy-streams').from;
26 | var pg = require('pg');
27 | var parse = require('csv-parse');
28 | var Q = require('q');
29 | var pgp = require('pg-promise')({ promiseLib: Q });
30 | var db;
31 | var _ = require('underscore');
32 | var getS3 = Q.nbind(s3.getObject, s3);
33 | var s3Keys = require('./s3-keys');
34 |
35 |
36 | function Importer(records) {
37 | var _this = this;
38 | _this.records = records;
39 | };
40 |
41 | Importer.prototype.getS3Records = function(bucket, prefix) {
42 | var _this = this;
43 | var deferred = Q.defer();
44 | var records = [];
45 | var prefix = prefix.replace(/^\/|\/$/g, '') + '/';
46 |
47 | var s3ListKeys = new s3Keys(s3, 1000);
48 | s3ListKeys.listKeys({
49 | bucket: bucket,
50 | prefix: prefix
51 | }, function (error, keys) {
52 | if (error) {
53 | deferred.reject(error);
54 | }
55 |
56 | _.each(keys, function (key) {
57 | if(_.last(key).replace(/[\W_]+/g,'').length !== 0) {
58 | records.push({
59 | s3: {
60 | bucket: {
61 | name: bucket,
62 | },
63 | object: {
64 | key: key
65 | }
66 | }
67 | });
68 | }
69 | });
70 | deferred.resolve(records);
71 | });
72 |
73 | return deferred.promise;
74 | },
75 |
76 | Importer.prototype.getTableName = function(key, inputInfo) {
77 | var table = key.split('/');
78 | if(inputInfo.folderDepthLevelForTableName < 0) {
79 | table.reverse();
80 | folderDepthLevelForTableName = (inputInfo.folderDepthLevelForTableName * -1) - 1;
81 | }
82 | return inputInfo.schema + '.' + inputInfo.tablePrefix + table[folderDepthLevelForTableName].replace(/[\W]+/g,'_');
83 | };
84 |
85 | Importer.prototype.getTableNames = function(inputInfo) {
86 | return new Q.promise(function(resolve, reject) {
87 | if(!inputInfo.useSingleTable) {
88 | for(var i = 0; i < inputInfo.records.length; i++) {
89 | var key = inputInfo.records[i].s3.object.key;
90 | var tableName = Importer.prototype.getTableName(key, inputInfo);
91 | if(inputInfo.tableNames.indexOf(tableName) === -1) {
92 | inputInfo.tableNames.push({
93 | 'tableName': tableName,
94 | 'key': key
95 | });
96 | console.log("Resolving table name to: " + tableName);
97 | }
98 | }
99 | }
100 |
101 | resolve(inputInfo);
102 | });
103 | };
104 |
105 | Importer.prototype.createTablesIfNotExists = function(inputInfo) {
106 | var createTables = [];
107 | var table = inputInfo.tableNames[0];
108 | var deferred = Q.defer();
109 |
110 | var createTableIfNotExists = function(table) {
111 | var createDeferred = Q.defer();
112 | getS3({Bucket: inputInfo.bucket, Key: table.key})
113 | .then(function(response) {
114 | parse(response.Body.toString(), {delimiter: inputInfo.delimiter}, function(error, output) {
115 | var sql = "CREATE TABLE IF NOT EXISTS " + table.tableName + "(";
116 | var columns = [];
117 | for(var i = 0; i < output[0].length; i++) {
118 | columns.push(output[0][i] + " text");
119 | }
120 | sql += columns.join(',') + ') WITH (OIDS=FALSE);';
121 | console.log(sql);
122 |
123 | db.query(sql)
124 | .then(function() {
125 | createDeferred.resolve();
126 | })
127 | .catch(function(error) {
128 | createDeferred.reject(error);
129 | });
130 | });
131 | });
132 | return createDeferred.promise;
133 | };
134 |
135 | for(var i = 0; i < inputInfo.tableNames.length; i++) {
136 | createTables.push(createTableIfNotExists(inputInfo.tableNames[i]));
137 | }
138 |
139 | Q.allSettled(createTables).then(function(response) {
140 | deferred.resolve(inputInfo);
141 | })
142 |
143 | return deferred.promise;
144 |
145 | };
146 |
147 | Importer.prototype.updateConfig = function(inputInfo) {
148 | console.log("Found RDS Configuration for " + inputInfo.bucket);
149 |
150 | var config = inputInfo.config.Item;
151 | var decryptMap = Q.nbind(kmsCrypto.decryptMap, kmsCrypto);
152 | var deferred = Q.defer();
153 | var encryptedItems = {
154 | 'rdsPassword': kmsCrypto.stringToBuffer(config.loadRDS.L[0].M.connectPassword.S)
155 | };
156 |
157 | return decryptMap(encryptedItems)
158 | .then(function(response) {
159 | inputInfo.tablePrefix = config.loadRDS.L[0].M.tablePrefix.S;
160 | inputInfo.schema = config.loadRDS.L[0].M.targetSchema.S;
161 | inputInfo.folderDepthLevelForTableName = config.folderDepthLevelForTableName.N;
162 | inputInfo.truncateTarget = config.loadRDS.L[0].M.truncateTarget.BOOL;
163 | inputInfo.useSingleTable = config.loadRDS.L[0].M.useSingleTable.BOOL;
164 | inputInfo.connectionString = 'postgres://' + config.loadRDS.L[0].M.connectUser.S + ':' + response.rdsPassword.toString() + '@'
165 | + config.loadRDS.L[0].M.rdsHost.S + ':' + config.loadRDS.L[0].M.rdsPort.N + '/' + config.loadRDS.L[0].M.rdsDB.S;
166 |
167 | db = pgp(inputInfo.connectionString);
168 | return inputInfo;
169 | })
170 | };
171 |
172 | Importer.prototype.dropTablesIfExists = function(inputInfo) {
173 | var dropTables = [];
174 | var deferred = Q.defer();
175 |
176 | var dropTableIfExists = function(table) {
177 | var sql = "DROP TABLE IF EXISTS " + table;
178 | console.log(sql);
179 | return db.query(sql);
180 | };
181 |
182 | if(inputInfo.truncateTarget) {
183 | for(var i = 0; i < inputInfo.tableNames.length; i++) {
184 | dropTables.push(dropTableIfExists(inputInfo.tableNames[i].tableName));
185 | }
186 | }
187 |
188 | Q.allSettled(dropTables).then(function() {
189 | deferred.resolve(inputInfo);
190 | });
191 |
192 | return deferred.promise;
193 | };
194 |
195 | Importer.prototype.runImport = function(inputInfo) {
196 | console.log('Importing data.');
197 | var deferred = Q.defer();
198 | var _this = this;
199 |
200 | function importRecords(i, key, columns, tableName) {
201 | return new Q.promise(function(resolve, reject) {
202 | var stream = S3S.ReadStream(s3, {Bucket: inputInfo.bucket, Key: key});
203 | pg.connect(inputInfo.connectionString, function(error, client) {
204 | if(error) {
205 | reject(error);
206 | } else {
207 | console.log('Loading file:', i+1, 'of', inputInfo.records.length, ' - ', key);
208 | var copySql = "COPY " + tableName + " (" + columns.join(',') + ") FROM STDIN WITH CSV HEADER DELIMITER '" + inputInfo.delimiter + "'";
209 |
210 | var query = client.query(pgCopy(copySql));
211 | stream.pipe(query)
212 | .on('end', function () {
213 | client.end();
214 | i++;
215 |
216 | if(i < inputInfo.records.length) {
217 | fetchRecord(i);
218 | } else {
219 | resolve(inputInfo);
220 | }
221 | })
222 | .on('error', function(error) {
223 | console.log(error);
224 | console.log(copySql);
225 | reject(error);
226 | });
227 | }
228 | });
229 | });
230 | }
231 |
232 | function fetchRecord(i) {
233 | var key = inputInfo.records[i].s3.object.key;
234 | var tableName = Importer.prototype.getTableName(key, inputInfo);
235 | var columns = [];
236 |
237 | getS3({Bucket: inputInfo.bucket, Key: key})
238 | .then(function(response) {
239 | parse(response.Body.toString(), { delimiter: inputInfo.delimiter, columns: true }, function(error, output) {
240 | if(error) {
241 | deferred.reject(error);
242 | } else {
243 | columns = Object.keys(output[0]);
244 |
245 | var _this = Importer.prototype;
246 | var tableName = _this.getTableName(key, inputInfo);
247 | if(i === 0 && inputInfo.useSingleTable) {
248 | console.log("Resolving table name to: " + tableName);
249 |
250 | inputInfo.tableNames.push({
251 | 'tableName': tableName,
252 | 'key': key
253 | });
254 |
255 | _this.dropTablesIfExists(inputInfo)
256 | .then(_this.createTablesIfNotExists)
257 | .then(function(response) {
258 | return importRecords(i, key, columns, tableName);
259 | })
260 | .catch(function(error) {
261 | deferred.reject(error);
262 | });
263 |
264 | } else if(inputInfo.useSingleTable) {
265 | tableName = inputInfo.tableNames[0].tableName;
266 | var newColumns = [];
267 | for(var j = 0; j < columns.length; j++) {
268 | var addColumnSql = "ALTER TABLE " + tableName + " ADD COLUMN " + columns[j] + " TEXT NULL";
269 | newColumns.push(db.query(addColumnSql));
270 | }
271 |
272 | Q.allSettled(newColumns)
273 | .then(function() {
274 | return importRecords(i, key, columns, tableName);
275 | });
276 |
277 |
278 | } else {
279 | return importRecords(i, key, columns, tableName);
280 | }
281 | }
282 | });
283 | });
284 | }
285 |
286 | fetchRecord(0);
287 | return deferred.promise;
288 | };
289 |
290 | Importer.prototype.getConfig = function() {
291 | var _this = this;
292 | return new Q.promise(function(resolve, reject) {
293 | var getDynamoItem = Q.nbind(dynamoDB.getItem, dynamoDB);
294 | var inputInfo = {
295 | bucket: _this.records[0].s3.bucket.name,
296 | key: _this.records[0].s3.object.key,
297 | schema: 'public',
298 | folderDepthLevelForTableName: 0,
299 | tableNames: [],
300 | connectionString: null,
301 | delimiter: ',',
302 | truncateTarget: false,
303 | tablePrefix: '',
304 | useSingleTable: false,
305 | sourceColumn: 'rds_loader_source_file',
306 | records: _this.records
307 | };
308 |
309 | // load the configuration for this prefix
310 | var dynamoLookup = {
311 | Key : {
312 | s3Prefix : {
313 | S : inputInfo.bucket
314 | }
315 | },
316 | TableName : configTable,
317 | ConsistentRead : true
318 | };
319 |
320 | return getDynamoItem(dynamoLookup)
321 | .then(function(response) {
322 | inputInfo.config = response;
323 | resolve(inputInfo);
324 | })
325 | .catch(function(error) {
326 | reject(error);
327 | });
328 | });
329 | };
330 |
331 | Importer.prototype.run = function() {
332 | var _this = this;
333 | _this.getConfig()
334 | .then(_this.updateConfig)
335 | .then(_this.getTableNames)
336 | .then(_this.dropTablesIfExists)
337 | .then(_this.createTablesIfNotExists)
338 | .then(_this.runImport)
339 | .catch(function(error) {
340 | console.log(error.stack);
341 | context.done(error);
342 | })
343 | .finally(function() {
344 | pgp.end();
345 | context.done();
346 | });
347 | };
348 |
349 | module.exports = Importer;
350 |
--------------------------------------------------------------------------------
/lib/kmsCrypto.js:
--------------------------------------------------------------------------------
1 | /*
2 | Copyright 2014-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
3 |
4 | Licensed under the Amazon Software License (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at
5 |
6 | http://aws.amazon.com/asl/
7 |
8 | or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions and limitations under the License.
9 | */
10 | var useRegion = undefined;
11 | var aws = require('aws-sdk');
12 | var async = require('async');
13 | var kms = undefined;
14 |
15 | var authContext = {
16 | module : "AWSLambdaRedshiftLoader",
17 | region : null
18 | };
19 |
20 | // module key alias to be used for this application
21 | var moduleKeyName = "alias/LambaRedshiftLoaderKey";
22 |
23 | var setRegion = function(region) {
24 | if (!region) {
25 | useRegion = process.env['AWS_REGION'];
26 |
27 | if (!useRegion) {
28 | useRegion = 'us-east-1';
29 | console.log("AWS KMS using default region " + useRegion);
30 | }
31 | } else {
32 | useRegion = region;
33 | }
34 |
35 | aws.config.update({
36 | region : useRegion
37 | });
38 | kms = new aws.KMS({
39 | apiVersion : '2014-11-01',
40 | region : useRegion
41 | });
42 | authContext.region = useRegion;
43 | };
44 | exports.setRegion = setRegion;
45 |
46 | /**
47 | * Retrieves or creates the master key metadata for this module
48 | * Parameters:
49 | *