├── .gitignore ├── .npmignore ├── LICENSE ├── README.md ├── handler.js ├── package.json └── serverless.yml /.gitignore: -------------------------------------------------------------------------------- 1 | .serverless 2 | # Logs 3 | logs 4 | *.log 5 | npm-debug.log* 6 | 7 | # Runtime data 8 | pids 9 | *.pid 10 | *.seed 11 | 12 | # Directory for instrumented libs generated by jscoverage/JSCover 13 | lib-cov 14 | 15 | # Coverage directory used by tools like istanbul 16 | coverage 17 | 18 | # nyc test coverage 19 | .nyc_output 20 | 21 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 22 | .grunt 23 | 24 | # node-waf configuration 25 | .lock-wscript 26 | 27 | # Compiled binary addons (http://nodejs.org/api/addons.html) 28 | build/Release 29 | 30 | # Dependency directories 31 | node_modules 32 | jspm_packages 33 | 34 | # Optional npm cache directory 35 | .npm 36 | 37 | # Optional REPL history 38 | .node_repl_history 39 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | # package directories 2 | node_modules 3 | jspm_packages 4 | 5 | # Serverless directories 6 | .serverless -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 AgileVision 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # AWS Step Functions Demo Project 2 | 3 | This is a demo project, created as a part of the [blog post](http://agilevision.pl/blog/serverless%20architecture/2017/02/12/easily-create-complex-workflows-with-aws-step-functions.html). 4 | The project uses **serverless** for deployments. 5 | 6 | # Setup 7 | 8 | ``` 9 | npm install 10 | serverless deploy 11 | serverless deploy stepf 12 | ``` 13 | 14 | Be sure to adjust bucket names and the email address inside the **serverless.yml**! -------------------------------------------------------------------------------- /handler.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const AWS = require('aws-sdk'); 4 | const async = require('async'); 5 | 6 | /** 7 | * Lambda function that triggers Step Function execution on an S3 event. 8 | * Input event is transformed and passed as a parameter to the step function. 9 | * Step function input has the following format: 10 | * { 11 | * "bucketName": "" 12 | * "objectKey": "", 13 | * } 14 | * process.env.STEP_FUNCTION_NAME should contain the name of the target bucket 15 | */ 16 | module.exports.executeWorkflow = function (event, context) { 17 | if ('Records' in event) { 18 | const stateMachineName = process.env.STEP_FUNCTION_NAME; 19 | const stepfunctions = new AWS.StepFunctions(); 20 | 21 | async.waterfall([ 22 | (next) => { 23 | console.log('Fetching the list of available workflows'); 24 | return stepfunctions.listStateMachines({}, next); 25 | }, 26 | (data, next) => { 27 | console.log(data, next); 28 | console.log('Searching for the step function', data); 29 | 30 | for (var i = 0; i < data.stateMachines.length; i++) { 31 | const item = data.stateMachines[i]; 32 | if (item.name === stateMachineName) { 33 | console.log('Found the step function', item); 34 | return next(null, item.stateMachineArn); 35 | } 36 | } 37 | 38 | throw 'Step function with the given name doesn\'t exist'; 39 | }, 40 | (stateMachineArn, next) => { 41 | console.log('Executing the step function', stateMachineArn); 42 | const eventData = event.Records[0]; 43 | return stepfunctions.startExecution({ 44 | stateMachineArn: stateMachineArn, 45 | input: JSON.stringify({ objectKey: eventData.s3.object.key, bucketName: eventData.s3.bucket.name }) 46 | }, next); 47 | }, 48 | () => { 49 | return context.succeed('OK'); 50 | } 51 | ]); 52 | } else { 53 | return context.fail('Incoming message doesn\'t contain "Records", it will be ignored', event); 54 | } 55 | }; 56 | 57 | 58 | /** 59 | * Lambda function that parses a CSV file, uploaded to S3 and 60 | * writes it's contents to the DynamoDB. 61 | * 62 | * Function assumes event contains information about uploaded file: 63 | * { 64 | * 'bucketName': '', 65 | * 'objectKey': '' 66 | * } 67 | */ 68 | module.exports.processFile = (event, context, callback) => { 69 | const csv = require('fast-csv'); 70 | const s3 = new AWS.S3(); 71 | const dynamodb = new AWS.DynamoDB(); 72 | 73 | async.waterfall([ 74 | (next) => { 75 | console.log('Waiting until the uploaded object becomes available', 76 | '[bucket = ', event.bucketName, ', key = ', 77 | event.objectKey, ' ]'); 78 | s3.waitFor('objectExists', { 79 | Bucket: event.bucketName, 80 | Key: event.objectKey 81 | }, next); 82 | }, 83 | (result, next) => { 84 | console.log('Downloading the CSV file from S3 [bucket = ', 85 | event.bucketName, ', key = ', event.objectKey, ' ]'); 86 | 87 | const csvStream = s3.getObject({ 88 | Bucket: event.bucketName, 89 | Key: event.objectKey 90 | }).createReadStream(); 91 | 92 | csv.fromStream(csvStream).on('data', (data) => { 93 | dynamodb.putItem({ 94 | Item: { 95 | 'sensor_id': { 96 | 'S': data[0] 97 | }, 98 | 'timestamp': { 99 | 'N': data[1] 100 | }, 101 | 'value': { 102 | 'N': data[2] 103 | } 104 | }, 105 | TableName: "sensor_data" 106 | }); 107 | }); 108 | 109 | next(null); 110 | }, 111 | ], (err, results) => { 112 | if (err) { 113 | console.log('Failed execution'); 114 | return context.fail('Execution failed'); 115 | } else { 116 | console.log('Successful execution'); 117 | return context.succeed(event); 118 | } 119 | }); 120 | }; 121 | 122 | /** 123 | * Lambda function that moves the processed file to the 'processed' folder 124 | * 125 | * Function assumes event contains information about uploaded file: 126 | * { 127 | * 'bucketName': '', 128 | * 'objectKey': '' 129 | * } 130 | * 131 | * ENV.TARGET_BUCKET should contain the name of the target bucket 132 | */ 133 | module.exports.moveFile = function (event, context) { 134 | const objectKey = event.objectKey; 135 | const bucketName = event.bucketName; 136 | const newLocation = 'processed/' + objectKey; 137 | const targetBucket = process.env.TARGET_BUCKET; 138 | const s3 = new AWS.S3(); 139 | 140 | console.log('Moving "', objectKey, '" to new location "', newLocation, '"'); 141 | async.waterfall([ 142 | (next) => { 143 | s3.copyObject({ 144 | Bucket: targetBucket, 145 | Key: newLocation, 146 | CopySource: bucketName + '/' + encodeURIComponent(objectKey) 147 | }, next); 148 | }, 149 | (data, next) => { 150 | s3.waitFor('objectExists', { 151 | Bucket: targetBucket, 152 | Key: newLocation 153 | }, next); 154 | }, 155 | (data, next) => { 156 | s3.deleteObject({ 157 | Bucket: bucketName, 158 | Key: objectKey 159 | }, next); 160 | } 161 | ], (error) => { 162 | if (error) { 163 | console.log('Failed to move file', error); 164 | context.fail(); 165 | } else { 166 | context.succeed({ 167 | bucketName: event.bucketName, 168 | objectKey: event.objectKey, 169 | newLocation: newLocation 170 | }); 171 | } 172 | }); 173 | }; 174 | 175 | /** 176 | * Lambda function that sends an email after processing finished 177 | * 178 | * Function assumes event contains information about uploaded file: 179 | * { 180 | * 'bucketName': '', 181 | * 'objectKey': '' 182 | * } 183 | * 184 | */ 185 | module.exports.sendEmail = function (event, context) { 186 | const objectKey = event.objectKey; 187 | const bucketName = event.sourceBucket; 188 | const ses = new AWS.SES(); 189 | 190 | console.log('Sending an email about "', objectKey, '"'); 191 | async.waterfall([ 192 | (next) => { 193 | ses.sendEmail({ 194 | Destination: { 195 | ToAddresses: [process.env.DEST_EMAIL] 196 | }, 197 | Message: { 198 | Body: { 199 | Text: { 200 | Data: 'Processed file ' + objectKey 201 | } 202 | }, 203 | Subject: { 204 | Data: 'File processed' 205 | } 206 | }, 207 | Source: process.env.DEST_EMAIL 208 | }, next); 209 | }], (err, results) => { 210 | if (err) { 211 | console.log('Failed to send an email', err); 212 | context.fail(); 213 | } else { 214 | context.succeed("OK"); 215 | } 216 | }); 217 | }; 218 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "step-functions-demo", 3 | "version": "1.0.0", 4 | "description": "A demo project for creating AWS StepFunctions backed by AWS Lambda written in JavaScript", 5 | "main": "handler.js", 6 | "repository": { 7 | "type": "git", 8 | "url": "git+ssh://git@github.com/AgileVisionCompany/step-functions-demo.git" 9 | }, 10 | "keywords": [ 11 | "aws", 12 | "lambda", 13 | "stepfunctions" 14 | ], 15 | "author": "Volodymyr Rudyi ", 16 | "license": "MIT", 17 | "bugs": { 18 | "url": "https://github.com/AgileVisionCompany/step-functions-demo/issues" 19 | }, 20 | "homepage": "https://github.com/AgileVisionCompany/step-functions-demo#readme", 21 | "dependencies": { 22 | "async": "^2.1.4", 23 | "aws-sdk": "^2.11.0", 24 | "fast-csv": "^2.3.1", 25 | "serverless-step-functions": "^0.3.0" 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /serverless.yml: -------------------------------------------------------------------------------- 1 | service: aws-stepfunctions-demo 2 | 3 | provider: 4 | name: aws 5 | runtime: nodejs4.3 6 | 7 | iamRoleStatements: 8 | - Effect: "Allow" 9 | Action: 10 | - "s3:*" 11 | Resource: "arn:aws:s3:::stepfunctions-demo-source-bucket/*" 12 | - Effect: "Allow" 13 | Action: 14 | - "s3:*" 15 | Resource: "arn:aws:s3:::stepfunctions-demo-target-bucket/*" 16 | - Effect: "Allow" 17 | Action: 18 | - "states:*" 19 | Resource: "*" 20 | - Effect: "Allow" 21 | Action: 22 | - "dynamodb:*" 23 | Resource: "*" 24 | - Effect: "Allow" 25 | Action: 26 | - "ses:*" 27 | Resource: "*" 28 | 29 | functions: 30 | processFile: 31 | timeout: 30 32 | memory: 256 33 | handler: handler.processFile 34 | moveFile: 35 | handler: handler.moveFile 36 | environment: 37 | TARGET_BUCKET: stepfunctions-demo-target-bucket 38 | sendEmail: 39 | handler: handler.sendEmail 40 | environment: 41 | DEST_EMAIL: test@example.com 42 | executeWorkflow: 43 | handler: handler.executeWorkflow 44 | environment: 45 | STEP_FUNCTION_NAME: aws-stepfunctions-demo-dev-stepfunctionsdemo 46 | events: 47 | - s3: 48 | bucket: stepfunctions-demo-source-bucket 49 | event: s3:ObjectCreated:* 50 | 51 | 52 | stepFunctions: 53 | stateMachines: 54 | stepfunctionsdemo: 55 | Comment: "Example StepFunction" 56 | StartAt: ProcessFile 57 | States: 58 | ProcessFile: 59 | Type: Task 60 | Resource: processFile 61 | Next: MoveFile 62 | MoveFile: 63 | Type: Task 64 | Resource: moveFile 65 | Next: Wait10Minutes 66 | Wait10Minutes: 67 | Type: Wait 68 | Seconds: 600 69 | Next: SendEmail 70 | SendEmail: 71 | Type: Task 72 | Resource: sendEmail 73 | End: true 74 | 75 | resources: 76 | Resources: 77 | TargetBucket: 78 | Type: AWS::S3::Bucket 79 | Properties: 80 | BucketName: stepfunctions-demo-target-bucket 81 | SensorDataTable: 82 | Type: "AWS::DynamoDB::Table" 83 | Properties: 84 | AttributeDefinitions: 85 | - AttributeName: "sensor_id" 86 | AttributeType: "S" 87 | KeySchema: 88 | - AttributeName: "sensor_id" 89 | KeyType: "HASH" 90 | ProvisionedThroughput: 91 | ReadCapacityUnits: 5 92 | WriteCapacityUnits: 5 93 | TableName: "sensor_data" 94 | 95 | 96 | plugins: 97 | - serverless-step-functions --------------------------------------------------------------------------------