├── .gitignore ├── config.json ├── package.json ├── README.md ├── gulpfile.js └── index.js /.gitignore: -------------------------------------------------------------------------------- 1 | dist/ 2 | node_modules/ 3 | build/ 4 | -------------------------------------------------------------------------------- /config.json: -------------------------------------------------------------------------------- 1 | { 2 | "lambda_function":"aws-lamda-opencv-face-detection", 3 | "bucket": "your-bucket", 4 | "destination":"path/to/destination", 5 | "region":"eu-west-1", 6 | "format": { 7 | "image": { 8 | "extension":"jpeg", 9 | "mimeType":"image/jpeg" 10 | } 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "aws-lambda-opencv-face-detection", 3 | "version": "0.0.0", 4 | "private": true, 5 | "description": "AWS Lambda function to detect faces", 6 | "main": "index.js", 7 | "scripts": { 8 | }, 9 | "keywords": [ 10 | "aws", 11 | "lambda", 12 | "opencv" 13 | ], 14 | "author": "Jens Grud", 15 | "license": "Unlicense", 16 | "devDependencies": { 17 | "aws-sdk": "2.2.12", 18 | "del": "^1.1.1", 19 | "gulp": "^3.8.11", 20 | "gulp-flatten": "0.0.4", 21 | "gulp-install": "^0.4.0", 22 | "gulp-rename": "^1.2.2", 23 | "gulp-shell": "^0.4.0", 24 | "gulp-util": "^3.0.4", 25 | "gulp-zip": "^3.0.2", 26 | "run-sequence": "^1.0.2" 27 | }, 28 | "dependencies": { 29 | "async": "^0.9.0" 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | > [AWS Lambda](http://aws.amazon.com/lambda/) function for detecting faces using [OpenCV](http://opencv.org/). 2 | 3 | ## Setup 4 | 0. [Setup EC2 instance](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/get-set-up-for-amazon-ec2.html) - you will need to build your native modules against the Amazon Linux libraries - see below for details. 5 | 1. Install node.js - AWS Lambda uses a [specific version of node](http://docs.aws.amazon.com/lambda/latest/dg/current-supported-versions.html) so preferably that one - see below for details. 6 | 2. Clone into this repository 7 | 3. Run 'npm install' 8 | 4. Setup S3 bucket and modify config.json accordingly 9 | 5. Run Gulp - see below 10 | 6. Create, upload and test your lambda function 11 | 7. Invoke lambda function by uploading to your desination bucket 12 | 13 | ### EC2 14 | Configure your EC2 instance 15 | ```bash 16 | $ sudo yum update 17 | $ sudo yum install gcc44 gcc-c++ libgcc44 cmake 18 | $ sudo yum install libjpeg-devel libpng-devel libjasper-devel libtiff-devel 19 | ``` 20 | 21 | ### Node 22 | Manually download, build and install node 23 | ```bash 24 | $ wget http://nodejs.org/dist/v0.10.36/node-v0.10.36.tar.gz 25 | $ tar -zxvf node-v0.10.36.tar.gz 26 | $ cd node-v0.10.36 && ./configure && make 27 | $ sudo make install 28 | ``` 29 | 30 | ### Gulp 31 | Default gulp task: 32 | - Clean up 33 | - Download, extract, build and install OpenCV - note that 3.x [is not yet fully supported](https://github.com/peterbraden/node-opencv). 34 | - Build opencv module using the statically compiled version. Be sure to configure PKG_CONFIG_PATH according to your OpenCV installation above. 35 | - Copy the index.js and config.json files to the dist directory 36 | - Run npm install in the dist directory 37 | - Zip and upload the function either directly to lambda function or S3 bucket 38 | -------------------------------------------------------------------------------- /gulpfile.js: -------------------------------------------------------------------------------- 1 | var http = require('http'); 2 | var fs = require('fs'); 3 | var gulp = require('gulp'); 4 | var gutil = require('gulp-util'); 5 | var shell = require('gulp-shell'); 6 | var flatten = require('gulp-flatten'); 7 | var rename = require('gulp-rename'); 8 | var del = require('del'); 9 | var install = require('gulp-install'); 10 | var zip = require('gulp-zip'); 11 | var AWS = require('aws-sdk'); 12 | var runSequence = require('run-sequence'); 13 | var async = require('async'); 14 | var s3 = new AWS.S3(); 15 | 16 | var config; 17 | try { 18 | config = require('./config.json'); 19 | } catch (ex) { 20 | config = {}; 21 | } 22 | 23 | var build = './build'; 24 | var filename = '2.4.12.3'; 25 | var fileURL = 'http://github.com/Itseez/opencv/archive'; 26 | var extension = 'zip'; 27 | 28 | gulp.task('download-opencv', shell.task([ 29 | ' wget ' + fileURL + '/' + filename + '.' + extension 30 | ])); 31 | 32 | gulp.task('unzip-opencv', shell.task([ 33 | 'unzip ' + filename + '.' + extension + ' -d ' + build 34 | ])); 35 | 36 | gulp.task('cmake-opencv', shell.task([ 37 | 'cd ' + build + '; cmake -D BUILD_PNG=OFF -D CMAKE_BUILD_TYPE=RELEASE -D BUILD_SHARED_LIBS=NO -D CMAKE_INSTALL_PREFIX=./opencv opencv-' + filename + '/' 38 | ])); 39 | 40 | gulp.task('make-opencv', shell.task([ 41 | 'cd ' + build + '; make && make install' 42 | ])); 43 | 44 | // Change path if needed - needs to be full 45 | gulp.task('npm-opencv', shell.task([ 46 | 'cd ./build; PKG_CONFIG_PATH=~/aws-lambda-opencv/build/opencv/lib/pkgconfig/ npm install opencv' 47 | ])); 48 | 49 | gulp.task('copy-opencv', function() { 50 | return gulp.src(['./node_modules/opencv/**/*']) 51 | .pipe(gulp.dest('./dist/node_modules/opencv')); 52 | }); 53 | 54 | gulp.task('copy-haarcascade', function() { 55 | return gulp.src(['node_modules/opencv/data/haarcascade_frontalface_alt.xml']) 56 | .pipe(gulp.dest('./dist/')); 57 | }); 58 | 59 | // First we need to clean out the dist folder and remove the compiled zip file. 60 | gulp.task('clean', function(cb) { 61 | del([ 62 | './build/*', 63 | './dist/*', 64 | './dist.zip' 65 | ], cb); 66 | }); 67 | 68 | // The js task could be replaced with gulp-coffee as desired. 69 | gulp.task('js', function() { 70 | return gulp.src(['index.js', 'config.json']) 71 | .pipe(gulp.dest('./dist')) 72 | }); 73 | 74 | // Here we want to install npm packages to dist, ignoring devDependencies. 75 | gulp.task('npm', function() { 76 | return gulp.src('./package.json') 77 | .pipe(gulp.dest('./dist')) 78 | .pipe(install({production: true})); 79 | }); 80 | 81 | // Now the dist directory is ready to go. Zip it. 82 | gulp.task('zip', function() { 83 | return gulp.src(['dist/**/*', '!dist/package.json', 'dist/.*']) 84 | .pipe(zip('dist.zip')) 85 | .pipe(gulp.dest('./')); 86 | }); 87 | 88 | gulp.task('uploadLambda', function() { 89 | AWS.config.region = config.region; 90 | var lambda = new AWS.Lambda(); 91 | 92 | var functionName = config.lambda_function; 93 | fs.readFile('./dist.zip', function(err, data) { 94 | 95 | var params = { 96 | FunctionName: functionName, 97 | Publish: false, 98 | ZipFile: data 99 | }; 100 | 101 | lambda.updateFunctionCode(params, function(err, data) { 102 | if (err) console.log(err, err.stack); // an error occurred 103 | else console.log(data); // successful response 104 | }); 105 | }); 106 | }); 107 | 108 | // Upload the function code to S3 109 | gulp.task('upload', function(cb) { 110 | AWS.config.region = 'eu-west-1'; 111 | s3.upload({ 112 | Bucket: config.bucket, 113 | Key: config.destination + "/" + config.lambda_function + "." + extension, 114 | Body: fs.createReadStream('./dist.zip') 115 | }, cb); 116 | }); 117 | 118 | gulp.task('default', function(cb) { 119 | return runSequence( 120 | ['clean'], 121 | ['download-opencv'], 122 | ['unzip-opencv'], 123 | ['cmake-opencv'], 124 | ['make-opencv'], 125 | ['npm-opencv'], 126 | ['copy-opencv'], 127 | ['copy-haarcascade', 'js', 'npm'], 128 | ['zip'], 129 | ['upload'], 130 | // ['uploadLambda'], issue with aws sdk and node 0.10.x https://github.com/aws/aws-sdk-js/issues/615, 131 | cb 132 | ); 133 | }); 134 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | var child_process = require('child_process'); 2 | var fs = require('fs'); 3 | var crypto = require('crypto'); 4 | var stream = require('stream'); 5 | var path = require('path'); 6 | var AWS = require('aws-sdk'); 7 | var async = require('async'); 8 | var config = require('./config'); 9 | var s3 = new AWS.S3(); 10 | var tempDir = process.env['TEMP'] || '/tmp'; 11 | 12 | var cv = require('opencv'); 13 | 14 | function downloadStream(bucket, file, cb) { 15 | console.log('Starting download ' + file); 16 | 17 | return s3.getObject({ 18 | Bucket: bucket, 19 | Key: file 20 | }).on('error', function(res) { 21 | cb('S3 download error: ' + JSON.stringify(res)); 22 | }).createReadStream(); 23 | } 24 | 25 | function s3upload(params, filename, cb) { 26 | s3.upload(params) 27 | .on('httpUploadProgress', function(evt) { 28 | console.log(filename, 'Progress:', evt.loaded, '/', evt.total); 29 | }) 30 | .send(cb); 31 | } 32 | 33 | function uploadFile(fileExt, bucket, keyPrefix, contentType, cb) { 34 | console.log('Uploading test', contentType, keyPrefix, bucket); 35 | 36 | var filename = path.join(tempDir, 'out.' + fileExt); 37 | var rmFiles = [filename]; 38 | var readStream = fs.createReadStream(filename); 39 | 40 | var params = { 41 | Bucket: bucket, 42 | Key: keyPrefix + '.' + fileExt, 43 | ContentType: contentType, 44 | CacheControl: 'max-age=31536000' // 1 year (60 * 60 * 24 * 365) 45 | }; 46 | 47 | async.waterfall([ 48 | function(cb) { 49 | return cb(null, readStream, filename); 50 | }, 51 | function(fstream, uploadFilename, cb) { 52 | console.log('Begin hashing', uploadFilename); 53 | 54 | var hash = crypto.createHash('sha256'); 55 | 56 | fstream.on('data', function(d) { 57 | hash.update(d); 58 | }); 59 | 60 | fstream.on('end', function() { 61 | cb(null, fs.createReadStream(uploadFilename), hash.digest('hex')); 62 | }); 63 | }, 64 | function(fstream, hashdigest, cb) { 65 | console.log(filename, 'hashDigest:', hashdigest); 66 | params.Body = fstream; 67 | 68 | if (hashdigest) 69 | params.Metadata = {'sha256': hashdigest}; 70 | 71 | s3upload(params, filename, cb); 72 | }, 73 | function(data, cb) { 74 | console.log(filename, 'complete. Deleting now.'); 75 | async.each(rmFiles, fs.unlink, cb); 76 | } 77 | ], cb); 78 | } 79 | 80 | function detectFaces(file, cb) { 81 | console.log('Starting Image processing', file); 82 | 83 | cv.readImage(file, function(err, im){ 84 | if (err) throw err; 85 | if (im.width() < 1 || im.height() < 1) throw new Error('Image has no size'); 86 | 87 | im.detectObject("haarcascade_frontalface_alt.xml", {}, function(err, faces){ 88 | if (err) throw err; 89 | 90 | for (var i = 0; i < faces.length; i++){ 91 | var face = faces[i]; 92 | im.ellipse(face.x + face.width / 2, face.y + face.height / 2, face.width / 2, face.height / 2); 93 | } 94 | 95 | im.save(tempDir + '/out.' + config.format.image.extension); 96 | 97 | return cb(err, 'Image saved to ' + tempDir + '/out.' + config.format.image.extension); 98 | }); 99 | }); 100 | } 101 | 102 | function processImage(s3Event, srcKey, cb) { 103 | 104 | var file = path.join(tempDir, 'download'); 105 | 106 | async.series([ 107 | function(cb) { 108 | var dlStream = downloadStream(s3Event.bucket.name, srcKey, cb); 109 | dlStream.on('end', function() { 110 | cb(null, 'download finished'); 111 | }); 112 | dlStream.pipe(fs.createWriteStream(file)); 113 | }, 114 | function(cb) { 115 | detectFaces(file, cb); 116 | }, 117 | function(cb) { 118 | console.log('Deleting download file'); 119 | fs.unlink(file, cb); 120 | } 121 | ], cb); 122 | } 123 | 124 | exports.handler = function(event, context) { 125 | 126 | var s3Event = event.Records[0].s3; 127 | var srcKey = decodeURIComponent(s3Event.object.key); 128 | var index = srcKey.lastIndexOf("/"); 129 | var fileName = srcKey.substr(index + 1) 130 | var keyPrefix = fileName.replace(/\.[^/.]+$/, ''); 131 | var format = config.format; 132 | 133 | async.series([ 134 | function (cb) { processImage(s3Event, srcKey, cb); }, 135 | function (cb) { 136 | async.parallel([ 137 | function (cb) { uploadFile(format.image.extension, s3Event.bucket.name + '/' + config.destination, keyPrefix, format.image.mimeType, cb); } 138 | ], cb); 139 | } 140 | ], function(err, results) { 141 | if (err) context.fail(err); 142 | else context.succeed(results); 143 | }); 144 | }; 145 | --------------------------------------------------------------------------------