├── .gitignore
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── LICENSE
├── README.md
├── Riv-Prod.template.json
├── app.py
├── default-params.py
├── docker-deploy
├── README.md
├── debug.bat
├── debug.sh
├── ship-it.bat
└── ship-it.sh
├── example-config.json
├── images
├── README.md
└── cdk-deploy
│ ├── Dockerfile
│ ├── requirements.txt
│ └── ship-it.sh
├── infra
├── README.md
├── bulkloader
│ ├── Diagrams.drawio
│ ├── README.md
│ ├── abstract.png
│ ├── batch_handler.py
│ ├── inventory_created.py
│ ├── throttled_indexer.py
│ └── topology.py
├── configsettings.py
├── default_lambda.py
├── frontend
│ ├── amplifydeployment
│ │ └── index.py
│ ├── cognito
│ │ └── topology.py
│ └── topology.py
├── interfaces.py
├── jumpbox.py
├── services
│ ├── core
│ │ └── backup.py
│ ├── networking
│ │ ├── vpc.py
│ │ └── vpce.py
│ └── rekognition
│ │ ├── collections.py
│ │ └── topology.py
├── storage
│ ├── README.md
│ ├── Storage.drawio
│ ├── diagram.png
│ ├── face_images.py
│ ├── face_metadata.py
│ └── topology.py
├── topologies.py
└── userportal
│ ├── README.md
│ ├── UserPortal.drawio
│ ├── diagram.png
│ ├── functions
│ ├── definitions.py
│ └── topology.py
│ ├── gateway
│ ├── models.py
│ └── topology.py
│ ├── states
│ ├── README.md
│ ├── auth.py
│ ├── interfaces.py
│ ├── register_idcard.py
│ ├── register_user.py
│ ├── topology.py
│ └── update.py
│ └── topology.py
├── one-click.sh
└── src
├── bulk-loader
├── batch-handler
│ ├── .dockerignore
│ ├── .vscode
│ │ └── example-launch.json
│ ├── Dockerfile
│ ├── README.md
│ ├── examples
│ │ └── payload.json
│ ├── handler.py
│ ├── lib
│ │ ├── importhistory.py
│ │ ├── models.py
│ │ └── registrationproviders.py
│ └── requirements.txt
├── inventory-created-handler
│ ├── .dockerignore
│ ├── Dockerfile
│ ├── README.md
│ ├── examples
│ │ ├── ValidJob.json
│ │ └── sqs_message.json
│ ├── handler.py
│ ├── lib
│ │ └── parser.py
│ └── requirements.txt
└── throttled-indexer
│ ├── .dockerignore
│ ├── .vscode
│ └── example-launch.json
│ ├── Dockerfile
│ ├── README.md
│ ├── examples
│ └── payload.json
│ ├── handler.py
│ ├── lib
│ ├── models.py
│ └── storage.py
│ └── requirements.txt
├── frontend
├── .gitignore
├── README.md
├── amplify.yml
├── package-lock.json
├── package.json
├── public
│ ├── favicon.ico
│ ├── index.html
│ ├── logo192.png
│ ├── logo512.png
│ ├── manifest.json
│ └── robots.txt
└── src
│ ├── App.css
│ ├── App.js
│ ├── App.test.js
│ ├── Components
│ ├── Header.js
│ ├── Liveness.js
│ ├── Routes.js
│ ├── SideNavigation.js
│ └── context
│ │ ├── AppLayoutContext.js
│ │ └── NotificationContext.js
│ ├── Error.js
│ ├── Pages
│ ├── Home.js
│ ├── LoggedIn.js
│ ├── Register.js
│ ├── RegisterWithIdCard.js
│ ├── SignIn.js
│ └── Success.js
│ ├── index.css
│ ├── index.js
│ ├── logo.svg
│ ├── reportWebVitals.js
│ └── setupTests.js
├── rekognition
├── check-userid
│ ├── ddb.py
│ └── handler.py
├── compare-face-with-idcard
│ ├── .dockerignore
│ ├── .vscode
│ │ └── example-launch.json
│ ├── Dockerfile
│ ├── errors.py
│ ├── examples
│ │ └── payload.json
│ ├── handler.py
│ ├── models.py
│ └── requirements.txt
├── compare-faces
│ ├── .dockerignore
│ ├── .vscode
│ │ └── example-launch.json
│ ├── Dockerfile
│ ├── ddb.py
│ ├── errors.py
│ ├── examples
│ │ └── payload.json
│ ├── handler.py
│ ├── models.py
│ └── requirements.txt
├── detect-faces
│ ├── .dockerignore
│ ├── .vscode
│ │ └── example-launch.json
│ ├── Dockerfile
│ ├── examples
│ │ ├── nbachmei.json
│ │ └── output.json
│ ├── exceptions.py
│ ├── handler.py
│ └── requirements.txt
├── index-faces
│ ├── .dockerignore
│ ├── .vscode
│ │ └── example-launch.json
│ ├── Dockerfile
│ ├── errors.py
│ ├── examples
│ │ └── payload.json
│ ├── handler.py
│ ├── models.py
│ ├── requirements.txt
│ └── storage.py
├── liveness-session-result
│ ├── .dockerignore
│ ├── Dockerfile
│ ├── handler.py
│ └── requirements.txt
├── reset
│ ├── Dockerfile
│ ├── handler.py
│ └── requirements.txt
├── search-faces
│ ├── .dockerignore
│ ├── .vscode
│ │ └── example-launch.json
│ ├── Dockerfile
│ ├── examples
│ │ └── payload.json
│ ├── handler.py
│ └── requirements.txt
├── setup
│ ├── .vscode
│ │ └── example-launch.json
│ ├── README.md
│ ├── app.py
│ ├── rekognition-setup.py
│ └── requirements.txt
└── start-liveness-session
│ ├── .dockerignore
│ ├── Dockerfile
│ ├── handler.py
│ └── requirements.txt
├── shared
└── requirements.txt
├── test-client
├── .gitignore
├── .vscode
│ └── example-launch.json
├── README.md
├── app.py
├── faces
│ ├── amitgt.jpg
│ ├── lemull.jpg
│ ├── nbachmei.jpg
│ ├── pasqanth.jpg
│ └── private
│ │ └── .gitignore
├── payload.json
└── requirements.txt
└── textract
├── README.md
└── extract-idcard
├── .dockerignore
├── .vscode
└── example-launch.json
├── Dockerfile
├── examples
├── parser_state.json
├── passport_card.jpeg
├── payload.json
└── textract-output.json
├── handler.py
├── model.py
└── requirements.txt
/.gitignore:
--------------------------------------------------------------------------------
1 | __pycache__
2 | cdk.out
3 | **/.vscode/launch.json
4 | cdk.context.json
5 | **/*.pem
6 | credentials
7 | config.json
8 | bin/**
9 | packages/**
10 |
11 | # dependencies
12 | /node_modules
13 | /.pnp
14 | .pnp.js
15 |
16 | # testing
17 | /coverage
18 |
19 | # production
20 | /build
21 |
22 | # misc
23 | .DS_Store
24 | .env.local
25 | .env.development.local
26 | .env.test.local
27 | .env.production.local
28 |
29 | npm-debug.log*
30 | yarn-debug.log*
31 | yarn-error.log*
32 |
33 | #amplify-do-not-edit-begin
34 | amplify/\#current-cloud-backend
35 | amplify/.config/local-*
36 | amplify/logs
37 | amplify/mock-data
38 | amplify/backend/amplify-meta.json
39 | amplify/backend/.temp
40 | build/
41 | dist/
42 | node_modules/
43 | aws-exports.js
44 | awsconfiguration.json
45 | amplifyconfiguration.json
46 | amplifyconfiguration.dart
47 | amplify-build-config.json
48 | amplify-gradle-config.json
49 | amplifytools.xcconfig
50 | .secret-*
51 | **.sample
52 | #amplify-do-not-edit-end
53 |
54 | amplify/team-provider-info.json
55 |
56 | .venv
57 | .env
58 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | ## Code of Conduct
2 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct).
3 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact
4 | opensource-codeofconduct@amazon.com with any additional questions or comments.
5 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing Guidelines
2 |
3 | Thank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional
4 | documentation, we greatly value feedback and contributions from our community.
5 |
6 | Please read through this document before submitting any issues or pull requests to ensure we have all the necessary
7 | information to effectively respond to your bug report or contribution.
8 |
9 |
10 | ## Reporting Bugs/Feature Requests
11 |
12 | We welcome you to use the GitHub issue tracker to report bugs or suggest features.
13 |
14 | When filing an issue, please check existing open, or recently closed, issues to make sure somebody else hasn't already
15 | reported the issue. Please try to include as much information as you can. Details like these are incredibly useful:
16 |
17 | * A reproducible test case or series of steps
18 | * The version of our code being used
19 | * Any modifications you've made relevant to the bug
20 | * Anything unusual about your environment or deployment
21 |
22 |
23 | ## Contributing via Pull Requests
24 | Contributions via pull requests are much appreciated. Before sending us a pull request, please ensure that:
25 |
26 | 1. You are working against the latest source on the *main* branch.
27 | 2. You check existing open, and recently merged, pull requests to make sure someone else hasn't addressed the problem already.
28 | 3. You open an issue to discuss any significant work - we would hate for your time to be wasted.
29 |
30 | To send us a pull request, please:
31 |
32 | 1. Fork the repository.
33 | 2. Modify the source; please focus on the specific change you are contributing. If you also reformat all the code, it will be hard for us to focus on your change.
34 | 3. Ensure local tests pass.
35 | 4. Commit to your fork using clear commit messages.
36 | 5. Send us a pull request, answering any default questions in the pull request interface.
37 | 6. Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation.
38 |
39 | GitHub provides additional document on [forking a repository](https://help.github.com/articles/fork-a-repo/) and
40 | [creating a pull request](https://help.github.com/articles/creating-a-pull-request/).
41 |
42 |
43 | ## Finding contributions to work on
44 | Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels (enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any 'help wanted' issues is a great place to start.
45 |
46 |
47 | ## Code of Conduct
48 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct).
49 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact
50 | opensource-codeofconduct@amazon.com with any additional questions or comments.
51 |
52 |
53 | ## Security issue notifications
54 | If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public github issue.
55 |
56 |
57 | ## Licensing
58 |
59 | See the [LICENSE](LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution.
60 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 |
3 | Permission is hereby granted, free of charge, to any person obtaining a copy of
4 | this software and associated documentation files (the "Software"), to deal in
5 | the Software without restriction, including without limitation the rights to
6 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
7 | the Software, and to permit persons to whom the Software is furnished to do so.
8 |
9 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
10 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
11 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
12 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
13 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
14 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
15 |
16 |
--------------------------------------------------------------------------------
/app.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from os import environ
3 | from infra.configsettings import ConfigManager
4 | from typing import List
5 | import aws_cdk as core
6 | from infra.interfaces import IRivStack
7 | from infra.topologies import DefaultRivStack
8 |
9 | config_mgr = ConfigManager()
10 |
11 | def get_environment()->core.Environment:
12 | '''
13 | Determines which region and account to deploy into.
14 | '''
15 | return core.Environment(
16 | region=config_mgr.region_name,
17 | account=config_mgr.account)
18 |
19 | class RIVApp(core.App):
20 | '''
21 | Represents the root CDK entity.
22 | '''
23 | def __init__(self, **kwargs) ->None:
24 | super().__init__(**kwargs)
25 |
26 | env = get_environment()
27 |
28 | riv_stack_name = environ.get('RIV_STACK_NAME')
29 | if riv_stack_name is None:
30 | riv_stack_name = 'Riv-Prod'
31 | self.riv_stack = DefaultRivStack(self,riv_stack_name, riv_stack_name=riv_stack_name, env=env)
32 |
33 | app = RIVApp()
34 | assembly = app.synth()
35 |
36 |
--------------------------------------------------------------------------------
/default-params.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | #########################################################
3 | # Modifies a CloudFormation Template to include
4 | # default Stack Parameter values.
5 | #########################################################
6 |
7 | from os import environ, path
8 | from json import loads,dumps
9 | from sys import stderr
10 |
11 | def get_asset_bucket()->str:
12 | '''
13 | Determines what bucket to use.
14 | '''
15 | bucket = environ.get('S3_ASSET_BUCKET')
16 | if not bucket is None:
17 | return bucket
18 |
19 | raise ValueError('Missing env TEMPLATE_ASSET_BUCKET and S3_ASSET_BUCKET')
20 |
21 | def get_asset_prefix()->str:
22 | '''
23 | Gets the preferred Asset bucket prefix
24 | '''
25 | prefix = environ.get('S3_ASSET_PREFIX')
26 | if prefix is None:
27 | prefix = ''
28 |
29 | return prefix.strip('/')
30 |
31 | if __name__ == '__main__':
32 | '''
33 | The program main routine.
34 | '''
35 | #content = stdin.read()
36 | template_file=environ.get('STACK_TEMPLATE_FILE')
37 | if template_file is None:
38 | print("Missing environment variable STACK_TEMPLATE_FILE")
39 | exit(1)
40 | elif not path.isfile(template_file):
41 | print('Unable to find template file %s' % template_file)
42 | exit(1)
43 | else:
44 | with open(template_file,'rt') as f:
45 | content = loads(f.read()) #stdin.read())
46 |
47 | parameters:dict = content['Parameters']
48 | for key in parameters.keys():
49 | key:str = key
50 | if not key.startswith('AssetParameters'):
51 | continue
52 |
53 | if 'Bucket' in key:
54 | parameters[key]['Default'] = get_asset_bucket()
55 | elif 'ArtifactHash' in key:
56 | start=len('AssetParameters')
57 | end=key.index('ArtifactHash')
58 | parameters[key]['Default'] = key[start:end]
59 | elif 'VersionKey' in key:
60 | start=len('AssetParameters')
61 | end=key.index('S3VersionKey')
62 | sha = key[start:end]
63 | parameters[key]['Default'] = '%s/||asset.%s.zip' % (get_asset_prefix(), sha)
64 | else:
65 | stderr.write('ignoring %s' % key)
66 |
67 | #with open('cdk.out/OneClick.template.json', 'w') as f:
68 | # f.write(dumps(content, indent=2))
69 | print(dumps(content, indent=2))
70 |
--------------------------------------------------------------------------------
/docker-deploy/README.md:
--------------------------------------------------------------------------------
1 | # Docker-Enabled Deployment Scripts
2 |
3 | This folder contains utility scripts for troubleshooting or deploying the solution. They are available for Windows and Mac workstations.
4 |
5 | ## What are the prerequiste steps to using these scripts
6 |
7 | Customers need to [configure the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-configure.html) and [install Docker](https://docs.docker.com/engine/install/).
8 |
9 | ## How can I troubleshoot deployments
10 |
11 | Customers can launch a terminal window for [Windows](debug.bat) or [Mac](debug.sh). The script will build the [cdk-deploy](../images/cdk-deploy) Docker image and then run it as an interactive session.
12 |
13 | ## How can I deploy everything via Docker
14 |
15 | Customers can launch the deployment tool for [Windows](ship-it.bat) or [Mac](ship-it.sh). The script will build the [cdk-deploy](../images/cdk-deploy) Docker image and then run [default deployment script](../images/cdk-deploy/ship-it.sh).
16 |
17 | Note: The scripts assume that system user is `root` and has home directory of `/root`. If your specific environment uses a different account or home directory, then you must update the script accordingly.
18 |
--------------------------------------------------------------------------------
/docker-deploy/debug.bat:
--------------------------------------------------------------------------------
1 | @CLS
2 | @ECHO OFF
3 | @ECHO ==================================
4 | @ECHO Deployment Tool
5 | @ECHO Nate Bachmeier - 2021
6 | @ECHO ==================================
7 |
8 | @SETLOCAL enableextensions enabledelayedexpansion
9 | @SET base_path=%~dp0
10 | @PUSHD %base_path%\..
11 |
12 | @CALL docker build -t riv-deploy images/cdk-deploy
13 | @CALL docker run -it -v %userprofile%\.aws:/root/.aws -v %cd%:/files -v /var/run/docker.sock:/var/run/docker.sock -w /files --entrypoint bash riv-deploy
14 |
15 | @POPD
--------------------------------------------------------------------------------
/docker-deploy/debug.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | SCRIPT_DIR="$( cd -- "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 ; pwd -P )"
4 |
5 | pushd $SCRIPT_DIR/..
6 | docker build -t riv-deploy ../images/cdk-deploy
7 | docker run -it -v ~/.aws:/root/.aws -v `pwd`:/files -v /var/run/docker.sock:/var/run/docker.sock -w /files --entrypoint bash riv-deploy
8 |
9 | popd
--------------------------------------------------------------------------------
/docker-deploy/ship-it.bat:
--------------------------------------------------------------------------------
1 | @CLS
2 | @ECHO OFF
3 | @ECHO ==================================
4 | @ECHO Deployment Tool
5 | @ECHO Nate Bachmeier - 2021
6 | @ECHO ==================================
7 |
8 | @SETLOCAL enableextensions enabledelayedexpansion
9 | @SET base_path=%~dp0\..
10 | @PUSHD %base_path%
11 |
12 | @CALL docker build -t riv-deploy images/cdk-deploy
13 | @CALL docker run -it -v %userprofile%\.aws:/root/.aws -v %cd%:/files -v /var/run/docker.sock:/var/run/docker.sock -w /files riv-deploy ship-it
14 |
15 | @POPD
--------------------------------------------------------------------------------
/docker-deploy/ship-it.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | SCRIPT_DIR="$( cd -- "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 ; pwd -P )"
4 |
5 | pushd $SCRIPT_DIR/..
6 | docker build -t riv-deploy images/cdk-deploy
7 | docker run -it -v ~/.aws:/root/.aws -v `pwd`:/files -v /var/run/docker.sock:/var/run/docker.sock -w /files riv-deploy ship-it
8 |
9 | popd
--------------------------------------------------------------------------------
/example-config.json:
--------------------------------------------------------------------------------
1 | {
2 | "region": "ca-central-1",
3 | "account": "111111111",
4 | "use_isolated_subnets": false,
5 | "use_automated_backup": false,
6 | "include_bulk_loader": false,
7 | "include_front_end": false,
8 | "use_custom_asset_bucket": false,
9 | "custom_asset_bucket_name": "nbachmei.riv.ca-central-1",
10 | "custom_assets": {
11 | "BatchHandler": {
12 | "key": "latest/bulk-loader/batch-handler.zip"
13 | },
14 | "LoaderInvCreatedHndlr": {
15 | "key": "latest/bulk-loader/inventory-created-handler.zip"
16 | },
17 | "ThrottledIndexer": {
18 | "key": "latest/bulk-loader/throttled-indexer.zip"
19 | },
20 | "CompareFaces": {
21 | "key": "latest/rekognition/compare-faces.zip"
22 | },
23 | "DetectFaces": {
24 | "key": "latest/rekognition/detect-faces.zip"
25 | },
26 | "IndexFaces": {
27 | "key": "latest/rekognition/index-faces.zip"
28 | },
29 | "SearchFaces": {
30 | "key": "latest/rekognition/search-faces.zip"
31 | },
32 | "Extract-IdCard": {
33 | "key": "latest/textract/extract-idcard.zip"
34 | }
35 | }
36 | }
--------------------------------------------------------------------------------
/images/README.md:
--------------------------------------------------------------------------------
1 | # Deployment Images
2 |
3 | This folder contains [Docker](https://docker.com) definitions for deployment artifacts. Customers can locally install the CDK and related tools on their workstation or utilize these resources for a more consistent experience.
4 |
--------------------------------------------------------------------------------
/images/cdk-deploy/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM node:16
2 |
3 | RUN apt-get -y update && apt-get -y install --no-install-recommends python3-pip npm curl zip groff \
4 | && npm install -g aws-cdk@2.13.0
5 |
6 | RUN /usr/bin/python3 -m pip install --upgrade pip
7 | RUN curl -sSL https://get.docker.com/ | sh
8 |
9 | COPY requirements.txt .
10 | RUN pip install -r requirements.txt
11 |
12 | COPY ship-it.sh /usr/local/bin/ship-it
13 | RUN chmod a+x /usr/local/bin/ship-it
14 |
--------------------------------------------------------------------------------
/images/cdk-deploy/requirements.txt:
--------------------------------------------------------------------------------
1 | virtualenv
2 | boto3
3 | setuptools
4 | aws-cdk-lib
5 | constructs
6 | aws_cdk.aws_amplify_alpha
--------------------------------------------------------------------------------
/images/cdk-deploy/ship-it.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | chmod a+x /files/app.py
4 | cdk diff -a /files/app.py --require-approval never
5 | cdk deploy -a /files/app.py --require-approval never
6 |
--------------------------------------------------------------------------------
/infra/README.md:
--------------------------------------------------------------------------------
1 | # RIV Infrastructure
2 |
3 | The folder contains all [AWS Cloud Development Kit](https://docs.aws.amazon.com/cdk/latest/guide/work-with-cdk-python.html) automation for provisioning a [RIV stack](toplogogies.py). A RIV stack is a self-contained service installation, complete with every required subsystem.
4 |
5 | Within each subfolder is a `topology.py` file that defines any exportable symbols. Typically, consumers should **import** these root constructs (e.g., [RivUserPortal](userportal/topology.py)) versus directly **import** private implementation details (e.g., [RivUserPortalCompareFaces](userportal/functions/definitions.py)). This approach reduces the risk of future breaking changes.
6 |
7 | ## What are the root-level subsystems
8 |
9 | - [bulkloader](bulkloader). The RIV bulk importing service.
10 | - [services](services). Standard AWS supporting components (e.g., backup)
11 | - [storage](storage). Defines all shared data stores.
12 | - [userportal](userportal). The public interface that endusers interact with
13 |
--------------------------------------------------------------------------------
/infra/bulkloader/Diagrams.drawio:
--------------------------------------------------------------------------------
1 | 7Vpdd9o4EP01PCYHf4HzGAhNsyd7Nlu629OnHGEL20VYriwH6K/fEUgYSyZxEkOSLskD9nisj7lXV+OxO85wvrxmKIv/pCEmHbsbLjvOVce2Lb/vwY+wrDYWv9fbGCKWhNKpNIyTX1gau9JaJCHOK46cUsKTrGoMaJrigFdsiDG6qLpNKan2mqEIG4ZxgIhp/ZaEPJaz8Lql/TNOolj1bHXllTlSztKQxyikix2TM+o4Q0Yp3xzNl0NMRPBUXDb3fdpzdTswhlPe5IbR9+nscpTEP0e3X37NFtYydL+f+ZtWHhAp5IT/vRvK8fKVCgItOElSPNzGuNtxBhFDYQJ9DymhDGwpTcF9EPM5gTMLDhdxwvE4Q4FoZgH8ANuUplyibNnqXPYkWoUoZeJ4vowEoc7RInfPI0aLbN3lDeBce/X+IQvE7ZzRGVZD6tiO37+wLnqio4QQbagPmPEEoL4kSSRa5VR0guQZwVMuWoTxJ2l0uz67crpyzHVdhCiPcSgnYsIjERO94uWOScJ1jekcc7YCF3nVvpDUkWvHcuX5omSi60tbvMNCR3EOSfZH27ZLgsCB5Mgz+GIZfDHIAmik4TYMdRzYoUgLUXKeDpLtHjVIbk2QekSQKYap9CJxNI5hziF4XSGO4GfMKQOdk34TpvyUBQZS3mzEHELFq4GtLgRJ+Jo1IOkNBq88+yoWwtWZvQ9ACuhMyVrM4iQMMSyXQRX29nF17Ya4uoeC1TNgvZmD/ANo3UERzDBvppuaesD/JzGIfXpqKNp0/aehudtQRYaUxt2iCSZ3NE94slFQwI3UieCEck7nOzoYwJgwe56yq3kqZbcqTHP26XzGaFgEfCPyg6xW7XOnHXp5mmz0TXr1ath1cSh29Ux2pQ8wPboe9IlgH45glv3OGNZ/jGH6vjNkGPH1BgWbQRKcmHcM5qV5S9TTE8eG1Ns+vLTOvQuDe5ffxiLtcYS4IR7EJ4YdgWGTdaRb4Zjb0znmva28qYrBM/XtxLqDs46g+SRELT0V2N47o51T87SnkQqn4aWoRQloCMrzJGj4FJzTggW4Aek5YhHmTwswDivlLjPMO2H0asKobAwTxJOHapGsLrayhzuawNRKFPc93KkmNhOXd+0WtfSG/GpDTl9raBMYo6E11NtpvwL9umf9o6Hfbwi+YskJ/Rejn/3xJbdTPEnp8H5Gp/PpYuqdmUv/GvO/Jj9g5/iKokhIp0EGgEBJLGU8phFNERmVVq2QUvrc0nVVRjDmB+Z8JYUbFZzW8UnD+oXy4jVkmPeoaJ91z92eZVWwOrPs15FJudDpNMeHWdxmxUemqno28RmlsDWzUzbxwbIJPYl1rIbZhHeobMIsAzXbT3JYovxQSUZTFXhfSYbb1bYZ54XbjKvVCrfbzrGSDPMt3U2aFdzUob8LDB4nFTpCreZnS7Uar69LULNajX8wCTJrNc0kqInWPJ2q+u9KQzytiOt4L9QQA2b9NdWBU1XbAPWu+K1SVbvpk/CeVPWVRNESCb3csAddWERoteOWCYf8sV5qu9lHOv8xbzjYdN/qXqVg2CHa15iBmBNRbNP3qxugz/KUN3+4vFlXM8tvtmkdrAq3ncLbVOH8htqjVsc72d1+mwxZob0D/z+50BVdcO5gk0LEtF8jjhdodRKiIwgRypL7SMa7FTXqaftc03cCh3vdab/pS4HGqZAa1UmOWpajJl8ADgoyA59bisI6pfr/fvinf/Z6yC//4LT8AnsDf/kduzP6Dw==
--------------------------------------------------------------------------------
/infra/bulkloader/README.md:
--------------------------------------------------------------------------------
1 | # Bulk Loader Service
2 |
3 | ## What is the high-level flow
4 |
5 | 1. The [Shared Data Storage](../storage) contains an **image bucket**, where administrators can centralize their registration information. Amazon S3 Inventory enumerates all files within the bucket daily (configurable) and persists this information into **inventory bucket**. This operation triggers a **ObjectCreatedNotification** from S3 that forwards to the SNS Topic **InventoryCreated**.
6 |
7 | 1. This notification triggers the [Bulk Loader Inventory Created Handler](inventory_created.py) to process the report. The function converts the report into an **S3BatchOperations_CSV_20180820** compatible format. Finally, it uses the `s3:CreateJob` method to fan-out importing the list.
8 |
9 | 1. Amazon S3 Batch enumerates through the list and passes each item to the [RivBulkLoaderBatchHandler](batch_handler.py). This function determines the current item qualifies for import (e.g., valid format). The historical importation information resides in an Amazon S3 Object Tags. After disqualifying files, the remaining forward into an Amazon SQS Queue (aka Riv Throttled Input Queue).
10 |
11 | 1. The [RivBulkLoaderThrottledIndexer](throttled_indexer.py) pulls from the **Input Queue** and forwards them into the [UserPortal Gateway](../userportal/gateway). After confirming the Gateway is successful, the function updates the **Import History** table. Finally, the message is removed from the queue.
12 |
13 | 
14 |
15 | ## What S3 ObjectTags are supported
16 |
17 | The bulk importer identifies any supported image (`*.png` and `*.jpeg` files) and examines the associated tags.
18 |
19 |
20 | | Tag Key | Expected Format | Description|
21 | |---------|-----------------|------------|
22 | | UserId | Unicode string (128 char max) | Required name of the user to create |
23 | | Properties | s3://bucket/path/properties.json| Optional path to Amazon S3 file containing the user's property bag
24 | | Indexed | True or False | Marker denoting the object has been imported |
25 | | Ignore | True or False | Marker denoting the object should never be processed |
26 |
27 | ## What is the expected format for properties.json
28 |
29 | The user's properties.json must deserialize into a `Mapping[str,str]` data structure. Extending this functionality would require extending the [Index-Face's StorageWriter](../../src/rekognition/index-faces/storage.py). Additionally, the entire user record in DynamoDB cannot exceed 400KB.
30 |
31 | ```json
32 | {
33 | "fname": "Fred",
34 | "lname": "Flintson",
35 | "address": "345 Cave Stone Road"
36 | }
37 | ```
38 |
--------------------------------------------------------------------------------
/infra/bulkloader/abstract.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/rekognition-identity-verification/3a1733f3a6ff91558815575b9df94285e0165bbe/infra/bulkloader/abstract.png
--------------------------------------------------------------------------------
/infra/bulkloader/batch_handler.py:
--------------------------------------------------------------------------------
1 | from typing import Mapping
2 | from infra.default_lambda import RivDefaultFunction
3 | from infra.storage.topology import RivSharedDataStores
4 | from infra.interfaces import IVpcRivStack
5 | import aws_cdk as core
6 | from constructs import Construct
7 |
8 | class RivBulkLoaderBatchHandler(RivDefaultFunction):
9 | '''
10 | Represents a function handler for the Amazon S3 Batch.
11 | '''
12 | @property
13 | def source_directory(self)->str:
14 | return 'src/bulk-loader/batch-handler'
15 |
16 | @property
17 | def component_name(self)->str:
18 | return 'BatchHandler'
19 |
20 | @property
21 | def function_timeout(self)->core.Duration:
22 | return core.Duration.minutes(5)
23 |
24 | @property
25 | def function_name(self) -> str:
26 | return '{}-BulkLoading-{}'.format(
27 | self.riv_stack.riv_stack_name,
28 | self.component_name)
29 |
30 | def __init__(self, scope: Construct, id: str, riv_stack:IVpcRivStack, sharedStorage:RivSharedDataStores, subnet_group_name:str='Default', env:Mapping[str,str]={}, **kwargs) -> None:
31 | super().__init__(scope, id, **kwargs, riv_stack=riv_stack, subnet_group_name=subnet_group_name, env=env)
32 |
33 | '''
34 | Grant additional permissions
35 | '''
36 | sharedStorage.images.image_bucket.grant_read(self.function.role)
37 | # self.function.role.add_to_policy(statement=iam.PolicyStatement(
38 | # effect= iam.Effect.ALLOW,
39 | # actions=['s3:GetObjectTagging'],
40 | # resources=[
41 | # sharedStorage.images.image_bucket.bucket_arn,
42 | # sharedStorage.images.image_bucket.bucket_arn+'/*'
43 | # ]))
44 |
--------------------------------------------------------------------------------
/infra/bulkloader/inventory_created.py:
--------------------------------------------------------------------------------
1 | from typing import Mapping
2 | from infra.default_lambda import RivDefaultFunction
3 | from infra.storage.topology import RivSharedDataStores
4 | from infra.interfaces import IVpcRivStack
5 | import aws_cdk as core
6 | from constructs import Construct
7 | from aws_cdk import (
8 | aws_lambda_event_sources as events,
9 | )
10 |
11 | class RivBulkLoaderInventoryCreatedHandler(RivDefaultFunction):
12 | '''
13 | Represents a lambda for processing Image Bucket Manifests.
14 | '''
15 | @property
16 | def source_directory(self)->str:
17 | return 'src/bulk-loader/inventory-created-handler'
18 |
19 | @property
20 | def component_name(self)->str:
21 | return 'LoaderInvCreatedHndlr'
22 |
23 | @property
24 | def function_timeout(self)->core.Duration:
25 | return core.Duration.minutes(5)
26 |
27 | @property
28 | def function_name(self) -> str:
29 | return '{}-BulkLoading-{}'.format(
30 | self.riv_stack.riv_stack_name,
31 | self.component_name)
32 |
33 | def __init__(self, scope: Construct, id: str, riv_stack:IVpcRivStack, sharedStorage:RivSharedDataStores, subnet_group_name:str='Default', env:Mapping[str,str]={}, **kwargs) -> None:
34 | super().__init__(scope, id, **kwargs, riv_stack=riv_stack, subnet_group_name=subnet_group_name, env=env)
35 |
36 | '''
37 | When the Amazon S3 Inventory Report completes, it raises the ObjectCreatedNotification.
38 | This message forwards into the Inventory Created topic, and this function responds to those request.
39 | '''
40 | self.function.add_event_source(events.SnsEventSource(
41 | topic= sharedStorage.images.inventory_created))
42 |
43 | '''
44 | Grant additional permissions here.
45 | '''
46 | sharedStorage.images.inventory_bucket.grant_read_write(self.function.role)
47 | # self.function.role.attach_inline_policy(policy= iam.Policy(self,'S3Batch',
48 | # statements=[
49 | # iam.PolicyStatement(
50 | # effect= iam.Effect.ALLOW,
51 | # resource
52 | # actions=[
53 | # 's3:CreateJob',
54 | # 'iam:PassRole'
55 | # ])
56 | # ]))
57 |
--------------------------------------------------------------------------------
/infra/bulkloader/throttled_indexer.py:
--------------------------------------------------------------------------------
1 | from typing import Mapping
2 | from infra.default_lambda import RivDefaultFunction
3 | from infra.storage.topology import RivSharedDataStores
4 | from infra.interfaces import IVpcRivStack
5 | import aws_cdk as core
6 | from constructs import Construct
7 | from aws_cdk import (
8 | aws_lambda_event_sources as events,
9 | aws_sqs as sqs,
10 | aws_iam as iam,
11 | )
12 |
13 | class RivBulkLoaderThrottledIndexer(RivDefaultFunction):
14 | '''
15 | Represents a function that reads from SQS and writes into RiV.
16 | '''
17 | @property
18 | def source_directory(self)->str:
19 | return 'src/bulk-loader/throttled-indexer'
20 |
21 | @property
22 | def component_name(self)->str:
23 | return 'ThrottledIndexer'
24 |
25 | @property
26 | def function_timeout(self)->core.Duration:
27 | return core.Duration.minutes(5)
28 |
29 | @property
30 | def function_name(self) -> str:
31 | return '{}-BulkLoading-{}'.format(
32 | self.riv_stack.riv_stack_name,
33 | self.component_name)
34 |
35 | def __init__(self, scope: Construct, id: str, riv_stack:IVpcRivStack, sharedStorage:RivSharedDataStores, subnet_group_name:str='Default', env:Mapping[str,str]={}, **kwargs) -> None:
36 | super().__init__(scope, id, **kwargs, riv_stack=riv_stack, subnet_group_name=subnet_group_name, env=env)
37 |
38 | '''
39 | Configure the Input Queue with redrive policy into a DLQ
40 | '''
41 | self.dead_letter_queue = sqs.Queue(self,'DeadLetterQueue')
42 |
43 | self.input_queue = sqs.Queue(self,'InputQueue',
44 | retention_period= core.Duration.days(7),
45 | visibility_timeout= self.function_timeout,
46 | dead_letter_queue=sqs.DeadLetterQueue(
47 | max_receive_count=3,
48 | queue=self.dead_letter_queue))
49 |
50 | '''
51 | Configure the lambda to trigger from the queue.
52 | '''
53 | self.function.add_event_source(events.SqsEventSource(
54 | queue= self.input_queue,
55 | batch_size = 1))
56 |
57 | '''
58 | Grant additional permissions on the image bucket...
59 | '''
60 | #sharedStorage.images.image_bucket.grant_read(self.function.role)
61 | self.function.role.add_to_policy(statement=iam.PolicyStatement(
62 | effect= iam.Effect.ALLOW,
63 | actions=[
64 | 's3:GetObject*',
65 | 's3:GetBucket*',
66 | 's3:List*',
67 | 's3:PutObjectTagging',
68 | ],
69 | resources=[
70 | sharedStorage.images.image_bucket.bucket_arn,
71 | sharedStorage.images.image_bucket.bucket_arn+'/*'
72 | ]))
73 |
74 | '''
75 | Grant read access to the SSM Parameters...
76 | '''
77 | self.function.role.add_to_policy(statement=iam.PolicyStatement(
78 | effect= iam.Effect.ALLOW,
79 | actions=['ssm:GetParameter*'],
80 | resources=['arn:aws:ssm:{}:{}:parameter/riv/{}/userportal/url'.format(
81 | core.Stack.of(self).region, core.Aws.ACCOUNT_ID, riv_stack.riv_stack_name)]
82 | ))
83 |
--------------------------------------------------------------------------------
/infra/bulkloader/topology.py:
--------------------------------------------------------------------------------
1 | import builtins
2 |
3 | from infra.bulkloader.batch_handler import RivBulkLoaderBatchHandler
4 | from infra.bulkloader.throttled_indexer import RivBulkLoaderThrottledIndexer
5 | from infra.storage.topology import RivSharedDataStores
6 | from infra.bulkloader.inventory_created import RivBulkLoaderInventoryCreatedHandler
7 | from infra.interfaces import IVpcRivStack
8 | from constructs import Construct
9 | from aws_cdk import (
10 | aws_iam as iam,
11 | #aws_dynamodb as ddb,
12 | #aws_ssm as ssm,
13 | )
14 |
15 | class RivBulkLoader(Construct):
16 | '''
17 | Represents the root construct for the Bulk Loader Service.
18 | '''
19 | def __init__(self, scope: Construct, id: builtins.str, riv_stack:IVpcRivStack, sharedStorage:RivSharedDataStores, subnet_group_name:str='Default', **kwargs) -> None:
20 | super().__init__(scope, id)
21 |
22 | '''
23 | Configure the Amazon S3 Batch Service role.
24 | '''
25 | self.batch_service_role = iam.Role(self,'BatchServiceRole',
26 | assumed_by= iam.ServicePrincipal(service='batchoperations.s3.amazonaws.com'))
27 |
28 | sharedStorage.images.image_bucket.grant_read(self.batch_service_role)
29 | sharedStorage.images.inventory_bucket.grant_read_write(self.batch_service_role)
30 |
31 | # '''
32 | # Configure this Import History table.
33 | # '''
34 | # self.import_history_table = ddb.Table(self,'ImportTable',
35 | # billing_mode= ddb.BillingMode.PAY_PER_REQUEST,
36 | # removal_policy= core.RemovalPolicy.DESTROY,
37 | # partition_key= ddb.Attribute(
38 | # name='PartitionKey',
39 | # type=ddb.AttributeType.STRING),
40 | # sort_key=ddb.Attribute(
41 | # name='SortKey',
42 | # type=ddb.AttributeType.STRING),
43 | # point_in_time_recovery=True)
44 |
45 | '''
46 | The batch job will determine which images qualify for processing.
47 | Only applicable items are put into an SQS queue that throttles data loading speeds.
48 | '''
49 | self.throttled_indexer = RivBulkLoaderThrottledIndexer(self,'BatchIndexer',
50 | riv_stack=riv_stack,
51 | sharedStorage=sharedStorage,
52 | subnet_group_name=subnet_group_name,
53 | env={
54 | # 'IMPORT_TABLE_NAME': self.import_history_table.table_name,
55 | 'RIV_STACK_NAME': riv_stack.riv_stack_name,
56 | 'USER_PORTAL_PARAM': '/riv/{}/userportal/url'.format(
57 | riv_stack.riv_stack_name),
58 | })
59 |
60 | '''
61 | S3 Batch iterates through the inventory list and passes the items to a lambda.
62 | This lambda will determine if the S3 object (aka the image); qualifies for RIV indexing.
63 | Reasons for skipping images include: already processed, incomplete information, etc.
64 | '''
65 | self.batch_handler = RivBulkLoaderBatchHandler(self,'BatchHandler',
66 | riv_stack=riv_stack,
67 | sharedStorage=sharedStorage,
68 | subnet_group_name=subnet_group_name,
69 | env={
70 | 'THROTTLED_QUEUE_URL': self.throttled_indexer.input_queue.queue_url,
71 | # 'IMPORT_TABLE_NAME': self.import_history_table.table_name,
72 | })
73 |
74 | self.batch_handler.function.grant_invoke(self.batch_service_role)
75 |
76 | '''
77 | When the S3 inventory completes it raises an ObjectCreatedNotification in the inventory bucket.
78 | This message forwards to an SNS Topic then into this function. After light-filtering and creates the S3 Batch job.
79 | '''
80 | self.inventory_created_handler = RivBulkLoaderInventoryCreatedHandler(self,'InventoryCreatedHandler',
81 | riv_stack=riv_stack,
82 | sharedStorage=sharedStorage,
83 | subnet_group_name=subnet_group_name,
84 | env={
85 | 'ACCOUNT_ID': core.Stack.of(self).account,
86 | 'BATCH_FUNCTION_ARN': self.batch_handler.function.function_arn,
87 | 'BATCH_ROLE_ARN': self.batch_service_role.role_arn,
88 | 'RIV_STACK_NAME': riv_stack.riv_stack_name,
89 | })
90 |
--------------------------------------------------------------------------------
/infra/frontend/amplifydeployment/index.py:
--------------------------------------------------------------------------------
1 | import json
2 | import logging
3 | import boto3
4 | from botocore.exceptions import ClientError
5 |
6 | client = boto3.client('amplify')
7 |
8 |
9 | def lambda_handler(event, context):
10 | # apps = client.list_apps()
11 | appName, appId = getApp(event['app'])
12 | if(appName is not None):
13 | branch = getBranch(appId, event['branch'])
14 | if(branch is not None):
15 | jobId = False
16 | jobId = getJob(appId,branch)
17 | if(jobId is not None):
18 | while(jobId == False):
19 | job_status = client.get_job(
20 | appId=appId,
21 | branchName=branch,
22 | jobId=jobId
23 | )
24 | status = job_status['job']['summary']['status']
25 | if (status == 'FAILED' or status == 'SUCCEED' or status == 'CANCELLED'):
26 | jobId = True
27 | break
28 | return {
29 | "status":"Amplify App deployed Successfully."
30 | }
31 |
32 |
33 |
34 | def getApp(name):
35 | apps = client.list_apps()
36 | appName = None
37 | appId = None
38 | for sub in apps['apps']:
39 | if sub['name'] == name:
40 | appName = sub['name']
41 | appId = sub['appId']
42 | # deleteAPP = client.delete_app(
43 | # appId=sub['appId']
44 | # )
45 | break
46 | return appName, appId
47 |
48 |
49 | def getBranch(appId, name):
50 | response = client.list_branches(appId=appId)
51 | branchName = None
52 | for sub in response['branches']:
53 | if sub['branchName'] == name:
54 | branchName = sub['branchName']
55 | break
56 | return branchName
57 |
58 |
59 | def getJob(appId, branch):
60 | jobs = client.list_jobs(
61 | appId=appId,
62 | branchName=branch,
63 | )
64 | jobId = None
65 | for sub in jobs['jobSummaries']:
66 | status = sub['status']
67 | if (status == 'PENDING' or status == 'PROVISIONING' or status == 'RUNNING' or status == 'CANCELLING'):
68 | jobId = sub['jobId']
69 | break
70 | return jobId
71 |
72 |
73 |
74 |
--------------------------------------------------------------------------------
/infra/frontend/cognito/topology.py:
--------------------------------------------------------------------------------
1 | import builtins
2 | from infra.interfaces import IRivStack
3 | from constructs import Construct
4 | from os import path
5 | from aws_cdk import (
6 | aws_iam as iam,
7 | aws_cognito as cognito
8 | )
9 |
10 |
11 | root_directory = path.dirname(__file__)
12 | bin_directory = path.join(root_directory, "bin")
13 |
14 |
15 | class RivCognitoForLivenes(Construct):
16 | '''
17 | Represents the root construct to create Amplify APP
18 | '''
19 |
20 | # def __init__(self, scope: Construct, id: builtins.str, riv_stack: IVpcRivStack) -> None:
21 | def __init__(self, scope: Construct, id: builtins.str, riv_stack: IRivStack) -> None:
22 | super().__init__(scope, id)
23 |
24 | self.cognito = cognito.UserPool(
25 | self, "RIV-Cognito-User-Pool", user_pool_name=riv_stack.stack_name)
26 | # self.cognito.add_client("RIV-Cogito-app", supported_identity_providers=[
27 | # cognito.UserPoolClientIdentityProvider.COGNITO])
28 |
29 | self.client = cognito.UserPoolClient(
30 | self, "RIV-Cognito-Client", user_pool=self.cognito, user_pool_client_name=riv_stack.stack_name)
31 |
32 | self.idp = cognito.CfnIdentityPool(self, "RIV-IdentityPool", identity_pool_name=riv_stack.stack_name, allow_unauthenticated_identities=True, cognito_identity_providers=[cognito.CfnIdentityPool.CognitoIdentityProviderProperty(
33 | client_id=self.client.user_pool_client_id, provider_name=self.cognito.user_pool_provider_name, server_side_token_check=None)])
34 |
35 | self.unAuthrole = iam.Role(self, 'RIVIdentityPoolUnAuthRole',
36 | assumed_by=iam.FederatedPrincipal('cognito-identity.amazonaws.com', conditions=({
37 | "StringEquals": {"cognito-identity.amazonaws.com:aud": self.idp.ref},
38 | "ForAnyValue:StringLike": {"cognito-identity.amazonaws.com:amr": "unauthenticated"}
39 | }), assume_role_action='sts:AssumeRoleWithWebIdentity'),
40 | description='role for amplify riv-prod app',
41 |
42 | managed_policies=[
43 | iam.ManagedPolicy.from_aws_managed_policy_name(
44 | managed_policy_name='AmazonRekognitionFullAccess')
45 | ])
46 |
47 | self.idpAttachment = cognito.CfnIdentityPoolRoleAttachment(
48 | self, 'RIV-IdentityPool-Role-Attachment', identity_pool_id=self.idp.ref, roles={"unauthenticated": self.unAuthrole.role_arn})
49 |
--------------------------------------------------------------------------------
/infra/interfaces.py:
--------------------------------------------------------------------------------
1 | from typing import List, Mapping
2 | from aws_cdk import Tags, Stack
3 | from constructs import Construct
4 | from aws_cdk import (
5 | aws_ec2 as ec2,
6 | )
7 |
8 | class IVpcEndpointsForAWSServices(Construct):
9 | '''
10 | Represents an interface for creating VPC-endpoints.
11 | '''
12 | def __init__(self, scope: Construct, id: str, **kwargs) -> None:
13 | super().__init__(scope, id, **kwargs)
14 |
15 | self.__interfaces = {}
16 | self.__gateways = {}
17 |
18 | @property
19 | def interfaces(self)->Mapping[str,ec2.IInterfaceVpcEndpoint]:
20 | '''
21 | Gets a mapping of defined service to interface endpoint.
22 | '''
23 | return self.__interfaces
24 |
25 | @property
26 | def gateways(self)->Mapping[str,ec2.IGatewayVpcEndpoint]:
27 | '''
28 | Gets a mapping of defined service to interface endpoints.
29 | '''
30 | return self.__gateways
31 |
32 | class IRivStack(Stack):
33 | '''
34 | Represents an interface into a deployment environment.
35 | '''
36 | def __init__(self, scope:Construct, id:str, **kwargs)->None:
37 | super().__init__(scope, id, **kwargs)
38 |
39 | @property
40 | def riv_stack_name(self)->str:
41 | '''
42 | Gets the name of the deployment environment.
43 | '''
44 | raise NotImplementedError()
45 |
46 | class IVpcNetworkingConstruct(Construct):
47 | '''
48 | Represent a networking configuration for an IRivStack.
49 | '''
50 | def __init__(self, scope: Construct, id: str, **kwargs) -> None:
51 | super().__init__(scope, id, **kwargs)
52 |
53 | @property
54 | def vpc(self)->ec2.IVpc:
55 | '''
56 | Gets the VPC associated with this environment.
57 | '''
58 | raise NotImplementedError()
59 |
60 | @property
61 | def endpoints(self)->IVpcEndpointsForAWSServices:
62 | '''
63 | Gets the VPC-endpoints for this environment.
64 | '''
65 | raise NotImplementedError()
66 |
67 | class IVpcRivStack(IRivStack):
68 | '''
69 | Represents an interface to a deployment environment with Vpc.
70 | '''
71 | def __init__(self, scope:Construct, id:str, **kwargs)->None:
72 | super().__init__(scope, id, **kwargs)
73 |
74 | @property
75 | def cidr_block(self)->str:
76 | '''
77 | Gets the environments network block (e.g., 10.0.0.0/16).
78 | '''
79 | raise NotImplementedError()
80 |
81 | @property
82 | def subnet_configuration(self)->List[ec2.SubnetConfiguration]:
83 | '''
84 | Gets the VPCs subnet topology.
85 | '''
86 | raise NotImplementedError()
87 |
88 | @property
89 | def vpc(self)->ec2.IVpc:
90 | '''
91 | Gets the VPC associated with this RIV stack.
92 | '''
93 | raise NotImplementedError()
94 |
95 | @property
96 | def networking(self)->IVpcNetworkingConstruct:
97 | '''
98 | Gets the network configuration for this environment.
99 | '''
100 | raise NotImplementedError()
101 |
102 | @property
103 | def security_group(self)->ec2.SecurityGroup:
104 | '''
105 | Gets the default security group for this environment.
106 | '''
107 | raise NotImplementedError()
108 |
109 | @property
110 | def vpc_endpoints(self)->IVpcEndpointsForAWSServices:
111 | raise NotImplementedError()
112 |
113 |
114 | class RivStack(IRivStack):
115 | '''
116 | Represents a deployable environment (aka CloudFormation Stack).
117 | '''
118 | def __init__(self, scope:Construct, id:str, **kwargs)->None:
119 | super().__init__(scope, id, **kwargs)
120 | Tags.of(self).add('riv_stack',self.riv_stack_name)
121 |
122 | @property
123 | def riv_stack_name(self)->str:
124 | '''
125 | Gets the name of this environment.
126 | '''
127 | raise NotImplementedError()
128 |
129 |
--------------------------------------------------------------------------------
/infra/jumpbox.py:
--------------------------------------------------------------------------------
1 | from typing import List, Mapping
2 |
3 | from infra.interfaces import IVpcRivStack
4 | from constructs import Construct
5 | from aws_cdk import (
6 | core,
7 | aws_ec2 as ec2,
8 | aws_iam as iam,
9 | )
10 |
11 | class JumpBoxConstruct(Construct):
12 | @property
13 | def riv_stack(self)->IVpcRivStack:
14 | return self.__landing_zone
15 |
16 | def __init__(self, scope:Construct, id:str, riv_stack:IVpcRivStack, **kwargs) -> None:
17 | '''
18 | Configure emphemeral jumpbox for testing
19 | '''
20 | super().__init__(scope,id, **kwargs)
21 | self.__landing_zone = riv_stack
22 |
23 | # Only required for debugging the jumpbox
24 | #key_pair_name = 'nbachmei.personal.'+core.Stack.of(self).region
25 |
26 | role = iam.Role(self,'Role',
27 | assumed_by=iam.ServicePrincipal(
28 | service='ec2',
29 | region=core.Stack.of(self).region),
30 | managed_policies=[
31 | iam.ManagedPolicy.from_aws_managed_policy_name('AmazonSSMManagedInstanceCore'),
32 | iam.ManagedPolicy.from_aws_managed_policy_name('AmazonSSMDirectoryServiceAccess'),
33 | ])
34 |
35 | self.instance = ec2.Instance(self,'Instance',
36 | role= role,
37 | vpc= riv_stack.vpc,
38 | #key_name= key_pair_name,
39 | instance_type=ec2.InstanceType.of(
40 | instance_class= ec2.InstanceClass.BURSTABLE3,
41 | instance_size=ec2.InstanceSize.SMALL),
42 | allow_all_outbound=True,
43 | user_data_causes_replacement=True,
44 | security_group= riv_stack.security_group,
45 | vpc_subnets= ec2.SubnetSelection(subnet_group_name='Default'),
46 | machine_image= self.machine_image)
47 |
48 | @property
49 | def machine_image(self)->ec2.IMachineImage:
50 | return ec2.MachineImage.generic_windows(ami_map={
51 | 'us-east-1': 'ami-0f93c815788872c5d',
52 | 'us-east-2': 'ami-0b697c4ae566cad55',
53 | 'eu-west-1': 'ami-03b9a7c8f0fc1808e',
54 | 'us-west-2': 'ami-0b7ebdd52b84c244d',
55 | })
56 |
--------------------------------------------------------------------------------
/infra/services/core/backup.py:
--------------------------------------------------------------------------------
1 | from infra.interfaces import IRivStack
2 | import aws_cdk as core
3 | from constructs import Construct
4 | from aws_cdk import (
5 | aws_backup as backup,
6 | aws_iam as iam,
7 | aws_kms as kms,
8 | aws_sns as sns,
9 | )
10 |
11 | class BackupStrategyConstruct(Construct):
12 | def __init__(self, scope:Construct, id:str, riv_stack:IRivStack, **kwargs):
13 | '''
14 | Landing Zone Backup Policy
15 | '''
16 | super().__init__(scope,id, **kwargs)
17 |
18 | region = core.Stack.of(self).region
19 |
20 | self.encryption_key = kms.Key(self,'EncryptionKey',
21 | description='Encryption Key for BackupStrategy')
22 |
23 | self.topic = sns.Topic(self,'Topic')
24 | self.role = iam.Role(self,'Role',
25 | description='Account Backup Role',
26 | assumed_by= iam.ServicePrincipal(service='backup'))
27 |
28 | self.vault = backup.BackupVault(self,'Vault',
29 | encryption_key=self.encryption_key,
30 | notification_topic= self.topic,
31 | removal_policy= core.RemovalPolicy.DESTROY,
32 | #backup_vault_name='{}-Backup-Vault'.format(riv_stack.riv_stack_name),
33 | access_policy= iam.PolicyDocument(
34 | statements=[
35 | iam.PolicyStatement(
36 | effect= iam.Effect.ALLOW,
37 | resources=["*"],
38 | actions=['backup:CopyIntoBackupVault'],
39 | principals= [
40 | iam.ArnPrincipal(arn = self.role.role_arn)
41 | ])
42 | ]))
43 |
44 | self.default_plan = backup.BackupPlan(self,'DefaultPlan',
45 | backup_vault= self.vault,
46 | backup_plan_name='Default Plan {} in {}'.format(riv_stack.riv_stack_name, region),
47 | backup_plan_rules=[
48 | backup.BackupPlanRule.daily(),
49 | backup.BackupPlanRule.weekly(),
50 | ])
51 |
52 | self.default_plan.add_selection('SelectionPolicy',
53 | allow_restores=True,
54 | role=self.role,
55 | resources=[
56 | backup.BackupResource.from_tag("riv_stack", riv_stack.riv_stack_name),
57 | ])
58 |
--------------------------------------------------------------------------------
/infra/services/networking/vpc.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from infra.services.networking.vpce import VpcEndpointsForAWSServices
3 | from typing import List
4 | from constructs import Construct
5 | from aws_cdk import (
6 | aws_ec2 as ec2,
7 | )
8 |
9 | class VpcNetworkingConstruct(Construct):
10 | '''
11 | Configure the networking layer
12 | '''
13 | def __init__(self, scope: Construct, id: str,cidr:str,subnet_configuration:List[ec2.SubnetConfiguration], **kwargs) -> None:
14 | super().__init__(scope, id, **kwargs)
15 |
16 | # Determine if we need NAT Gateways...
17 | has_private_networks = len([x for x in subnet_configuration if x.subnet_type == ec2.SubnetType.PRIVATE_WITH_EGRESS])
18 | nat_gateways=0
19 | if has_private_networks > 0:
20 | nat_gateways = 1
21 |
22 | self.vpc = ec2.Vpc(self,'Network',
23 | ip_addresses= ec2.IpAddresses.cidr(cidr),
24 | enable_dns_hostnames=True,
25 | enable_dns_support=True,
26 | max_azs= 2,
27 | nat_gateways=nat_gateways,
28 | subnet_configuration=subnet_configuration)
29 |
30 | self.endpoints = VpcEndpointsForAWSServices(self,'Endpoints',vpc=self.vpc)
31 |
--------------------------------------------------------------------------------
/infra/services/networking/vpce.py:
--------------------------------------------------------------------------------
1 | from typing import List
2 | from infra.interfaces import IVpcEndpointsForAWSServices
3 | from constructs import Construct
4 | from aws_cdk import (
5 | aws_ec2 as ec2,
6 | )
7 |
8 |
9 | class VpcEndpointsForAWSServices(IVpcEndpointsForAWSServices):
10 | '''
11 | Represents a utility class for creating VPC endpoints.
12 | '''
13 | def __init__(self, scope: Construct, id: str, vpc: ec2.IVpc, **kwargs) -> None:
14 | super().__init__(scope, id, **kwargs)
15 |
16 | self.vpc = vpc
17 |
18 | self.security_group = ec2.SecurityGroup(
19 | self, 'EndpointSecurity',
20 | vpc=vpc,
21 | allow_all_outbound=True,
22 | description='SG for AWS Resources in isolated subnet')
23 |
24 | self.security_group.add_ingress_rule(
25 | peer=ec2.Peer.any_ipv4(),
26 | connection=ec2.Port(
27 | protocol=ec2.Protocol.ALL,
28 | string_representation='Any source'))
29 |
30 | def add_gateways(self)->IVpcEndpointsForAWSServices:
31 | for svc in ['s3', 'dynamodb']:
32 | self.gateways[svc] = ec2.GatewayVpcEndpoint(
33 | self, svc,
34 | vpc=self.vpc,
35 | service=ec2.GatewayVpcEndpointAwsService(
36 | name=svc))
37 | return self
38 |
39 | def add_rekognition_support(self)->IVpcEndpointsForAWSServices:
40 | return self.add_interfaces(services=[
41 | 'rekognition'
42 | ])
43 |
44 | def add_textract_support(self)->IVpcEndpointsForAWSServices:
45 | return self.add_interfaces(services=[
46 | 'textract'
47 | ])
48 |
49 | def add_kms_support(self)->IVpcEndpointsForAWSServices:
50 | return self.add_interfaces(services=[
51 | 'kms'
52 | ])
53 |
54 | def add_ssm_support(self)->IVpcEndpointsForAWSServices:
55 | return self.add_interfaces(services=[
56 | 'ssm', 'ec2messages', 'ec2','ssmmessages','logs'
57 | ])
58 |
59 | def add_lambda_support(self)->IVpcEndpointsForAWSServices:
60 | return self.add_interfaces(services=[
61 | 'elasticfilesystem', 'lambda', 'states',
62 | 'ecr.api', 'ecr.dkr'
63 | ])
64 |
65 | def add_apigateway_support(self)->IVpcEndpointsForAWSServices:
66 | return self.add_interfaces(services=[
67 | 'execute-api'
68 | ])
69 |
70 | def add_storage_gateway(self)->IVpcEndpointsForAWSServices:
71 | return self.add_interfaces(services=[
72 | 'storagegateway'
73 | ])
74 |
75 | def add_everything(self)->IVpcEndpointsForAWSServices:
76 | return self.add_interfaces(services=[
77 | 'ssm', 'ec2messages', 'ec2',
78 | 'ssmmessages', 'kms', 'elasticloadbalancing',
79 | 'elasticfilesystem', 'lambda', 'states',
80 | 'events', 'execute-api', 'kinesis-streams',
81 | 'kinesis-firehose', 'logs', 'sns', 'sqs',
82 | 'secretsmanager', 'config', 'ecr.api', 'ecr.dkr',
83 | 'storagegateway'
84 | ])
85 |
86 | def add_interfaces(self, services:List[str])->IVpcEndpointsForAWSServices:
87 | for svc in services:
88 | if not svc in self.interfaces:
89 | self.interfaces[svc] = ec2.InterfaceVpcEndpoint(
90 | self, svc,
91 | vpc=self.vpc,
92 | service=ec2.InterfaceVpcEndpointAwsService(name=svc),
93 | open=True,
94 | private_dns_enabled=True,
95 | lookup_supported_azs=True,
96 | security_groups=[self.security_group])
97 |
98 | return self
99 |
--------------------------------------------------------------------------------
/infra/services/rekognition/collections.py:
--------------------------------------------------------------------------------
1 | import builtins
2 |
3 | from constructs import Construct
4 | from aws_cdk import (
5 | custom_resources as cr,
6 | )
7 |
8 | class RekognitionCollectionConstruct(Construct):
9 | def __init__(self, scope: Construct, id: builtins.str, collection_id:str) -> None:
10 | super().__init__(scope, id)
11 | assert not collection_id is None, "CollectionId is missing"
12 |
13 | '''
14 | AwsSdkCall expects JavaScript naming conventions.
15 | https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/Rekognition.html#createCollection-property
16 | '''
17 | _ = cr.AwsCustomResource(self,'RekognitionCollection',
18 | policy= cr.AwsCustomResourcePolicy.from_sdk_calls(
19 | resources=cr.AwsCustomResourcePolicy.ANY_RESOURCE),
20 | on_create= cr.AwsSdkCall(
21 | service='Rekognition',
22 | action='createCollection',
23 | physical_resource_id= cr.PhysicalResourceId.of('RekogitionCollection:'+collection_id),
24 | parameters={
25 | 'CollectionId': collection_id,
26 | }),
27 | on_delete= cr.AwsSdkCall(
28 | service='Rekognition',
29 | action='deleteCollection',
30 | physical_resource_id= cr.PhysicalResourceId.of('RekogitionCollection:'+collection_id),
31 | parameters={
32 | 'CollectionId': collection_id,
33 | })
34 | )
--------------------------------------------------------------------------------
/infra/services/rekognition/topology.py:
--------------------------------------------------------------------------------
1 | import builtins
2 | from os import environ
3 | from infra.configsettings import ConfigManager
4 | from infra.interfaces import IVpcRivStack
5 | from infra.services.rekognition.collections import RekognitionCollectionConstruct
6 |
7 | from constructs import Construct
8 | from aws_cdk import (
9 | aws_ssm as ssm,
10 | )
11 |
12 | config_mgr = ConfigManager()
13 |
14 | class RivRekognitionSetupConstruct(Construct):
15 | def __init__(self, scope: Construct, id: builtins.str, riv_stack:IVpcRivStack) -> None:
16 | super().__init__(scope, id)
17 |
18 | '''
19 | Configure the Collections...
20 | '''
21 | total_collections = config_mgr.total_collections
22 |
23 | for ix in range(int(total_collections)):
24 | RekognitionCollectionConstruct(self,'Collection_'+str(ix),
25 | collection_id = '%s-%s' % (riv_stack.stack_name, ix))
26 |
27 | '''
28 | Add parameters for lambdas
29 | '''
30 | ssm.StringParameter(self,'PartitionCountParameter',
31 | parameter_name='/riv/{}/rekognition/{}'.format(riv_stack.stack_name, 'partition-count'),
32 | string_value=str(total_collections),
33 | #data_type=ssm.ParameterType.STRING,
34 | #cltype = ssm.ParameterType.STRING,
35 | tier= ssm.ParameterTier.STANDARD,
36 | description='Generated from %s' % __file__)
37 |
--------------------------------------------------------------------------------
/infra/storage/README.md:
--------------------------------------------------------------------------------
1 | # Storage Constructs
2 |
3 | The [RivSharedDataStores](topology.py) is the main module export, and creates all shared data stores.
4 |
5 | ## What is the FaceTable
6 |
7 | The [RivStorageFaceMetadata](face_metadata.py) construct contains an Amazon DynamoDB. Customers can extend this to include custom encryption keys and additional supporting functions.
8 |
9 | ## What is the Image Store
10 |
11 | The [RivStorageImageStore](face_images.py) holds face images for bulk loading operations. An Amazon S3 Full Inventory runs daily, then raise the `InventoryCreated` notification through an Amazon SNS Topic.
12 |
13 | 
--------------------------------------------------------------------------------
/infra/storage/Storage.drawio:
--------------------------------------------------------------------------------
1 | 5Zhdb5swFIZ/DZeZAoQ0vVySdpvUbpVSqe2lA6dg1eDImIbs1+8Qm0/TJqtKla1XsV+MjZ/z4eNY7iLOvwmyia55AMxyxkFuuUvLcWzHnuBPoeyUMp3NlBAKGuhBtbCiv0GLY61mNIC0NVByziTdtEWfJwn4sqURIfi2PeyRs/aqGxKCIax8wkz1jgYyUurMG9f6d6BhVK5sj/WTmJSDtZBGJODbhuReWO5CcC5VK84XwAp4JRf13uULT6sPE5DIY14Ip6OH+R3kG5Isyc+EXF7dj0aOmuWZsExvWH+s3JUEBM+SAIpJxpY730ZUwmpD/OLpFm2OWiRjhj0bm3o6EBLyF7/TrnaPbgM8Bil2OCSvEKpXtMfYJettzd+baS1qsHdLkWibh9XcNRZsaDJ/QWlyepTc8w6lmUnJ8XooOdOhKLmnR8meHEHJ6aFkD0bJMyj9iIsc5EwZrj1fC2yFRWue+U8gDYSYRjZF098xiiyFexjkWlG/WlcC8Z/CvS1+ZRKnAa2nKv3a3vvQn3QiuUrqDfrTHviDxfHUZJ8842Z4Mff/x9/zToz/mcF/SSjbGYwhCb4WZ3eBmZE0pX4bKORU3jfaD0VC+eLp3jLX+WXf2TU6NyAo7gSE1tTCEBglQIcwfhzPhA+Hw1oSEYI85IKmxZpnW49FSk0AI5I+tz+3z0x6hRtOcSN1QI7bDlH1yynUNvVbzVqiO1HHs1yvM5HiYEy095pq2293pFnPUaPiN7LL+N2n1bTUcZn6keFwGGOy7WKpFPwJFpzxwlkSXsTo/JEy1pFSjHqahCh4de+WYwZYjpyXDjmOYf3I9v4d0SCABLX20ThAhVAZu+FrVbXadLbJUOF/bljtssBiZN5bskbD/MOJ1z47scRbrv9qwFyDJAGR5POGTLdcdPtCpi8/DxYytn2E4VYR7jBYoulWWMp85pzXvTtOznvq/Y81oHnDfqXmXAggEoEY96aIx+ss/ZA7k9cNAsdk2Hf9Hi55HXGxPFQxWu9X6elLxMFKT1n+VEo9t3uhfXOpZ79XqYfd+t8vNbz+D9G9+AM=
--------------------------------------------------------------------------------
/infra/storage/diagram.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/rekognition-identity-verification/3a1733f3a6ff91558815575b9df94285e0165bbe/infra/storage/diagram.png
--------------------------------------------------------------------------------
/infra/storage/face_images.py:
--------------------------------------------------------------------------------
1 | import builtins
2 | from infra.interfaces import IVpcRivStack
3 | import aws_cdk as core
4 | from constructs import Construct
5 | from aws_cdk import (
6 | aws_s3 as s3,
7 | aws_sqs as sqs,
8 | aws_sns as sns,
9 | aws_s3_notifications as s3n,
10 | aws_sns_subscriptions as subs,
11 | )
12 | from infra.configsettings import ConfigManager
13 |
14 | config_mgr = ConfigManager()
15 |
16 | class RivStorageImageStore(Construct):
17 | '''
18 | Represents the ImageStore construct.
19 | '''
20 | @property
21 | def image_bucket(self)->s3.IBucket:
22 | '''
23 | Gets the bucket holding the images.
24 | '''
25 | return self.__image_bucket
26 |
27 | @image_bucket.setter
28 | def image_bucket(self,value:s3.IBucket)->None:
29 | self.__image_bucket = value
30 |
31 | if config_mgr.use_inventory_bucket:
32 | @property
33 | def inventory_bucket(self)->s3.IBucket:
34 | '''
35 | Gets the inventory bucket associated with the image bucket
36 | '''
37 | return self.__inventory_bucket
38 |
39 | @inventory_bucket.setter
40 | def inventory_bucket(self,value:s3.IBucket)->None:
41 | self.__inventory_bucket = value
42 |
43 | @property
44 | def inventory_created(self)->sns.ITopic:
45 | '''
46 | Gets the notification topic that an Amazon S3 Inventory finished.
47 | '''
48 | return self.__inventory_created
49 |
50 | @inventory_created.setter
51 | def inventory_created(self,value:sns.ITopic)->None:
52 | self.__inventory_created = value
53 |
54 | def __init__(self, scope: Construct, id: builtins.str, riv_stack:IVpcRivStack) -> None:
55 | super().__init__(scope, id)
56 |
57 | self.image_bucket = s3.Bucket(self,'ImageBucket',
58 | removal_policy= core.RemovalPolicy.RETAIN)
59 |
60 | if config_mgr.use_inventory_bucket:
61 |
62 | # Create the inventory bucket...
63 | self.inventory_bucket = s3.Bucket(self,'InventoryBucket',
64 | removal_policy= core.RemovalPolicy.DESTROY)
65 |
66 | self.image_bucket.add_inventory(
67 | #objects_prefix='images/',
68 | inventory_id='{}-InventoryReport'.format('Full'),
69 | format =s3.InventoryFormat.CSV,
70 | frequency= s3.InventoryFrequency.DAILY,
71 | include_object_versions= s3.InventoryObjectVersion.CURRENT,
72 | destination= s3.InventoryDestination(
73 | bucket=self.inventory_bucket,
74 | bucket_owner= core.Aws.ACCOUNT_ID,
75 | prefix=None))
76 |
77 | # Broadcast inventory creation events...
78 | self.inventory_created = sns.Topic(self,'InventoryCreated',
79 | display_name='{}-ImageStore-InventoryCreated'.format(riv_stack.riv_stack_name),
80 | topic_name='{}-ImageStore-InventoryCreated'.format(riv_stack.riv_stack_name))
81 |
82 | self.inventory_bucket.add_event_notification(
83 | s3.EventType.OBJECT_CREATED,
84 | s3n.SnsDestination(topic=self.inventory_created),
85 | s3.NotificationKeyFilter(suffix='manifest.json'))
86 |
87 | # Persist the notification in an SQS topic to simplify debugging.
88 | self.inventory_created_debug_queue:sqs.IQueue = sqs.Queue(self,'InventoryCreatedDebugQueue',
89 | retention_period=core.Duration.days(14))
90 |
91 | self.inventory_created.add_subscription(subs.SqsSubscription(
92 | queue=self.inventory_created_debug_queue,
93 | raw_message_delivery=True))
94 |
--------------------------------------------------------------------------------
/infra/storage/face_metadata.py:
--------------------------------------------------------------------------------
1 | import builtins
2 | from infra.interfaces import IVpcRivStack
3 | from constructs import Construct
4 | import aws_cdk as core
5 | from aws_cdk import (
6 | aws_dynamodb as ddb,
7 | )
8 |
9 | class RivStorageFaceMetadata(Construct):
10 | '''
11 | Represents a central storage for Facial metadata.
12 | '''
13 | @property
14 | def face_table(self)->ddb.ITable:
15 | '''
16 | Gets the DynamoDB Face metadata table.
17 | '''
18 | return self.__face_table
19 |
20 | @face_table.setter
21 | def face_table(self,value:ddb.ITable)->None:
22 | self.__face_table = value
23 |
24 | def __init__(self, scope: Construct, id: builtins.str, riv_stack:IVpcRivStack) -> None:
25 | super().__init__(scope, id)
26 |
27 | self.face_table = ddb.Table(self,'FaceTable',
28 | removal_policy= core.RemovalPolicy.DESTROY,
29 | #table_name='{}-FaceTable'.format(riv_stack.riv_stack_name),
30 | partition_key= ddb.Attribute(
31 | name='PartitionKey',
32 | type=ddb.AttributeType.STRING),
33 | sort_key=ddb.Attribute(
34 | name='SortKey',
35 | type=ddb.AttributeType.STRING),
36 | billing_mode= ddb.BillingMode.PAY_PER_REQUEST,
37 | point_in_time_recovery=True)
38 |
--------------------------------------------------------------------------------
/infra/storage/topology.py:
--------------------------------------------------------------------------------
1 | import builtins
2 | from infra.storage.face_images import RivStorageImageStore
3 | from infra.storage.face_metadata import RivStorageFaceMetadata
4 | from infra.interfaces import IVpcRivStack
5 | from constructs import Construct
6 |
7 | class RivSharedDataStores(Construct):
8 | '''
9 | Represents the root construct for deploying shared data stores.
10 | '''
11 | def __init__(self, scope: Construct, id: builtins.str, riv_stack:IVpcRivStack) -> None:
12 | super().__init__(scope, id)
13 |
14 | '''
15 | Declare the data stores...
16 | '''
17 | self.face_metadata = RivStorageFaceMetadata(self,'Metadata', riv_stack=riv_stack)
18 | self.images = RivStorageImageStore(self,'Images', riv_stack=riv_stack)
19 |
20 | '''
21 | Include any VPC-endpoints required for Isolated Networking
22 | '''
23 | riv_stack.networking.endpoints.add_gateways()
24 |
--------------------------------------------------------------------------------
/infra/userportal/README.md:
--------------------------------------------------------------------------------
1 | # User Portal
2 |
3 | Represents the public interface for end-user registration, profile updates, and authentication requests.
4 |
5 | ## Where is the Amazon Lamba function code
6 |
7 | - [src](../../src) is the root source folder
8 | - [rekognition](../../src/rekognition) holds the functions for the user flow
9 |
10 | ## How is the infrastructure organized
11 |
12 | - [topology.py](topology.py) represents the main export construct
13 | - [gateway.py](gateway.py) declares the Amazon API Gateway
14 | - [states.py](states.py) declares the userflows using Amazon Step Function Express
15 | - [functions](functions) declares the Amazon Lambda tasks that support the user flows
16 |
17 | 
18 |
--------------------------------------------------------------------------------
/infra/userportal/diagram.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/rekognition-identity-verification/3a1733f3a6ff91558815575b9df94285e0165bbe/infra/userportal/diagram.png
--------------------------------------------------------------------------------
/infra/userportal/states/README.md:
--------------------------------------------------------------------------------
1 | # UserPortal Step Functions
2 |
3 | When users interact with the [UserPortal Gateway](../gateway.py), the request processing happens with Amazon Step Function Express.
4 |
5 | Step Functions is a serverless orchestration service that lets you combine AWS Lambda functions and other AWS services to build business-critical applications. Through Step Functions' graphical console, you see your application's workflow as a series of event-driven steps. Step Functions is based on state machines and tasks.
6 |
7 | ## What input do the state machines expect
8 |
9 | Each Step Function state machine initialize with the below example state document. Callers can place arbitrary metadata in the **Properties** property bag. The Image must be either **PNG** or **JPG** format, encoded as `utf8(base64(image_bytes))`.
10 |
11 | ```json
12 | {
13 | "inputRequest": {
14 | "UserId": "string",
15 | "Image": "base64(bytes) with utf8 encoding",
16 | "Properties": {
17 | "Key1": "Value1",
18 | "Key2": "Value2"
19 | }
20 | }
21 | }
22 | ```
23 |
24 | ## How do I use the state document
25 |
26 | Any state other than a **Fail** state can include **InputPath**, **ResultPath**, or **OutputPath**. These allow you to use a path to filter the JSON as it moves through your workflow.
27 |
28 | For [example](https://docs.aws.amazon.com/step-functions/latest/dg/input-output-example.html), start with the AWS Lambda function and state machine described in the Creating a Step Functions State Machine That Uses Lambda tutorial. Modify the state machine so that it includes the following **InputPath**, **ResultPath**, and **OutputPath**.
29 |
30 | | Parameter| Usage | Example|
31 | |----------|------|----------|
32 | | InputPath | Select the task's input | `$.inputRequest` |
33 | | OutPath | Specify a subset of the task's output | `$.Payload.FaceDetails` |
34 | | ResultPath | Specify where to persist the output | `$.myStepName` |
35 |
--------------------------------------------------------------------------------
/infra/userportal/states/auth.py:
--------------------------------------------------------------------------------
1 | import builtins
2 | from infra.userportal.functions.topology import RivUserPortalFunctionSet
3 | from infra.userportal.states.interfaces import RivStateMachineConstruct
4 | from infra.interfaces import IVpcRivStack
5 | import aws_cdk as core
6 | from constructs import Construct
7 | from aws_cdk import (
8 | aws_stepfunctions as sf,
9 | aws_stepfunctions_tasks as sft,
10 | )
11 |
12 | class AuthStateMachine(RivStateMachineConstruct):
13 | def __init__(self, scope: Construct, id: builtins.str, riv_stack: IVpcRivStack, functions: RivUserPortalFunctionSet,state_machine_type:sf.StateMachineType) -> None:
14 | super().__init__(scope, id, riv_stack, functions, state_machine_type=state_machine_type)
15 |
16 | '''
17 | Check if this is a valid image...
18 | '''
19 | detect = sft.LambdaInvoke(self,'Check-ImageQuality',
20 | lambda_function=functions.detect_faces.function,
21 | input_path='$.inputRequest',
22 | result_path='$.detection',
23 | output_path='$',
24 | invocation_type= sft.LambdaInvocationType.REQUEST_RESPONSE)
25 |
26 | '''
27 | Check if the user exists already within DynamoDB table
28 | '''
29 | compare = sft.LambdaInvoke(self,'Compare-CachedFaces',
30 | input_path='$.inputRequest',
31 | result_path='$.compare',
32 | output_path='$',
33 | lambda_function=functions.compare_faces.function,
34 | invocation_type= sft.LambdaInvocationType.REQUEST_RESPONSE)
35 |
36 | detect.next(compare)
37 |
38 | '''
39 | Format response
40 | '''
41 | auth_completed = sf.Pass(self,'Auth-Complete',
42 | parameters={
43 | 'UserId.$': '$.inputRequest.UserId',
44 | 'Status': 'Verified'
45 | })
46 |
47 | '''
48 | Use output of compare as Match/No-Match.
49 | '''
50 | user_exists = sf.Choice(self,'CompareFaces-IsMatches')
51 | user_exists.when(
52 | condition= sf.Condition.boolean_equals('$.compare.Payload.IsMatch', True),
53 | next=auth_completed)
54 |
55 | compare.next(user_exists)
56 |
57 | '''
58 | If not in Dynamo, Search collection to authenticate the users
59 | '''
60 | search = sft.LambdaInvoke(self,'Search-ExistingFaces',
61 | input_path='$.inputRequest',
62 | result_path='$.search',
63 | output_path='$',
64 | lambda_function=functions.search_faces_by_image.function,
65 | invocation_type= sft.LambdaInvocationType.REQUEST_RESPONSE)
66 |
67 | user_exists.otherwise(search)
68 |
69 | '''
70 | Confirm the caller's has the correct picture
71 | '''
72 | is_calleruser = sf.Choice(self,'Check-SearchResults')
73 | is_calleruser.when(
74 | condition= sf.Condition.boolean_equals('$.compare.Payload.IsMatch',False),
75 | next= sf.Fail(self,'InvalidCredentials',
76 | error='UserAccessDenied',
77 | cause='The wrong person is in the photo.'))
78 |
79 | is_calleruser.otherwise(auth_completed)
80 | search.next(is_calleruser)
81 |
82 | '''
83 | Definition is complete, route it.
84 | '''
85 | self.set_state_machine(
86 | state_machine_name='{}-UserPortal-Auth'.format(self.riv_stack.riv_stack_name),
87 | definition=detect)
88 |
--------------------------------------------------------------------------------
/infra/userportal/states/register_user.py:
--------------------------------------------------------------------------------
1 | import builtins
2 | from infra.userportal.functions.topology import RivUserPortalFunctionSet
3 | from infra.userportal.states.interfaces import RivStateMachineConstruct
4 | from infra.interfaces import IVpcRivStack
5 | from constructs import Construct
6 | from aws_cdk import (
7 | aws_stepfunctions as sf,
8 | aws_stepfunctions_tasks as sft,
9 | )
10 |
11 | class RegisterStateMachine(RivStateMachineConstruct):
12 |
13 | def __init__(self, scope: Construct, id: builtins.str, riv_stack: IVpcRivStack, functions: RivUserPortalFunctionSet, state_machine_type:sf.StateMachineType) -> None:
14 | super().__init__(scope, id, riv_stack, functions, state_machine_type=state_machine_type)
15 |
16 | '''
17 | Check if this is a valid image...
18 | '''
19 | detect = sft.LambdaInvoke(self,'Check-ImageQuality',
20 | lambda_function=functions.detect_faces.function,
21 | input_path='$.inputRequest',
22 | result_path='$.detection',
23 | output_path='$',
24 | invocation_type= sft.LambdaInvocationType.REQUEST_RESPONSE)
25 |
26 | '''
27 | Check if the user already exists...
28 | '''
29 | search = sft.LambdaInvoke(self,'Search-ExistingFaces',
30 | lambda_function=functions.search_faces_by_image.function,
31 | input_path='$.inputRequest',
32 | result_path='$.search',
33 | output_path='$',
34 | invocation_type= sft.LambdaInvocationType.REQUEST_RESPONSE)
35 |
36 | detect.next(search)
37 |
38 | '''
39 | Index the user and complete the operation...
40 | '''
41 | index = sft.LambdaInvoke(self,'Index-FaceInfo',
42 | lambda_function=functions.index_faces.function,
43 | input_path='$.inputRequest',
44 | output_path='$',
45 | result_path='$.index',
46 | invocation_type= sft.LambdaInvocationType.REQUEST_RESPONSE)
47 |
48 | '''
49 | Stitch everything together...
50 | '''
51 | user_exists = sf.Choice(self,'Check-SearchResults')
52 | user_exists.when(
53 | condition= sf.Condition.string_equals('$.search.Payload.TopMatch.Face.ExternalImageId',"Special:RIV_NO_FACE_MATCH"),
54 | next=index)
55 | user_exists.when(
56 | condition= sf.Condition.boolean_equals('$.search.Payload.TopMatch.Face.IsCallerUser',True),
57 | next=index)
58 | user_exists.otherwise(
59 | sf.Fail(self,'UserAlreadyExistsError',
60 | error='UserAlreadyExists',
61 | cause='Cannot register double faces in same collections.'))
62 |
63 | search.next(user_exists)
64 |
65 | # Format the message into API Gateway Model
66 | index.next(sf.Pass(self,'Registration-Complete',
67 | parameters={
68 | 'UserId.$': '$.inputRequest.UserId',
69 | 'ImageId.$': '$.index.Payload.FaceRecord.Face.ImageId',
70 | 'Status': 'Registered'
71 | }))
72 |
73 | self.set_state_machine(
74 | state_machine_name='{}-UserPortal-Register_User'.format(self.riv_stack.riv_stack_name),
75 | definition=detect)
76 |
--------------------------------------------------------------------------------
/infra/userportal/states/topology.py:
--------------------------------------------------------------------------------
1 | import builtins
2 | from infra.userportal.states.interfaces import IRivUserPortalStateMachines
3 | from infra.userportal.functions.topology import RivUserPortalFunctionSet
4 | from infra.userportal.states.register_user import RegisterStateMachine
5 | from infra.userportal.states.register_idcard import RegisterIdCardStateMachine
6 | from infra.userportal.states.update import UpdateStateMachine
7 | from infra.userportal.states.auth import AuthStateMachine
8 | from infra.interfaces import IVpcRivStack
9 | from constructs import Construct
10 | from aws_cdk import (
11 | aws_stepfunctions as sf,
12 | )
13 |
14 | class RivUserPortalStateMachines(IRivUserPortalStateMachines):
15 | '''
16 | Represents a Construct containing all UserPortal state machines.
17 | '''
18 | def __init__(self, scope: Construct, id: builtins.str, riv_stack:IVpcRivStack,functions:RivUserPortalFunctionSet, state_machine_type:sf.StateMachineType) -> None:
19 | '''
20 | Initializes a new instance of the RivUserPortalStateMachines Construct.
21 | '''
22 | super().__init__(scope, id)
23 |
24 | '''
25 | Create the state machines for each flow
26 | '''
27 | self.register_new_user = RegisterStateMachine(self,'Register',
28 | riv_stack=riv_stack,
29 | functions=functions,
30 | state_machine_type=state_machine_type)
31 |
32 | self.register_with_idcard = RegisterIdCardStateMachine(self,'Register-IdCard',
33 | riv_stack=riv_stack,
34 | functions=functions,
35 | state_machine_type=state_machine_type)
36 |
37 | self.update_existing_user = UpdateStateMachine(self,'Update',
38 | riv_stack=riv_stack,
39 | functions=functions,
40 | state_machine_type=state_machine_type)
41 |
42 | self.auth_existing_user = AuthStateMachine(self,'Auth',
43 | riv_stack=riv_stack,
44 | functions=functions,
45 | state_machine_type=state_machine_type)
46 |
--------------------------------------------------------------------------------
/infra/userportal/states/update.py:
--------------------------------------------------------------------------------
1 | import builtins
2 | from infra.userportal.functions.topology import RivUserPortalFunctionSet
3 | from infra.userportal.states.interfaces import RivStateMachineConstruct
4 | from infra.interfaces import IVpcRivStack
5 | from constructs import Construct
6 | from aws_cdk import (
7 | aws_stepfunctions as sf,
8 | aws_stepfunctions_tasks as sft,
9 | )
10 |
11 | class UpdateStateMachine(RivStateMachineConstruct):
12 | def __init__(self, scope: Construct, id: builtins.str, riv_stack: IVpcRivStack, functions:RivUserPortalFunctionSet, state_machine_type:sf.StateMachineType) -> None:
13 | super().__init__(scope, id, riv_stack, functions, state_machine_type=state_machine_type)
14 |
15 | '''
16 | Check if this is a valid image...
17 | '''
18 | detect = sft.LambdaInvoke(self,'Check-ImageQuality',
19 | lambda_function=functions.detect_faces.function,
20 | input_path='$.inputRequest',
21 | result_path='$.detection',
22 | output_path='$',
23 | invocation_type= sft.LambdaInvocationType.REQUEST_RESPONSE)
24 |
25 | '''
26 | Check if the user exists already within DynamoDB table
27 | '''
28 | compare = sft.LambdaInvoke(self,'Compare-CachedFaces',
29 | input_path='$.inputRequest',
30 | result_path='$.compare',
31 | output_path='$',
32 | lambda_function=functions.compare_faces.function,
33 | invocation_type= sft.LambdaInvocationType.REQUEST_RESPONSE)
34 |
35 | detect.next(compare)
36 |
37 | '''
38 | Update user's profile
39 | '''
40 | index = sft.LambdaInvoke(self,'Index-FaceInfo',
41 | input_path='$.inputRequest',
42 | output_path='$',
43 | result_path='$.index',
44 | lambda_function=functions.index_faces.function,
45 | invocation_type= sft.LambdaInvocationType.REQUEST_RESPONSE)
46 |
47 | '''
48 | Use output of compare as Match/No-Match.
49 | '''
50 | user_exists = sf.Choice(self,'CompareFaces-IsMatches')
51 | user_exists.when(
52 | condition= sf.Condition.boolean_equals('$.compare.Payload.IsMatch', True),
53 | next=index)
54 |
55 | compare.next(user_exists)
56 |
57 | '''
58 | If not in Dynamo, Search collection to authenticate the users
59 | '''
60 | search = sft.LambdaInvoke(self,'Search-ExistingFaces',
61 | input_path='$.inputRequest',
62 | result_path='$.search',
63 | output_path='$',
64 | lambda_function=functions.search_faces_by_image.function,
65 | invocation_type= sft.LambdaInvocationType.REQUEST_RESPONSE)
66 |
67 | user_exists.otherwise(search)
68 |
69 | '''
70 | Confirm the caller's has the correct picture
71 | '''
72 | is_calleruser = sf.Choice(self,'Check-SearchResults')
73 | is_calleruser.when(
74 | condition= sf.Condition.boolean_equals('$.compare.Payload.TopMatch.IsCallerUser',False),
75 | next= sf.Fail(self,'InvalidCredentials',
76 | error='UserAccessDenied',
77 | cause='The wrong person is in the photo.'))
78 |
79 | is_calleruser.otherwise(index)
80 | search.next(is_calleruser)
81 |
82 | # Format the message into API Gateway Model
83 | index.next(sf.Pass(self,'Update-Complete',
84 | parameters={
85 | 'UserId.$': '$.inputRequest.UserId',
86 | 'ImageId.$': '$.index.Payload.FaceRecord.Face.ImageId',
87 | 'Status': 'Updated'
88 | }))
89 |
90 | '''
91 | And we're finished.
92 | '''
93 | self.set_state_machine(
94 | definition=detect,
95 | state_machine_name='{}-UserPortal-Update'.format(
96 | self.riv_stack.riv_stack_name))
97 |
--------------------------------------------------------------------------------
/infra/userportal/topology.py:
--------------------------------------------------------------------------------
1 | import builtins
2 | from infra.configsettings import ConfigManager
3 |
4 | from aws_cdk.aws_stepfunctions import StateMachineType
5 | from infra.storage.topology import RivSharedDataStores
6 | from infra.userportal.functions.topology import RivUserPortalFunctionSet
7 | from infra.userportal.states.topology import RivUserPortalStateMachines
8 | from infra.userportal.gateway.topology import RivUserPortalGateway
9 | from json import dumps
10 | from infra.interfaces import IVpcRivStack
11 | from constructs import Construct
12 |
13 | config = ConfigManager()
14 | class RivUserPortal(Construct):
15 | def __init__(self, scope: Construct, id: builtins.str, riv_stack:IVpcRivStack, sharedStorage, subnet_group_name:str='Default') -> None:
16 | super().__init__(scope, id)
17 |
18 | if config.use_isolated_subnets:
19 | '''
20 | Declare any VPC endpoints required by this construct.
21 | '''
22 | riv_stack.networking.endpoints.add_lambda_support()
23 | riv_stack.networking.endpoints.add_apigateway_support()
24 | riv_stack.networking.endpoints.add_rekognition_support()
25 |
26 | '''
27 | Declare the function set that powers the backend
28 | '''
29 | self.functions = RivUserPortalFunctionSet(self,'Functions',
30 | riv_stack=riv_stack,
31 | subnet_group_name=subnet_group_name,
32 | sharedStorage=sharedStorage)
33 |
34 | '''
35 | Create an Amazon API Gateway and register Step Function Express integrations.
36 | '''
37 | self.api_gateway = RivUserPortalGateway(self,'Gateway', riv_stack=riv_stack)
38 | self.state_machines = RivUserPortalStateMachines(self,'States',
39 | riv_stack=riv_stack,
40 | functions=self.functions,
41 | state_machine_type= StateMachineType.EXPRESS)
42 |
43 | self.api_gateway.bind_state_machines(self.state_machines)
44 |
45 | self.api_gateway.bind_reset_user(self.functions)
46 |
47 | self.api_gateway.bind_start_liveness_session(self.functions)
48 |
49 | self.api_gateway.bind_liveness_session_result(self.functions)
50 |
51 | self.api_gateway.bind_check_userid(self.functions)
52 |
53 | self.api_gateway.bind_extract_id_card(self.functions)
54 |
55 | if config.use_debug_state:
56 | '''
57 | Create Standard Stepfunctions to simplify developer troubleshooting.
58 | '''
59 | self.debug_state_machines = RivUserPortalStateMachines(self,'DebugStates',
60 | riv_stack=riv_stack,
61 | functions=self.functions,
62 | state_machine_type= StateMachineType.STANDARD)
63 |
--------------------------------------------------------------------------------
/src/bulk-loader/batch-handler/.dockerignore:
--------------------------------------------------------------------------------
1 | __pycache__
2 | *.bat
3 | examples/**
4 | *.md
--------------------------------------------------------------------------------
/src/bulk-loader/batch-handler/.vscode/example-launch.json:
--------------------------------------------------------------------------------
1 | {
2 | "configurations": [
3 | {
4 | "name": "Python: Lambda function",
5 | "type": "python",
6 | "request": "launch",
7 | "program": "handler.py",
8 | "console": "integratedTerminal",
9 | "env": {
10 | "REGION":"ca-central-1",
11 | "THROTTLED_QUEUE_URL": "https://sqs.ca-central-1.amazonaws.com/1234567890/RIV-Riv-Prod-BulkLoaderBatchIndexerInputQueue"
12 | }
13 | }
14 | ]
15 | }
--------------------------------------------------------------------------------
/src/bulk-loader/batch-handler/Dockerfile:
--------------------------------------------------------------------------------
1 | # Define function directory
2 | ARG FUNCTION_DIR="/var/task"
3 |
4 | FROM python:3.8 as build-image
5 |
6 | # Install aws-lambda-cpp build dependencies
7 | # RUN apt-get update && \
8 | # apt-get install -y \
9 | # g++ \
10 | # make \
11 | # cmake \
12 | # unzip \
13 | # libcurl4-openssl-dev
14 |
15 | # Include global arg in this stage of the build
16 | ARG FUNCTION_DIR
17 | # Create function directory
18 | RUN mkdir -p ${FUNCTION_DIR}
19 |
20 | # Copy function code
21 | COPY . ${FUNCTION_DIR}
22 |
23 | # Install the runtime interface client
24 | RUN pip install \
25 | --target ${FUNCTION_DIR} \
26 | awslambdaric
27 |
28 | RUN pip install \
29 | --target ${FUNCTION_DIR} \
30 | -r ${FUNCTION_DIR}/requirements.txt
31 |
32 | # Multi-stage build: grab a fresh copy of the base image
33 | FROM public.ecr.aws/lambda/python:3.8
34 |
35 | # Include global arg in this stage of the build
36 | ARG FUNCTION_DIR
37 | # Set working directory to function root directory
38 | WORKDIR ${FUNCTION_DIR}
39 |
40 | # Copy in the build image dependencies
41 | COPY --from=build-image ${FUNCTION_DIR} ${FUNCTION_DIR}
42 |
43 | #ENTRYPOINT [ "/usr/local/bin/python", "-m", "awslambdaric" ]
44 | CMD [ "handler.function_main" ]
--------------------------------------------------------------------------------
/src/bulk-loader/batch-handler/README.md:
--------------------------------------------------------------------------------
1 | # Bulk Loader Batch Handler
2 |
3 | Amazon S3 Batch enumerates the Amazon S3 Inventory and calls this function once per S3 object.
4 |
5 | This function will read the specified image file (e.g., **foo.png** or **bar.jpeg**) and determine if it qualifies for processing.
6 |
7 | Images that qualify require additional metadata, which the [IRegistrationDataProvider](lib/models.py) must extract. This information can come from RDS or some other external system. The [S3TagUserRegistrationInfo](lib/registrationproviders.py) is an example that uses Amazon S3 Object Tags. Most likely, customers will need to implement their own class to describe their specific business rules.
8 |
9 | ## What is the IRegistrationDataProvider
10 |
11 | This interface receives a reference to the current Amazon S3 Batch Task, and must return the UserRegistrationInfo. The **task** contains properties for determining the image's location (e.g., `s3://bucket/path/to/file.jpeg`).
12 |
13 | Presently, the **UserRegistrationInfo.user_id** property is the only required value. It is recommended that customers include additional metadata in the **properties** dictionary.
14 |
15 | ```python
16 | class IRegistrationDataProvider:
17 | '''
18 | Represents an interface for querying user registration data.
19 | '''
20 | def get_registration_date(self, task:BatchTask)->UserRegistrationInfo:
21 | raise NotImplementedError()
22 |
23 | class MyCustomProvider(IRegistrationDataProvider):
24 | ... omitted ...
25 |
26 | def get_registration_date(self, task:BatchTask)->UserRegistrationInfo:
27 | info = UserRegistrationInfo()
28 | info.user_id = get_userid_from_task(task)
29 | info.properties = get_properties_from_rds(user_id)
30 | return info
31 |
32 | ... omitted ...
33 | ```
34 |
35 | ## How do I use MyCustomProvider
36 |
37 | In **handler.py** specify the configuration toward the top of the file.
38 |
39 | ```python
40 | '''
41 | Important: Configure your registration source here
42 | '''
43 | #registration_data_provider = S3TagUserRegistrationInfo(region_name=environ.get('REGION'))
44 | registration_data_provider = MyCustomProvider(region_name=environ.get('REGION'))
45 | ```
46 |
--------------------------------------------------------------------------------
/src/bulk-loader/batch-handler/examples/payload.json:
--------------------------------------------------------------------------------
1 | {
2 | "invocationSchemaVersion": "1.0",
3 | "invocationId": "AAAAAAAAAAFDxo8OYMmopPjs19/Fc206fNzPzCgiMwvdBm2SoKGP6+M7qWyEH3VY/aNewY4Be0fabVCK470NDLAWnmB+YZR8JM23lN5vgxXsC9ptAvELtQLnaQRfflvkZxL0MSIH3XoIMDGEZ9kaWKZ/LNh/RVxX69EMNTJvtlXiZ8XujylZLI790Uh4Ld8SokM2S65B9H4g1yWJeD7B+rE7rtqYsfuK9a1tTU3wi55OvwXSIL1QM+zGOxx56vGJv8aksXBbWeSje2r+yWbop5DjQEFxvd+qZdGt+IoIRLGYJJ18L8pvxMdHU0dz9N+yoOa9m2MWfLaoC4D8WT2jwIz528BR5Pu/duPQRS5mlc2FOiz4FB5BCyiivHdbnz4IbEI5y/skTY4IMPi9p/knDdW1e2wuYUVITpiMz72bknkHOl8SFGOPXRcSf4Z+Ar2oC4Y9i11ppPKAJcCr86R43Smaxxr1aI4gehIXDEpWFHIbFDaMrmyCMBQGrlK8bTGec8tVgtqaN38rOfqU6KKf/Qx44vmZxxz2Kri1hT7oN3T1veG0AoB66hw2SeZJ+BuLWNoQvWonxA25HiXk+b15cnyLJg7YMAhy8kT2kbJwW5X0iCr6MBZem8Y82ynCX5cCyrjjljv6xtOQLrIzxQ6Tth/jziSHJZVUFSqY0OJo3AXd6ZFfLjizTqLs3MS9a+K+ipGWgdADa/qlyEVzsTSvKDJnOkHv6HhBZE5yhZcbchCdnN2Pt0NXCfVKFWyIIczPFZF7+TPNdPqvQ/rSKE5eWrrbYCUn9EKx4HTqg/5LBvMWPK4KzvJF9sXWU1lisu//chaJmy654UBh5SomD2vsLH4W4T3EUh1WN2YPUra2j7tnD9IqDW1hvk/vsar4gjoqyp3xPIL69bff9hpN3LQHPxPYtwqq6w2JFK4oLrp8TTSjAtIgGpsMM6CvaivZ+HO560c7lUdQNjcdDkHBrSa5t25Oa3TxcMvrQ+JC8FULwCPBborZtpacolijL0jx4Wo0hy8ztpj8xSAVvZOWMsg2jFUgUFhdlGSXFtV6l6HZ2qm2+c/nhNfKtO5O0YAyRUKKbVp5n5jzhj+zj48CeIFWuxfuj7ULsVaZrSi7qxdp0ceI5fEnGBrK9IPWTz10zbN7Ye+svdaXVqQ6DtnA/gPx2Vt+ivFW8hMP+mV8WxVYe6pDtHObHwXDqLSp1soaqCUjOjvZBNQokiAZflbhER8qeQOlocGQjiBKG/xPy0/KjAVTmq2rcZwwA2LC1CPg47dplIJjEkYUiwxVSHxYvrfT2kyOWi3rT4GzhZingTNPcZfOc9oxVT0WwoIOlkdHvL2qHhwzzrnrG05SVjSar/FVh/QsXhczk3HiE4mydEckiST7ARW7vaiSzI2RGsp4PgyApp1dzRNVJ998vGytkq3OgfXkMflGnpdTds7JWRiaUsw+e1EtU7bnWmBpPnwj1xyWYOuM1DfTd2WQvWf/2w4xgw/AHfyfX1c52VoeBsU5A+pvyFIWjQhr32EqwDYb2/8jt0ZrZY6ggkeD4VrbpSAXV8tcPwT/TQMH8AuvTsu5l9drQxc/9vEw5Ww3MoVCTmNGih/xwLqZ5WkZcT0VTsJeVhSztj6wD+mHCHz7hp3WKgi/bXgg1Epft0hulo1oUkBhvzswQga8Xo86WfGDL+ihtZxoMozrcWdzWAbIeGHdwzQPQ+QiCQMJTfNwPhzrlG22AhcMvYesFOF5c/Z+H23apN5UzhLpLKPlBmF0c6q54mOd6WOTDPqvERlfHlFvcOOhvUFbFODBlV1L5aZE3WB/um2yYRg/V5zDyBZ+cONZ9fC3U3TqSD5JDkeaxZadojQ239XLQNKd8EM=",
4 | "job": {
5 | "id": "f305ba72-b1b1-4792-b7b6-a0ede5da5b8d"
6 | },
7 | "tasks": [
8 | {
9 | "taskId": "AAAAAAAAAAEpIhst0R2gt5vVTJe4V+jSyr7aq7OiiZhrnDs+02A3EkbfnISXDnpNK2jHE4wV6QvoS6sKHnsca0HEM++Ljqxp3wAXD5O9Y7tC6yyVuj8EpW9v1abl8xnVkAJ1gaKkakCt/jDMlFYIg2I6V29IvCtKzChz8pjmKOhxULv7aij0yye70uz71cvLh3afZ3y6YMa49ElG3VJb5C1GTg8+SzzaKguSmvl3hzbknbHqP8NLgwzwAoAnpybvoE+NQsMrbPdH015sJLt86Y82nXCk7Qono2Y27Qyd2sgoBYJGKx/2JamgXXtK9fKVu4r8avQkJKItwUNFzfDRCRsWZTTl0MHIwC+UEjOTT4l0iKXxm9ozafs6/oe+wz1WmoO36nApMgoAdxpVN82P7iYrIbAp6rG8OlWoaZbvyTse8Pj8MRZ6bkZNsEGjmwvJhV9tBGohqA+YT64gqCDjvaXjpG+SkRmzjfpTa6YTgUcIDDDykRTa1zDVAG0nPtlmpkzr89hlAMzx/S/Tut5w65ZCdL1mgru97XSms+cxUecuJYlfSvws0BJPzk3vn5r0dEjy/cKpftmLt7RTsBm5wcHyKGfweAJ/zEkzZbz8BvxY4t2LfHsqAnkyaw46kzW3hHrqJr8rLnkI/jATIBtomjlYZMTTjTwoqoIUG5aYCqABZ6NHCvHVoThYLunR1QThU2CSVJO9IBnBRJZu4mqykuOFinD8kCzKaAe8FbLOxClovzUPG1U8hRU+tuWpoGiQzYYobNKDV7Jgu8LcdYGCIQ/ToyOxATMU4jjD/KqE14/S1OoHyv8WTq0S/Qq1Stdd5zF6XtOUjGy8Sg79dmqW/DO25FhSFfrqGiiEYwr8pM6axKYkJDbZkV6QGbpdtUCa+UIfyMcVOsw4T27JZPYsjwHXbHSyQraBIwgAJR8IneErbt3gAPje7LDgH0CWXkMcnh9GhYN3v2a+SEcWJGaNekSQ7FeP9KxOcPP3bCIt0TBxg+ofk8Ijc3FA9A==",
10 | "s3BucketArn": "arn:aws:s3:::riv-simple-sharedstorageimagesimagebucket7de5900f-sibzosvqtetl",
11 | "s3Key": "lemull.jpg",
12 | "s3VersionId": null
13 | }
14 | ]
15 | }
16 |
--------------------------------------------------------------------------------
/src/bulk-loader/batch-handler/handler.py:
--------------------------------------------------------------------------------
1 | from lib.importhistory import ImportHistoryManager
2 | from lib.registrationproviders import S3TagUserRegistrationInfo
3 | from lib.models import BatchRequest, BatchResponse, BatchTask, BatchTaskResult, TaskResultCode
4 | import boto3
5 | from os import environ, path
6 | from typing import Any, List, Mapping
7 | from json import dumps, loads
8 | from logging import Logger
9 |
10 | '''
11 | Initialize the environment
12 | '''
13 | logger = Logger(name='LambdaFunction')
14 | THROTTLED_QUEUE_URL = environ.get('THROTTLED_QUEUE_URL')
15 |
16 | assert THROTTLED_QUEUE_URL is not None, "THROTTLED_QUEUE_URL is missing"
17 |
18 | '''
19 | Prepare XRAY, if available.
20 | '''
21 | try:
22 | from aws_xray_sdk.core import xray_recorder, patch_all
23 | patch_all() # Instrument all AWS methods.
24 | except:
25 | print('AWS XRAY support not available.')
26 |
27 | '''
28 | Initialize any clients (... after xray!)
29 | '''
30 | sqs = boto3.client('sqs', region_name=environ.get('REGION'))
31 | import_table = ImportHistoryManager(region_name=environ.get('REGION'))
32 |
33 | '''
34 | Important: Configure your registration source here
35 | '''
36 | registration_data_provider = S3TagUserRegistrationInfo(region_name=environ.get('REGION'))
37 |
38 | #@xray_recorder.capture('process_task')
39 | def process_task(task:BatchTask) ->BatchTaskResult:
40 | '''
41 | Processes an individual Amazon S3 Batch Task.
42 | '''
43 | user_data = registration_data_provider.get_registration_data(task)
44 | if not user_data.is_valid:
45 | return BatchTaskResult(task.taskId, TaskResultCode.SUCCEEDED, 'Skipping invalid object.')
46 |
47 | '''
48 | Confirm this task qualifies for processing.
49 | '''
50 | if not import_table.qualifies_for_processing(
51 | user_id=user_data.user_id,
52 | task= task):
53 | return BatchTaskResult(task.taskId, TaskResultCode.SUCCEEDED, 'Skipping non-qualified object')
54 |
55 | '''
56 | Write the message into the throttling queue.
57 | The throttled-indexer will pull these messages and process them.
58 | '''
59 | response = sqs.send_message(
60 | QueueUrl= THROTTLED_QUEUE_URL,
61 | MessageBody = dumps({
62 | 'BucketArn': task.s3BucketArn,
63 | 'ObjectKey': task.s3Key,
64 | 'InputRequest': user_data.to_dict(),
65 | })
66 | )
67 | return BatchTaskResult(task.taskId, TaskResultCode.SUCCEEDED, 'Queued message {}'.format(response['MessageId']))
68 |
69 | def function_main(event:Mapping[str,Any],_=None):
70 | '''
71 | Main Lambda Function entry point.
72 | https://docs.aws.amazon.com/lambda/latest/dg/services-s3-batch.html
73 |
74 | :param event: An Amazon S3 batch request.
75 | '''
76 | print(dumps(event))
77 | inputRequest = BatchRequest(event)
78 |
79 | '''
80 | Process the incoming tasks.
81 | '''
82 | response = BatchResponse(invocationId=inputRequest.invocationId)
83 | for task in inputRequest.tasks:
84 | response.results.append(process_task(task))
85 |
86 | return response.to_dict()
87 |
88 | def read_example_file(filename:str)->Mapping[str,Any]:
89 | example_dir = path.join(path.dirname(__file__),'examples')
90 | file = path.join(example_dir, filename)
91 |
92 | with open(file, 'r') as f:
93 | return loads(f.read())
94 |
95 | if __name__ == '__main__':
96 | xray_recorder.begin_segment('LocalDebug')
97 | batchRequest = read_example_file('payload.json')
98 | function_main(batchRequest)
99 | xray_recorder.end_segment()
100 |
--------------------------------------------------------------------------------
/src/bulk-loader/batch-handler/lib/importhistory.py:
--------------------------------------------------------------------------------
1 | import boto3
2 | from lib.models import BatchTask
3 | #from aws_xray_sdk.core import xray_recorder
4 |
5 | class ImportHistoryManager:
6 | '''
7 | Represents a client for the Import History metadata.
8 | '''
9 |
10 | def __init__(self, region_name:str) -> None:
11 | assert region_name is not None, "No region_name available"
12 |
13 | self.s3 = boto3.client('s3', region_name=region_name)
14 |
15 | #@xray_recorder.capture('qualifies_for_processing')
16 | def qualifies_for_processing(self, user_id:str, task:BatchTask)->bool:
17 | '''
18 | Checks if the task requires RIV processing.
19 | :param user_id: The desired user identity.
20 | :param task: The current Amazon S3 Batch Task.
21 | '''
22 | assert user_id is not None, "No user_id is available"
23 | assert task is not None, "No task is available"
24 |
25 | '''
26 | Get the Object TagSet with case for this object
27 | '''
28 | response = self.s3.get_object_tagging(
29 | Bucket=task.bucket_name,
30 | Key=task.s3Key)
31 |
32 | '''
33 | Enumerate through tags and attempt to disqualify processing
34 | '''
35 | for tag in response['TagSet']:
36 | key = str(tag['Key']).lower()
37 | value = str(tag['Value']).lower()
38 |
39 | if 'indexed' == key and value == 'true':
40 | return False
41 |
42 | if 'ignore' == key and value == 'true':
43 | return False
44 |
45 | return True
46 |
--------------------------------------------------------------------------------
/src/bulk-loader/batch-handler/lib/models.py:
--------------------------------------------------------------------------------
1 | from enum import Enum
2 | from typing import List
3 |
4 | class BatchJob:
5 | '''
6 | Represents the Amazon S3 Batch Job metadata.
7 | '''
8 | def __init__(self, props:dict) -> None:
9 | self.id:str = props['id']
10 |
11 | class BatchTask:
12 | '''
13 | Represents an individual task within the BatchRequest message.
14 | '''
15 | def __init__(self, props:dict) -> None:
16 | self.taskId:str = props['taskId']
17 | self.s3Key:str = props['s3Key']
18 | self.s3BucketArn:str = props['s3BucketArn']
19 | self.s3VersionId:str = props['s3VersionId']
20 |
21 | @property
22 | def object_arn(self)->str:
23 | '''
24 | Gets the S3 objects fully qualified Amazon Resource Name.
25 | '''
26 | return '{}/{}'.format(self.s3BucketArn, self.s3Key)
27 |
28 | @property
29 | def bucket_name(self)->str:
30 | '''
31 | Gets the name of the bucket holding the object.
32 | '''
33 | if self.s3BucketArn.startswith('arn'):
34 | return self.s3BucketArn.split(':')[-1]
35 | else:
36 | return self.s3BucketArn
37 |
38 | class TaskResultCode(Enum):
39 | '''
40 | Represents the completion flag of a BatchTask.
41 | '''
42 | SUCCEEDED='Succeeded'
43 | TEMPORARY_FAILURE='TemporaryFailure'
44 | PERMANENT_FAILURE='PermanentFailure'
45 |
46 | class BatchTaskResult:
47 | '''
48 | Represents the result of an individual BatchTask
49 | '''
50 | def __init__(self, taskId:str, resultCode:TaskResultCode, resultString:str='') -> None:
51 | self.taskId:str = taskId
52 | self.resultCode = resultCode
53 | self.resultString = resultString
54 |
55 | def to_dict(self)->dict:
56 | return {
57 | 'taskId': self.taskId,
58 | 'resultCode': self.resultCode.value,
59 | 'resultString': self.resultString,
60 | }
61 |
62 | class BatchResponse:
63 | '''
64 | Represents the response to the storage operation.
65 | '''
66 | def __init__(self, invocationId:str) -> None:
67 | self.__invocationId:str = invocationId
68 | self.__results:List[BatchTaskResult] = []
69 |
70 | @property
71 | def invocationId(self)->str:
72 | return self.__invocationId
73 |
74 | @property
75 | def results(self)->List[BatchTaskResult]:
76 | return self.__results
77 |
78 | def to_dict(self)->dict:
79 | return {
80 | 'invocationSchemaVersion': '1.0',
81 | 'treatMissingKeysAs': 'PermanentFailure',
82 | 'invocationId': self.invocationId,
83 | 'results': [result.to_dict() for result in self.results ]
84 | }
85 |
86 | class BatchRequest:
87 | '''
88 | Represents an incoming event from Amazon S3 Batch.
89 | '''
90 | def __init__(self, event:dict) -> None:
91 | self.invocationId = event['invocationId']
92 | self.job = BatchJob(event['job'])
93 | self.tasks = [BatchTask(task) for task in event['tasks']]
94 |
95 | class UserRegistrationInfo:
96 | '''
97 | Represents the user registration metadata.
98 | '''
99 | def __init__(self) -> None:
100 | self.user_id = None
101 | self.properties = {}
102 |
103 | @property
104 | def user_id(self)->str:
105 | return self.__user_id
106 |
107 | @property
108 | def properties(self)->dict:
109 | return self.__properties
110 |
111 | @property
112 | def is_valid(self)->bool:
113 | return self.user_id is not None
114 |
115 | @user_id.setter
116 | def user_id(self,value:str)->None:
117 | self.__user_id = value
118 |
119 | @properties.setter
120 | def properties(self,value:dict)->None:
121 | self.__properties = value
122 |
123 | def to_dict(self)->dict:
124 | return {
125 | "UserId": self.user_id,
126 | "Properties": self.properties
127 | }
128 |
129 | class IRegistrationDataProvider:
130 | '''
131 | Represents an interface for querying user registration data.
132 | '''
133 | def get_registration_date(self, task:BatchTask)->UserRegistrationInfo:
134 | raise NotImplementedError()
135 |
--------------------------------------------------------------------------------
/src/bulk-loader/batch-handler/lib/registrationproviders.py:
--------------------------------------------------------------------------------
1 | import boto3
2 | from json import loads
3 | from json.decoder import JSONDecodeError
4 | from logging import Logger
5 | from urllib.parse import urlparse
6 | from lib.models import BatchTask, IRegistrationDataProvider, UserRegistrationInfo
7 | #from aws_xray_sdk.core import xray_recorder
8 |
9 | class PropertyFileLoadError(Exception):
10 | '''
11 | Represents an error while fetching the user's properties file.
12 | '''
13 |
14 | logger = Logger('S3TagUserRegistrationInfo')
15 | class S3TagUserRegistrationInfo(IRegistrationDataProvider):
16 | '''
17 | Represents a Registration Data Provider that extracts data from Amazon S3 Tagging data.
18 | '''
19 | def __init__(self, region_name:str) -> None:
20 | super().__init__()
21 | assert region_name is not None, "No region_name available"
22 | self.s3 = boto3.client('s3', region_name=region_name)
23 |
24 | #@xray_recorder.capture('S3TagUserRegistrationInfo::get_registration_data')
25 | def get_registration_data(self, task:BatchTask)->UserRegistrationInfo:
26 | assert task is not None, "No task available."
27 |
28 | response = self.s3.get_object_tagging(
29 | Bucket = task.bucket_name,
30 | Key= task.s3Key
31 | )
32 |
33 | '''
34 | Convert the Object Tags into Registration metadata.
35 | '''
36 | registration = UserRegistrationInfo()
37 | for tag in response['TagSet']:
38 | key:str = tag['Key']
39 | compare_key = key.lower()
40 | value:str = tag['Value']
41 |
42 | if compare_key == "userid":
43 | registration.user_id = value
44 | elif compare_key in ['indexed', 'ignore']:
45 | continue
46 | elif compare_key == 'properties':
47 | registration.properties = self.get_property_bag(value)
48 |
49 | return registration
50 |
51 | def get_property_bag(self, object_path:str)->dict:
52 | '''
53 | Gets the user's property bag from an existing Amazon S3 Object.
54 | '''
55 | if not object_path.startswith('s3://'):
56 | raise PropertyFileLoadError('Invalid Path %s ' % object_path)
57 |
58 | if not object_path.endswith('.json'):
59 | raise PropertyFileLoadError('Invalid Path %s ' % object_path)
60 |
61 | parsed = urlparse(object_path)
62 | bucket = parsed.hostname
63 | key = parsed.path.lstrip('/')
64 | try:
65 | response = self.s3.get_object(Bucket=bucket,Key=key)
66 | content = response['Body'].read()
67 | return loads(content)
68 | except self.s3.exceptions.NoSuchKey as error:
69 | logger.error('Unable to fetch %s due to %s' % object_path, str(error))
70 | raise PropertyFileLoadError('Unable to fetch %s ' % object_path)
71 | except JSONDecodeError as error:
72 | logger.error('Unable to deserialize %s' % object_path)
73 | raise PropertyFileLoadError('Unable to deserialize %s' % object_path)
74 | except Exception as error:
75 | raise NotImplementedError('Unknown Error %s' % error.__class__.__name__)
76 |
--------------------------------------------------------------------------------
/src/bulk-loader/batch-handler/requirements.txt:
--------------------------------------------------------------------------------
1 | aws_xray_sdk
--------------------------------------------------------------------------------
/src/bulk-loader/inventory-created-handler/.dockerignore:
--------------------------------------------------------------------------------
1 | __pycache__
2 | *.bat
3 | examples/**
4 | *.md
--------------------------------------------------------------------------------
/src/bulk-loader/inventory-created-handler/Dockerfile:
--------------------------------------------------------------------------------
1 | # Define function directory
2 | ARG FUNCTION_DIR="/var/task"
3 |
4 | FROM python:3.8 as build-image
5 |
6 | # Install aws-lambda-cpp build dependencies
7 | # RUN apt-get update && \
8 | # apt-get install -y \
9 | # g++ \
10 | # make \
11 | # cmake \
12 | # unzip \
13 | # libcurl4-openssl-dev
14 |
15 | # Include global arg in this stage of the build
16 | ARG FUNCTION_DIR
17 | # Create function directory
18 | RUN mkdir -p ${FUNCTION_DIR}
19 |
20 | # Copy function code
21 | COPY . ${FUNCTION_DIR}
22 |
23 | # Install the runtime interface client
24 | RUN pip install \
25 | --target ${FUNCTION_DIR} \
26 | awslambdaric
27 |
28 | RUN pip install \
29 | --target ${FUNCTION_DIR} \
30 | -r ${FUNCTION_DIR}/requirements.txt
31 |
32 | # Multi-stage build: grab a fresh copy of the base image
33 | FROM public.ecr.aws/lambda/python:3.8
34 |
35 | # Include global arg in this stage of the build
36 | ARG FUNCTION_DIR
37 | # Set working directory to function root directory
38 | WORKDIR ${FUNCTION_DIR}
39 |
40 | # Copy in the build image dependencies
41 | COPY --from=build-image ${FUNCTION_DIR} ${FUNCTION_DIR}
42 |
43 | #ENTRYPOINT [ "/usr/local/bin/python", "-m", "awslambdaric" ]
44 | CMD [ "handler.function_main" ]
--------------------------------------------------------------------------------
/src/bulk-loader/inventory-created-handler/README.md:
--------------------------------------------------------------------------------
1 | # Inventory Created Handler
2 |
3 | The **image Bucket** will periodically run an S3 Inventory Report. When that report writes into the **inventory bucket**, this operations triggers the Inventory Created Handler function.
4 |
5 | Next, the function does a light-weight filter while initializing an Amazon S3 Batch Job. The Batch job performs a fan-out behavior to assess which files qualify for indexing.
--------------------------------------------------------------------------------
/src/bulk-loader/inventory-created-handler/examples/ValidJob.json:
--------------------------------------------------------------------------------
1 | {
2 | "Job": {
3 | "JobId": "21df778a-68c8-4312-ab57-4bdbf241a77e",
4 | "Description": "2021-08-17 - Invoke AWS Lambda function",
5 | "JobArn": "arn:aws:s3:ca-central-1:581361757134:job/21df778a-68c8-4312-ab57-4bdbf241a77e",
6 | "Status": "Suspended",
7 | "Manifest": {
8 | "Spec": {
9 | "Format": "S3BatchOperations_CSV_20180820",
10 | "Fields": [
11 | "Bucket",
12 | "Key"
13 | ]
14 | },
15 | "Location": {
16 | "ObjectArn": "arn:aws:s3:::riv-simple-sharedstorageimagesinventorybucketab8f-qlx62dgkgypf/input/Full-InventoryReport/2021-08-16T01-00Z.csv",
17 | "ETag": "4854ce0896d28c3cac1aa663b5821601"
18 | }
19 | },
20 | "Operation": {
21 | "LambdaInvoke": {
22 | "FunctionArn": "arn:aws:lambda:ca-central-1:581361757134:function:RivSimple-BulkLoading-BatchHandler"
23 | }
24 | },
25 | "Priority": 10,
26 | "ProgressSummary": {
27 | "TotalNumberOfTasks": 4,
28 | "NumberOfTasksSucceeded": 0,
29 | "NumberOfTasksFailed": 0
30 | },
31 | "FailureReasons": [],
32 | "Report": {
33 | "Bucket": "arn:aws:s3:::riv-simple-sharedstorageimagesinventorybucketab8f-qlx62dgkgypf",
34 | "Format": "Report_CSV_20180820",
35 | "Enabled": true,
36 | "ReportScope": "AllTasks"
37 | },
38 | "CreationTime": "2021-08-17T21:15:29.624000+00:00",
39 | "RoleArn": "arn:aws:iam::581361757134:role/RIV-Riv-Prod-BulkLoaderBatchServiceRoleEED69548-TCLICRLXS3R6",
40 | "SuspendedDate": "2021-08-17T21:15:31.370000+00:00",
41 | "SuspendedCause": "AwaitingConfirmation"
42 | }
43 | }
44 |
--------------------------------------------------------------------------------
/src/bulk-loader/inventory-created-handler/examples/sqs_message.json:
--------------------------------------------------------------------------------
1 | {
2 | "Records": [
3 | {
4 | "eventVersion": "2.1",
5 | "eventSource": "aws:s3",
6 | "awsRegion": "ca-central-1",
7 | "eventTime": "2021-08-16T19:16:37.614Z",
8 | "eventName": "ObjectCreated:Put",
9 | "userIdentity": {
10 | "principalId": "AWS:AROAII6QBJF3CVCYZA6A4:i-08d1868d6c9b325c6"
11 | },
12 | "requestParameters": {
13 | "sourceIPAddress": "10.0.139.156"
14 | },
15 | "responseElements": {
16 | "x-amz-request-id": "N8BHVF8Q8JA0SNYS",
17 | "x-amz-id-2": "Q+99XHVWAQEuatPgNHPDQ2hkgEPtz4ZTSjjw6MEGY1FwkgIoXhf3DXQ7F6WXdSRVmPwZqaJhYPFxj51VanY+TvpYl/2YvMjY"
18 | },
19 | "s3": {
20 | "s3SchemaVersion": "1.0",
21 | "configurationId": "ZjJhNWUwNjgtN2FmYi00ZDJjLWEwMGQtY2MxODlmNjU3ZDBh",
22 | "bucket": {
23 | "name": "riv-simple-sharedstorageimagesinventorybucketab8f-qlx62dgkgypf",
24 | "ownerIdentity": {
25 | "principalId": "ALH4O32NROZP1"
26 | },
27 | "arn": "arn:aws:s3:::riv-simple-sharedstorageimagesinventorybucketab8f-qlx62dgkgypf"
28 | },
29 | "object": {
30 | "key": "riv-simple-sharedstorageimagesimagebucket7de5900f-sibzosvqtetl/Full-InventoryReport/2021-08-16T01-00Z/manifest.json",
31 | "size": 560,
32 | "eTag": "a244b34689a818870290a93826634a4c",
33 | "sequencer": "00611AB997C4413255"
34 | }
35 | }
36 | }
37 | ]
38 | }
--------------------------------------------------------------------------------
/src/bulk-loader/inventory-created-handler/requirements.txt:
--------------------------------------------------------------------------------
1 | aws_xray_sdk
--------------------------------------------------------------------------------
/src/bulk-loader/throttled-indexer/.dockerignore:
--------------------------------------------------------------------------------
1 | __pycache__
2 | *.bat
3 | examples/**
4 | *.md
--------------------------------------------------------------------------------
/src/bulk-loader/throttled-indexer/.vscode/example-launch.json:
--------------------------------------------------------------------------------
1 | {
2 | "configurations": [
3 | {
4 | "name": "Python: Lambda function",
5 | "type": "python",
6 | "request": "launch",
7 | "program": "handler.py",
8 | "console": "integratedTerminal",
9 | "env": {
10 | "RIV_STACK_NAME": "Riv-Prod",
11 | "USER_PORTAL_PARAM":"/riv/Riv-Prod/userportal/url",
12 | "IMPORT_TABLE_NAME":"RIV-Riv-Prod-BulkLoaderImportTable",
13 | "REGION": "ca-central-1"
14 | }
15 | }
16 | ]
17 | }
--------------------------------------------------------------------------------
/src/bulk-loader/throttled-indexer/Dockerfile:
--------------------------------------------------------------------------------
1 | # Define function directory
2 | ARG FUNCTION_DIR="/var/task"
3 |
4 | FROM python:3.8 as build-image
5 |
6 | # Install aws-lambda-cpp build dependencies
7 | # RUN apt-get update && \
8 | # apt-get install -y \
9 | # g++ \
10 | # make \
11 | # cmake \
12 | # unzip \
13 | # libcurl4-openssl-dev
14 |
15 | # Include global arg in this stage of the build
16 | ARG FUNCTION_DIR
17 | # Create function directory
18 | RUN mkdir -p ${FUNCTION_DIR}
19 |
20 | # Copy function code
21 | COPY . ${FUNCTION_DIR}
22 |
23 | # Install the runtime interface client
24 | RUN pip install \
25 | --target ${FUNCTION_DIR} \
26 | awslambdaric
27 |
28 | RUN pip install \
29 | --target ${FUNCTION_DIR} \
30 | -r ${FUNCTION_DIR}/requirements.txt
31 |
32 | # Multi-stage build: grab a fresh copy of the base image
33 | FROM public.ecr.aws/lambda/python:3.8
34 |
35 | # Include global arg in this stage of the build
36 | ARG FUNCTION_DIR
37 | # Set working directory to function root directory
38 | WORKDIR ${FUNCTION_DIR}
39 |
40 | # Copy in the build image dependencies
41 | COPY --from=build-image ${FUNCTION_DIR} ${FUNCTION_DIR}
42 |
43 | #ENTRYPOINT [ "/usr/local/bin/python", "-m", "awslambdaric" ]
44 | CMD [ "handler.function_main" ]
--------------------------------------------------------------------------------
/src/bulk-loader/throttled-indexer/README.md:
--------------------------------------------------------------------------------
1 | # Bulk Loader Throttled Indexer
2 |
3 | This function reads from an SQS Queue and indexes the messages into the **UserPortal** gateway.
4 |
5 | Afterward, it will update the **ImportHistoryTable** DynamoDB Table, avoiding reprocessing the data.
6 |
--------------------------------------------------------------------------------
/src/bulk-loader/throttled-indexer/examples/payload.json:
--------------------------------------------------------------------------------
1 | {
2 | "Records": [
3 | {
4 | "messageId": "19dd0b57-b21e-4ac1-bd88-01bbb068cb78",
5 | "receiptHandle": "MessageReceiptHandle",
6 | "body": "{\n \"BucketArn\": \"arn:aws:s3:::riv-simple-sharedstorageimagesimagebucket7de5900f-sibzosvqtetl\",\n \"ObjectKey\": \"lemull.jpg\",\n \"InputRequest\": {\n \"UserId\": \"lemull\",\n \"Properties\": {}\n }\n}",
7 | "attributes": {
8 | "ApproximateReceiveCount": "1",
9 | "SentTimestamp": "1523232000000",
10 | "SenderId": "123456789012",
11 | "ApproximateFirstReceiveTimestamp": "1523232000001"
12 | },
13 | "messageAttributes": {},
14 | "md5OfBody": "{{{md5_of_body}}}",
15 | "eventSource": "aws:sqs",
16 | "eventSourceARN": "arn:aws:sqs:ca-central-1:123456789012:MyQueue",
17 | "awsRegion": "ca-central-1"
18 | }
19 | ]
20 | }
21 |
--------------------------------------------------------------------------------
/src/bulk-loader/throttled-indexer/handler.py:
--------------------------------------------------------------------------------
1 | from lib.storage import StorageClient
2 | import boto3
3 | from requests import post
4 | from lib.models import SQSMessage, SQSMessageRecord
5 | from os import environ, path
6 | from typing import Any, List, Mapping
7 | from json import dumps, loads
8 | from logging import Logger
9 |
10 | '''
11 | Initialize the environment
12 | '''
13 | logger = Logger(name='LambdaFunction')
14 | USER_PORTAL_PARAM = environ.get('USER_PORTAL_PARAM')
15 | REGION_NAME = environ.get('REGION')
16 | RIV_STACK_NAME =environ.get('RIV_STACK_NAME')
17 |
18 | assert USER_PORTAL_PARAM is not None, "USER_PORTAL_PARAM is missing"
19 | assert REGION_NAME is not None, "REGION_NAME is missing"
20 |
21 | '''
22 | Prepare XRAY, if available.
23 | '''
24 | try:
25 | from aws_xray_sdk.core import xray_recorder, patch_all
26 | patch_all() # Instrument all AWS methods.
27 | except:
28 | print('AWS XRAY support not available.')
29 |
30 | '''
31 | Initialize any clients (... after xray!)
32 | '''
33 | storage_client = StorageClient(region_name=REGION_NAME)
34 | ssm_client = boto3.client('ssm', region_name=REGION_NAME)
35 |
36 | '''
37 | Discover the UserPortal Address.
38 | '''
39 | def get_user_portal_url()->str:
40 | '''
41 | Gets the UserPortal public endpoint.
42 | '''
43 | #xray_recorder.begin_segment('get_user_portal_url')
44 | try:
45 | #xray_recorder.context.context_missing
46 | parameter_name = '/riv/{}/userportal/url'.format(RIV_STACK_NAME)
47 |
48 | response = ssm_client.get_parameter(Name=parameter_name)
49 | value:str = response['Parameter']['Value']
50 |
51 | if value is None:
52 | raise ValueError('No userportal url available.')
53 | if not value.startswith('http'):
54 | raise ValueError('UserPortalUrl in unexpected format: '.format(value))
55 |
56 | return value
57 | except Exception as error:
58 | logger.error('Unable to get_user_portal_url.')
59 | raise error
60 | finally:
61 | #xray_recorder.end_segment()
62 | pass
63 |
64 | USER_PORTAL_URL = get_user_portal_url()
65 |
66 | #@xray_recorder.capture('process_sqs_record')
67 | def process_sqs_record(record:SQSMessageRecord)->None:
68 | '''
69 | Process an individual SQS message.
70 | '''
71 | payload = storage_client.attach_image(record.payload)
72 | post(url='{}/register'.format(USER_PORTAL_URL), json=payload.input_request)
73 | storage_client.write_index_complete(payload)
74 |
75 | def function_main(event:Mapping[str,Any],_=None):
76 | '''
77 | Main Lambda Function entry point.
78 | https://docs.aws.amazon.com/lambda/latest/dg/with-sqs.html
79 |
80 | :param event: An SQS message event.
81 | '''
82 | logger.debug(dumps(event))
83 | message = SQSMessage(event)
84 |
85 | '''
86 | Enumerate through the incoming records and process them sequentially.
87 | '''
88 | for record in message.records:
89 | process_sqs_record(record)
90 |
91 | def read_example_file(filename:str)->Mapping[str,Any]:
92 | example_dir = path.join(path.dirname(__file__),'examples')
93 | file = path.join(example_dir, filename)
94 |
95 | with open(file, 'r') as f:
96 | return loads(f.read())
97 |
98 | if __name__ == '__main__':
99 | xray_recorder.begin_segment('LocalDebug')
100 | sqs_message = read_example_file('payload.json')
101 | function_main(sqs_message)
102 | xray_recorder.end_segment()
103 |
--------------------------------------------------------------------------------
/src/bulk-loader/throttled-indexer/lib/models.py:
--------------------------------------------------------------------------------
1 | from json import loads
2 | from typing import List, Mapping
3 |
4 | class Payload:
5 | def __init__(self, props:dict) -> None:
6 | self.bucket_arn:str = props['BucketArn']
7 | self.object_key:str = props['ObjectKey']
8 | self.input_request:dict = props['InputRequest']
9 |
10 | @property
11 | def bucket_name(self)->str:
12 | '''
13 | Gets the name of the bucket holding the image.
14 | '''
15 | if self.bucket_arn.startswith('arn'):
16 | return self.bucket_arn.split(':')[-1]
17 | else:
18 | return self.bucket_arn
19 |
20 | class SQSMessageRecord:
21 | '''
22 | Represents an individual SQS message.
23 | '''
24 | def __init__(self, props:dict) -> None:
25 | self.body:str = props['body']
26 | self.messageId:str = props['messageId']
27 | self.attributes:Mapping[str,str] = props['attributes']
28 | self.message_attributes = props['messageAttributes']
29 |
30 | self.payload:Payload = Payload(loads(self.body))
31 |
32 |
33 | class SQSMessage:
34 | '''
35 | Represents a batch of SQSMessageRecords
36 | '''
37 | def __init__(self, event:dict) -> None:
38 | self.records:List[SQSMessageRecord] = [SQSMessageRecord(x) for x in event['Records']]
39 |
--------------------------------------------------------------------------------
/src/bulk-loader/throttled-indexer/lib/storage.py:
--------------------------------------------------------------------------------
1 | from typing import List, Mapping
2 | import boto3
3 | from lib.models import Payload
4 | from base64 import b64encode
5 | #from aws_xray_sdk.core import xray_recorder
6 |
7 | class StorageClient:
8 | '''
9 | Represents a utility for interacting with various storage services.
10 | '''
11 | def __init__(self, region_name:str) -> None:
12 | assert region_name is not None, "No region_name available"
13 |
14 | self.s3 = boto3.client('s3', region_name=region_name)
15 |
16 | #@xray_recorder.capture('attach_image')
17 | def attach_image(self, payload:Payload)->Payload:
18 | '''
19 | Downloads the image referenced by the payload.
20 | Persists the content into the payload.input_request structure.
21 | :param payload: The body from most the SQSMessageRecord
22 | '''
23 | assert payload is not None, "No payload is available"
24 |
25 | response = self.s3.get_object(
26 | Bucket = payload.bucket_name,
27 | Key= payload.object_key
28 | )
29 |
30 | image = response['Body'].read()
31 | payload.input_request['Image'] = str(b64encode(image), 'utf-8')
32 | return payload
33 |
34 | #@xray_recorder.capture('write_index_complete')
35 | def write_index_complete(self, payload:Payload)->None:
36 | '''
37 | Updates the Import History Table to record this file is complete.
38 | :param payload: The body from most the SQSMessageRecord
39 | '''
40 | assert payload is not None, "No payload is available"
41 |
42 | '''
43 | Get the previous tags...
44 | '''
45 | response = self.s3.get_object_tagging(
46 | Bucket=payload.bucket_name,
47 | Key=payload.object_key)
48 |
49 | tagSet:Mapping[str,dict] = {}
50 | for tag in response['TagSet']:
51 | tagSet[tag['Key']] = tag
52 |
53 | '''
54 | Upsert the Indexed marker...
55 | '''
56 | tagSet['Indexed'] = {'Key':'Indexed', 'Value':'True'}
57 |
58 | self.s3.put_object_tagging(
59 | Bucket=payload.bucket_name,
60 | Key=payload.object_key,
61 | Tagging={ 'TagSet': list(tagSet.values()) })
62 |
--------------------------------------------------------------------------------
/src/bulk-loader/throttled-indexer/requirements.txt:
--------------------------------------------------------------------------------
1 | aws_xray_sdk
--------------------------------------------------------------------------------
/src/frontend/.gitignore:
--------------------------------------------------------------------------------
1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
2 |
3 | # dependencies
4 | /node_modules
5 | /.pnp
6 | .pnp.js
7 |
8 | # testing
9 | /coverage
10 |
11 | # production
12 | /build
13 |
14 | # misc
15 | .DS_Store
16 | .env.local
17 | .env.development.local
18 | .env.test.local
19 | .env.production.local
20 |
21 | npm-debug.log*
22 | yarn-debug.log*
23 | yarn-error.log*
24 |
25 | #amplify-do-not-edit-begin
26 | amplify/\#current-cloud-backend
27 | amplify/.config/local-*
28 | amplify/logs
29 | amplify/mock-data
30 | amplify/mock-api-resources
31 | amplify/backend/amplify-meta.json
32 | amplify/backend/.temp
33 | build/
34 | dist/
35 | node_modules/
36 | aws-exports.js
37 | awsconfiguration.json
38 | amplifyconfiguration.json
39 | amplifyconfiguration.dart
40 | amplify-build-config.json
41 | amplify-gradle-config.json
42 | amplifytools.xcconfig
43 | .secret-*
44 | **.sample
45 | #amplify-do-not-edit-end
46 |
--------------------------------------------------------------------------------
/src/frontend/README.md:
--------------------------------------------------------------------------------
1 | # Getting Started with Create React App
2 |
3 |
4 | ## Available Scripts
5 |
6 | In the project directory, you can run:
7 |
8 | `npm start`
9 |
10 | Runs the app in the development mode.\
11 | Open [http://localhost:3000](http://localhost:3000) to view it in your browser.
12 |
13 | The page will reload when you make changes.\
14 | You may also see any lint errors in the console.
15 |
16 | `npm test`
17 |
18 | Launches the test runner in the interactive watch mode.\
19 | See the section about [running tests](https://facebook.github.io/create-react-app/docs/running-tests) for more information.
20 |
21 | `npm run build`
22 |
23 | Builds the app for production to the `build` folder.\
24 | It correctly bundles React in production mode and optimizes the build for the best performance.
25 |
26 | The build is minified and the filenames include the hashes.\
27 | Your app is ready to be deployed!
28 |
29 | See the section about [deployment](https://facebook.github.io/create-react-app/docs/deployment) for more information.
30 |
31 | `npm run eject`
32 |
33 | **Note: this is a one-way operation. Once you `eject`, you can't go back!**
34 |
35 | If you aren't satisfied with the build tool and configuration choices, you can `eject` at any time. This command will remove the single build dependency from your project.
36 |
37 | Instead, it will copy all the configuration files and the transitive dependencies (webpack, Babel, ESLint, etc) right into your project so you have full control over them. All of the commands except `eject` will still work, but they will point to the copied scripts so you can tweak them. At this point you're on your own.
38 |
39 | You don't have to ever use `eject`. The curated feature set is suitable for small and middle deployments, and you shouldn't feel obligated to use this feature. However we understand that this tool wouldn't be useful if you couldn't customize it when you are ready for it.
40 |
41 |
--------------------------------------------------------------------------------
/src/frontend/amplify.yml:
--------------------------------------------------------------------------------
1 | version: 1
2 | frontend:
3 | phases:
4 | preBuild:
5 | commands:
6 | - npm i
7 | build:
8 | commands:
9 | - npm run build
10 | artifacts:
11 | files:
12 | - '**/*'
13 | baseDirectory: /build
14 | cache: null
15 |
16 |
--------------------------------------------------------------------------------
/src/frontend/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "riv",
3 | "version": "0.1.0",
4 | "private": true,
5 | "dependencies": {
6 | "@aws-amplify/ui-react": "^4.6.0",
7 | "@aws-amplify/ui-react-liveness": "^1.0.0",
8 | "@awsui/components-react": "^3.0.788",
9 | "@testing-library/jest-dom": "^5.11.9",
10 | "@testing-library/react": "^11.2.5",
11 | "@testing-library/user-event": "^12.8.3",
12 | "aws-amplify": "^5.1.1",
13 | "react": "^18.2.0",
14 | "react-dom": "^18.2.0",
15 | "react-icons": "^4.7.1",
16 | "react-json-tree": "^0.18.0",
17 | "react-responsive-modal": "^6.4.1",
18 | "react-router-dom": "^6.5.0",
19 | "react-scripts": "^5.0.1",
20 | "react-youtube": "^10.1.0",
21 | "use-awsui": "^1.0.8",
22 | "use-awsui-router": "^2.0.1",
23 | "web-vitals": "^2.1.4"
24 | },
25 | "scripts": {
26 | "start": "react-scripts start",
27 | "build": "react-scripts build",
28 | "test": "react-scripts test",
29 | "eject": "react-scripts eject"
30 | },
31 | "eslintConfig": {
32 | "extends": [
33 | "react-app",
34 | "react-app/jest"
35 | ]
36 | },
37 | "browserslist": {
38 | "production": [
39 | ">0.2%",
40 | "not dead",
41 | "not op_mini all"
42 | ],
43 | "development": [
44 | "last 1 chrome version",
45 | "last 1 firefox version",
46 | "last 1 safari version",
47 | ">0.2%",
48 | "not dead",
49 | "not op_mini all"
50 | ]
51 | }
52 | }
53 |
--------------------------------------------------------------------------------
/src/frontend/public/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/rekognition-identity-verification/3a1733f3a6ff91558815575b9df94285e0165bbe/src/frontend/public/favicon.ico
--------------------------------------------------------------------------------
/src/frontend/public/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
12 |
13 |
17 |
18 |
27 | Identity Verification using Amazon Rekognition Demo App
28 |
29 |
30 |
31 |
32 |
42 |
43 |
44 |
--------------------------------------------------------------------------------
/src/frontend/public/logo192.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/rekognition-identity-verification/3a1733f3a6ff91558815575b9df94285e0165bbe/src/frontend/public/logo192.png
--------------------------------------------------------------------------------
/src/frontend/public/logo512.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/rekognition-identity-verification/3a1733f3a6ff91558815575b9df94285e0165bbe/src/frontend/public/logo512.png
--------------------------------------------------------------------------------
/src/frontend/public/manifest.json:
--------------------------------------------------------------------------------
1 | {
2 | "short_name": "Rekognition Identity Verification",
3 | "name": "Rekognition Identity Verification App Sample",
4 | "icons": [
5 | {
6 | "src": "favicon.ico",
7 | "sizes": "64x64 32x32 24x24 16x16",
8 | "type": "image/x-icon"
9 | },
10 | {
11 | "src": "logo192.png",
12 | "type": "image/png",
13 | "sizes": "192x192"
14 | },
15 | {
16 | "src": "logo512.png",
17 | "type": "image/png",
18 | "sizes": "512x512"
19 | }
20 | ],
21 | "start_url": ".",
22 | "display": "standalone",
23 | "theme_color": "#000000",
24 | "background_color": "#ffffff"
25 | }
26 |
--------------------------------------------------------------------------------
/src/frontend/public/robots.txt:
--------------------------------------------------------------------------------
1 | # https://www.robotstxt.org/robotstxt.html
2 | User-agent: *
3 | Disallow:
4 |
--------------------------------------------------------------------------------
/src/frontend/src/App.css:
--------------------------------------------------------------------------------
1 | .App {
2 | text-align: center;
3 | }
4 |
5 | .App-logo {
6 | height: 40vmin;
7 | pointer-events: none;
8 | }
9 |
10 | @media (prefers-reduced-motion: no-preference) {
11 | .App-logo {
12 | animation: App-logo-spin infinite 20s linear;
13 | }
14 | }
15 |
16 | .App-header {
17 | background-color: #282c34;
18 | min-height: 100vh;
19 | display: flex;
20 | flex-direction: column;
21 | align-items: center;
22 | justify-content: center;
23 | font-size: calc(10px + 2vmin);
24 | color: white;
25 | }
26 |
27 | .App-link {
28 | color: #61dafb;
29 | }
30 |
31 | @keyframes App-logo-spin {
32 | from {
33 | transform: rotate(0deg);
34 | }
35 | to {
36 | transform: rotate(360deg);
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/src/frontend/src/App.js:
--------------------------------------------------------------------------------
1 | import './App.css';
2 | import { API, Auth } from 'aws-amplify';
3 | import React from "react";
4 | import RIVHeader from './Components/Header'
5 | import AppLayout from "@awsui/components-react/app-layout";
6 | import { AppLayoutProvider } from "./Components/context/AppLayoutContext"
7 | import {
8 | ThemeProvider
9 | } from '@aws-amplify/ui-react';
10 | import { useAppLayoutContext } from "./Components/context/AppLayoutContext"
11 | import { useAppLayout } from "use-awsui"
12 | import { BrowserRouter } from "react-router-dom"
13 | import Routes from "./Components/Routes"
14 | import SideNavigation from './Components/SideNavigation'
15 |
16 |
17 | Auth.configure({
18 | "Auth": {
19 | "identityPoolId": process.env.REACT_APP_IDENTITYPOOL_ID,
20 | "region": process.env.REACT_APP_REGION,
21 | "userPoolId": process.env.REACT_APP_USERPOOL_ID,
22 | "mandatorySignIn": false,
23 | "userPoolWebClientId": process.env.REACT_APP_WEBCLIENT_ID
24 | // "identityPoolId": "us-east-1:763cb669-b748-498a-ae6b-4cabfb0e457e",
25 | // "region": "us-east-1",
26 | // "userPoolId": "us-east-1_dJkZVBJIN",
27 | // "mandatorySignIn": false,
28 | // "userPoolWebClientId": "6uvfrd4dllnh1h2rck8utka07d"
29 | }
30 | })
31 |
32 | API.configure({
33 | API: {
34 | endpoints: [
35 | {
36 | name: "identityverification",
37 | endpoint: process.env.REACT_APP_ENV_API_URL
38 | // endpoint: "https://bu3wsszai5.execute-api.us-east-1.amazonaws.com/prod/"
39 | },
40 | ],
41 | },
42 | })
43 |
44 | function App() {
45 | const theme = {
46 | name: 'my-theme',
47 | tokens: {
48 | components: {
49 | text: {
50 | color: { value: '232f3e' },
51 | },
52 |
53 | card: {
54 | backgroundColor: { value: '#fff' },
55 | // Variations
56 | outlined: {
57 | },
58 | elevated: {
59 | boxShadow: { value: '{shadows.large}' },
60 | },
61 |
62 | }
63 | },
64 | colors: {
65 | background: {
66 | primary: { value: '#0F1111' },
67 | secondary: { value: '#fff' },
68 | tertiary: { value: '#0F1111' },
69 | },
70 | border: {
71 | primary: { value: '#0F1111' },
72 | secondary: { value: '#0F1111' },
73 | tertiary: { value: '#0F1111' },
74 | },
75 | },
76 | },
77 | };
78 | const { handleNavigationChange, navigationOpen } = useAppLayout({
79 | defaultNavigationOpen: true,
80 | defaultToolsOpen: true
81 | })
82 | let {
83 | state: { navigationHide, contentType }
84 | } = useAppLayoutContext()
85 |
86 | return (
87 |
88 |
89 |
90 |
91 |
92 | }
98 | toolsHide
99 | contentType={contentType}
100 | content={
101 |
102 |
103 |
104 | }
105 | />
106 |
107 |
108 |
109 |
110 | );
111 | }
112 |
113 | export default App;
114 |
--------------------------------------------------------------------------------
/src/frontend/src/App.test.js:
--------------------------------------------------------------------------------
1 | import { render, screen } from '@testing-library/react';
2 | import App from './App';
3 |
4 | test('renders learn react link', () => {
5 | render();
6 | const linkElement = screen.getByText(/learn react/i);
7 | expect(linkElement).toBeInTheDocument();
8 | });
9 |
--------------------------------------------------------------------------------
/src/frontend/src/Components/Routes.js:
--------------------------------------------------------------------------------
1 | import {
2 | Route,
3 | Routes as Switch,
4 | useLocation,
5 | matchPath
6 | } from "react-router-dom"
7 | import Home from '../Pages/Home'
8 | import SignIn from '../Pages/SignIn'
9 | import Register from '../Pages/Register'
10 | import RegisterWithIdCard from '../Pages/RegisterWithIdCard'
11 | import Success from '../Pages/Success'
12 | import LoggedIn from '../Pages/LoggedIn'
13 | import { useEffect } from "react"
14 | import {
15 | useAppLayoutDispatch,
16 | useAppLayoutContext
17 | } from "./context/AppLayoutContext"
18 |
19 | export const routes = [
20 | {
21 | path: "/",
22 | element: ,
23 | exact: true,
24 | contentType: "default",
25 | navigationHide: true
26 |
27 | },
28 | {
29 | path: "/login",
30 | element: ,
31 | exact: true,
32 | contentType: "default",
33 | navigationHide: true
34 | },
35 | {
36 | path: "/register",
37 | element: ,
38 | exact: true,
39 | contentType: "default",
40 | navigationHide: true
41 | },
42 | {
43 | path: "/registerwithid",
44 | element: ,
45 | exact: true,
46 | contentType: "default",
47 | navigationHide: true
48 | },
49 | {
50 | path: "/success",
51 | element: ,
52 | exact: true,
53 | contentType: "default",
54 | navigationHide: true
55 | },
56 | {
57 | path: "/loggedin",
58 | element: ,
59 | exact: true,
60 | contentType: "default",
61 | navigationHide: true
62 | }
63 |
64 |
65 | ]
66 |
67 | const Routes = () => {
68 | const { pathname } = useLocation()
69 | const { setContentType } = useAppLayoutDispatch()
70 | const { state } = useAppLayoutContext()
71 |
72 | useEffect(() => {
73 | const currentRoute = routes.find((i) => matchPath(i.path, pathname))
74 | const newContentType = currentRoute?.contentType || "default"
75 | const newNavHide = currentRoute?.navigationHide
76 | if (newContentType !== state.contentType || newNavHide !== state.navigationHide) {
77 | setContentType(currentRoute?.contentType || "default", currentRoute?.navigationHide)
78 | }
79 | }, [pathname, setContentType, state.contentType, state.navigationHide])
80 |
81 | return (
82 |
83 | {routes.map(({ contentType, ...route }, i) => (
84 |
85 | ))}
86 | } />
87 |
88 | )
89 | }
90 |
91 | export default Routes
92 |
93 |
--------------------------------------------------------------------------------
/src/frontend/src/Components/SideNavigation.js:
--------------------------------------------------------------------------------
1 | import { SideNavigation as Navigation } from "@awsui/components-react"
2 | import { useSideNavigation } from "use-awsui-router"
3 | import { API } from "aws-amplify";
4 | import {
5 | useNavigate
6 | } from "react-router-dom";
7 | import { Button } from '@aws-amplify/ui-react';
8 | let MAIN_ITEMS = [{ type: "link", text: "Login", href: "/login" },
9 | { type: "link", text: "Register", href: "/register" },
10 | { type: "link", text: "Register with ID", href: "/registerwithid" }
11 | // { type: "link", text: "Delete all users", href: "#", onclick: "deleteUsers()" }
12 | ]
13 |
14 | const FOOTER_ITEMS = [
15 | { type: "divider" },
16 | {
17 | type: "link",
18 | text: "Documentation",
19 | href: "https://aws.amazon.com/rekognition/identity-verification/",
20 | external: true
21 | }
22 | ]
23 |
24 | const SideNavigation = () => {
25 |
26 | const navigate = useNavigate();
27 | function deleteUsers() {
28 | console.log('User deleted')
29 | const options = { headers: {
30 | 'Content-Type': 'application/json'
31 | }}
32 | API.get("identityverification", "reset-user", options).then(response => {
33 | // localStorage.removeItem("pic");
34 | navigate("/");
35 | });
36 | }
37 | const { activeHref, handleFollow } = useSideNavigation()
38 | let items = [...MAIN_ITEMS]
39 |
40 | items.push(...FOOTER_ITEMS)
41 |
42 | return (
43 | <>
44 |
50 |
51 |
54 |
55 | >
56 | )
57 | }
58 |
59 | export default SideNavigation
60 |
--------------------------------------------------------------------------------
/src/frontend/src/Components/context/AppLayoutContext.js:
--------------------------------------------------------------------------------
1 | import { createContext, useContext, useReducer, useMemo } from "react"
2 |
3 | const initialAppLayout = {
4 | contentType: "default",
5 | navigationHide:false
6 | }
7 |
8 | export const AppLayoutContext = createContext({
9 | state: initialAppLayout
10 | })
11 |
12 | const reducer = (state, action) => {
13 | switch (action.type) {
14 | case "SET_CONTENT_TYPE":
15 | return {
16 | ...state,
17 | contentType: action.payload.contentType || initialAppLayout.contentType,
18 | navigationHide:action.payload.navigationHide || initialAppLayout.navigationHide
19 | }
20 | default:
21 | throw new Error(`Unknown action type: ${action.type}`)
22 | }
23 | }
24 |
25 | export const useAppLayoutDispatch = () => {
26 | const { dispatch } = useContext(AppLayoutContext)
27 | return {
28 | setContentType: (contentType,navigationHide) =>
29 | dispatch({
30 | type: "SET_CONTENT_TYPE",
31 | payload: {
32 | contentType,
33 | navigationHide
34 | }
35 | })
36 | }
37 | }
38 |
39 | export const AppLayoutProvider = (props) => {
40 | const [state, dispatch] = useReducer(reducer, initialAppLayout, (arg) => arg)
41 | const contextValue = useMemo(() => {
42 | return { state, dispatch }
43 | }, [state, dispatch])
44 |
45 | return (
46 |
47 | {props.children}
48 |
49 | )
50 | }
51 |
52 | export const useAppLayoutContext = () => useContext(AppLayoutContext)
53 |
--------------------------------------------------------------------------------
/src/frontend/src/Components/context/NotificationContext.js:
--------------------------------------------------------------------------------
1 | import { createContext, useContext, useReducer, useMemo } from "react"
2 |
3 | const initialState = {
4 | notifications: []
5 | }
6 |
7 | export const NotificationContext = createContext({
8 | state: initialState
9 | })
10 |
11 | const ADD_NOTIFICATION = "ADD_NOTIFICATION"
12 | const CLEAR_NOTIFICATION = "CLEAR_NOTIFICATION"
13 |
14 | const reducer = (state, action) => {
15 | switch (action.type) {
16 | case ADD_NOTIFICATION:
17 | return {
18 | ...state,
19 | notifications: [...state.notifications, action.payload.notification]
20 | }
21 | case CLEAR_NOTIFICATION:
22 | return {
23 | ...state,
24 | notifications: state.notifications.filter(
25 | (_, idx) => idx !== action.payload.index
26 | )
27 | }
28 | default:
29 | throw new Error(`Unknown action type: ${action.type}`)
30 | }
31 | }
32 |
33 | export const useNotificationDispatch = () => {
34 | const { dispatch } = useContext(NotificationContext)
35 | return {
36 | addNotification: (notification) =>
37 | dispatch({
38 | type: ADD_NOTIFICATION,
39 | payload: {
40 | notification
41 | }
42 | }),
43 | clearNotification: (index) =>
44 | dispatch({
45 | type: CLEAR_NOTIFICATION,
46 | payload: {
47 | index
48 | }
49 | })
50 | }
51 | }
52 |
53 | export const NotificationProvider = (props) => {
54 | const [state, dispatch] = useReducer(reducer, initialState, (arg) => arg)
55 | const contextValue = useMemo(() => {
56 | return { state, dispatch }
57 | }, [state, dispatch])
58 |
59 | return (
60 |
61 | {props.children}
62 |
63 | )
64 | }
65 |
66 | export const useNotificationContext = () => useContext(NotificationContext)
67 |
--------------------------------------------------------------------------------
/src/frontend/src/Error.js:
--------------------------------------------------------------------------------
1 | const ErrorMessage = {
2 | 'UserAlreadyExists': 'The user already exists. Delete the user first to try out this feature.',
3 | 'FaceNotMatchWithIDCard': 'The image from the camera and ID card don’t match. Please use your valid ID documents.',
4 | 'GenericError': 'An error occurred while submitting the form. Please try again later.',
5 | 'ValueError': 'The user does not exist. Please register the user first.',
6 | 'LiveNessResultThreshold': 'The Face Liveness confidence score is less than the user-selected confidence threshold. Please reattempt the face liveness check again.',
7 | 'UserAccessDenied': 'Access Denied. The wrong person is in the photo.'
8 | }
9 |
10 | export default ErrorMessage;
11 |
--------------------------------------------------------------------------------
/src/frontend/src/Pages/Home.js:
--------------------------------------------------------------------------------
1 | import {
2 | Card,
3 | Image,
4 | View,
5 | Heading,
6 | Flex,
7 | Text,
8 | Button,
9 | useTheme,
10 | Link,
11 | ButtonGroup,
12 | Collection
13 |
14 | } from '@aws-amplify/ui-react';
15 | import { useNavigate } from "react-router-dom"
16 | import YouTube from 'react-youtube';
17 | function Home() {
18 |
19 | const { tokens } = useTheme();
20 | const navigate = useNavigate()
21 |
22 | return (
23 | <>
24 |
25 |
26 | Identity Verification using Amazon Rekognition
27 |
28 |
29 | Verify user identity online using machine learning.
30 |
31 |
32 |
40 |
45 |
46 |
47 |
48 |
49 |
50 |
54 |
58 | In-person user identity verification is slow to scale, costly, and has high friction. Machine learning-powered facial biometrics can enable online user identity verification. Amazon Rekognition offers pre-trained facial recognition and analysis capabilities to quickly add to user onboarding and authentication workflows to verify opted-in users' identities online. No machine learning expertise is required. With Amazon Rekognition, you can onboard and authenticate users in seconds while detecting fraudulent or duplicate accounts. As a result, you can grow users faster, reduce fraud, and lower user verification costs.
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
73 | How it works
74 |
75 |
76 |
85 | >
86 | );
87 | }
88 |
89 | export default Home;
--------------------------------------------------------------------------------
/src/frontend/src/Pages/LoggedIn.js:
--------------------------------------------------------------------------------
1 | import {
2 | View,
3 | Heading,
4 | Flex,
5 | useTheme,
6 | Alert
7 |
8 | } from '@aws-amplify/ui-react';
9 |
10 | import { JSONTree } from 'react-json-tree';
11 | import { useLocation } from 'react-router-dom'
12 |
13 | function LoggedIn() {
14 |
15 | const { tokens } = useTheme();
16 | // const navigate = useNavigate()
17 | const location = useLocation()
18 | const { label } = location.state
19 | const { responseData } = location.state
20 | return (
21 | <>
22 |
27 |
28 | Login Successful!!
29 |
30 |
31 |
37 |
38 |
41 | Welcome {label}
42 |
43 |
44 | {/*
45 |
48 | */}
49 |
50 |
51 |
52 | {responseData &&
53 | <>
54 |
60 | Response:
61 |
62 | raw.length > 200 ? ({raw}) : raw} />
64 | >
65 | }
66 |
67 | >
68 |
69 |
70 | );
71 | }
72 |
73 | export default LoggedIn;
--------------------------------------------------------------------------------
/src/frontend/src/Pages/Success.js:
--------------------------------------------------------------------------------
1 | import {
2 | View,
3 | Heading,
4 | Flex,
5 | Button,
6 | useTheme,
7 | Alert
8 |
9 | } from '@aws-amplify/ui-react';
10 | import { useNavigate } from "react-router-dom"
11 | import { ImEnter } from "react-icons/im";
12 | import { JSONTree } from 'react-json-tree';
13 | import { useLocation } from 'react-router-dom'
14 |
15 | function Success() {
16 |
17 | const { tokens } = useTheme();
18 | const navigate = useNavigate()
19 | const location = useLocation()
20 | // const { label } = location.state
21 | const { responseData } = location.state
22 | return (
23 |
24 |
25 | <>
26 |
31 |
32 | Success
33 |
34 |
35 |
41 |
42 |
45 | Congratulations, your account has been successfully created.
46 |
47 |
48 |
49 |
52 |
53 |
54 |
55 |
56 |
57 | {responseData &&
58 | <>
59 |
65 | Response:
66 |
67 |
68 | >
69 | }
70 |
71 |
72 |
73 | >
74 |
75 |
76 | );
77 | }
78 |
79 | export default Success;
--------------------------------------------------------------------------------
/src/frontend/src/index.css:
--------------------------------------------------------------------------------
1 | body {
2 | margin: 0;
3 | font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', 'Roboto', 'Oxygen',
4 | 'Ubuntu', 'Cantarell', 'Fira Sans', 'Droid Sans', 'Helvetica Neue',
5 | sans-serif;
6 | -webkit-font-smoothing: antialiased;
7 | -moz-osx-font-smoothing: grayscale;
8 | }
9 |
10 | code {
11 | font-family: source-code-pro, Menlo, Monaco, Consolas, 'Courier New',
12 | monospace;
13 | }
14 |
--------------------------------------------------------------------------------
/src/frontend/src/index.js:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import ReactDOM from 'react-dom/client';
3 | import './index.css';
4 | import App from './App';
5 | import reportWebVitals from './reportWebVitals';
6 | import "react-responsive-modal/styles.css";
7 | import '@aws-amplify/ui-react/styles.css';
8 |
9 |
10 | const root = ReactDOM.createRoot(document.getElementById('root'));
11 | root.render(
12 | //
13 | //
14 | //
15 |
16 | );
17 |
18 | // If you want to start measuring performance in your app, pass a function
19 | // to log results (for example: reportWebVitals(console.log))
20 | // or send to an analytics endpoint. Learn more: https://bit.ly/CRA-vitals
21 | reportWebVitals();
22 |
--------------------------------------------------------------------------------
/src/frontend/src/logo.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/src/frontend/src/reportWebVitals.js:
--------------------------------------------------------------------------------
1 | const reportWebVitals = onPerfEntry => {
2 | if (onPerfEntry && onPerfEntry instanceof Function) {
3 | import('web-vitals').then(({ getCLS, getFID, getFCP, getLCP, getTTFB }) => {
4 | getCLS(onPerfEntry);
5 | getFID(onPerfEntry);
6 | getFCP(onPerfEntry);
7 | getLCP(onPerfEntry);
8 | getTTFB(onPerfEntry);
9 | });
10 | }
11 | };
12 |
13 | export default reportWebVitals;
14 |
--------------------------------------------------------------------------------
/src/frontend/src/setupTests.js:
--------------------------------------------------------------------------------
1 | // jest-dom adds custom jest matchers for asserting on DOM nodes.
2 | // allows you to do things like:
3 | // expect(element).toHaveTextContent(/react/i)
4 | // learn more: https://github.com/testing-library/jest-dom
5 | import '@testing-library/jest-dom';
6 |
--------------------------------------------------------------------------------
/src/rekognition/check-userid/ddb.py:
--------------------------------------------------------------------------------
1 | import boto3
2 |
3 | from typing import Mapping
4 |
5 | from boto3.dynamodb.conditions import BeginsWith, Key
6 | from boto3.dynamodb.conditions import Key, Attr
7 | from base64 import b64encode
8 | #from aws_xray_sdk.core import xray_recorder
9 |
10 |
11 | class FaceTableClient:
12 | '''
13 | Represents a storage client for querying Facial metadata.
14 | '''
15 | def __init__(self, table_name:str, region_name:str) -> None:
16 | '''
17 | Initializes a new instance of the FaceTableClient.
18 | :param table_name: The DyanmoDB table name.
19 | :param region_name: The Amazon region hosting the table.
20 | '''
21 | assert table_name is not None, "No table_name available"
22 | assert region_name is not None, "No region_name available"
23 |
24 | ddb = boto3.resource('dynamodb', region_name=region_name)
25 | self.table = ddb.Table(table_name)
26 |
27 |
28 |
29 | #@xray_recorder.capture('get_faces')
30 | def check_ID(self, user_id:str)->Mapping[str,str]:
31 | '''
32 | Gets every face associated with a given user.
33 | :param user_id: The users alias.
34 | :returns: Check is ID exist or not
35 | '''
36 | assert user_id is not None, "user_id is missing"
37 |
38 | response = self.table.query(
39 | KeyConditionExpression=Key('PartitionKey').eq('User::'+user_id).__and__(Key('SortKey').begins_with('Face::')),
40 | )
41 |
42 | faces = []
43 | for item in response['Items']:
44 | face_id:str = str(item['PartitionKey']).replace('User::','',1).lower()
45 | print(face_id)
46 | faces.append(face_id)
47 | return faces
48 |
49 |
--------------------------------------------------------------------------------
/src/rekognition/check-userid/handler.py:
--------------------------------------------------------------------------------
1 | from ast import Str
2 | from ddb import FaceTableClient
3 |
4 | import boto3
5 | from os import environ, path
6 | from typing import Any, Mapping, Tuple
7 | from json import loads
8 | from logging import Logger
9 |
10 |
11 | '''
12 | Initialize the runtime.
13 | '''
14 | region_name = environ.get('REGION')
15 | logger = Logger(name='LambdaFunction')
16 | SIMILARITY_THRESHOLD = 95.0
17 |
18 | '''
19 | Prepare XRAY, if available.
20 | '''
21 | try:
22 | from aws_xray_sdk.core import xray_recorder, patch_all
23 | patch_all() # Instrument all AWS methods.
24 | except:
25 | print('AWS XRAY support not available.')
26 |
27 | '''
28 | Initialize any clients (... after xray!)
29 | '''
30 | rek_client = boto3.client('rekognition', region_name=environ.get('REGION'))
31 | face_table_client = FaceTableClient(environ.get(
32 | 'FACE_TABLE_NAME'), region_name=region_name)
33 |
34 | def choose_random_face(faces: Mapping[str, str]) -> Tuple[str, str]:
35 | '''
36 | Chooses a random face from the set.
37 | :returns: face_id (str) and image (bytes)
38 | '''
39 | faceids = list(faces.keys())
40 | ix = randint(0, len(faceids)-1)
41 | return faceids[ix], faces[faceids[ix]]
42 |
43 |
44 | def function_main(event: Mapping[str, Any], _=None):
45 | userId = event['UserId']
46 |
47 | '''
48 | Retrieve the face information.
49 | If an exact match exists, we're done.
50 | '''
51 | faces = face_table_client.check_ID(userId)
52 | if userId in faces:
53 | return {
54 | 'Reason': 'User Exist'
55 | }
56 | else:
57 | return {
58 | 'Reason': 'User not Exist'
59 | }
60 |
61 | def read_example_file(filename: str) -> Mapping[str, Any]:
62 | example_dir = path.join(path.dirname(__file__), 'examples')
63 | file = path.join(example_dir, filename)
64 |
65 | with open(file, 'r') as f:
66 | return loads(f.read())
67 |
68 | if __name__ == '__main__':
69 | xray_recorder.begin_segment('LocalDebug')
70 | payload = read_example_file('payload.json')
71 | function_main(payload)
72 | xray_recorder.end_segment()
--------------------------------------------------------------------------------
/src/rekognition/compare-face-with-idcard/.dockerignore:
--------------------------------------------------------------------------------
1 | __pycache__
2 | *.bat
3 | examples/**
--------------------------------------------------------------------------------
/src/rekognition/compare-face-with-idcard/.vscode/example-launch.json:
--------------------------------------------------------------------------------
1 | {
2 | "version": "0.2.0",
3 | "configurations": [
4 | {
5 | "name": "Python: Current File",
6 | "type": "python",
7 | "request": "launch",
8 | "program": "handler.py",
9 | "console": "integratedTerminal",
10 | "env": {
11 | "REGION":"us-east-1",
12 | "FACE_TABLE_NAME":"HomeNet-Hybrid-FaceTable"
13 | }
14 | }
15 | ]
16 | }
--------------------------------------------------------------------------------
/src/rekognition/compare-face-with-idcard/Dockerfile:
--------------------------------------------------------------------------------
1 | # Define function directory
2 | ARG FUNCTION_DIR="/var/task"
3 |
4 | FROM python:3.8 as build-image
5 |
6 | # Install aws-lambda-cpp build dependencies
7 | # RUN apt-get update && \
8 | # apt-get install -y \
9 | # g++ \
10 | # make \
11 | # cmake \
12 | # unzip \
13 | # libcurl4-openssl-dev
14 |
15 | # Include global arg in this stage of the build
16 | ARG FUNCTION_DIR
17 | # Create function directory
18 | RUN mkdir -p ${FUNCTION_DIR}
19 |
20 | # Copy function code
21 | COPY . ${FUNCTION_DIR}
22 |
23 | # Install the runtime interface client
24 | RUN pip install \
25 | --target ${FUNCTION_DIR} \
26 | awslambdaric
27 |
28 | RUN pip install \
29 | --target ${FUNCTION_DIR} \
30 | -r ${FUNCTION_DIR}/requirements.txt
31 |
32 | # Multi-stage build: grab a fresh copy of the base image
33 | FROM public.ecr.aws/lambda/python:3.8
34 |
35 | # Include global arg in this stage of the build
36 | ARG FUNCTION_DIR
37 | # Set working directory to function root directory
38 | WORKDIR ${FUNCTION_DIR}
39 |
40 | # Copy in the build image dependencies
41 | COPY --from=build-image ${FUNCTION_DIR} ${FUNCTION_DIR}
42 |
43 | #ENTRYPOINT [ "/usr/local/bin/python", "-m", "awslambdaric" ]
44 | CMD [ "handler.function_main" ]
--------------------------------------------------------------------------------
/src/rekognition/compare-face-with-idcard/errors.py:
--------------------------------------------------------------------------------
1 | class InvalidImageUriException(Exception):
2 | '''
3 | Represents a failure due to unexpected s3_uri format.
4 | '''
5 | pass
6 |
7 | class InvalidImageExtensionException(Exception):
8 | '''
9 | Represents a failure due to the file suffix being unsupported type.
10 | '''
11 | pass
--------------------------------------------------------------------------------
/src/rekognition/compare-face-with-idcard/handler.py:
--------------------------------------------------------------------------------
1 | from models import InputRequest
2 | import boto3
3 | from os import environ, path
4 | from typing import Any, Mapping, Tuple
5 | from json import loads
6 | from logging import Logger
7 | from random import randint
8 | from base64 import b64decode
9 |
10 | '''
11 | Initialize the runtime.
12 | '''
13 | region_name= environ.get('REGION')
14 | logger = Logger(name='LambdaFunction')
15 | SIMILARITY_THRESHOLD = 95.0
16 |
17 | '''
18 | Prepare XRAY, if available.
19 | '''
20 | try:
21 | from aws_xray_sdk.core import xray_recorder, patch_all
22 | patch_all() # Instrument all AWS methods.
23 | except:
24 | print('AWS XRAY support not available.')
25 |
26 | '''
27 | Initialize any clients (... after xray!)
28 | '''
29 | rek_client = boto3.client('rekognition', region_name=environ.get('REGION'))
30 |
31 |
32 | def function_main(event:Mapping[str,Any],_=None):
33 | inputRequest = InputRequest(event)
34 |
35 | '''
36 | Otherwise compare a historical record against the input
37 | '''
38 | try:
39 | response = rek_client.compare_faces(
40 | SimilarityThreshold=0.9,
41 | SourceImage={
42 | 'S3Object': {
43 | 'Bucket': inputRequest.bucket,
44 | 'Name': inputRequest.idcard_name
45 | }
46 | },
47 | TargetImage={
48 | 'S3Object': {
49 | 'Bucket': inputRequest.bucket,
50 | 'Name': inputRequest.name
51 | }
52 | })
53 |
54 | '''
55 | Confirm these are approximately the same image.
56 | '''
57 | if len(response['FaceMatches']) == 0:
58 | return {
59 | 'IsMatch':False,
60 | 'Reason': 'Property $.FaceMatches is empty.'
61 | }
62 | facenotMatch = False
63 | for match in response['FaceMatches']:
64 | similarity:float = match['Similarity']
65 | if similarity > SIMILARITY_THRESHOLD:
66 | return {
67 | 'IsMatch':True,
68 | 'Reason': 'All checks passed.'
69 | }
70 | else:
71 | facenotMatch = True
72 | if facenotMatch:
73 | return {
74 | 'IsMatch':False,
75 | 'Reason': 'Similarity comparison was below threshold (%f < %f).' % (similarity, SIMILARITY_THRESHOLD)
76 | }
77 |
78 | return {
79 | 'IsMatch':True,
80 | 'Reason': 'All checks passed.'
81 | }
82 | except Exception as error:
83 | print('Comparing({}) to ID Card failed - {}'.format(
84 | inputRequest.user_id, str(error)))
85 | raise error
86 |
87 | def read_example_file(filename:str)->Mapping[str,Any]:
88 | example_dir = path.join(path.dirname(__file__),'examples')
89 | file = path.join(example_dir, filename)
90 |
91 | with open(file, 'r') as f:
92 | return loads(f.read())
93 |
94 | if __name__ == '__main__':
95 | xray_recorder.begin_segment('LocalDebug')
96 | payload = read_example_file('payload.json')
97 | function_main(payload)
98 | xray_recorder.end_segment()
99 |
100 |
--------------------------------------------------------------------------------
/src/rekognition/compare-face-with-idcard/models.py:
--------------------------------------------------------------------------------
1 | from typing import Any
2 | from base64 import b64decode
3 |
4 | class InputRequest:
5 | def __init__(self, event:dict) -> None:
6 | self.user_id = event['UserId']
7 | self.image_bytes = event['Image'] if event.get('Image') != None else None
8 | self.bucket = event['Bucket']
9 | self.name = event['Name']
10 | print(event['IdCardName'])
11 | self.idcard_name = event['IdCardName']
12 |
13 | @property
14 | def user_id(self)->str:
15 | return self.__user_id
16 |
17 | @user_id.setter
18 | def user_id(self, value:str)->None:
19 | self.__user_id = value.lower()
20 |
21 | @property
22 | def image_bytes(self)->bytes:
23 | return self.__image
24 |
25 | @image_bytes.setter
26 | def image_bytes(self, value:Any)->None:
27 | if isinstance(value, bytes):
28 | self.__image = value
29 | elif isinstance(value, str):
30 | self.__image = b64decode(value)
31 | else:
32 | self.__image = None
33 |
34 | @property
35 | def bucket(self)->str:
36 | return self.__bucket
37 |
38 | @bucket.setter
39 | def bucket(self, value:str)->None:
40 | self.__bucket = value
41 |
42 |
43 | @property
44 | def name(self)->str:
45 | return self.__name
46 |
47 | @name.setter
48 | def name(self, value:str)->None:
49 | self.__name = value
50 |
51 | @property
52 | def idcard_name(self)->str:
53 | return self.__idcard_name
54 |
55 | @idcard_name.setter
56 | def idcard_name(self, value:str)->None:
57 | self.__idcard_name = value
58 |
59 |
60 |
61 |
62 |
63 |
--------------------------------------------------------------------------------
/src/rekognition/compare-face-with-idcard/requirements.txt:
--------------------------------------------------------------------------------
1 | aws_xray_sdk
--------------------------------------------------------------------------------
/src/rekognition/compare-faces/.dockerignore:
--------------------------------------------------------------------------------
1 | __pycache__
2 | *.bat
3 | examples/**
--------------------------------------------------------------------------------
/src/rekognition/compare-faces/.vscode/example-launch.json:
--------------------------------------------------------------------------------
1 | {
2 | "version": "0.2.0",
3 | "configurations": [
4 | {
5 | "name": "Python: Current File",
6 | "type": "python",
7 | "request": "launch",
8 | "program": "handler.py",
9 | "console": "integratedTerminal",
10 | "env": {
11 | "REGION":"us-east-1",
12 | "FACE_TABLE_NAME":"HomeNet-Hybrid-FaceTable"
13 | }
14 | }
15 | ]
16 | }
--------------------------------------------------------------------------------
/src/rekognition/compare-faces/Dockerfile:
--------------------------------------------------------------------------------
1 | # Define function directory
2 | ARG FUNCTION_DIR="/var/task"
3 |
4 | FROM python:3.8 as build-image
5 |
6 | # Install aws-lambda-cpp build dependencies
7 | # RUN apt-get update && \
8 | # apt-get install -y \
9 | # g++ \
10 | # make \
11 | # cmake \
12 | # unzip \
13 | # libcurl4-openssl-dev
14 |
15 | # Include global arg in this stage of the build
16 | ARG FUNCTION_DIR
17 | # Create function directory
18 | RUN mkdir -p ${FUNCTION_DIR}
19 |
20 | # Copy function code
21 | COPY . ${FUNCTION_DIR}
22 |
23 | # Install the runtime interface client
24 | RUN pip install \
25 | --target ${FUNCTION_DIR} \
26 | awslambdaric
27 |
28 | RUN pip install \
29 | --target ${FUNCTION_DIR} \
30 | -r ${FUNCTION_DIR}/requirements.txt
31 |
32 | # Multi-stage build: grab a fresh copy of the base image
33 | FROM public.ecr.aws/lambda/python:3.8
34 |
35 | # Include global arg in this stage of the build
36 | ARG FUNCTION_DIR
37 | # Set working directory to function root directory
38 | WORKDIR ${FUNCTION_DIR}
39 |
40 | # Copy in the build image dependencies
41 | COPY --from=build-image ${FUNCTION_DIR} ${FUNCTION_DIR}
42 |
43 | #ENTRYPOINT [ "/usr/local/bin/python", "-m", "awslambdaric" ]
44 | CMD [ "handler.function_main" ]
--------------------------------------------------------------------------------
/src/rekognition/compare-faces/ddb.py:
--------------------------------------------------------------------------------
1 | import boto3
2 | from errors import InvalidImageExtensionException, InvalidImageUriException
3 | from typing import Mapping
4 | from urllib.parse import urlparse
5 | from boto3.dynamodb.conditions import BeginsWith, Key
6 | from boto3.dynamodb.conditions import Key, Attr
7 | from base64 import b64encode
8 | #from aws_xray_sdk.core import xray_recorder
9 |
10 |
11 | class FaceTableClient:
12 | '''
13 | Represents a storage client for querying Facial metadata.
14 | '''
15 | def __init__(self, table_name:str, region_name:str) -> None:
16 | '''
17 | Initializes a new instance of the FaceTableClient.
18 | :param table_name: The DyanmoDB table name.
19 | :param region_name: The Amazon region hosting the table.
20 | '''
21 | assert table_name is not None, "No table_name available"
22 | assert region_name is not None, "No region_name available"
23 |
24 | ddb = boto3.resource('dynamodb', region_name=region_name)
25 | self.table = ddb.Table(table_name)
26 |
27 | self.s3 = boto3.client('s3', region_name=region_name)
28 |
29 | #@xray_recorder.capture('get_faces')
30 | def get_faces(self, user_id:str)->Mapping[str,str]:
31 | '''
32 | Gets every face associated with a given user.
33 | :param user_id: The users alias.
34 | :returns: Map face_id (str) to image (bytes)
35 | '''
36 | assert user_id is not None, "user_id is missing"
37 |
38 | response = self.table.query(
39 | KeyConditionExpression=Key('PartitionKey').eq('User::'+user_id).__and__(Key('SortKey').begins_with('Face::')),
40 | )
41 |
42 | faces = {}
43 | for item in response['Items']:
44 | face_id:str = str(item['SortKey']).replace('Face::','',1).lower()
45 | if 'image' in item:
46 | faces[face_id] = item['image']
47 | elif 'bucket' in item:
48 | faces[face_id] = {'bucket':item['bucket'],'name':item['name']}
49 | else:
50 | #faces[face_id] = None
51 | print('user {} - face_id {} has no face.'.format(user_id,face_id))
52 |
53 | return faces
54 |
55 |
56 | #@xray_recorder.capture('get_image_from_uri')
57 | def __get_image_from_uri(self, s3_uri:str)->bytes:
58 | '''
59 | Downloads the requested image from Amazon S3.
60 | :param s3_uri: The path in format s3://bucket/key.
61 | :rtype: The raw image bytes.
62 | '''
63 | #xray_recorder.current_subsegment().put_annotation('s3_uri', s3_uri)
64 | url = urlparse(s3_uri)
65 |
66 | if url.scheme != 's3':
67 | raise InvalidImageUriException(
68 | 'get_image_from_uri only supports s3://bucket/key format.')
69 |
70 | bucket = url.netloc
71 | key = url.path.lstrip('/')
72 |
73 | if not key.lower().endswith('.png') and not key.lower().endswith('.jpg'):
74 | raise InvalidImageExtensionException(
75 | 'get_image_from_uri only supports .png and .jpg files.')
76 |
77 | '''
78 | Retrieve the object from the bucket.
79 | '''
80 | response = self.s3.get_object(Bucket=bucket,Key=key)
81 | return response['Body'].read()
82 |
--------------------------------------------------------------------------------
/src/rekognition/compare-faces/errors.py:
--------------------------------------------------------------------------------
1 | class InvalidImageUriException(Exception):
2 | '''
3 | Represents a failure due to unexpected s3_uri format.
4 | '''
5 | pass
6 |
7 | class InvalidImageExtensionException(Exception):
8 | '''
9 | Represents a failure due to the file suffix being unsupported type.
10 | '''
11 | pass
--------------------------------------------------------------------------------
/src/rekognition/compare-faces/handler.py:
--------------------------------------------------------------------------------
1 | from ddb import FaceTableClient
2 | from models import InputRequest
3 | import boto3
4 | from os import environ, path
5 | from typing import Any, Mapping, Tuple
6 | from json import loads
7 | from logging import Logger
8 | from random import randint
9 | from base64 import b64decode
10 |
11 | '''
12 | Initialize the runtime.
13 | '''
14 | region_name= environ.get('REGION')
15 | logger = Logger(name='LambdaFunction')
16 | SIMILARITY_THRESHOLD = 95.0
17 |
18 | '''
19 | Prepare XRAY, if available.
20 | '''
21 | try:
22 | from aws_xray_sdk.core import xray_recorder, patch_all
23 | patch_all() # Instrument all AWS methods.
24 | except:
25 | print('AWS XRAY support not available.')
26 |
27 | '''
28 | Initialize any clients (... after xray!)
29 | '''
30 | rek_client = boto3.client('rekognition', region_name=environ.get('REGION'))
31 | face_table_client = FaceTableClient(environ.get('FACE_TABLE_NAME'), region_name=region_name)
32 |
33 | def choose_random_face(faces: Mapping[str, str]) -> Tuple[str, str]:
34 | '''
35 | Chooses a random face from the set.
36 | :returns: face_id (str) and image (bytes)
37 | '''
38 | faceids = list(faces.keys())
39 | ix = randint(0, len(faceids)-1)
40 | return faceids[ix], faces[faceids[ix]]
41 |
42 | def function_main(event:Mapping[str,Any],_=None):
43 | inputRequest = InputRequest(event)
44 |
45 | '''
46 | Retrieve the face information.
47 | If an exact match exists, we're done.
48 | '''
49 | faces = face_table_client.get_faces(inputRequest.user_id)
50 | if inputRequest.face_id.lower() in faces.keys():
51 | return {
52 | 'IsMatch':True,
53 | 'Reason': 'Known FaceId detected.'
54 | }
55 |
56 | '''
57 | Otherwise compare a historical record against the input
58 | '''
59 | historical_face_id, historical_image = choose_random_face(faces)
60 | try:
61 | response = rek_client.compare_faces(
62 | SimilarityThreshold=0.9,
63 | SourceImage={
64 | 'S3Object': {
65 | 'Bucket': historical_image['bucket'],
66 | 'Name': historical_image['name']
67 | }
68 | },
69 | TargetImage={
70 | 'S3Object': {
71 | 'Bucket': inputRequest.bucket,
72 | 'Name': inputRequest.name
73 | }
74 | })
75 |
76 | '''
77 | Confirm these are approximately the same image.
78 | '''
79 | if len(response['FaceMatches']) == 0:
80 | return {
81 | 'IsMatch':False,
82 | 'Reason': 'Property $.FaceMatches is empty.'
83 | }
84 |
85 | for match in response['FaceMatches']:
86 | similarity:float = match['Similarity']
87 | if similarity < SIMILARITY_THRESHOLD:
88 | return {
89 | 'IsMatch':False,
90 | 'Reason': 'Similarity comparison was below threshold (%f < %f).' % (similarity, SIMILARITY_THRESHOLD)
91 | }
92 |
93 | return {
94 | 'IsMatch':True,
95 | 'Reason': 'All checks passed.'
96 | }
97 | except Exception as error:
98 | print('Comparing({}) to face_id[{}] failed - {}'.format(
99 | inputRequest.user_id, historical_face_id, str(error)))
100 | raise error
101 |
102 | def read_example_file(filename:str)->Mapping[str,Any]:
103 | example_dir = path.join(path.dirname(__file__),'examples')
104 | file = path.join(example_dir, filename)
105 |
106 | with open(file, 'r') as f:
107 | return loads(f.read())
108 |
109 | if __name__ == '__main__':
110 | xray_recorder.begin_segment('LocalDebug')
111 | payload = read_example_file('payload.json')
112 | function_main(payload)
113 | xray_recorder.end_segment()
114 |
115 |
--------------------------------------------------------------------------------
/src/rekognition/compare-faces/models.py:
--------------------------------------------------------------------------------
1 | from typing import Any
2 | from base64 import b64decode
3 |
4 | class InputRequest:
5 | def __init__(self, event:dict) -> None:
6 | self.user_id = event['UserId']
7 | self.image_bytes = event['Image'] if event.get('Image') != None else None
8 | self.bucket = event['Bucket']
9 | self.name = event['Name']
10 |
11 |
12 | if 'FaceId' in event:
13 | self.face_id = event['FaceId']
14 | else:
15 | self.face_id = 'FACEID_NOT_AVAIL'
16 |
17 | if 'Properties' in event:
18 | self.property_bag = event['Properties']
19 | else:
20 | self.property_bag = {}
21 |
22 | @property
23 | def user_id(self)->str:
24 | return self.__user_id
25 |
26 | @user_id.setter
27 | def user_id(self, value:str)->None:
28 | self.__user_id = value.lower()
29 |
30 | @property
31 | def face_id(self)->str:
32 | return self.__face_id
33 |
34 | @face_id.setter
35 | def face_id(self, value:str)->None:
36 | self.__face_id = value
37 |
38 | @property
39 | def image_bytes(self)->bytes:
40 | return self.__image
41 |
42 | @image_bytes.setter
43 | def image_bytes(self, value:Any)->None:
44 | if isinstance(value, bytes):
45 | self.__image = value
46 | elif isinstance(value, str):
47 | self.__image = b64decode(value)
48 | else:
49 | self.__image = None
50 |
51 | @property
52 | def property_bag(self)->dict:
53 | return self.__property_bag
54 |
55 | @property_bag.setter
56 | def property_bag(self, value:dict)->None:
57 | self.__property_bag = value
58 |
59 | @property
60 | def bucket(self)->str:
61 | return self.__bucket
62 |
63 | @bucket.setter
64 | def bucket(self, value:str)->None:
65 | self.__bucket = value
66 |
67 | @property
68 | def name(self)->str:
69 | return self.__name
70 |
71 | @name.setter
72 | def name(self, value:str)->None:
73 | self.__name = value
74 |
75 | def to_dyanmodb_item(self)->dict:
76 | '''
77 | Encodes this object as Amazon DyanmoDB Item.
78 | '''
79 | return {
80 | 'PartitionKey': {'S': 'User::{}'.format(self.user_id)},
81 | 'SortKey': {'S': self.face_id },
82 | #'image': {'B', self.image },
83 | 'property_bag': {'M': InputRequest.ddb_encode_dict(self.property_bag) }
84 | }
85 |
86 | @staticmethod
87 | def ddb_encode_dict(dict:dict)->dict:
88 | encoded = {}
89 | for key in dict.keys():
90 | encoded[str(key)] = {'S': str(dict[key]) }
91 | return encoded
92 |
93 |
--------------------------------------------------------------------------------
/src/rekognition/compare-faces/requirements.txt:
--------------------------------------------------------------------------------
1 | aws_xray_sdk
--------------------------------------------------------------------------------
/src/rekognition/detect-faces/.dockerignore:
--------------------------------------------------------------------------------
1 | __pycache__
2 | *.bat
3 | examples/**
--------------------------------------------------------------------------------
/src/rekognition/detect-faces/.vscode/example-launch.json:
--------------------------------------------------------------------------------
1 | {
2 | "version": "0.2.0",
3 | "configurations": [
4 | {
5 | "name": "Python: Current File",
6 | "type": "python",
7 | "request": "launch",
8 | "program": "handler.py",
9 | "console": "integratedTerminal",
10 | "env": {
11 | "REGION":"us-east-1",
12 | "TABLE_NAME":"HomeNet-Hybrid-FaceTable"
13 | }
14 | }
15 | ]
16 | }
--------------------------------------------------------------------------------
/src/rekognition/detect-faces/Dockerfile:
--------------------------------------------------------------------------------
1 | # Define function directory
2 | ARG FUNCTION_DIR="/var/task"
3 |
4 | FROM python:3.8 as build-image
5 |
6 | # Install aws-lambda-cpp build dependencies
7 | # RUN apt-get update && \
8 | # apt-get install -y \
9 | # g++ \
10 | # make \
11 | # cmake \
12 | # unzip \
13 | # libcurl4-openssl-dev
14 |
15 | # Include global arg in this stage of the build
16 | ARG FUNCTION_DIR
17 | # Create function directory
18 | RUN mkdir -p ${FUNCTION_DIR}
19 |
20 | # Copy function code
21 | COPY . ${FUNCTION_DIR}
22 |
23 | # Install the runtime interface client
24 | RUN pip install \
25 | --target ${FUNCTION_DIR} \
26 | awslambdaric
27 |
28 | RUN pip install \
29 | --target ${FUNCTION_DIR} \
30 | -r ${FUNCTION_DIR}/requirements.txt
31 |
32 | # Multi-stage build: grab a fresh copy of the base image
33 | FROM public.ecr.aws/lambda/python:3.8
34 |
35 | # Include global arg in this stage of the build
36 | ARG FUNCTION_DIR
37 | # Set working directory to function root directory
38 | WORKDIR ${FUNCTION_DIR}
39 |
40 | # Copy in the build image dependencies
41 | COPY --from=build-image ${FUNCTION_DIR} ${FUNCTION_DIR}
42 |
43 | #ENTRYPOINT [ "/usr/local/bin/python", "-m", "awslambdaric" ]
44 | CMD [ "handler.function_main" ]
--------------------------------------------------------------------------------
/src/rekognition/detect-faces/exceptions.py:
--------------------------------------------------------------------------------
1 |
2 | class NoFacesDetectedException(Exception):
3 | '''
4 | Represents a failure due to no faces detected.
5 | '''
6 | pass
7 |
8 | class TooManyFacesDetectedException(Exception):
9 | '''
10 | Represents a failure due to too many faces detected.
11 | '''
12 | pass
13 |
14 | class InvalidPoseDetectedException(Exception):
15 | '''
16 | Represents a failure due to the user wearing sunglasses.
17 | '''
18 | pass
19 |
20 | class SunglassesDetectedException(Exception):
21 | '''
22 | Represents a failure due to the user wearing sunglasses.
23 | '''
24 | pass
--------------------------------------------------------------------------------
/src/rekognition/detect-faces/handler.py:
--------------------------------------------------------------------------------
1 | import boto3
2 | from exceptions import InvalidPoseDetectedException, NoFacesDetectedException, SunglassesDetectedException, TooManyFacesDetectedException
3 | from os import environ, path
4 | from typing import Any, Mapping
5 | from json import dumps, loads
6 | from logging import Logger
7 | from base64 import b64decode
8 |
9 | '''
10 | Initialize the function runtime.
11 | '''
12 | logger = Logger(name='LambdaFunction')
13 |
14 | '''
15 | Prepare XRAY, if available.
16 | '''
17 | try:
18 | from aws_xray_sdk.core import xray_recorder, patch_all
19 | patch_all() # Instrument all AWS methods.
20 | except:
21 | print('AWS XRAY support not available.')
22 |
23 | '''
24 | Initialize any clients (... after xray!)
25 | '''
26 | client = boto3.client('rekognition', region_name=environ.get('REGION'))
27 |
28 | def valid_pose_value(value:float)->bool:
29 | '''
30 | Check that a pose angle is within an acceptable range.
31 | '''
32 | assert value is not None, "valid_pose_value missing value"
33 | return -45 < value and value < 45
34 |
35 | #@xray_recorder.capture('detect_faces')
36 | def detect_faces(image:str)->dict:
37 | '''
38 | Invoke the rekognition:detect_faces method.
39 | :param image: The utf8(base64( image-bytes ))
40 | :rtype: The response from detect_faces method.
41 | '''
42 | assert image is not None, "detect_faces missing image argument."
43 |
44 | response = client.detect_faces(
45 | Attributes=['ALL'],
46 | Image={
47 | 'Bytes':b64decode(image)
48 | })
49 | return response
50 |
51 | #@xray_recorder.capture('detect_faces_image')
52 | def detect_faces_image(bucket: str, name: str) -> dict:
53 | '''
54 | Invoke the rekognition:detect_faces method.
55 | :param bucket: The bucket name
56 | :param key: The bucket key
57 | :rtype: The response from detect_faces method.
58 | '''
59 | assert bucket is not None and name is not None, "detect_faces missing image s3 bucket argument."
60 | response = client.detect_faces(
61 | Attributes=['ALL'],
62 | Image={
63 | "S3Object": {
64 | 'Bucket': bucket,
65 | 'Name': name
66 | }
67 | })
68 | return response
69 |
70 | def function_main(event:Mapping[str,Any], _=None):
71 | '''
72 | Convert the input into a Amazon Rekognition call...
73 | '''
74 | if event.get('Image', None) != None:
75 | response = detect_faces(event['Image'])
76 | else:
77 | response = detect_faces_image(event['Bucket'], event['Name'])
78 |
79 | '''
80 | Confirm the face is usable...
81 | '''
82 | valid_faces = [face for face in response['FaceDetails'] if face['Confidence'] > 90]
83 | if len(valid_faces) == 0:
84 | raise NoFacesDetectedException()
85 | elif len(valid_faces) > 1:
86 | raise TooManyFacesDetectedException()
87 |
88 | user_face = valid_faces[0]
89 |
90 | '''
91 | Confirm the face position is within range
92 | Each pose dimension is between -180 to 180 degress
93 | '''
94 | pose = user_face['Pose']
95 | for dimension in ['Pitch','Roll','Yaw']:
96 | if not valid_pose_value(pose[dimension]):
97 | raise InvalidPoseDetectedException(dimension)
98 |
99 | '''
100 | Do not permit users to wear sunglasses
101 | '''
102 | if user_face['Sunglasses']['Value']:
103 | raise SunglassesDetectedException()
104 |
105 | '''
106 | Return the valid faces...
107 | '''
108 | return {
109 | 'FaceDetails': user_face
110 | }
111 |
112 | def read_example_file(filename:str)->Mapping[str,Any]:
113 | example_dir = path.join(path.dirname(__file__),'examples')
114 | file = path.join(example_dir, filename)
115 |
116 | with open(file, 'r') as f:
117 | return loads(f.read())
118 |
119 | if __name__ == '__main__':
120 | xray_recorder.begin_segment(name='LocalDebug')
121 | payload = read_example_file('nbachmei.json')
122 | function_main(payload)
123 | xray_recorder.end_segment()
--------------------------------------------------------------------------------
/src/rekognition/detect-faces/requirements.txt:
--------------------------------------------------------------------------------
1 | aws_xray_sdk
--------------------------------------------------------------------------------
/src/rekognition/index-faces/.dockerignore:
--------------------------------------------------------------------------------
1 | __pycache__
2 | *.bat
3 | examples/**
--------------------------------------------------------------------------------
/src/rekognition/index-faces/.vscode/example-launch.json:
--------------------------------------------------------------------------------
1 | {
2 | "version": "0.2.0",
3 | "configurations": [
4 | {
5 | "name": "Python: Current File",
6 | "type": "python",
7 | "request": "launch",
8 | "program": "handler.py",
9 | "console": "integratedTerminal",
10 | "env": {
11 | "REGION":"ca-central-1",
12 | "RIV_STACK_NAME":"Riv-Prod",
13 | "FACE_TABLE_NAME":"RIV-Riv-Prod-SharedStorageMetadataFaceTable609BE09F"
14 | }
15 | }
16 | ]
17 | }
--------------------------------------------------------------------------------
/src/rekognition/index-faces/Dockerfile:
--------------------------------------------------------------------------------
1 | # Define function directory
2 | ARG FUNCTION_DIR="/var/task"
3 |
4 | FROM python:3.8 as build-image
5 |
6 | # Install aws-lambda-cpp build dependencies
7 | # RUN apt-get update && \
8 | # apt-get install -y \
9 | # g++ \
10 | # make \
11 | # cmake \
12 | # unzip \
13 | # libcurl4-openssl-dev
14 |
15 | # Include global arg in this stage of the build
16 | ARG FUNCTION_DIR
17 | # Create function directory
18 | RUN mkdir -p ${FUNCTION_DIR}
19 |
20 | # Copy function code
21 | COPY . ${FUNCTION_DIR}
22 |
23 | # Install the runtime interface client
24 | RUN pip install \
25 | --target ${FUNCTION_DIR} \
26 | awslambdaric
27 |
28 | RUN pip install \
29 | --target ${FUNCTION_DIR} \
30 | -r ${FUNCTION_DIR}/requirements.txt
31 |
32 | # Multi-stage build: grab a fresh copy of the base image
33 | FROM public.ecr.aws/lambda/python:3.8
34 |
35 | # Include global arg in this stage of the build
36 | ARG FUNCTION_DIR
37 | # Set working directory to function root directory
38 | WORKDIR ${FUNCTION_DIR}
39 |
40 | # Copy in the build image dependencies
41 | COPY --from=build-image ${FUNCTION_DIR} ${FUNCTION_DIR}
42 |
43 | #ENTRYPOINT [ "/usr/local/bin/python", "-m", "awslambdaric" ]
44 | CMD [ "handler.function_main" ]
--------------------------------------------------------------------------------
/src/rekognition/index-faces/errors.py:
--------------------------------------------------------------------------------
1 | import boto3
2 |
3 | class TransientError(Exception):
4 | '''
5 | Represents a retryable error.
6 | '''
7 | pass
8 |
9 | class NonRecoverableError(Exception):
10 | '''
11 | Represents a hard failure from DynamoDB.
12 | '''
13 | pass
14 |
15 | class ExceptionUtil:
16 |
17 | @staticmethod
18 | def normalize_for_step_functions(ddb_client:boto3.client, error:Exception)->Exception:
19 | '''
20 | Creates a generic error to return to the StepFunction caller.
21 | This approach simplifies the state machine's retry policy.
22 | '''
23 | hard_failures = [
24 | ddb_client.exceptions.ConditionalCheckFailedException,
25 | ddb_client.exceptions.ResourceNotFoundException,
26 | ddb_client.exceptions.ItemCollectionSizeLimitExceededException,
27 | ]
28 |
29 | retryable = [
30 | ddb_client.exceptions.ProvisionedThroughputExceededException,
31 | ddb_client.exceptions.TransactionConflictException,
32 | ddb_client.exceptions.RequestLimitExceeded,
33 | ddb_client.exceptions.InternalServerError,
34 | ]
35 |
36 | if error in hard_failures:
37 | return NonRecoverableError(error.__class__.__name__)
38 | elif error in retryable:
39 | return TransientError(error.__class__.__name__)
40 | else:
41 | return error
42 |
--------------------------------------------------------------------------------
/src/rekognition/index-faces/models.py:
--------------------------------------------------------------------------------
1 | from typing import Any
2 | from base64 import b64decode, b64encode
3 |
4 | class FaceMetadata:
5 | def __init__(self, event:dict) -> None:
6 | self.user_id = event['UserId']
7 | #self.face_id = event['FaceId']
8 | self.image_bytes = event['Image'] if event.get('Image') != None else None
9 | self.bucket = event['Bucket']
10 | self.name = event['Name']
11 | self.property_bag = event['Properties']
12 |
13 | @property
14 | def user_id(self)->str:
15 | return self.__user_id
16 |
17 | @user_id.setter
18 | def user_id(self, value:str)->None:
19 | self.__user_id = value.lower()
20 |
21 | @property
22 | def image_bytes(self)->bytes:
23 | return self.__image
24 |
25 | @image_bytes.setter
26 | def image_bytes(self, value:Any)->None:
27 | if isinstance(value, bytes):
28 | self.__image = value
29 | elif isinstance(value, str):
30 | self.__image = b64decode(value)
31 | else:
32 | self.__image = None
33 |
34 | @property
35 | def property_bag(self)->dict:
36 | return self.__property_bag
37 |
38 | @property_bag.setter
39 | def property_bag(self, value:dict)->None:
40 | self.__property_bag = value
41 |
42 | @property
43 | def bucket(self)->str:
44 | return self.__bucket
45 |
46 | @bucket.setter
47 | def bucket(self, value:str)->None:
48 | self.__bucket = value.lower()
49 |
50 | @property
51 | def name(self)->str:
52 | return self.__name
53 |
54 | @name.setter
55 | def name(self, value:str)->None:
56 | self.__name = value.lower()
57 |
58 | @staticmethod
59 | def ddb_encode_dict(dict:dict)->dict:
60 | encoded = {}
61 | for key in dict.keys():
62 | encoded[str(key)] = {'S': str(dict[key]) }
63 | return encoded
64 |
65 |
--------------------------------------------------------------------------------
/src/rekognition/index-faces/requirements.txt:
--------------------------------------------------------------------------------
1 | aws_xray_sdk
--------------------------------------------------------------------------------
/src/rekognition/liveness-session-result/.dockerignore:
--------------------------------------------------------------------------------
1 | __pycache__
2 | *.bat
3 | examples/**
--------------------------------------------------------------------------------
/src/rekognition/liveness-session-result/Dockerfile:
--------------------------------------------------------------------------------
1 | # Define function directory
2 | ARG FUNCTION_DIR="/var/task"
3 |
4 | FROM python:3.8 as build-image
5 |
6 | # Install aws-lambda-cpp build dependencies
7 | # RUN apt-get update && \
8 | # apt-get install -y \
9 | # g++ \
10 | # make \
11 | # cmake \
12 | # unzip \
13 | # libcurl4-openssl-dev
14 |
15 | # Include global arg in this stage of the build
16 | ARG FUNCTION_DIR
17 | # Create function directory
18 | RUN mkdir -p ${FUNCTION_DIR}
19 |
20 | # Copy function code
21 | COPY . ${FUNCTION_DIR}
22 |
23 | # Install the runtime interface client
24 | RUN pip install \
25 | --target ${FUNCTION_DIR} \
26 | awslambdaric
27 |
28 | RUN pip install \
29 | --target ${FUNCTION_DIR} \
30 | -r ${FUNCTION_DIR}/requirements.txt
31 |
32 | # Multi-stage build: grab a fresh copy of the base image
33 | FROM public.ecr.aws/lambda/python:3.8
34 |
35 | # Include global arg in this stage of the build
36 | ARG FUNCTION_DIR
37 | # Set working directory to function root directory
38 | WORKDIR ${FUNCTION_DIR}
39 |
40 | # Copy in the build image dependencies
41 | COPY --from=build-image ${FUNCTION_DIR} ${FUNCTION_DIR}
42 |
43 | #ENTRYPOINT [ "/usr/local/bin/python", "-m", "awslambdaric" ]
44 | CMD [ "handler.function_main" ]
--------------------------------------------------------------------------------
/src/rekognition/liveness-session-result/handler.py:
--------------------------------------------------------------------------------
1 | from sys import prefix
2 | import boto3
3 | from base64 import b64decode
4 | from os import environ, path
5 | from typing import Any, Mapping
6 | from logging import Logger
7 | from json import loads
8 | from logging import Logger
9 | import base64
10 | import json
11 | import sys
12 |
13 | '''
14 | Initialize the runtime.
15 | '''
16 | region_name = environ.get('REGION')
17 | logger = Logger(name='LambdaFunction')
18 |
19 | '''
20 | Prepare XRAY, if available.
21 | '''
22 | try:
23 | from aws_xray_sdk.core import xray_recorder, patch_all
24 | patch_all() # Instrument all AWS methods.
25 | except:
26 | print('AWS XRAY support not available.')
27 |
28 | '''
29 | Initialize any clients (... after xray!)
30 | '''
31 | rek_client = boto3.client('rekognition', region_name=region_name)
32 |
33 | s3 = boto3.resource('s3', region_name=region_name)
34 |
35 |
36 | class FaceLivenessError(Exception):
37 | '''
38 | Represents an error due to Face Liveness Issue.
39 | '''
40 | pass
41 |
42 |
43 | def session_result(sessionid):
44 | '''
45 | Get Session result.
46 | '''
47 | try:
48 | session = rek_client.get_face_liveness_session_results(
49 | SessionId=sessionid)
50 | return session
51 |
52 | except rek_client.exceptions.AccessDeniedException:
53 | logger.error('Access Denied Error')
54 | raise FaceLivenessError('AccessDeniedError')
55 | except rek_client.exceptions.InternalServerError:
56 | logger.error('InternalServerError')
57 | raise FaceLivenessError('InternalServerError')
58 | except rek_client.exceptions.InvalidParameterException:
59 | logger.error('InvalidParameterException')
60 | raise FaceLivenessError('InvalidParameterException')
61 | except rek_client.exceptions.SessionNotFoundException:
62 | logger.error('SessionNotFound')
63 | raise FaceLivenessError('SessionNotFound')
64 | except rek_client.exceptions.ThrottlingException:
65 | logger.error('ThrottlingException')
66 | raise FaceLivenessError('ThrottlingException')
67 | except rek_client.exceptions.ProvisionedThroughputExceededException:
68 | logger.error('ProvisionedThroughputExceededException')
69 | raise FaceLivenessError('ProvisionedThroughputExceededException')
70 |
71 |
72 | def function_main(event, context):
73 | assert event['sessionid'] is not None, "SessionID is not available"
74 | output = session_result(event['sessionid'])
75 | if output and output['ReferenceImage']:
76 | bucketName = output['ReferenceImage']['S3Object']['Bucket']
77 | keyName = output['ReferenceImage']['S3Object']['Name']
78 | bucket = s3.Bucket(bucketName)
79 | obj = bucket.Object(keyName)
80 | response = obj.get()
81 | img = response['Body'].read()
82 | myObj = [base64.b64encode(img)]
83 | return_json = str(myObj[0])
84 | return_json = return_json.replace("b'", "")
85 | encoded_image = return_json.replace("'", "")
86 | output['ReferenceImageBase64'] = encoded_image
87 | return {
88 | 'statusCode': 200,
89 | 'body': output
90 | }
91 |
92 |
93 | if __name__ == '__main__':
94 | xray_recorder.begin_segment('LocalDebug')
95 | args = sys.argv[1]
96 | print(args)
97 | session_result(args)
98 | xray_recorder.end_segment()
99 |
--------------------------------------------------------------------------------
/src/rekognition/liveness-session-result/requirements.txt:
--------------------------------------------------------------------------------
1 | boto3
2 | aws_xray_sdk
--------------------------------------------------------------------------------
/src/rekognition/reset/Dockerfile:
--------------------------------------------------------------------------------
1 | # Define function directory
2 | ARG FUNCTION_DIR="/var/task"
3 |
4 | FROM python:3.8 as build-image
5 |
6 | # Install aws-lambda-cpp build dependencies
7 | # RUN apt-get update && \
8 | # apt-get install -y \
9 | # g++ \
10 | # make \
11 | # cmake \
12 | # unzip \
13 | # libcurl4-openssl-dev
14 |
15 | # Include global arg in this stage of the build
16 | ARG FUNCTION_DIR
17 | # Create function directory
18 | RUN mkdir -p ${FUNCTION_DIR}
19 |
20 | # Copy function code
21 | COPY . ${FUNCTION_DIR}
22 |
23 | # Install the runtime interface client
24 | RUN pip install \
25 | --target ${FUNCTION_DIR} \
26 | awslambdaric
27 |
28 | RUN pip install \
29 | --target ${FUNCTION_DIR} \
30 | -r ${FUNCTION_DIR}/requirements.txt
31 |
32 | # Multi-stage build: grab a fresh copy of the base image
33 | FROM public.ecr.aws/lambda/python:3.8
34 |
35 | # Include global arg in this stage of the build
36 | ARG FUNCTION_DIR
37 | # Set working directory to function root directory
38 | WORKDIR ${FUNCTION_DIR}
39 |
40 | # Copy in the build image dependencies
41 | COPY --from=build-image ${FUNCTION_DIR} ${FUNCTION_DIR}
42 |
43 | #ENTRYPOINT [ "/usr/local/bin/python", "-m", "awslambdaric" ]
44 | CMD [ "handler.function_main" ]
--------------------------------------------------------------------------------
/src/rekognition/reset/handler.py:
--------------------------------------------------------------------------------
1 | import boto3
2 | from base64 import b64decode
3 | from os import environ, path
4 | from typing import Any, Mapping
5 | from json import loads
6 | from logging import Logger
7 |
8 | '''
9 | Initialize the function runtime
10 | '''
11 | logger = Logger(name='LambdaFunction')
12 | riv_stack_name = environ.get('RIV_STACK_NAME')
13 | region_name = environ.get('REGION')
14 | assert riv_stack_name is not None, "riv_stack_name is not available"
15 | assert region_name is not None, "region_name is not available"
16 |
17 | '''
18 | Prepare XRAY, if available.
19 | '''
20 | try:
21 | from aws_xray_sdk.core import xray_recorder, patch_all
22 | patch_all() # Instrument all AWS methods.
23 | except:
24 | print('AWS XRAY support not available.')
25 |
26 | '''
27 | Initialize any clients (... after xray!)
28 | '''
29 | rek_client = boto3.client('rekognition', region_name=region_name)
30 |
31 | ddb = boto3.resource('dynamodb', region_name=region_name)
32 |
33 | table = ddb.Table(environ.get('FACE_TABLE_NAME'))
34 |
35 |
36 | def function_main(event: Mapping[str, Any], _=None):
37 | '''
38 | Main function handler.
39 | '''
40 | collectionID = riv_stack_name+'-0'
41 | scan = table.scan()
42 | with table.batch_writer() as batch:
43 | for each in scan['Items']:
44 | batch.delete_item(
45 | Key={
46 | 'PartitionKey': each['PartitionKey'],
47 | "SortKey": each['SortKey']
48 | }
49 | )
50 |
51 | deleteCollection = rek_client.delete_collection(
52 | CollectionId=collectionID
53 | )
54 | createCollection = rek_client.create_collection(
55 | CollectionId=collectionID
56 | )
57 | return {
58 | 'statusCode': 200,
59 | 'headers': {
60 | 'Content-Type': 'application/json',
61 | 'Access-Control-Allow-Origin': '*'
62 | },
63 | 'body': "RIV data reset successfully"
64 | }
65 |
66 |
67 | if __name__ == '__main__':
68 | xray_recorder.begin_segment('LocalDebug')
69 | function_main()
70 | xray_recorder.end_segment()
71 |
--------------------------------------------------------------------------------
/src/rekognition/reset/requirements.txt:
--------------------------------------------------------------------------------
1 | aws_xray_sdk
--------------------------------------------------------------------------------
/src/rekognition/search-faces/.dockerignore:
--------------------------------------------------------------------------------
1 | __pycache__
2 | *.bat
3 | examples/**
--------------------------------------------------------------------------------
/src/rekognition/search-faces/.vscode/example-launch.json:
--------------------------------------------------------------------------------
1 | {
2 | "version": "0.2.0",
3 | "configurations": [
4 | {
5 | "name": "Python: Current File",
6 | "type": "python",
7 | "request": "launch",
8 | "program": "handler.py",
9 | "console": "integratedTerminal",
10 | "env": {
11 | "REGION":"ca-central-1",
12 | "RIV_STACK_NAME":"Riv-Prod"
13 | }
14 | }
15 | ]
16 | }
--------------------------------------------------------------------------------
/src/rekognition/search-faces/Dockerfile:
--------------------------------------------------------------------------------
1 | # Define function directory
2 | ARG FUNCTION_DIR="/var/task"
3 |
4 | FROM python:3.8 as build-image
5 |
6 | # Install aws-lambda-cpp build dependencies
7 | # RUN apt-get update && \
8 | # apt-get install -y \
9 | # g++ \
10 | # make \
11 | # cmake \
12 | # unzip \
13 | # libcurl4-openssl-dev
14 |
15 | # Include global arg in this stage of the build
16 | ARG FUNCTION_DIR
17 | # Create function directory
18 | RUN mkdir -p ${FUNCTION_DIR}
19 |
20 | # Copy function code
21 | COPY . ${FUNCTION_DIR}
22 |
23 | # Install the runtime interface client
24 | RUN pip install \
25 | --target ${FUNCTION_DIR} \
26 | awslambdaric
27 |
28 | RUN pip install \
29 | --target ${FUNCTION_DIR} \
30 | -r ${FUNCTION_DIR}/requirements.txt
31 |
32 | # Multi-stage build: grab a fresh copy of the base image
33 | FROM public.ecr.aws/lambda/python:3.8
34 |
35 | # Include global arg in this stage of the build
36 | ARG FUNCTION_DIR
37 | # Set working directory to function root directory
38 | WORKDIR ${FUNCTION_DIR}
39 |
40 | # Copy in the build image dependencies
41 | COPY --from=build-image ${FUNCTION_DIR} ${FUNCTION_DIR}
42 |
43 | #ENTRYPOINT [ "/usr/local/bin/python", "-m", "awslambdaric" ]
44 | CMD [ "handler.function_main" ]
--------------------------------------------------------------------------------
/src/rekognition/search-faces/requirements.txt:
--------------------------------------------------------------------------------
1 | aws_xray_sdk
--------------------------------------------------------------------------------
/src/rekognition/setup/.vscode/example-launch.json:
--------------------------------------------------------------------------------
1 | {
2 | "version": "0.2.0",
3 | "configurations": [
4 | {
5 | "name": "Python: Current File",
6 | "type": "python",
7 | "request": "launch",
8 | "program": "app.py",
9 | "console": "integratedTerminal",
10 | "env": {
11 | "RIV_STACK_NAME": "Riv-Prod",
12 | "REGION": "ca-central-1",
13 | "TOTAL_COLLECTIONS": "10"
14 | }
15 | }
16 | ]
17 | }
--------------------------------------------------------------------------------
/src/rekognition/setup/README.md:
--------------------------------------------------------------------------------
1 | # Setup Rekognition Utility
2 |
3 | This utility configures any Amazon Rekognition objects required for your environment.
4 |
5 | ## How do I run this app
6 |
7 | The script requires three enviroment variables. A later update should convert this to the click framework.
8 |
9 | ```sh
10 | export RIV_STACK_NAME=Prod
11 | export REGION=ca-central-1
12 | export TOTAL_COLLECTIONS=2
13 | ./app.py
14 | ```
15 |
16 | ## How do I determine TOTAL_COLLECTIONS
17 |
18 | The **TOTAL_COLLECTIONS** value defines the number of _AWS Rekognition Collections_ that the system will leverage. AWS Customers __cannot change this value later, without significant effort__. The RIV team strongly encourages over allocation, since there are **no cost** associated with having too many collections.
19 |
20 | Typically, customers should follow the formula `ceil( count(max_users) / 10M ) +1` total partitions. For instance, 27M maximum users requires 4 partitions.
21 |
22 | ## What challenges exist with multiple partitions
23 |
24 | Users within the RIV system provide the tuple `(userid, password)`; where **password** is a `utf8(base64(image))` bytearray of themselves.
25 |
26 | The registration system will persist the user metadata within the `partition_id = hash(userid) % TOTAL_COLLECTIONS`. This design means that a specific photo (password) is unique within its **associated partition, not globally unique**. For instance, a customer might be able to register themselves as `(fred1234, flintstone.png)` and `(barney4567, flintstone.png)`.
27 |
28 | Customers can avoid this behavior by setting `TOTAL_COLLECTIONS=1`, which limits the **max_users** to about 20M total. For many scenarios this is completely acceptable. However, nation-wide brands will find this prohibitively restrictive.
29 |
30 | Instead, the RIV team recommends adding controls around the `user_id` selection. This situation could include scanning a drivers license and setting the `user_id` equal to their `state id` or `government id`.
31 |
--------------------------------------------------------------------------------
/src/rekognition/setup/app.py:
--------------------------------------------------------------------------------
1 | import boto3
2 | from os import environ
3 |
4 | '''
5 | Configure the script from enviroment variables.
6 | '''
7 | riv_stack_name = environ.get('RIV_STACK_NAME')
8 | region_name = environ.get('REGION')
9 | total_collections = environ.get('TOTAL_COLLECTIONS')
10 |
11 | if riv_stack_name is None:
12 | print('RIV_STACK_NAME variable missing, defaulting to Riv-Prod')
13 | riv_stack_name='Riv-Prod'
14 |
15 | if total_collections is None:
16 | print('TOTAL_COLLECTIONS variable missing, defaulting to 10')
17 | total_collections=10
18 |
19 | if region_name is None:
20 | for alter in ['AWS_REGION', 'AWS_DEFAULT_REGION', 'AMAZON_REGION']:
21 | region_name = environ.get(alter)
22 | if region_name is not None:
23 | print('Defaulting region_name to env[{%s}] = %s' % (alter, region_name))
24 | break
25 |
26 | assert riv_stack_name is not None, "riv_stack_name is not available"
27 | assert region_name is not None, "region_name is not available"
28 | assert total_collections is not None, "region_name is not available"
29 |
30 | rek_client = boto3.client('rekognition', region_name=region_name)
31 | ssm_client = boto3.client('ssm', region_name=region_name)
32 | total_collections = int(total_collections)
33 |
34 | def create_collections()->None:
35 | '''
36 | Create each of the Rekognition Collection Partitions
37 | '''
38 | for ix in range(0, total_collections):
39 | collection_id = '{}-{}'.format(riv_stack_name, ix)
40 |
41 | try:
42 | '''
43 | Check if the collection exists, or skip forward if already present.
44 | '''
45 | rek_client.describe_collection(CollectionId=collection_id)
46 | print('Collection {} already exists, nothing to do.'.format(collection_id))
47 | continue
48 | except rek_client.exceptions.ResourceNotFoundException as error:
49 | '''
50 | This error means the collection does not exist. Create it.
51 | '''
52 | response = rek_client.create_collection(
53 | CollectionId=collection_id,
54 | Tags={
55 | 'riv_stack': riv_stack_name,
56 | })
57 |
58 | print('Created collection {} with Version {}'.format(collection_id, response['FaceModelVersion']))
59 | except Exception as error:
60 | '''
61 | These are hard failures (e.g., AccessDeniedException)
62 | '''
63 | print('Unable to describe_collection({})'.format(collection_id))
64 | raise error
65 |
66 | def set_parameters()->None:
67 | '''
68 | Set the parameters for the Rekogonion clients.
69 | '''
70 | for name, value in [('partition-count',str(total_collections))]:
71 | try:
72 | response = ssm_client.put_parameter(
73 | Name='/riv/{}/rekognition/{}'.format(riv_stack_name, name),
74 | Value=value,
75 | Description='Generated by {}'.format(__file__),
76 | Type='String',
77 | Overwrite=True,
78 | Tier='Standard',
79 | DataType='text')
80 | print('set_parameter({}) with version {}'.format(name, response['Version']))
81 | except Exception as error:
82 | print('Unable to set_parameter({}, {})'.format(name,value))
83 | raise error
84 |
85 | if __name__ == '__main__':
86 | create_collections()
87 | set_parameters()
88 | print('Setup is complete.')
89 |
--------------------------------------------------------------------------------
/src/rekognition/setup/rekognition-setup.py:
--------------------------------------------------------------------------------
1 | import boto3
2 |
3 |
--------------------------------------------------------------------------------
/src/rekognition/setup/requirements.txt:
--------------------------------------------------------------------------------
1 | boto3
--------------------------------------------------------------------------------
/src/rekognition/start-liveness-session/.dockerignore:
--------------------------------------------------------------------------------
1 | __pycache__
2 | *.bat
3 | examples/**
--------------------------------------------------------------------------------
/src/rekognition/start-liveness-session/Dockerfile:
--------------------------------------------------------------------------------
1 | # Define function directory
2 | ARG FUNCTION_DIR="/var/task"
3 |
4 | FROM python:3.8 as build-image
5 |
6 | # Install aws-lambda-cpp build dependencies
7 | # RUN apt-get update && \
8 | # apt-get install -y \
9 | # g++ \
10 | # make \
11 | # cmake \
12 | # unzip \
13 | # libcurl4-openssl-dev
14 |
15 | # Include global arg in this stage of the build
16 | ARG FUNCTION_DIR
17 | # Create function directory
18 | RUN mkdir -p ${FUNCTION_DIR}
19 |
20 | # Copy function code
21 | COPY . ${FUNCTION_DIR}
22 |
23 | # Install the runtime interface client
24 | RUN pip install \
25 | --target ${FUNCTION_DIR} \
26 | awslambdaric
27 |
28 | RUN pip install \
29 | --target ${FUNCTION_DIR} \
30 | -r ${FUNCTION_DIR}/requirements.txt
31 |
32 | # Multi-stage build: grab a fresh copy of the base image
33 | FROM public.ecr.aws/lambda/python:3.8
34 |
35 | # Include global arg in this stage of the build
36 | ARG FUNCTION_DIR
37 | # Set working directory to function root directory
38 | WORKDIR ${FUNCTION_DIR}
39 |
40 | # Copy in the build image dependencies
41 | COPY --from=build-image ${FUNCTION_DIR} ${FUNCTION_DIR}
42 |
43 | #ENTRYPOINT [ "/usr/local/bin/python", "-m", "awslambdaric" ]
44 | CMD [ "handler.function_main" ]
--------------------------------------------------------------------------------
/src/rekognition/start-liveness-session/handler.py:
--------------------------------------------------------------------------------
1 | from sys import prefix
2 | import boto3
3 | from base64 import b64decode
4 | from os import environ, path
5 | from typing import Any, Mapping
6 | from json import loads
7 | from logging import Logger
8 | import json
9 |
10 | '''
11 | Initialize the runtime.
12 | '''
13 | region_name = environ.get('REGION')
14 | logger = Logger(name='LambdaFunction')
15 |
16 | '''
17 | Prepare XRAY, if available.
18 | '''
19 | try:
20 | from aws_xray_sdk.core import xray_recorder, patch_all
21 | patch_all() # Instrument all AWS methods.
22 | except:
23 | print('AWS XRAY support not available.')
24 |
25 | '''
26 | Initialize any clients (... after xray!)
27 | '''
28 | rek_client = boto3.client('rekognition', region_name=region_name)
29 |
30 | class FaceLivenessError(Exception):
31 | '''
32 | Represents an error due to Face Liveness Issue.
33 | '''
34 | pass
35 |
36 | def getSession():
37 | '''
38 | Get liveness session.
39 | '''
40 | try:
41 | session = rek_client.create_face_liveness_session(Settings={'AuditImagesLimit':1, 'OutputConfig': {"S3Bucket": environ.get('IMAGE_BUCKET_NAME')}})
42 | return session
43 |
44 | except rek_client.exceptions.AccessDeniedException:
45 | logger.error('Access Denied Error')
46 | raise FaceLivenessError('AccessDeniedError')
47 | except rek_client.exceptions.InternalServerError:
48 | logger.error('InternalServerError')
49 | raise FaceLivenessError('InternalServerError')
50 | except rek_client.exceptions.InvalidParameterException:
51 | logger.error('InvalidParameterException')
52 | raise FaceLivenessError('InvalidParameterException')
53 | except rek_client.exceptions.ThrottlingException:
54 | logger.error('ThrottlingException')
55 | raise FaceLivenessError('ThrottlingException')
56 | except rek_client.exceptions.ProvisionedThroughputExceededException:
57 | logger.error('ProvisionedThroughputExceededException')
58 | raise FaceLivenessError('ProvisionedThroughputExceededException')
59 |
60 |
61 | def function_main(event, context):
62 | '''
63 | Main function handler.
64 | '''
65 | return {
66 | 'statusCode': 200,
67 | 'body': getSession()
68 | }
69 |
70 |
71 | if __name__ == '__main__':
72 | xray_recorder.begin_segment('LocalDebug')
73 | getSession()
74 | xray_recorder.end_segment()
75 |
--------------------------------------------------------------------------------
/src/rekognition/start-liveness-session/requirements.txt:
--------------------------------------------------------------------------------
1 | boto3
2 | aws_xray_sdk
--------------------------------------------------------------------------------
/src/shared/requirements.txt:
--------------------------------------------------------------------------------
1 | boto3
2 | aws_xray_sdk
--------------------------------------------------------------------------------
/src/test-client/.gitignore:
--------------------------------------------------------------------------------
1 | .env
--------------------------------------------------------------------------------
/src/test-client/.vscode/example-launch.json:
--------------------------------------------------------------------------------
1 | {
2 | "version": "0.2.0",
3 | "configurations": [
4 | {
5 | "name": "Register New User",
6 | "type": "python",
7 | "request": "launch",
8 | "program": "app.py",
9 | "console": "integratedTerminal",
10 | "args": ["register", "-z", "Riv-Prod", "-r", "ca-central-1"]
11 | },
12 | {
13 | "name": "Update Existing User",
14 | "type": "python",
15 | "request": "launch",
16 | "program": "app.py",
17 | "console": "integratedTerminal",
18 | "args": ["update", "-z", "Riv-Prod", "-r", "ca-central-1", "-u","nbachmei","-p", "./faces/nbachmei.jpg"]
19 | },
20 | {
21 | "name": "Auth User",
22 | "type": "python",
23 | "request": "launch",
24 | "program": "app.py",
25 | "console": "integratedTerminal",
26 | "args": ["auth", "-z", "Riv-Prod", "-r", "ca-central-1", "-u","nbachmei","-p", "./faces/nbachmei.jpg"]
27 | },
28 | {
29 | "name": "Encode Payload",
30 | "type": "python",
31 | "request": "launch",
32 | "program": "app.py",
33 | "console": "integratedTerminal",
34 | "args": ["encode", "-u","nbachmei","-p", "./faces/nbachmei.jpg"]
35 | }
36 | ]
37 | }
--------------------------------------------------------------------------------
/src/test-client/README.md:
--------------------------------------------------------------------------------
1 | # Test Client
2 |
3 | This command line interface (CLI) supports testing the User Portal functionality.
4 |
5 | Most operations require the caller specifies the **CloudFormation Stack Name** (`riv_stack_name`) and **region** you are testing.
6 |
7 | The `riv_stack_name` is specifies which instance (Prod, Dev, ...) you want to target, and is specified during the [one-click.sh](../../one-click.sh) invocation.
8 |
9 | If you do not specify the value, it defaults to **Riv-Prod**.
10 |
11 | ## How do I use the test-client
12 |
13 | ```sh
14 | # Optional/Recommended create a virtual enviroment (removes need for sudo)
15 | virtualenv .env
16 |
17 | # Activate this the virtual environment
18 | source .env/bin/activate
19 |
20 | # Install dependencies
21 | pip3 install -r src/test-client/requirements.txt
22 |
23 | # Confirm the client starts
24 | python3 src/test-client/app.py help
25 | ```
26 |
27 | ## What permissions does this tool require
28 |
29 | The caller must have `ssm:GetParameter*` rights to the resources `/riv/{stack-name}/userportal/url`.
30 |
31 | This requirement is due to the `get_userportal_address` dynamically fetching the endpoints.
32 |
33 | ## How do I register a new user
34 |
35 | Developers can create new users with any of the following examples.
36 |
37 | It is not supported to register the "same face" multiple times under different **UserId**.
38 |
39 | ```sh
40 | # Register the user nbachmei from a local image
41 | python3 ./app.py register -z Riv-Prod -r ca-central-1 -u nbachmei -p ./faces/nbachmei.jpg
42 |
43 | # Register the user nbachmei with properties from an idcard
44 | python3 ./app.py register-idcard -z Riv-Prod -r ca-central-1 -u nbachmei -p ./faces/nbachmei.jpg -c ./faces/private/license.jpg
45 | ```
46 |
47 | ## How do I update an existing user
48 |
49 | Developers can update existing users with any of the following examples.
50 |
51 | It is not supported to update a user before calling register. The service also checks that the new face is comparable (e.g., 95% confidence) to the registration photo.
52 |
53 | ```sh
54 | # Update the user data
55 | python3 ./app.py update -z Riv-Prod -r ca-central-1 -u nbachmei -p ./faces/different.jpg
56 | ```
57 |
58 | ## How do I authenticate a user
59 |
60 | Developers can perform an authentication check with any of the following examples.
61 |
62 | It is not supported to authenticate the user before calling register.
63 |
64 | ```sh
65 | # Update the user data
66 | python3 ./app.py auth -z Riv-Prod -r ca-central-1 -u nbachmei -p ./faces/nbachmei.jpg
67 | ```
68 |
69 | ## How do I generate payloads for external tooling
70 |
71 | Developers can export valid payloads with any of the following examples.
72 |
73 | ```sh
74 | # Generate a random payload to stdout
75 | python3 ./app.py encode -o -
76 |
77 | # Write a random payload to a file
78 | python3 ./app.py encode -o payload.json
79 |
80 | # Write a local data
81 | python3 ./app.py encode -o payload.json -u nbachmei -p ./faces/nbachmei.jpg
82 |
83 | # Register the payload with curl
84 | curl -X POST -H "Content-Type: application/json" https://your-apigateway-address/register --data "@payload.json"
85 | ```
86 |
--------------------------------------------------------------------------------
/src/test-client/faces/amitgt.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/rekognition-identity-verification/3a1733f3a6ff91558815575b9df94285e0165bbe/src/test-client/faces/amitgt.jpg
--------------------------------------------------------------------------------
/src/test-client/faces/lemull.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/rekognition-identity-verification/3a1733f3a6ff91558815575b9df94285e0165bbe/src/test-client/faces/lemull.jpg
--------------------------------------------------------------------------------
/src/test-client/faces/nbachmei.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/rekognition-identity-verification/3a1733f3a6ff91558815575b9df94285e0165bbe/src/test-client/faces/nbachmei.jpg
--------------------------------------------------------------------------------
/src/test-client/faces/pasqanth.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/rekognition-identity-verification/3a1733f3a6ff91558815575b9df94285e0165bbe/src/test-client/faces/pasqanth.jpg
--------------------------------------------------------------------------------
/src/test-client/faces/private/.gitignore:
--------------------------------------------------------------------------------
1 | *.jpg
2 | *.png
--------------------------------------------------------------------------------
/src/test-client/requirements.txt:
--------------------------------------------------------------------------------
1 | wheel
2 | boto3
3 | click
4 | names
5 | pygments
6 | requests
7 |
--------------------------------------------------------------------------------
/src/textract/README.md:
--------------------------------------------------------------------------------
1 | # Textract Operations
2 |
3 | [Amazon Textract](https://aws.amazon.com/textract/) is a machine learning service that automatically extracts text, handwriting and data from scanned documents that goes beyond simple optical character recognition (OCR) to identify, understand, and extract data from forms and tables. Today, many companies manually extract data from scanned documents like PDFs, images, tables and forms, or through simple OCR software that requires manual configuration which often times requires reconfiguration when the form changes. To overcome these manual and expensive processes, Textract uses machine learning to read and process any type of document, accurately extracting text, handwriting, tables and other data without any manual effort. You can quickly automate document processing and take action on the information extracted whether it be automating loans processing or extracting information from invoices and receipts. Textract can extract the data in minutes vs. hours or days.
4 |
--------------------------------------------------------------------------------
/src/textract/extract-idcard/.dockerignore:
--------------------------------------------------------------------------------
1 | __pycache__
2 | *.bat
3 | examples/**
4 | .env
--------------------------------------------------------------------------------
/src/textract/extract-idcard/.vscode/example-launch.json:
--------------------------------------------------------------------------------
1 | {
2 | "version": "0.2.0",
3 | "configurations": [
4 | {
5 | "name": "Python: Current File",
6 | "type": "python",
7 | "request": "launch",
8 | "program": "handler.py",
9 | "console": "integratedTerminal",
10 | "env": {
11 | "REGION":"ca-central-1"
12 | }
13 | }
14 | ]
15 | }
--------------------------------------------------------------------------------
/src/textract/extract-idcard/Dockerfile:
--------------------------------------------------------------------------------
1 | # Define function directory
2 | ARG FUNCTION_DIR="/var/task"
3 |
4 | FROM python:3.8 as build-image
5 |
6 | # Include global arg in this stage of the build
7 | ARG FUNCTION_DIR
8 | # Create function directory
9 | RUN mkdir -p ${FUNCTION_DIR}
10 |
11 | # Copy function code
12 | COPY . ${FUNCTION_DIR}
13 |
14 | # Install the runtime interface client
15 | RUN pip install \
16 | --target ${FUNCTION_DIR} \
17 | awslambdaric
18 |
19 | RUN pip install \
20 | --target ${FUNCTION_DIR} \
21 | -r ${FUNCTION_DIR}/requirements.txt
22 |
23 | # Multi-stage build: grab a fresh copy of the base image
24 | FROM public.ecr.aws/lambda/python:3.8
25 |
26 | # Include global arg in this stage of the build
27 | ARG FUNCTION_DIR
28 | # Set working directory to function root directory
29 | WORKDIR ${FUNCTION_DIR}
30 |
31 | # Copy in the build image dependencies
32 | COPY --from=build-image ${FUNCTION_DIR} ${FUNCTION_DIR}
33 |
34 | #ENTRYPOINT [ "/usr/local/bin/python", "-m", "awslambdaric" ]
35 | CMD [ "handler.function_main" ]
--------------------------------------------------------------------------------
/src/textract/extract-idcard/examples/passport_card.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/rekognition-identity-verification/3a1733f3a6ff91558815575b9df94285e0165bbe/src/textract/extract-idcard/examples/passport_card.jpeg
--------------------------------------------------------------------------------
/src/textract/extract-idcard/model.py:
--------------------------------------------------------------------------------
1 | from enum import Enum
2 | from typing import List, Mapping
3 | from base64 import b64decode
4 | #from aws_xray_sdk.core import xray_recorder
5 |
6 | class InvalidImageError(Exception):
7 | '''
8 | Represents an error due to invalid image.
9 | '''
10 | pass
11 |
12 | class TransientError(Exception):
13 | '''
14 | Represents a transient and retryable failure.
15 | '''
16 | pass
17 |
18 | class InputRequest:
19 | '''
20 | Represents the Lambda function input request.
21 | '''
22 | def __init__(self, event:Mapping[str,str]) -> None:
23 | assert event is not None, "No event specified."
24 | assert 'UserId' in event, "Missing required event.UserId attribute"
25 | assert 'IdCard' in event, "Missing required event.IdCard attribute"
26 | assert 'ImageName' in event, "Missing required event.ImageName attribute"
27 |
28 | self.__user_id = event['UserId']
29 | self.__idcard_image = b64decode(event['IdCard'])
30 | self.__image_name = event['ImageName']
31 |
32 | if 'Properties' in event:
33 | self.__properties = event['Properties']
34 | else:
35 | self.__properties = {}
36 |
37 | @property
38 | def user_id(self)->str:
39 | return self.__user_id
40 |
41 | @property
42 | def idcard_image_bytes(self)->bytes:
43 | return self.__idcard_image
44 |
45 | @property
46 | def property_bag(self)->dict:
47 | return self.__properties
48 |
49 | @property
50 | def image_name(self) -> str:
51 | return self.__image_name
52 |
--------------------------------------------------------------------------------
/src/textract/extract-idcard/requirements.txt:
--------------------------------------------------------------------------------
1 | boto3
2 | aws_xray_sdk
--------------------------------------------------------------------------------