├── .github
└── PULL_REQUEST_TEMPLATE.md
├── .gitignore
├── CHANGELOG.md
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── LICENSE.txt
├── NOTICE.txt
├── README.md
├── deployment
├── build-s3-dist.sh
├── manifest-generator
│ ├── app.js
│ └── package.json
├── real-time-iot-device-monitoring-with-kinesis.yaml
└── run-unit-tests.sh
└── source
├── custom-resource
├── index.js
├── lib
│ ├── kinesis-helper.js
│ ├── metrics-helper.js
│ ├── s3-bucket-encryption-helper.js
│ └── website-helper.js
└── package.json
├── demo
└── send-messages.sh
├── update_ddb_from_stream
└── update_ddb_from_stream.py
└── web_site
├── css
├── custom.css
└── jquery-jvectormap-2.0.3.css
├── favicon.ico
├── index.html
└── js
├── app-variables.js.example
├── aws-cognito-sdk.min.js
└── dash.js
/.github/PULL_REQUEST_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | *Issue #, if available:*
2 |
3 | *Description of changes:*
4 |
5 |
6 | By submitting this pull request, I confirm that you can use, modify, copy, and redistribute this contribution, under the terms of your choice.
7 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | **/dist
2 | **/.zip
3 | **/.DS_Store
4 | /dev/**/*
5 | node_modules/
6 | package-lock.json
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Change Log
2 | All notable changes to this project will be documented in this file.
3 |
4 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
5 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
6 |
7 | ## [1.1.1] - 2019-11-19
8 | ### Added
9 | - CHANGELOG templated file
10 | - Added licenses info in NOTICE.txt for aws-sdk, boto3, amazon-cognito-identity, aws-cognito-sdk, node-uuid, underscore and font-awesome
11 |
12 | ### Updated
13 | - The Solution to Node.js 12.x and Python 3.8
14 | - The license information to Apache 2.0 License
15 |
16 | ### Deleted
17 | - Third party sources (e.g., bootstrap, chart.js, jquery, font-awesome). They will be added through build scripts.
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | ## Code of Conduct
2 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct).
3 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact
4 | opensource-codeofconduct@amazon.com with any additional questions or comments.
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing Guidelines
2 |
3 | Thank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional
4 | documentation, we greatly value feedback and contributions from our community.
5 |
6 | Please read through this document before submitting any issues or pull requests to ensure we have all the necessary
7 | information to effectively respond to your bug report or contribution.
8 |
9 |
10 | ## Reporting Bugs/Feature Requests
11 |
12 | We welcome you to use the GitHub issue tracker to report bugs or suggest features.
13 |
14 | When filing an issue, please check [existing open](https://github.com/awslabs/real-time-iot-device-monitoring-with-kinesis/issues), or [recently closed](https://github.com/awslabs/real-time-iot-device-monitoring-with-kinesis/issues?utf8=%E2%9C%93&q=is%3Aissue%20is%3Aclosed%20), issues to make sure somebody else hasn't already
15 | reported the issue. Please try to include as much information as you can. Details like these are incredibly useful:
16 |
17 | * A reproducible test case or series of steps
18 | * The version of our code being used
19 | * Any modifications you've made relevant to the bug
20 | * Anything unusual about your environment or deployment
21 |
22 |
23 | ## Contributing via Pull Requests
24 | Contributions via pull requests are much appreciated. Before sending us a pull request, please ensure that:
25 |
26 | 1. You are working against the latest source on the *master* branch.
27 | 2. You check existing open, and recently merged, pull requests to make sure someone else hasn't addressed the problem already.
28 | 3. You open an issue to discuss any significant work - we would hate for your time to be wasted.
29 |
30 | To send us a pull request, please:
31 |
32 | 1. Fork the repository.
33 | 2. Modify the source; please focus on the specific change you are contributing. If you also reformat all the code, it will be hard for us to focus on your change.
34 | 3. Ensure local tests pass.
35 | 4. Commit to your fork using clear commit messages.
36 | 5. Send us a pull request, answering any default questions in the pull request interface.
37 | 6. Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation.
38 |
39 | GitHub provides additional document on [forking a repository](https://help.github.com/articles/fork-a-repo/) and
40 | [creating a pull request](https://help.github.com/articles/creating-a-pull-request/).
41 |
42 |
43 | ## Finding contributions to work on
44 | Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels ((enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any ['help wanted'](https://github.com/awslabs/real-time-iot-device-monitoring-with-kinesis/labels/help%20wanted) issues is a great place to start.
45 |
46 |
47 | ## Code of Conduct
48 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct).
49 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact
50 | opensource-codeofconduct@amazon.com with any additional questions or comments.
51 |
52 |
53 | ## Security issue notifications
54 | If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public github issue.
55 |
56 |
57 | ## Licensing
58 |
59 | See the [LICENSE](https://github.com/awslabs/real-time-iot-device-monitoring-with-kinesis/blob/master/LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution.
60 |
61 | We may ask you to sign a [Contributor License Agreement (CLA)](http://en.wikipedia.org/wiki/Contributor_License_Agreement) for larger changes.
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
--------------------------------------------------------------------------------
/NOTICE.txt:
--------------------------------------------------------------------------------
1 | Real Time IoT Device Monitoring with Kinesis Analytics
2 |
3 | Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
4 | Licensed under the Apache License Version 2.0 (the "License"). You may not use this file except
5 | in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/
6 | or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS,
7 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express or implied. See the License for the
8 | specific language governing permissions and limitations under the License.
9 |
10 | ==THIRD PARTY LICENSES==
11 |
12 | ** bootstrap (v3.3.7); version 3.3.6 -- https://github.com/twbs/bootstrap
13 | Copyright 2011-2015 Twitter, Inc.
14 |
15 | The MIT License (MIT)
16 |
17 | Copyright (c) 2011-2016 Twitter, Inc.
18 |
19 | Permission is hereby granted, free of charge, to any person obtaining a copy
20 | of this software and associated documentation files (the "Software"), to deal
21 | in the Software without restriction, including without limitation the rights
22 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
23 | copies of the Software, and to permit persons to whom the Software is
24 | furnished to do so, subject to the following conditions:
25 |
26 | The above copyright notice and this permission notice shall be included in
27 | all copies or substantial portions of the Software.
28 |
29 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
30 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
31 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
32 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
33 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
34 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
35 | THE SOFTWARE.
36 |
37 | -----
38 |
39 | ** Moment; version 2.20.1 -- https://github.com/moment/moment
40 | Copyright (c) JS Foundation and other contributors
41 |
42 | Copyright (c) JS Foundation and other contributors
43 |
44 | Permission is hereby granted, free of charge, to any person
45 | obtaining a copy of this software and associated documentation
46 | files (the "Software"), to deal in the Software without
47 | restriction, including without limitation the rights to use,
48 | copy, modify, merge, publish, distribute, sublicense, and/or sell
49 | copies of the Software, and to permit persons to whom the
50 | Software is furnished to do so, subject to the following
51 | conditions:
52 |
53 | The above copyright notice and this permission notice shall be
54 | included in all copies or substantial portions of the Software.
55 |
56 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
57 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
58 | OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
59 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
60 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
61 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
62 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
63 | OTHER DEALINGS IN THE SOFTWARE.
64 |
65 | -----
66 |
67 | ** jquery.min; version 2.2.4 -- https://www.jquery.com
68 | Copyright jQuery Foundation and other contributors, https://jquery.org/ This software consists of voluntary contributions made by many
69 | individuals.
70 |
71 | Copyright jQuery Foundation and other contributors, https://jquery.org/
72 |
73 | This software consists of voluntary contributions made by many individuals. For
74 | exact contribution history, see the revision history available at
75 | https://github.com/jquery/jquery-ui
76 |
77 | The following license applies to all parts of this software except as
78 | documented below:
79 |
80 | ====
81 | Permission is hereby granted, free of charge, to any person obtaining a copy of
82 | this software and associated documentation files (the "Software"), to deal in
83 | the Software without restriction, including without limitation the rights to
84 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
85 | of the Software, and to permit persons to whom the Software is furnished to do
86 | so, subject to the following conditions:
87 |
88 | The above copyright notice and this permission notice shall be included in all
89 | copies or substantial portions of the Software.
90 |
91 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
92 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
93 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
94 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
95 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
96 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
97 | SOFTWARE.
98 | ====
99 | Copyright and related rights for sample code are waived via CC0. Sample code is
100 | defined as all source code contained within the demos directory.
101 |
102 | CC0: http://creativecommons.org/publicdomain/zero/1.0/
103 |
104 | ====
105 |
106 | All files located in the node_modules and external directories are externally
107 | maintained libraries used by this software which have their own licenses; we
108 | recommend you read them, as their terms may differ from the terms above.
109 |
110 | -----
111 |
112 | ** Chart.js; version 2.3.0 -- https://github.com/chartjs/Chart.js
113 | /*!
114 | * Chart.js
115 | * http://chartjs.org/
116 | * Version: 2.3.0
117 | *
118 | * Copyright 2016 Nick Downie
119 | * Released under the MIT license
120 | * https://github.com/chartjs/Chart.js/blob/master/LICENSE.md
121 | */
122 |
123 | The MIT License (MIT)
124 |
125 | Copyright (c) 2018 Chart.js Contributors
126 |
127 | Permission is hereby granted, free of charge, to any person obtaining a copy of
128 | this software and associated documentation files (the "Software"), to deal in
129 | the Software without restriction, including without limitation the rights to
130 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
131 | of the Software, and to permit persons to whom the Software is furnished to do
132 | so, subject to the following conditions:
133 |
134 | The above copyright notice and this permission notice shall be included in all
135 | copies or substantial portions of the Software.
136 |
137 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
138 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
139 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
140 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
141 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
142 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
143 | SOFTWARE.
144 |
145 | -----
146 |
147 | ** AWS SDK under the Apache License Version 2.0
148 |
149 | -----
150 |
151 | ** Boto3 AWS SDK under the Apache License Version 2.0
152 |
153 | -----
154 |
155 | ** amazon-cognito-identity under the Apache License Version 2.0
156 |
157 | -----
158 |
159 | ** aws-cognito-sdk under the Apache License Version 2.0
160 |
161 | -----
162 |
163 | ** node-uuid under the Massachusetts Institute of Technology (MIT) license
164 |
165 | -----
166 |
167 | ** underscore under the Massachusetts Institute of Technology (MIT) license
168 |
169 | -----
170 |
171 | ** font-awesome under the Massachusetts Institute of Technology (MIT) license
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Deprecation Notice
2 |
3 | This AWS Solution has been archived and is no longer maintained by AWS. To discover other solutions, please visit the [AWS Solutions Library](https://aws.amazon.com/solutions/).
4 |
5 | # Real Time IoT Device Monitoring with Kinesis Analytics
6 | AWS Solution for analyzing IoT Device Connectivity using Kinesis Analytics
7 |
8 | ## OS/Python Environment Setup
9 | ```bash
10 | sudo apt-get update
11 | sudo apt-get install install zip wget gawk sed -y
12 | ```
13 |
14 | ## Building Lambda Package
15 | ```bash
16 | cd deployment/
17 | ./build-s3-dist.sh source-bucket-base-name solution-name solution-version
18 | ```
19 | source-bucket-base-name should be the base name for the S3 bucket location where the template will source the Lambda code from.
20 | The template will append '-[region_name]' to this value.
21 | For example: ./build-s3-dist.sh solutions
22 | The template will then expect the source code to be located in the solutions-[region_name] bucket
23 |
24 | ## CF template and Lambda function
25 | The CF Template is located in `deployment/global-s3-assets` directory. The Lambda function is located in `deployment/regional-s3-assets` directory.
26 |
27 | ## Collection of operational metrics
28 |
29 | This solution collects anonymous operational metrics to help AWS improve the quality of features of the solution. For more information, including how to disable this capability, please see the [implementation guide](https://docs.aws.amazon.com/solutions/latest/real-time-iot-device-monitoring-with-kinesis/appendix-c.html).
30 |
31 | ***
32 |
33 | Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
34 |
35 | Licensed under the Apache License Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at
36 |
37 | http://www.apache.org/licenses/
38 |
39 | or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions and limitations under the License.
40 |
--------------------------------------------------------------------------------
/deployment/build-s3-dist.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | #
3 | # This assumes all of the OS-level configuration has been completed and git repo has already been cloned
4 | #
5 | # This script should be run from the repo's deployment directory
6 | # cd deployment
7 | # ./build-s3-dist.sh source-bucket-base-name trademarked-solution-name version-code
8 | #
9 | # Parameters:
10 | # - source-bucket-base-name: Name for the S3 bucket location where the template will source the Lambda
11 | # code from. The template will append '-[region_name]' to this bucket name.
12 | # For example: ./build-s3-dist.sh solutions my-solution v1.0.0
13 | # The template will then expect the source code to be located in the solutions-[region_name] bucket
14 | #
15 | # - trademarked-solution-name: name of the solution for consistency
16 | #
17 | # - version-code: version of the solution
18 |
19 | set -e
20 |
21 | # Check to see if input has been provided:
22 | if [ -z "$1" ] || [ -z "$2" ] || [ -z "$3" ]; then
23 | echo "Please provide the base source bucket name, trademark approved solution name and version where the lambda code will eventually reside."
24 | echo "For example: ./build-s3-dist.sh solutions trademarked-solution-name v1.0.0"
25 | exit 1
26 | fi
27 |
28 | # Get reference for all important folders
29 | template_dir="$PWD"
30 | template_dist_dir="$template_dir/global-s3-assets"
31 | build_dist_dir="$template_dir/regional-s3-assets"
32 | source_dir="$template_dir/../source"
33 |
34 | echo "------------------------------------------------------------------------------"
35 | echo "[Init] Clean old dist folders"
36 | echo "------------------------------------------------------------------------------"
37 | echo "rm -rf $template_dist_dir"
38 | rm -rf $template_dist_dir
39 | echo "mkdir -p $template_dist_dir"
40 | mkdir -p $template_dist_dir
41 | echo "rm -rf $build_dist_dir"
42 | rm -rf $build_dist_dir
43 | echo "mkdir -p $build_dist_dir"
44 | mkdir -p $build_dist_dir
45 |
46 | echo "------------------------------------------------------------------------------"
47 | echo "[Packaging] Global Assets: Cloudformation Templates"
48 | echo "------------------------------------------------------------------------------"
49 | echo "copy yaml templates and rename"
50 | cp $template_dir/real-time-iot-device-monitoring-with-kinesis.yaml $template_dist_dir/
51 | cd $template_dist_dir
52 | # Rename all *.yaml to *.template
53 | for f in *.yaml; do
54 | mv -- "$f" "${f%.yaml}.template"
55 | done
56 |
57 | echo "Updating code source bucket name in template with $1"
58 | bucket_name="s/%%BUCKET_NAME%%/$1/g"
59 | echo "sed -i -e $bucket_name $template_dist_dir/*.template"
60 | sed -i -e $bucket_name $template_dist_dir/*.template
61 |
62 | echo "Updating code source solution name in template with $2"
63 | solution_name="s/%%SOLUTION_NAME%%/$2/g"
64 | echo "sed -i -e $solution_name $template_dist_dir/*.template"
65 | sed -i -e $solution_name $template_dist_dir/*.template
66 |
67 | echo "Updating code source version in template with $3"
68 | s_version="s/%%VERSION%%/$3/g"
69 | echo "sed -i -e $s_version $template_dist_dir/*.template"
70 | sed -i -e $s_version $template_dist_dir/*.template
71 |
72 | echo "------------------------------------------------------------------------------"
73 | echo "[Packaging] Region Assets: Source"
74 | echo "------------------------------------------------------------------------------"
75 |
76 | # Build Custom Resource
77 | echo "Building CFN custom resource helper Lambda function"
78 | cd $source_dir/custom-resource
79 | npm install
80 | npm run build
81 | npm run zip
82 | cp ./dist/custom-resource-helper.zip $build_dist_dir/custom-resource-helper.zip
83 | rm -rf dist
84 | rm -rf node_modules
85 |
86 | # Build UpdateDDBLambda
87 | echo "Building UpdateDDBLambda"
88 | cd $source_dir/update_ddb_from_stream
89 | zip -r $build_dist_dir/update_ddb_from_stream.zip *
90 |
91 | # Build Demo script
92 | echo "Building Demo Script"
93 | cd $source_dir/demo
94 | zip -r $build_dist_dir/demo.zip *
95 |
96 | echo "Getting third party libraries for web site"
97 | cd $source_dir/web_site
98 | npm install bootstrap@3.3.7
99 | cp node_modules/bootstrap/dist/css/bootstrap.min.css css/
100 | cp node_modules/bootstrap/dist/js/bootstrap.min.js js/
101 |
102 | npm install font-awesome
103 | cp -r node_modules/font-awesome/fonts ./
104 | cp node_modules/font-awesome/css/font-awesome.min.css css/
105 |
106 | npm install chart.js
107 | cp node_modules/chart.js/dist/Chart.min.js js/
108 |
109 | npm install amazon-cognito-identity-js
110 | cp node_modules/amazon-cognito-identity-js/dist/amazon-cognito-identity.min.js js/
111 |
112 | npm install jquery
113 | cp node_modules/jquery/dist/jquery.min.js js/
114 |
115 | rm -rf node_modules
116 | rm package-lock.json
117 |
118 | echo "Copying web site content to $build_dist_dir"
119 | cd $source_dir
120 | cp -r web_site $build_dist_dir/
121 |
122 | echo "Generating web site manifest"
123 | cd $template_dir/manifest-generator
124 | npm install
125 | node app.js --target $build_dist_dir/web_site --output $build_dist_dir/web-site-manifest.json
126 |
127 | cd $template_dir
128 |
129 | echo "Completed building distribution"
130 |
--------------------------------------------------------------------------------
/deployment/manifest-generator/app.js:
--------------------------------------------------------------------------------
1 | /*********************************************************************************************************************
2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. *
3 | * *
4 | * Licensed under the Apache License Version 2.0 (the "License"). You may not use this file except in compliance *
5 | * with the License. A copy of the License is located at *
6 | * *
7 | * http://www.apache.org/licenses/ *
8 | * *
9 | * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES *
10 | * OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions *
11 | * and limitations under the License. *
12 | *********************************************************************************************************************/
13 |
14 | /**
15 | * @author Solution Builders
16 | */
17 |
18 | 'use strict';
19 |
20 | const fs = require('fs');
21 | const path = require('path');
22 | const args = require('minimist')(process.argv.slice(2));
23 |
24 | let getFileList = function(path) {
25 | let fileInfo;
26 | let filesFound;
27 | let fileList = [];
28 |
29 | filesFound = fs.readdirSync(path);
30 | for (let i = 0; i < filesFound.length; i++) {
31 | fileInfo = fs.lstatSync([path, filesFound[i]].join('/'));
32 | if (fileInfo.isFile()) {
33 | fileList.push(filesFound[i]);
34 | }
35 |
36 | if (fileInfo.isDirectory()) {
37 | console.log([path, filesFound[i]].join('/'));
38 | }
39 | }
40 |
41 | return fileList;
42 | };
43 |
44 | // List all files in a directory in Node.js recursively in a synchronous fashion
45 | let walkSync = function(dir, filelist) {
46 | // let filelist = []; //getFileList('./temp/site');
47 | let files = fs.readdirSync(dir);
48 | filelist = filelist || [];
49 | files.forEach(function(file) {
50 | if (fs.statSync(path.join(dir, file)).isDirectory()) {
51 | filelist = walkSync(path.join(dir, file), filelist);
52 | } else {
53 | filelist.push(path.join(dir, file));
54 | }
55 | });
56 |
57 | return filelist;
58 | };
59 |
60 | let _filelist = [];
61 | let _manifest = {
62 | files: []
63 | };
64 |
65 | if (!args.hasOwnProperty('target')) {
66 | console.log('--target parameter missing. This should be the target directory containing content for the manifest.');
67 | process.exit(1);
68 | }
69 |
70 | if (!args.hasOwnProperty('output')) {
71 | console.log('--ouput parameter missing. This should be the out directory where the manifest file will be generated.');
72 | process.exit(1);
73 | }
74 |
75 | console.log(`Generating a manifest file ${args.output} for directory ${args.target}`);
76 |
77 | walkSync(args.target, _filelist);
78 |
79 | for (let i = 0; i < _filelist.length; i++) {
80 | _manifest.files.push(_filelist[i].replace(`${args.target}/`, ''));
81 | };
82 |
83 | fs.writeFileSync(args.output, JSON.stringify(_manifest, null, 4));
84 | console.log(`Manifest file ${args.output} generated.`);
--------------------------------------------------------------------------------
/deployment/manifest-generator/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "manifest-generator",
3 | "version": "0.0.0",
4 | "private": true,
5 | "description": "Helper utility to create web site manifest for deployment",
6 | "main": "app.js",
7 | "author": {
8 | "name": "aws-solutions-builder"
9 | },
10 | "dependencies": {
11 | "minimist": "*"
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/deployment/real-time-iot-device-monitoring-with-kinesis.yaml:
--------------------------------------------------------------------------------
1 | AWSTemplateFormatVersion: "2010-09-09"
2 | Description: "(SO0039) - Real-Time IoT Device Monitoring with Kinesis Analytics: Analyze IoT Device Connectivity using Kinesis Analytics, Version %%VERSION%%"
3 | Parameters:
4 | UserName:
5 | Description: The username of the user you want to create in Amazon Cognito.
6 | Type: String
7 | AllowedPattern: "^(?=\\s*\\S).*$"
8 | ConstraintDescription: " cannot be empty"
9 | MinLength: 1
10 | UserEmail:
11 | Type: String
12 | Description: Email address for dashboard user. After successfully launching this
13 | solution, you will receive an email with instructions to log in.
14 | AllowedPattern: ^[_A-Za-z0-9-\+]+(\.[_A-Za-z0-9-]+)*@[A-Za-z0-9-]+(\.[A-Za-z0-9]+)*(\.[A-Za-z]{2,})$
15 | MinLength: 1
16 | IoTTopicName:
17 | Type: String
18 | MinLength: 1
19 | Default: "iot_device_analytics"
20 | Description: "IoT Topic Name that your devices will send messages to."
21 |
22 | Metadata:
23 | AWS::CloudFormation::Interface:
24 | ParameterGroups:
25 | - Label:
26 | default: Cognito User for Access to the Dashboard
27 | Parameters:
28 | - UserName
29 | - UserEmail
30 | - Label:
31 | default: IoT Settings
32 | Parameters:
33 | - IoTTopicName
34 | ParameterLabels:
35 | UserName:
36 | default: "User Name"
37 | UserEmail:
38 | default: "User Email Address"
39 | IoTTopicName:
40 | default: "IoT Topic to monitor"
41 |
42 | Mappings:
43 | SourceCode:
44 | General:
45 | S3Bucket: '%%BUCKET_NAME%%'
46 | KeyPrefix: '%%SOLUTION_NAME%%/%%VERSION%%'
47 | LogPrefix: '%%SOLUTION_NAME%%/'
48 | KinesisAnalyticsApp:
49 | Outputs:
50 | FireHoseStreamName: PROCESSED_METRICS_S3_STREAM
51 | LambdaStreamName: UPDATE_DDB_LAMBDA_STREAM
52 | DDB:
53 | Scaling:
54 | ReadTargetUtilization: 70
55 | ReadCapacityMin: 1
56 | ReadCapacityMax: 100
57 | WriteTargetUtilization: 50
58 | WriteCapacityMin: 5
59 | WriteCapacityMax: 1000
60 | Solution:
61 | Data:
62 | ID: SO0039
63 | Version: '%%VERSION%%'
64 | SendAnonymousUsageData: 'True'
65 |
66 | Resources:
67 | IoTMetricsLogGroup:
68 | Type: AWS::Logs::LogGroup
69 | Properties:
70 | RetentionInDays: 7
71 |
72 | IotMetricsLogStream:
73 | Type: AWS::Logs::LogStream
74 | Properties:
75 | LogGroupName: !Ref IoTMetricsLogGroup
76 |
77 | IoTTopicRule:
78 | Type: AWS::IoT::TopicRule
79 | Properties:
80 | TopicRulePayload:
81 | Description: 'Send IoT Device data in raw format to Kinesis Analytics'
82 | AwsIotSqlVersion: '2016-03-23'
83 | RuleDisabled: 'false'
84 | Sql: !Sub 'SELECT *, parse_time("yyyy-MM-dd HH:mm:ss", timestamp()) as ts FROM "${IoTTopicName}"'
85 | Actions:
86 | - Firehose:
87 | DeliveryStreamName: !Ref RawMetricsDeliveryStream
88 | RoleArn: !Sub '${IoTTopicRuleRole.Arn}'
89 | Separator: "\n"
90 |
91 | IoTTopicRuleRole:
92 | Type: AWS::IAM::Role
93 | Properties:
94 | AssumeRolePolicyDocument:
95 | Version: '2012-10-17'
96 | Statement:
97 | - Effect: Allow
98 | Principal:
99 | Service:
100 | - 'iot.amazonaws.com'
101 | Action:
102 | - 'sts:AssumeRole'
103 | Path: /
104 | Policies:
105 |
106 | # Posts to RawMetricsDeliveryStream
107 | - PolicyName: 'IoTTopicRulePolicy'
108 | PolicyDocument:
109 | Version: '2012-10-17'
110 | Statement:
111 | Effect: Allow
112 | Action:
113 | - firehose:DescribeDeliveryStream
114 | - firehose:ListDeliveryStreams
115 | - firehose:PutRecord
116 | - firehose:PutRecordBatch
117 | Resource: !Sub '${RawMetricsDeliveryStream.Arn}'
118 |
119 | RawMetricsDeliveryStream:
120 | Type: AWS::KinesisFirehose::DeliveryStream
121 | Properties:
122 | S3DestinationConfiguration:
123 | BucketARN: !GetAtt RawMetricsBucket.Arn
124 | BufferingHints:
125 | IntervalInSeconds: 60
126 | SizeInMBs: 10
127 | CloudWatchLoggingOptions:
128 | Enabled: true
129 | LogGroupName: !Ref IoTMetricsLogGroup
130 | LogStreamName: 'RawMetricsS3Delivery'
131 | CompressionFormat: 'UNCOMPRESSED'
132 | EncryptionConfiguration:
133 | NoEncryptionConfig: 'NoEncryption'
134 | Prefix: !FindInMap
135 | - SourceCode
136 | - General
137 | - LogPrefix
138 | RoleARN: !GetAtt RawMetricsDeliveryStreamRole.Arn
139 |
140 | RawMetricsBucket:
141 | DeletionPolicy: Retain
142 | Type: AWS::S3::Bucket
143 | Properties:
144 | BucketEncryption:
145 | ServerSideEncryptionConfiguration:
146 | - ServerSideEncryptionByDefault:
147 | SSEAlgorithm: AES256
148 | PublicAccessBlockConfiguration:
149 | BlockPublicAcls: true
150 | BlockPublicPolicy: true
151 | IgnorePublicAcls: true
152 | RestrictPublicBuckets: true
153 | LoggingConfiguration:
154 | DestinationBucketName: !Ref LogsBucket
155 | LogFilePrefix: raw-metrics-bucket/
156 | LifecycleConfiguration:
157 | Rules:
158 | - Id: ExpirationRule
159 | Status: Enabled
160 | ExpirationInDays: '7'
161 | Metadata:
162 | cfn_nag:
163 | rules_to_suppress:
164 | - id: W51
165 | reason: "This is a private bucket. Does not require bucket policy"
166 |
167 | RawMetricsDeliveryStreamRole:
168 | Type: AWS::IAM::Role
169 | Properties:
170 | AssumeRolePolicyDocument:
171 | Version: '2012-10-17'
172 | Statement:
173 | - Effect: Allow
174 | Principal:
175 | Service:
176 | - 'firehose.amazonaws.com'
177 | Action:
178 | - 'sts:AssumeRole'
179 | Path: /
180 | Policies:
181 |
182 | # Puts objects in RawMetricsBucket
183 | - PolicyName: 'RawMetricsS3UploadPolicy'
184 | PolicyDocument:
185 | Version: '2012-10-17'
186 | Statement:
187 | - Effect: Allow
188 | Action:
189 | - s3:AbortMultipartUpload
190 | - s3:GetBucketLocation
191 | - s3:GetObject
192 | - s3:PutObject
193 | - s3:ListBucket
194 | - s3:ListBucketMultipartUploads
195 | Resource:
196 | - !Sub '${RawMetricsBucket.Arn}'
197 | - !Sub '${RawMetricsBucket.Arn}/'
198 | - !Sub '${RawMetricsBucket.Arn}/*'
199 |
200 | # Write to CloudWatch
201 | - PolicyName: RawMetricsDeliveryStreamLogging
202 | PolicyDocument:
203 | Version: '2012-10-17'
204 | Statement:
205 | - Effect: Allow
206 | Action:
207 | - logs:CreateLogGroup
208 | - logs:CreateLogStream
209 | - logs:PutDestination
210 | - logs:PutLogEvents
211 | Resource:
212 | !Join
213 | - ''
214 | - - 'arn:aws:logs:'
215 | - !Ref AWS::Region
216 | - ':'
217 | - !Ref AWS::AccountId
218 | - ':log-group:*'
219 | Metadata:
220 | cfn_nag:
221 | rules_to_suppress:
222 | - id: W11
223 | reason: "The wildcard action in the logs policy is required"
224 |
225 | KinesisAnalyticsApp:
226 | Type: AWS::KinesisAnalytics::Application
227 | Properties:
228 | ApplicationDescription: 'IOT Device Monitoring Analysis'
229 | ApplicationCode: !Sub
230 | - |
231 | -- Create a common format to be used for all the different metrics for IoT device monitoring
232 | CREATE OR REPLACE STREAM FAN_OUT_STREAM
233 | ( eventTimeStamp TIMESTAMP, computationType VARCHAR(128), category VARCHAR(128), subcategory INTEGER, unit VARCHAR(128), unitValue DOUBLE);
234 |
235 | -- 1. Create an output stream, which is used to send unique number of connected IoT devices to the destination
236 | CREATE OR REPLACE PUMP connected_device_pump AS INSERT INTO FAN_OUT_STREAM
237 | SELECT current_timestamp as eventTimeStamp, 'ConnectedDevicesCount', 'None', 0, 'Count', * FROM (
238 | SELECT STREAM * FROM TABLE(COUNT_DISTINCT_ITEMS_TUMBLING(
239 | CURSOR(SELECT STREAM * FROM source_sql_stream_001),
240 | 'device',
241 | 60
242 | )
243 | )
244 | );
245 |
246 | -- 2. Max of the data point (temp) per connected device
247 | CREATE OR REPLACE PUMP per_device_max_pump AS INSERT INTO FAN_OUT_STREAM
248 | SELECT STREAM
249 | STEP(source_sql_stream_001."COL_time" BY INTERVAL '1' MINUTE) AS eventTimeStamp,
250 | 'PerDeviceMaxTemp',
251 | "device",
252 | 0,
253 | 'Maximum',
254 | MAX("temp") AS max_value
255 | FROM source_sql_stream_001
256 | GROUP BY "device", STEP(source_sql_stream_001.rowtime BY INTERVAL '1' MINUTE), STEP(source_sql_stream_001."COL_time" BY INTERVAL '1' MINUTE);
257 |
258 | -- 3. Min of the data point (temp) per connected device
259 | CREATE OR REPLACE PUMP per_device_min_pump AS INSERT INTO FAN_OUT_STREAM
260 | SELECT STREAM
261 | STEP(source_sql_stream_001."COL_time" BY INTERVAL '1' MINUTE) AS eventTimeStamp,
262 | 'PerDeviceMinTemp',
263 | "device",
264 | 0,
265 | 'Minimum',
266 | MIN("temp") AS min_value
267 | FROM source_sql_stream_001
268 | GROUP BY "device", STEP(source_sql_stream_001.rowtime BY INTERVAL '1' MINUTE), STEP(source_sql_stream_001."COL_time" BY INTERVAL '1' MINUTE);
269 |
270 | -- 4. Avg of the data point (temp) per connected device
271 | CREATE OR REPLACE PUMP per_device_avg_pump AS INSERT INTO FAN_OUT_STREAM
272 | SELECT STREAM
273 | STEP(source_sql_stream_001."COL_time" BY INTERVAL '1' MINUTE) AS eventTimeStamp,
274 | 'PerDeviceAvgTemp',
275 | "device",
276 | 0,
277 | 'Average',
278 | AVG("temp") AS avg_value
279 | FROM source_sql_stream_001
280 | GROUP BY "device", STEP(source_sql_stream_001.rowtime BY INTERVAL '1' MINUTE), STEP(source_sql_stream_001."COL_time" BY INTERVAL '1' MINUTE);
281 |
282 | -- Setup for Anomaly detection
283 | CREATE OR REPLACE STREAM temp_stream (temp INTEGER, device varchar(4), anomaly_score DOUBLE);
284 |
285 | CREATE OR REPLACE PUMP temp_pump AS INSERT INTO temp_stream
286 | SELECT STREAM "temp", "device", anomaly_score
287 | FROM TABLE(RANDOM_CUT_FOREST(
288 | CURSOR(SELECT STREAM * FROM source_sql_stream_001)
289 | ));
290 |
291 | -- 5. Anomaly detection on the value sent (temp)
292 | CREATE OR REPLACE PUMP anomaly_pump AS INSERT INTO FAN_OUT_STREAM
293 | SELECT STREAM
294 | STEP(temp_stream.rowtime BY INTERVAL '1' MINUTE) as eventTimeStamp,
295 | 'DeviceTempAnomalyScore',
296 | device,
297 | temp,
298 | 'AnomalyScore',
299 | anomaly_score
300 | FROM temp_stream
301 | ORDER BY STEP(temp_stream.rowtime BY INTERVAL '1' MINUTE), anomaly_score DESC;
302 |
303 | -- 6. Average of the data point (temp) across all devices
304 | CREATE OR REPLACE PUMP avg_aggregate_pump AS INSERT INTO FAN_OUT_STREAM
305 | SELECT STREAM
306 | STEP(source_sql_stream_001."COL_time" BY INTERVAL '1' MINUTE) AS event_timestamp,
307 | 'AvgTempValue',
308 | 'All',
309 | 0,
310 | 'Average',
311 | AVG("temp") AS avg_value
312 | FROM source_sql_stream_001
313 | GROUP BY STEP(source_sql_stream_001.rowtime BY INTERVAL '1' MINUTE), STEP(source_sql_stream_001."COL_time" BY INTERVAL '1' MINUTE);
314 |
315 | -- 7. Min of the data point (temp) across all devices
316 | CREATE OR REPLACE PUMP min_aggregate_pump AS INSERT INTO FAN_OUT_STREAM
317 | SELECT STREAM
318 | STEP(source_sql_stream_001."COL_time" BY INTERVAL '1' MINUTE) AS event_timestamp,
319 | 'MinTempValue',
320 | 'All',
321 | 0,
322 | 'Minimum',
323 | MIN("temp") AS min_value
324 | FROM source_sql_stream_001
325 | GROUP BY STEP(source_sql_stream_001.rowtime BY INTERVAL '1' MINUTE), STEP(source_sql_stream_001."COL_time" BY INTERVAL '1' MINUTE);
326 |
327 | -- 8. Max of the data point (temp) across all devices
328 | CREATE OR REPLACE PUMP max_aggregate_pump AS INSERT INTO FAN_OUT_STREAM
329 | SELECT STREAM
330 | STEP(source_sql_stream_001."COL_time" BY INTERVAL '1' MINUTE) AS event_timestamp,
331 | 'MaxTempValue',
332 | 'All',
333 | 0,
334 | 'Maximum',
335 | MAX("temp") AS max_value
336 | FROM source_sql_stream_001
337 | GROUP BY STEP(source_sql_stream_001.rowtime BY INTERVAL '1' MINUTE), STEP(source_sql_stream_001."COL_time" BY INTERVAL '1' MINUTE);
338 |
339 | --Setup for 9-14
340 | -- Sort stream and apply sessions
341 | CREATE OR REPLACE STREAM sorted_stream (event_timestamp TIMESTAMP, device VARCHAR(4), flow INTEGER, temp INTEGER, humidity INTEGER);
342 |
343 | CREATE OR REPLACE PUMP sort_pump AS INSERT INTO sorted_stream
344 | SELECT STREAM "COL_time" AS event_timestamp, "device", "flow", "temp", "humidity"
345 | FROM source_sql_stream_001
346 | ORDER BY STEP(source_sql_stream_001.rowtime BY INTERVAL '10' SECOND), "COL_time";
347 |
348 |
349 | CREATE OR REPLACE STREAM time_between_events_stream (event_timestamp TIMESTAMP, seconds_between_events INTEGER, device VARCHAR(4));
350 |
351 | CREATE OR REPLACE PUMP time_between_events_pump AS INSERT INTO time_between_events_stream
352 | SELECT STREAM event_timestamp,
353 | -- Calculates the time between session events.
354 | -- tsdiff takes the difference between to timestamps in ms
355 | -- compares the current timestamp in the row to the last timestamp
356 | TSDIFF(event_timestamp,
357 | -- Lag pulls a column from a previous event relative to the current event
358 | -- In this case, we use 1 because we want the time between the two events
359 | LAG(event_timestamp, 1) OVER W1) / 1000
360 | AS seconds_between_events,
361 | device
362 | FROM sorted_stream
363 | WINDOW W1 as (
364 | -- If no unique session_id exists or no client event for ending a session, you must define the start and end of a session.
365 | -- If users are expected to have multiple sessions online at a given time, another unique identifier must be added to the partition.
366 | PARTITION BY device
367 | RANGE INTERVAL '1' HOUR PRECEDING
368 | );
369 |
370 |
371 | CREATE OR REPLACE STREAM connected_flag_stream (new_session_indicator BIGINT, event_timestamp TIMESTAMP, seconds_between_events INTEGER, device VARCHAR(4));
372 |
373 | CREATE OR REPLACE PUMP connected_flag_pump AS INSERT INTO connected_flag_stream
374 | SELECT STREAM
375 | -- Flag new connected sessions which makes other analytics easier
376 | -- Assumes no device has more than one active session
377 | (CASE
378 | -- time interval >= 0, part of the same session
379 | WHEN seconds_between_events >= 0 AND seconds_between_events <= 60 THEN 0
380 | -- time interval null, new session
381 | WHEN seconds_between_events IS NULL OR seconds_between_events > 60 THEN UNIX_TIMESTAMP(event_timestamp)
382 | ELSE NULL
383 | END) AS new_session_indicator,
384 | event_timestamp, seconds_between_events, device
385 | FROM time_between_events_stream;
386 |
387 | --Group sessions together
388 | CREATE OR REPLACE STREAM device_session_stream (sesson_id VARCHAR(128), seconds_between_events INTEGER, device VARCHAR(4));
389 |
390 | CREATE OR REPLACE PUMP device_session_pump AS INSERT INTO device_session_stream
391 | SELECT STREAM (device || '_' ||
392 | -- If users are expected to have multiple sessions online at a given time, another unique identifer must be added here.
393 | CAST(MAX(new_session_indicator) OVER W1 AS VARCHAR(128))
394 | ) as session_id, seconds_between_events, device
395 | FROM connected_flag_stream
396 | WINDOW W1 AS (
397 | PARTITION BY device
398 | RANGE INTERVAL '1' HOUR PRECEDING
399 | );
400 |
401 | -- Calculate connected time events for devices
402 | CREATE OR REPLACE STREAM session_connected_time_stream (sesson_id VARCHAR(128), connected_time_seconds INTEGER);
403 |
404 | CREATE OR REPLACE PUMP session_connected_time_pump AS INSERT INTO session_connected_time_stream
405 | SELECT STREAM sesson_id, SUM(seconds_between_events) OVER W1 AS connected_time_seconds
406 | FROM device_session_stream
407 | WINDOW W1 AS (
408 | PARTITION BY sesson_id
409 | RANGE INTERVAL '1' HOUR PRECEDING
410 | );
411 |
412 | --Per session time stream for disconnected devices that came back online within an hour
413 | CREATE OR REPLACE STREAM per_session_disconnected_time_stream (sesson_id VARCHAR(128), max_disconnected_time_seconds INTEGER, avg_disconnected_time_seconds INTEGER, min_disconnected_time_seconds INTEGER);
414 |
415 | CREATE OR REPLACE PUMP per_session_disconnected_time_pump AS INSERT INTO per_session_disconnected_time_stream
416 | SELECT STREAM sesson_id,
417 | MAX(connected_time_seconds) AS max_disconnected_time_seconds,
418 | AVG(connected_time_seconds) AS avg_disconnected_time_seconds,
419 | MIN(connected_time_seconds) AS min_disconnected_time_seconds
420 | FROM session_connected_time_stream
421 | WHERE connected_time_seconds > 60
422 | GROUP BY STEP(session_connected_time_stream.rowtime BY INTERVAL '10' SECOND), sesson_id;
423 |
424 | --9. Max for disconnected devices that came back online within an hour
425 | CREATE OR REPLACE PUMP maximum_disconnected_time_pump AS INSERT INTO FAN_OUT_STREAM
426 | SELECT STREAM
427 | STEP(per_session_disconnected_time_stream.rowtime BY INTERVAL '10' SECOND) AS event_timestamp,
428 | 'MaxDisconnTime',
429 | 'None',
430 | 0,
431 | 'Maximum',
432 | MAX(max_disconnected_time_seconds) AS max_value
433 | FROM per_session_disconnected_time_stream
434 | GROUP BY STEP(per_session_disconnected_time_stream.rowtime BY INTERVAL '10' SECOND);
435 |
436 | --10. Min for disconnected devices that came back online within an hour
437 | CREATE OR REPLACE PUMP minimum_disconnected_time_pump AS INSERT INTO FAN_OUT_STREAM
438 | SELECT STREAM
439 | STEP(per_session_disconnected_time_stream.rowtime BY INTERVAL '10' SECOND) AS event_timestamp,
440 | 'MinDisconnTime',
441 | 'None',
442 | 0,
443 | 'Minimum',
444 | MIN(min_disconnected_time_seconds) AS min_value
445 | FROM per_session_disconnected_time_stream
446 | GROUP BY STEP(per_session_disconnected_time_stream.rowtime BY INTERVAL '10' SECOND);
447 |
448 | --11. Avg for disconnected devices that came back online within an hour
449 | CREATE OR REPLACE PUMP average_disconnected_time_pump AS INSERT INTO FAN_OUT_STREAM
450 | SELECT STREAM
451 | STEP(per_session_disconnected_time_stream.rowtime BY INTERVAL '10' SECOND) AS event_timestamp,
452 | 'AvgDisconnTime',
453 | 'None',
454 | 0,
455 | 'Average',
456 | AVG(avg_disconnected_time_seconds) AS avg_value
457 | FROM per_session_disconnected_time_stream
458 | GROUP BY STEP(per_session_disconnected_time_stream.rowtime BY INTERVAL '10' SECOND);
459 |
460 | --Per session time stream for connected devices
461 | CREATE OR REPLACE STREAM per_session_connected_time_stream (sesson_id VARCHAR(128), max_connected_time_seconds INTEGER, avg_connected_time_seconds INTEGER, min_connected_time_seconds INTEGER);
462 |
463 | CREATE OR REPLACE PUMP per_session_connected_time_pump AS INSERT INTO per_session_connected_time_stream
464 | SELECT STREAM sesson_id,
465 | MAX(connected_time_seconds) AS max_connected_time_seconds,
466 | AVG(connected_time_seconds) AS avg_connected_time_seconds,
467 | MIN(connected_time_seconds) AS min_connected_time_seconds
468 | FROM session_connected_time_stream
469 | WHERE connected_time_seconds <= 60
470 | GROUP BY STEP(session_connected_time_stream.rowtime BY INTERVAL '10' SECOND), sesson_id;
471 |
472 | --12. Max for connected devices
473 | CREATE OR REPLACE PUMP maximum_connected_time_pump AS INSERT INTO FAN_OUT_STREAM
474 | SELECT STREAM
475 | STEP(per_session_connected_time_stream.rowtime BY INTERVAL '10' SECOND) AS event_timestamp,
476 | 'MaxConnTime',
477 | 'None',
478 | 0,
479 | 'Maximum',
480 | MAX(max_connected_time_seconds) AS max_value
481 | FROM per_session_connected_time_stream
482 | GROUP BY STEP(per_session_connected_time_stream.rowtime BY INTERVAL '10' SECOND);
483 |
484 | --13. Min for connected devices
485 | CREATE OR REPLACE PUMP minimum_connected_time_pump AS INSERT INTO FAN_OUT_STREAM
486 | SELECT STREAM
487 | STEP(per_session_connected_time_stream.rowtime BY INTERVAL '10' SECOND) AS event_timestamp,
488 | 'MinConnTime',
489 | 'None',
490 | 0,
491 | 'Minimum',
492 | MIN(min_connected_time_seconds) AS min_value
493 | FROM per_session_connected_time_stream
494 | GROUP BY STEP(per_session_connected_time_stream.rowtime BY INTERVAL '10' SECOND);
495 |
496 | --14. Avg for connected devices
497 | CREATE OR REPLACE PUMP average_connected_time_pump AS INSERT INTO FAN_OUT_STREAM
498 | SELECT STREAM
499 | STEP(per_session_connected_time_stream.rowtime BY INTERVAL '10' SECOND) AS event_timestamp,
500 | 'AvgConnTime',
501 | 'None',
502 | 0,
503 | 'Average',
504 | AVG(avg_connected_time_seconds) AS avg_value
505 | FROM per_session_connected_time_stream
506 | GROUP BY STEP(per_session_connected_time_stream.rowtime BY INTERVAL '10' SECOND);
507 |
508 | --15. Fan out to multiple Kinesis Analytics Outputs
509 | CREATE STREAM ${LambdaStreamName}
510 | ( eventTimeStamp TIMESTAMP, computationType VARCHAR(128), category VARCHAR(128), subcategory INTEGER, unit VARCHAR(128), unitValue DOUBLE);
511 |
512 | CREATE OR REPLACE PUMP fan_out_lambda_pump AS
513 | INSERT INTO ${LambdaStreamName}
514 | SELECT *
515 | FROM FAN_OUT_STREAM;
516 |
517 | CREATE STREAM ${FireHoseStreamName}
518 | ( eventTimeStamp TIMESTAMP, computationType VARCHAR(128), category VARCHAR(128), subcategory INTEGER, unit VARCHAR(128), unitValue DOUBLE);
519 |
520 | CREATE OR REPLACE PUMP fan_out_firehose_pump AS
521 | INSERT INTO ${FireHoseStreamName}
522 | SELECT *
523 | FROM FAN_OUT_STREAM;
524 | - LambdaStreamName: !FindInMap
525 | - KinesisAnalyticsApp
526 | - Outputs
527 | - LambdaStreamName
528 | FireHoseStreamName: !FindInMap
529 | - KinesisAnalyticsApp
530 | - Outputs
531 | - FireHoseStreamName
532 |
533 | Inputs:
534 | - NamePrefix: 'SOURCE_SQL_STREAM'
535 | InputSchema:
536 | RecordColumns:
537 | - Name: 'COL_time'
538 | SqlType: 'TIMESTAMP'
539 | Mapping: '$.ts'
540 | - Name: 'device'
541 | SqlType: 'VARCHAR(4)'
542 | Mapping: '$.device'
543 | - Name: 'flow'
544 | SqlType: 'INTEGER'
545 | Mapping: '$.flow'
546 | - Name: 'temp'
547 | SqlType: 'INTEGER'
548 | Mapping: '$.temp'
549 | - Name: 'humidity'
550 | SqlType: 'INTEGER'
551 | Mapping: '$.humidity'
552 | RecordFormat:
553 | RecordFormatType: 'JSON'
554 | MappingParameters:
555 | JSONMappingParameters:
556 | RecordRowPath: '$'
557 | RecordEncoding: 'UTF-8'
558 | KinesisFirehoseInput:
559 | ResourceARN: !GetAtt RawMetricsDeliveryStream.Arn
560 | RoleARN: !GetAtt KinesisAnalyticsAppRole.Arn
561 |
562 | KinesisAnalyticsAppRole:
563 | Type: AWS::IAM::Role
564 | Properties:
565 | AssumeRolePolicyDocument:
566 | Version: '2012-10-17'
567 | Statement:
568 | - Effect: Allow
569 | Principal:
570 | Service: kinesisanalytics.amazonaws.com
571 | Action: 'sts:AssumeRole'
572 | Path: '/'
573 | Policies:
574 | # Read from RawMetricsDeliveryStream
575 | - PolicyName: 'KinesisAnalyticsReadRawMetrics'
576 | PolicyDocument:
577 | Version: '2012-10-17'
578 | Statement:
579 | - Effect: Allow
580 | Action:
581 | - firehose:DescribeDeliveryStream
582 | - firehose:Get*
583 | Resource: !Sub '${RawMetricsDeliveryStream.Arn}'
584 | # Post to ProcessedMetricsDeliveryStream
585 | - PolicyName: 'KinesisAnalyticsPutProcessedMetrics'
586 | PolicyDocument:
587 | Version: '2012-10-17'
588 | Statement:
589 | - Effect: Allow
590 | Action:
591 | - firehose:DescribeDeliveryStream
592 | - firehose:ListDeliveryStreams
593 | - firehose:PutRecord
594 | - firehose:PutRecordBatch
595 | Resource: !Sub '${ProcessedMetricsDeliveryStream.Arn}'
596 | # Invoke UpdateDDBLambda
597 | - PolicyName: UpdateDDBLambdaInvocation
598 | PolicyDocument:
599 | Version: '2012-10-17'
600 | Statement:
601 | - Effect: Allow
602 | Action:
603 | - lambda:InvokeFunction
604 | Resource: !Sub '${UpdateDDBLambda.Arn}'
605 | # Write to CloudWatch
606 | - PolicyName: KinesisAnalyticsAppLogging
607 | PolicyDocument:
608 | Version: '2012-10-17'
609 | Statement:
610 | - Effect: Allow
611 | Action:
612 | - logs:CreateLogGroup
613 | - logs:CreateLogStream
614 | - logs:PutDestination
615 | - logs:PutLogEvents
616 | Resource:
617 | !Join
618 | - ''
619 | - - 'arn:aws:logs:'
620 | - !Ref AWS::Region
621 | - ':'
622 | - !Ref AWS::AccountId
623 | - ':log-group:*'
624 | Metadata:
625 | cfn_nag:
626 | rules_to_suppress:
627 | - id: W11
628 | reason: "The wildcard action in the logs policy is required"
629 | - id: F3
630 | reason: "The wildcard action in the KinesisAnalyticsReadRawMetrics policy permits the KinesisAnalyticsApp to read from the RawMetricsDeliveryStream. The wildcard resource in the KinesisAnalyticsAppLogging policy permits the KinesisAnalyticsApp to log events to CloudWatch."
631 |
632 | KinesisAnalyticsAppFirehoseOutput:
633 | Type: AWS::KinesisAnalytics::ApplicationOutput
634 | Properties:
635 | ApplicationName: !Ref KinesisAnalyticsApp
636 | Output:
637 | DestinationSchema:
638 | RecordFormatType: 'CSV'
639 | KinesisFirehoseOutput:
640 | ResourceARN: !Sub '${ProcessedMetricsDeliveryStream.Arn}'
641 | RoleARN: !Sub '${KinesisAnalyticsAppRole.Arn}'
642 | Name: !FindInMap
643 | - KinesisAnalyticsApp
644 | - Outputs
645 | - FireHoseStreamName
646 |
647 | KinesisAnalyticsAppLambdaOutput:
648 | Type: AWS::KinesisAnalytics::ApplicationOutput
649 |
650 | # Use DependsOn to serialize adding Application Outputs to reduce likelihood of errors.
651 | DependsOn: KinesisAnalyticsAppFirehoseOutput
652 | Properties:
653 | ApplicationName: !Ref KinesisAnalyticsApp
654 | Output:
655 | DestinationSchema:
656 | RecordFormatType: 'CSV'
657 | LambdaOutput:
658 | ResourceARN: !Sub '${UpdateDDBLambda.Arn}'
659 | RoleARN: !Sub '${KinesisAnalyticsAppRole.Arn}'
660 | Name: !FindInMap
661 | - KinesisAnalyticsApp
662 | - Outputs
663 | - LambdaStreamName
664 |
665 | ProcessedMetricsDeliveryStream:
666 | Type: AWS::KinesisFirehose::DeliveryStream
667 | Properties:
668 | DeliveryStreamType: 'DirectPut'
669 | S3DestinationConfiguration:
670 | BucketARN: !Sub '${ProcessedMetricsBucket.Arn}'
671 | BufferingHints:
672 | IntervalInSeconds: 60
673 | SizeInMBs: 10
674 | CloudWatchLoggingOptions:
675 | Enabled: true
676 | LogGroupName: !Ref IoTMetricsLogGroup
677 | LogStreamName: 'ProcessedMetricsS3Delivery'
678 | CompressionFormat: 'UNCOMPRESSED'
679 | EncryptionConfiguration:
680 | NoEncryptionConfig: 'NoEncryption'
681 | Prefix: !FindInMap
682 | - SourceCode
683 | - General
684 | - LogPrefix
685 | RoleARN: !Sub '${ProcessedMetricsDeliveryStreamRole.Arn}'
686 |
687 | ProcessedMetricsBucket:
688 | DeletionPolicy: Retain
689 | Type: AWS::S3::Bucket
690 | Properties:
691 | BucketEncryption:
692 | ServerSideEncryptionConfiguration:
693 | - ServerSideEncryptionByDefault:
694 | SSEAlgorithm: AES256
695 | PublicAccessBlockConfiguration:
696 | BlockPublicAcls: true
697 | BlockPublicPolicy: true
698 | IgnorePublicAcls: true
699 | RestrictPublicBuckets: true
700 | LoggingConfiguration:
701 | DestinationBucketName: !Ref LogsBucket
702 | LogFilePrefix: processed-metrics-bucket/
703 | LifecycleConfiguration:
704 | Rules:
705 | - Id: ExpirationRule
706 | Status: Enabled
707 | ExpirationInDays: '7'
708 | Metadata:
709 | cfn_nag:
710 | rules_to_suppress:
711 | - id: W51
712 | reason: "This is a private bucket. Does not require bucket policy"
713 |
714 | ProcessedMetricsDeliveryStreamRole:
715 | Type: AWS::IAM::Role
716 | Properties:
717 | AssumeRolePolicyDocument:
718 | Version: '2012-10-17'
719 | Statement:
720 | - Effect: Allow
721 | Principal:
722 | Service:
723 | - firehose.amazonaws.com
724 | Action:
725 | - sts:AssumeRole
726 | Path: /
727 | Policies:
728 |
729 | # Put objects in ProcessedMetricsBucket
730 | - PolicyName: 'ProcessedMetricsS3Delivery'
731 | PolicyDocument:
732 | Version: '2012-10-17'
733 | Statement:
734 | Action:
735 | - s3:AbortMultipartUpload
736 | - s3:GetBucketLocation
737 | - s3:PutObject
738 | - s3:GetObject
739 | - s3:ListBucket
740 | - s3:ListBucketMultipartUploads
741 | Effect: Allow
742 | Resource:
743 | - !Sub '${ProcessedMetricsBucket.Arn}'
744 | - !Sub '${ProcessedMetricsBucket.Arn}/'
745 | - !Sub '${ProcessedMetricsBucket.Arn}/*'
746 |
747 | # Write to CloudWatch
748 | - PolicyName: ProcessedMetricsLogging
749 | PolicyDocument:
750 | Version: '2012-10-17'
751 | Statement:
752 | - Effect: Allow
753 | Action:
754 | - logs:CreateLogGroup
755 | - logs:CreateLogStream
756 | - logs:PutDestination
757 | - logs:PutLogEvents
758 | Resource:
759 | !Join
760 | - ''
761 | - - 'arn:aws:logs:'
762 | - !Ref AWS::Region
763 | - ':'
764 | - !Ref AWS::AccountId
765 | - ':log-group:*'
766 | Metadata:
767 | cfn_nag:
768 | rules_to_suppress:
769 | - id: W11
770 | reason: "The wildcard action in the logs policy is required"
771 | # UpdateDDBLambda
772 | UpdateDDBLambda:
773 | Type: AWS::Lambda::Function
774 | Properties:
775 | Code:
776 | S3Bucket: !Sub
777 | - ${Param1}-${AWS::Region}
778 | - Param1: !FindInMap
779 | - SourceCode
780 | - General
781 | - S3Bucket
782 | S3Key: !Sub
783 | - ${Param1}/update_ddb_from_stream.zip
784 | - Param1: !FindInMap
785 | - SourceCode
786 | - General
787 | - KeyPrefix
788 | Environment:
789 | Variables:
790 | ANALYTICS_TABLE: !Ref AnalyticsTable
791 | SOLUTION_UUID: !GetAtt GenerateUUID.UUID
792 | SOLUTION_ID: !FindInMap
793 | - Solution
794 | - Data
795 | - ID
796 | SOLUTION_VERSION: !FindInMap
797 | - Solution
798 | - Data
799 | - Version
800 | SEND_ANONYMOUS_DATA: !FindInMap
801 | - Solution
802 | - Data
803 | - SendAnonymousUsageData
804 | Description: Puts ProcessedMetrics data into AnalyticsTable.
805 | Handler: update_ddb_from_stream.lambda_handler
806 | MemorySize: 256
807 | Role: !GetAtt UpdateDDBLambdaRole.Arn
808 | Runtime: python3.8
809 | Timeout: 300
810 |
811 | UpdateDDBLambdaRole:
812 | Type: AWS::IAM::Role
813 | Properties:
814 | AssumeRolePolicyDocument:
815 | Version: '2012-10-17'
816 | Statement:
817 | - Effect: Allow
818 | Principal:
819 | Service:
820 | - lambda.amazonaws.com
821 | Action:
822 | - sts:AssumeRole
823 | Path: "/"
824 | Policies:
825 | - PolicyName: root
826 | PolicyDocument:
827 | Version: '2012-10-17'
828 | Statement:
829 |
830 | # Read from ProcessedMetricsDeliveryStream
831 | - Effect: Allow
832 | Action:
833 | - firehose:DescribeDeliveryStream
834 | - firehose:Get*
835 | Resource:
836 | - !Sub '${ProcessedMetricsDeliveryStream.Arn}'
837 |
838 | # Update AnalyticsTable
839 | - Effect: Allow
840 | Action:
841 | - dynamodb:GetItem
842 | - dynamodb:PutItem
843 | Resource:
844 | - !Sub '${AnalyticsTable.Arn}'
845 |
846 | # Write to CloudWatch
847 | - PolicyName: UpdateDDBLambdaLogging
848 | PolicyDocument:
849 | Version: '2012-10-17'
850 | Statement:
851 | - Effect: Allow
852 | Action:
853 | - logs:CreateLogGroup
854 | - logs:CreateLogStream
855 | - logs:PutDestination
856 | - logs:PutLogEvents
857 | Resource:
858 | !Join
859 | - ''
860 | - - 'arn:aws:logs:'
861 | - !Ref AWS::Region
862 | - ':'
863 | - !Ref AWS::AccountId
864 | - ':log-group:*'
865 | Metadata:
866 | cfn_nag:
867 | rules_to_suppress:
868 | - id: F3
869 | reason: "The wildcard action in the root policy permits the UpdateDDBLambda function to read from the ProcessedMetricsDeliveryStream. The wilcard resource in the UpdateDDBLambdaLogging policy permits the UpdateDDBLambda function to log events to CloudWatch."
870 | - id: W11
871 | reason: "The wildcard action required to log events to CloudWatch."
872 |
873 | # Database
874 | AnalyticsTable:
875 | Type: AWS::DynamoDB::Table
876 | Properties:
877 | AttributeDefinitions:
878 | - AttributeName: MetricType
879 | AttributeType: S
880 | - AttributeName: EventTime
881 | AttributeType: S
882 | KeySchema:
883 | - KeyType: HASH
884 | AttributeName: MetricType
885 | - KeyType: RANGE
886 | AttributeName: EventTime
887 | ProvisionedThroughput:
888 | ReadCapacityUnits: 20
889 | WriteCapacityUnits: 20
890 |
891 | AnalyticsTableScalingRole:
892 | Type: AWS::IAM::Role
893 | Properties:
894 | AssumeRolePolicyDocument:
895 | Version: '2012-10-17'
896 | Statement:
897 | - Effect: Allow
898 | Principal:
899 | Service:
900 | - application-autoscaling.amazonaws.com
901 | Action:
902 | - sts:AssumeRole
903 | Path: '/'
904 | Policies:
905 | - PolicyName: AnalyticsTableScalingPolicy
906 | PolicyDocument:
907 | Version: '2012-10-17'
908 | Statement:
909 |
910 | # Allows updating AnalyticsTable capacity.
911 | - Effect: Allow
912 | Action:
913 | - dynamodb:DescribeTable
914 | - dynamodb:UpdateTable
915 | Resource:
916 | - !Sub '${AnalyticsTable.Arn}'
917 |
918 | # Allows access to AnalyticsTable cloudwatch logs.
919 | - Effect: Allow
920 | Action:
921 | - cloudwatch:PutMetricAlarm
922 | - cloudwatch:DescribeAlarms
923 | - cloudwatch:GetMetricStatistics
924 | - cloudwatch:SetAlarmState
925 | - cloudwatch:DeleteAlarms
926 | Resource:
927 | - '*'
928 | Metadata:
929 | cfn_nag:
930 | rules_to_suppress:
931 | - id: W11
932 | reason: "The wildcard action in the root policy permits the UpdateDDBLambda function to read from the ProcessedMetricsDeliveryStream. The wilcard resource in the UpdateDDBLambdaLogging policy permits the UpdateDDBLambda function to log events to CloudWatch."
933 |
934 | AnalyticsTableWriteCapacityTarget:
935 | Type: AWS::ApplicationAutoScaling::ScalableTarget
936 | Properties:
937 | MaxCapacity: !FindInMap [DDB, Scaling, WriteCapacityMax]
938 | MinCapacity: !FindInMap [DDB, Scaling, WriteCapacityMin]
939 | ResourceId: !Sub 'table/${AnalyticsTable}'
940 | RoleARN: !Sub '${AnalyticsTableScalingRole.Arn}'
941 | ScalableDimension: dynamodb:table:WriteCapacityUnits
942 | ServiceNamespace: dynamodb
943 |
944 | AnalyticsTableWriteScalingPolicy:
945 | Type: AWS::ApplicationAutoScaling::ScalingPolicy
946 | Properties:
947 | PolicyName: WriteAutoScalingPolicy
948 | PolicyType: TargetTrackingScaling
949 | ScalingTargetId: !Ref AnalyticsTableWriteCapacityTarget
950 | TargetTrackingScalingPolicyConfiguration:
951 | TargetValue: !FindInMap [DDB, Scaling, WriteTargetUtilization]
952 | ScaleInCooldown: 300
953 | ScaleOutCooldown: 60
954 | PredefinedMetricSpecification:
955 | PredefinedMetricType: DynamoDBWriteCapacityUtilization
956 |
957 | AnalyticsTableReadCapacityTarget:
958 | Type: AWS::ApplicationAutoScaling::ScalableTarget
959 | Properties:
960 | MaxCapacity: !FindInMap [DDB, Scaling, ReadCapacityMax]
961 | MinCapacity: !FindInMap [DDB, Scaling, ReadCapacityMin]
962 | ResourceId: !Sub 'table/${AnalyticsTable}'
963 | RoleARN: !Sub '${AnalyticsTableScalingRole.Arn}'
964 | ScalableDimension: dynamodb:table:ReadCapacityUnits
965 | ServiceNamespace: dynamodb
966 |
967 | AnalyticsTableReadScalingPolicy:
968 | Type: AWS::ApplicationAutoScaling::ScalingPolicy
969 | Properties:
970 | PolicyName: ReadAutoScalingPolicy
971 | PolicyType: TargetTrackingScaling
972 | ScalingTargetId: !Ref AnalyticsTableReadCapacityTarget
973 | TargetTrackingScalingPolicyConfiguration:
974 | TargetValue: !FindInMap [DDB, Scaling, ReadTargetUtilization]
975 | ScaleInCooldown: 300
976 | ScaleOutCooldown: 60
977 | PredefinedMetricSpecification:
978 | PredefinedMetricType: DynamoDBReadCapacityUtilization
979 |
980 | # Dashboard Website
981 | WebsiteBucket:
982 | Type: AWS::S3::Bucket
983 | DeletionPolicy: Retain
984 | Properties:
985 | BucketEncryption:
986 | ServerSideEncryptionConfiguration:
987 | - ServerSideEncryptionByDefault:
988 | SSEAlgorithm: AES256
989 | PublicAccessBlockConfiguration:
990 | BlockPublicAcls: true
991 | BlockPublicPolicy: true
992 | IgnorePublicAcls: true
993 | RestrictPublicBuckets: true
994 | LoggingConfiguration:
995 | DestinationBucketName: !Ref LogsBucket
996 | LogFilePrefix: website-bucket/
997 | WebsiteConfiguration:
998 | IndexDocument: "index.html"
999 | ErrorDocument: "index.html"
1000 | WebsiteBucketPolicy:
1001 | Type: "AWS::S3::BucketPolicy"
1002 | Properties:
1003 | Bucket:
1004 | Ref: "WebsiteBucket"
1005 | PolicyDocument:
1006 | Statement:
1007 | -
1008 | Action:
1009 | - "s3:GetObject"
1010 | Effect: "Allow"
1011 | Resource:
1012 | Fn::Join:
1013 | - ""
1014 | -
1015 | - "arn:aws:s3:::"
1016 | -
1017 | Ref: "WebsiteBucket"
1018 | - "/*"
1019 | Principal:
1020 | CanonicalUser: !GetAtt WebsiteOriginAccessIdentity.S3CanonicalUserId
1021 | WebsiteOriginAccessIdentity:
1022 | Type: AWS::CloudFront::CloudFrontOriginAccessIdentity
1023 | Properties:
1024 | CloudFrontOriginAccessIdentityConfig:
1025 | Comment: !Sub "access-identity-${WebsiteBucket}"
1026 | WebsiteDistribution:
1027 | Type: AWS::CloudFront::Distribution
1028 | Properties:
1029 | DistributionConfig:
1030 | Comment: "Website distribution for solution"
1031 | Origins:
1032 | -
1033 | Id: S3-solution-website
1034 | DomainName: !Sub "${WebsiteBucket}.s3.${AWS::Region}.amazonaws.com"
1035 | S3OriginConfig:
1036 | OriginAccessIdentity: !Sub "origin-access-identity/cloudfront/${WebsiteOriginAccessIdentity}"
1037 | DefaultCacheBehavior:
1038 | TargetOriginId: S3-solution-website
1039 | AllowedMethods:
1040 | - GET
1041 | - HEAD
1042 | - OPTIONS
1043 | - PUT
1044 | - POST
1045 | - PATCH
1046 | - DELETE
1047 | CachedMethods:
1048 | - GET
1049 | - HEAD
1050 | - OPTIONS
1051 | ForwardedValues:
1052 | QueryString: 'false'
1053 | ViewerProtocolPolicy: redirect-to-https
1054 | IPV6Enabled: 'true'
1055 | ViewerCertificate:
1056 | CloudFrontDefaultCertificate: 'true'
1057 | Enabled: 'true'
1058 | HttpVersion: 'http2'
1059 | Logging:
1060 | IncludeCookies: 'false'
1061 | Bucket: !GetAtt LogsBucket.DomainName
1062 | Prefix: cloudfront-logs/
1063 |
1064 | ##Logging bucket for cloudFront and other solution buckets
1065 | LogsBucket:
1066 | DeletionPolicy: Retain
1067 | Type: AWS::S3::Bucket
1068 | Properties:
1069 | BucketEncryption:
1070 | ServerSideEncryptionConfiguration:
1071 | - ServerSideEncryptionByDefault:
1072 | SSEAlgorithm: AES256
1073 | PublicAccessBlockConfiguration:
1074 | BlockPublicAcls: true
1075 | BlockPublicPolicy: true
1076 | IgnorePublicAcls: true
1077 | RestrictPublicBuckets: true
1078 | AccessControl: LogDeliveryWrite
1079 | Metadata:
1080 | cfn_nag:
1081 | rules_to_suppress:
1082 | - id: W35
1083 | reason: "This is the logs bucket for all the other S3 Buckets and CloudFront"
1084 | - id: W51
1085 | reason: "This is a private bucket. Does not require bucket policy"
1086 |
1087 |
1088 | # Cognito for Dashboard Users
1089 | CognitoUserPool:
1090 | Type: AWS::Cognito::UserPool
1091 | Properties:
1092 | AliasAttributes:
1093 | - email
1094 | AutoVerifiedAttributes:
1095 | - email
1096 | AdminCreateUserConfig:
1097 | AllowAdminCreateUserOnly: True
1098 | InviteMessageTemplate:
1099 | EmailMessage:
1100 | !Sub |
1101 |
You are invited to join the Real-Time IoT Device Monitoring dashboard. Your dashboard credentials are as follows:
1107 | Please sign in to the dashboard with the user name and your temporary password provided above at:
1108 | https://${WebsiteDistribution.DomainName}/index.html
1109 |
1120 | Please sign in to the dashboard with the user name and temporary password provided above at:
1121 | https://${WebsiteDistribution.DomainName}/index.html
1122 |
97 | The Dashboard is not configured correctly. Please ensure these values are set and are accurate.
98 |
99 |
100 |
101 | These values are used by the Dashboard to validate users in your Cognito User Pool. Don't change these values unless you
102 | know what you're doing!
103 |
104 |
105 |
127 |
128 |
129 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
143 |
Create New Password
144 |
145 |
146 |
147 | The passwords you entered do not match!
148 |
149 |
150 | The password you entered does not meet the following complexity requirements:
151 |
152 |
8 or more characters
153 |
Upper case character
154 |
Lower case character
155 |
Number
156 |
157 |
158 |
159 |
160 |
161 | Your temporary password must be changed! Please create a new password (8 or more characters, one of which must be
162 | uppercase, lowercase, and a number).
163 |
164 |
165 |
178 |
179 |
180 |
181 |
182 |
183 |
184 |
185 |
186 |
187 |
188 |
Total number of connected devices
189 |
190 |
191 |
192 |
Count: 0
193 |
194 |
195 | Last Updated:
196 |
0
197 |
198 |
199 |
200 |
201 |
202 |
203 |
204 |
205 |
206 |
Anomaly Scores
207 | Updated every 10 seconds
208 |
209 |
210 |
211 |
212 |
213 |
214 |
215 |
216 |
217 |
218 |
219 |
220 |
Average Temperature Value
221 |
222 |
223 |
224 |
225 |
226 |
227 |
228 |
229 |
Minimum Temperature Value
230 |
231 |
232 |
233 |
234 |
235 |
236 |
237 |
238 |
239 |
240 |
241 |
Maximum Temperature Value
242 |
243 |
244 |
245 |
246 |
247 |
248 |
249 |
250 |
Avg Temperature per Device
251 |
252 |
253 |
254 |
255 |
256 |
257 |
258 |
259 |
260 |
261 |
262 |
Min Temperature per Device
263 |
264 |
265 |
266 |
267 |
268 |
269 |
270 |
271 |
Max Temperature per Device
272 |
273 |
274 |
275 |
276 |
277 |
278 |
279 |
280 |
281 |
282 |
283 |
Average Connection Time
284 |
285 |
286 |
287 |
288 |
289 |
290 |
291 |
292 |
Average Disconnect Time
293 |
294 |
295 |
296 |
297 |
298 |
299 |
300 |
301 |
302 |
303 |
304 |
305 |
306 |
307 |
308 |
309 |
310 |
311 |
312 |
315 |
316 |
317 |
--------------------------------------------------------------------------------
/source/web_site/js/app-variables.js.example:
--------------------------------------------------------------------------------
1 | localStorage.setItem('UserPoolClientId', '');
2 | localStorage.setItem('UserPoolId', '');
3 | localStorage.setItem('AnalyticsTable', '');
4 | localStorage.setItem('Region', '');
5 | localStorage.setItem('IdentityPoolId', '');
6 | var _dashboard_usage = 'True';
7 | var _hit_data = {
8 | Solution: '',
9 | UUID: '',
10 | Data: {
11 | dashboard: 1,
12 | region: ''
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/source/web_site/js/dash.js:
--------------------------------------------------------------------------------
1 | /*********************************************************************************************************************
2 | * Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. *
3 | * *
4 | * Licensed under the Apache License Version 2.0 (the "License"). You may not use this file except in compliance *
5 | * with the License. A copy of the License is located at *
6 | * *
7 | * http://www.apache.org/licenses/ *
8 | * *
9 | * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES *
10 | * OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions *
11 | * and limitations under the License. *
12 | *********************************************************************************************************************/
13 |
14 | function init() {
15 |
16 | console.log('dash.js initialized');
17 |
18 | const clientIdParamName = "UserPoolClientId";
19 | const userPoolIdParamName = "UserPoolId";
20 | const identityPoolIdParamName = "IdentityPoolId";
21 | const cognitoRegionParamName = "Region";
22 |
23 | var streamName,
24 | streamType,
25 | rate,
26 | sendDataHandle,
27 | totalRecordsSent = 0,
28 | cognitoAppClientId = getConfigParameterByName(clientIdParamName),
29 | cognitoUserPoolId = getConfigParameterByName(userPoolIdParamName),
30 | cognitoIdentityPoolId = getConfigParameterByName(identityPoolIdParamName),
31 | cognitoRegion = getConfigParameterByName(cognitoRegionParamName),
32 | cognitoUser;
33 |
34 | let tableName = getConfigParameterByName('AnalyticsTable');
35 |
36 | // Populate the dashboard settings UI
37 | $("#userPoolId").val(cognitoUserPoolId);
38 | $("#identityPoolId").val(cognitoIdentityPoolId);
39 | $("#clientId").val(cognitoAppClientId);
40 | $("#userPoolRegion").val(cognitoRegion);
41 | $("#tableName").val(tableName);
42 |
43 | function getConfigParameterByName(name) {
44 | var data = getQSParameterByName(name);
45 |
46 | if (data == null || data == '') {
47 | data = localStorage.getItem(name);
48 | return data;
49 | }
50 | localStorage.setItem(name, data);
51 | return data;
52 | }
53 |
54 | function getQSParameterByName(name, url) {
55 | if (!url) {
56 | url = window.location.href;
57 | }
58 |
59 | name = name.replace(/[\[\]]/g, "\\$&");
60 | var regex = new RegExp("[?&]" + name + "(=([^]*)|&|#|$)");
61 | var results = regex.exec(url);
62 | if (!results) return null;
63 | if (!results[2]) return '';
64 | return decodeURIComponent(results[2].replace(/\+/g, " "));
65 | }
66 |
67 | var dateTime = [];
68 | var usersCounter = [];
69 | var androidUsers = [];
70 | var iOSUsers = [];
71 | var windowsUsers = [];
72 | var otherUsers = [];
73 | var quadA = [];
74 | var quadB = [];
75 | var quadC = [];
76 | var quadD = [];
77 |
78 | var osUsageData = [];
79 | var quadrantData = [];
80 |
81 | var colors = ["red", "green", "blue", "orange", "purple", "cyan", "magenta", "lime", "pink", "teal", "lavender", "brown", "beige", "maroon", "mint", "olive", "coral"];
82 | var dynamicColors = function(i) {
83 | if (i >= 0 && i < colors.length) return colors[i];
84 | var r = Math.floor(Math.random() * 255);
85 | var g = Math.floor(Math.random() * 255);
86 | var b = Math.floor(Math.random() * 255);
87 | return "rgb(" + r + "," + g + "," + b + ")";
88 | }
89 |
90 | var identity = function(arg1) {
91 | return arg1;
92 | };
93 |
94 | function addData(chart, label, data) {
95 | chart.data.labels = label;
96 | for (var i = 0; i < chart.data.datasets.length; i++) {
97 | dataset = chart.data.datasets[i];
98 | dataset.data = data;
99 | dataset.fill = false;
100 | var color = dynamicColors(colors.length - 1 - i);
101 | dataset.fillColor = color;
102 | dataset.hightlightFill = color;
103 | dataset.backgroundColor = color;
104 | dataset.borderColor = color;
105 | };
106 | chart.update();
107 | }
108 |
109 | function updateData(chart, labels, data, datasetLabel, separateAxes = false) {
110 | chart.data.labels = labels;
111 | chart.data.datasets = new Array();
112 |
113 | for (var i = 0; i < data.length; i++) {
114 | var dataset = {};
115 | dataset.data = data[i];
116 | dataset.label = datasetLabel[i];
117 | if (separateAxes) dataset.yAxisID = datasetLabel[i];
118 | dataset.fill = false;
119 | var color = dynamicColors(i);
120 | dataset.backgroundColor = color;
121 | dataset.borderColor = color;
122 | chart.data.datasets.push(dataset);
123 | }
124 | chart.update();
125 | }
126 |
127 | var generateLineChartConfig = function(label) {
128 | var config = {
129 | type: "line",
130 | data: {
131 | labels: [],
132 | datasets: [
133 | {
134 | label: label,
135 | data: []
136 | }
137 | ]
138 | },
139 | options: {
140 | responsive: true,
141 | scales: {
142 | xAxes: [
143 | {
144 | ticks: {
145 | autoSkip: true,
146 | maxTicksLimit: 4
147 | },
148 | display: true
149 | }
150 | ],
151 | yAxes: [
152 | {
153 | ticks: {
154 | stepSize: 50,
155 | suggestedMin: 0,
156 | suggestedMax: 100
157 | },
158 | display: true
159 | }
160 | ]
161 | }
162 | }
163 | };
164 | return config;
165 | }
166 |
167 | var generateHorizontalBarChartConfig = function(label) {
168 | var config = {
169 | type: "horizontalBar",
170 | data: {
171 | labels: [],
172 | datasets: [
173 | {
174 | label: label,
175 | data: []
176 | }
177 | ]
178 | },
179 | options: {
180 | legend: {
181 | display: true
182 | },
183 | responsive: true,
184 | scales: {
185 | yAxes: [{
186 | stacked: true
187 | }],
188 | xAxes: [{
189 | display: true,
190 | scaleLabel: {
191 | display: false
192 | },
193 | ticks: {
194 | stepSize: 10,
195 | suggestedMin: 0,
196 | suggestedMax: 10
197 |
198 | }
199 | }]
200 | }
201 | }
202 | };
203 | return config;
204 | }
205 |
206 | var generateLineChart = function(divId, label) {
207 | var ctx = document.getElementById(divId).getContext("2d");
208 | var config = generateLineChartConfig(label);
209 | return new Chart(ctx, config);
210 | };
211 |
212 | var generateHorizontalBarChart = function(divId, label) {
213 | var ctx = document.getElementById(divId).getContext("2d");
214 | var config = generateHorizontalBarChartConfig(label);
215 | return new Chart(ctx, config);
216 | };
217 |
218 | var getTimeSecsAgo = function(secsAgo = 0) {
219 | return new Date(new Date().getTime() - secsAgo*1000).toISOString().replace('T',' ').replace('Z','');
220 | };
221 |
222 | var currentTime = new Date();
223 |
224 | var totalCallCurrentTime = new Date(currentTime.getTime() - 600000).toISOString().replace('T',' ').replace('Z','');
225 |
226 | var AvgConnTimeQueryTime = new Date(currentTime.getTime() - 6000000).toISOString().replace('T',' ').replace('Z','');
227 | var AvgConnTimeMap = {};
228 | var AvgConnTimeCallLabels = new Array();
229 | var AvgConnTimeCallChart = generateLineChart("connTime", "Average Connection Time");
230 |
231 | var AvgDisConnTimeQueryTime = new Date(currentTime.getTime() - 6000000).toISOString().replace('T',' ').replace('Z','');
232 | var AvgDisConnTimeMap = {};
233 | var AvgDisConnTimeCallLabels = new Array();
234 | var AvgDisConnTimeCallChart = generateLineChart("disconnTime", "Average DisConnection Time");
235 |
236 | var AvgTempValueQueryTime = new Date(currentTime.getTime() - 6000000).toISOString().replace('T',' ').replace('Z','');
237 | var AvgTempCallMap = {};
238 | var AvgTempCallLabels = new Array();
239 | var AvgTempCallChart = generateLineChart("avgTempValueCanvas", "Average Temp");
240 |
241 | var MinTempValueQueryTime = new Date(currentTime.getTime() - 6000000).toISOString().replace('T',' ').replace('Z','');
242 | var MinTempCallMap = {};
243 | var MinTempCallLabels = new Array();
244 | var MinTempCallChart = generateLineChart("minTempValueCanvas", "Minimum Temp");
245 |
246 | var MaxTempValueQueryTime = new Date(currentTime.getTime() - 6000000).toISOString().replace('T',' ').replace('Z','');
247 | var MaxTempCallMap = {};
248 | var MaxTempCallLabels = new Array();
249 | var MaxTempCallChart = generateLineChart("maxTempValueCanvas", "Maximum Temp");
250 |
251 | var anomalyScoreCurrentTime = new Date(currentTime.getTime() - 600000).toISOString().replace('T',' ').replace('Z','');
252 | var anomalyCallMap = {"Average Anomaly Score": []};
253 | var anomalyCallLabels= new Array();
254 | var anomalyChartConfig = generateLineChartConfig("Average Anomaly Score");
255 | var anomalyCtx = document.getElementById("anomalyCanvas").getContext("2d");
256 |
257 | anomalyChartConfig.options.scales.yAxes = [
258 | {
259 | id: 'Average Anomaly Score',
260 | type: 'linear',
261 | position: 'left',
262 | ticks: {
263 | stepSize: 1,
264 | max: 3,
265 | min: 0
266 | }
267 | }
268 | ];
269 | anomalyChart = new Chart(anomalyCtx, anomalyChartConfig)
270 |
271 | var avgTempPerDeviceQueryTime = new Date(currentTime.getTime() - 600000).toISOString().replace('T',' ').replace('Z','');
272 | var avgTempPerDeviceChart = generateHorizontalBarChart("avgTempCanvas", "Avg Temp per device");
273 |
274 | var minTempPerDeviceQueryTime = new Date(currentTime.getTime() - 600000).toISOString().replace('T',' ').replace('Z','');
275 | var minTempPerDeviceChart = generateHorizontalBarChart("minTempCanvas", "Min Temp per device");
276 |
277 | var maxTempPerDeviceQueryTime = new Date(currentTime.getTime() - 600000).toISOString().replace('T',' ').replace('Z','');
278 | var maxTempPerDeviceChart = generateHorizontalBarChart("maxTempCanvas", "Max Temp per device");
279 |
280 | var totalCallCtx = document.getElementById("A_count");
281 | var totalCallTimeCtx = document.getElementById("A_percent");
282 | var totalConnectedDevices = 0;
283 |
284 | var splitFunc = function(entry) {return entry.split('|')[0]; };
285 |
286 | var retrieveParams = function(metricType, eventTime) {
287 | return {
288 | TableName: tableName,
289 | ConsistentRead: true,
290 | ScanIndexForward: true,
291 | KeyConditionExpression: "MetricType = :TrailLog AND EventTime > :currentTime",
292 | ExpressionAttributeValues: { ":currentTime": eventTime, ":TrailLog": metricType }
293 | }
294 | };
295 |
296 | var retrieveParamsFromMaxTable = function(metricType, eventTime) {
297 | var date = eventTime.split(' ');
298 | var time = date[1].split(':');
299 | var hour = date[0]+ " " + time[0];
300 | var min = time[1];
301 | return {
302 | TableName: tableName,
303 | ConsistentRead: true,
304 | ScanIndexForward: true,
305 | KeyConditionExpression: "#hour = :hour AND #min > :minute",
306 | ExpressionAttributeNames: {"#hour": "Hour", "#min": "Minute"},
307 | ExpressionAttributeValues: { ":hour": hour, ":minute": min }
308 | }
309 | }
310 |
311 | var updateHorizontalBarChart = function(data, noOfTopItems, chartName, queryTime, labelFunc=identity) {
312 | var items = data.Items;
313 | var ipCountMap = {};
314 |
315 | // Merge the counts of each DDB item into a single map.
316 | for (var i=0; i 0) {
323 | queryTime = items[items.length-1].EventTime;
324 |
325 | var topIps = Object.keys(ipCountMap).sort(function(a,b) { return ipCountMap[b] - ipCountMap[a]}).slice(0,noOfTopItems);
326 |
327 | var topIpCounts = topIps.map(function(ip) {return ipCountMap[ip]; })
328 | topIps = topIps.map(labelFunc);
329 | addData(chartName,topIps,topIpCounts);
330 | }
331 | return queryTime;
332 | };
333 |
334 | var splitLabel = function(label) {
335 | return [''].concat(label.split(' '));
336 | }
337 | var updateLineChart = function(data, AvgTempCallLabels, AvgTempCallMap, chart, queryTime, labelFunc=identity) {
338 | var items = data.Items;
339 | var l = items.length
340 | let past_time;
341 | var now = new Date();
342 | var now_utc = new Date(now.getUTCFullYear(), now.getUTCMonth(), now.getUTCDate(), now.getUTCHours(), now.getUTCMinutes(), now.getUTCSeconds()).getTime();
343 | for (var i=0; i 5) {
372 | AvgTempCallLabels.push(splitLabel(queryTime.split('.')[0]));
373 | for (var key in AvgTempCallMap) {
374 | AvgTempCallMap[key].push(0);
375 | }
376 | }
377 | }
378 |
379 | updateData(chart, AvgTempCallLabels, Object.values(AvgTempCallMap), Object.keys(AvgTempCallMap).map(labelFunc));
380 |
381 | return queryTime;
382 | }
383 |
384 | var getLatestRecord = function(){
385 | console.log('Getting latest records from DynamoDB');
386 | var params = retrieveParams("ConnectedDevicesCount", totalCallCurrentTime);
387 | var PerDeviceMaxTempParams = retrieveParams("PerDeviceMaxTemp", maxTempPerDeviceQueryTime);
388 | var PerDeviceAvgTempParams = retrieveParams("PerDeviceAvgTemp", avgTempPerDeviceQueryTime);
389 | var PerDeviceMinTempParams = retrieveParams("PerDeviceMinTemp", minTempPerDeviceQueryTime);
390 | var AvgTempParams = retrieveParams("AvgTempValue", AvgTempValueQueryTime);
391 | var MinTempParams = retrieveParams("MinTempValue", MinTempValueQueryTime);
392 | var MaxTempParams = retrieveParams("MaxTempValue", MaxTempValueQueryTime);
393 | var AvgConnTimeParams = retrieveParams("AvgConnTime", AvgConnTimeQueryTime);
394 | var AvgDisConnTimeParams = retrieveParams("AvgDisconnTime", AvgDisConnTimeQueryTime);
395 | var anomalyParams = retrieveParams("DeviceTempAnomalyScore", anomalyScoreCurrentTime);
396 |
397 | var docClient = new AWS.DynamoDB.DocumentClient();
398 |
399 | docClient.query(PerDeviceMaxTempParams, function(err, data) {
400 | if (err) console.log(err);
401 | else {
402 | maxTempPerDeviceQueryTime = updateHorizontalBarChart(data, 20, maxTempPerDeviceChart, maxTempPerDeviceQueryTime, splitFunc);
403 | }
404 | });
405 |
406 | docClient.query(PerDeviceMinTempParams, function(err, data) {
407 | if (err) console.log(err);
408 | else {
409 | minTempPerDeviceQueryTime = updateHorizontalBarChart(data, 20, minTempPerDeviceChart, minTempPerDeviceQueryTime, splitFunc);
410 | }
411 | });
412 |
413 | docClient.query(PerDeviceAvgTempParams, function(err, data) {
414 | if (err) console.log(err);
415 | else {
416 | avgTempPerDeviceQueryTime = updateHorizontalBarChart(data, 20, avgTempPerDeviceChart, avgTempPerDeviceQueryTime, splitFunc);
417 | }
418 | });
419 |
420 | docClient.query(AvgConnTimeParams, function(err, data) {
421 | if (err) console.log(err);
422 | else {
423 | AvgConnTimeQueryTime = updateLineChart(data, AvgConnTimeCallLabels, AvgConnTimeMap, AvgConnTimeCallChart, AvgConnTimeQueryTime, splitFunc) ;
424 | }
425 | });
426 |
427 | docClient.query(AvgDisConnTimeParams, function(err, data) {
428 | if (err) console.log(err);
429 | else {
430 | AvgDisConnTimeQueryTime = updateLineChart(data, AvgDisConnTimeCallLabels, AvgDisConnTimeMap, AvgDisConnTimeCallChart, AvgDisConnTimeQueryTime, splitFunc) ;
431 | }
432 | });
433 |
434 | docClient.query(AvgTempParams, function(err, data) {
435 | if (err) console.log(err);
436 | else {
437 | AvgTempValueQueryTime = updateLineChart(data, AvgTempCallLabels, AvgTempCallMap, AvgTempCallChart, AvgTempValueQueryTime, splitFunc) ;
438 | }
439 | });
440 |
441 | docClient.query(MinTempParams, function(err, data) {
442 | if (err) console.log(err);
443 | else {
444 | MinTempValueQueryTime = updateLineChart(data, MinTempCallLabels, MinTempCallMap, MinTempCallChart, MinTempValueQueryTime, splitFunc) ;
445 | }
446 | });
447 |
448 | docClient.query(MaxTempParams, function(err, data) {
449 | if (err) console.log(err);
450 | else {
451 | MaxTempValueQueryTime = updateLineChart(data, MaxTempCallLabels, MaxTempCallMap, MaxTempCallChart, MaxTempValueQueryTime, splitFunc) ;
452 | }
453 | });
454 |
455 | docClient.query(anomalyParams, function(err, data) {
456 | if (err) console.log(err);
457 | else {
458 | var items = data.Items;
459 | console.log(`anomalyScore data: ${data}`)
460 | for (let i = 0; i < items.length; i++) {
461 | console.log(`anomalyscore item: ${items[i]}`);
462 | anomalyCallLabels.push(splitLabel(items[i].EventTime));
463 | ddbItem = JSON.parse(items[i].Data);
464 | anomaly_score_value = Object.values(ddbItem);
465 | var sum = anomaly_score_value.reduce((previous, current) => current += previous);
466 | var avg = sum / anomaly_score_value.length;
467 | anomalyCallMap["Average Anomaly Score"].push(parseFloat(avg));
468 | }
469 | if (items.length>0) {
470 | anomalyScoreCurrentTime = items[items.length-1].EventTime;
471 | updateData(anomalyChart, anomalyCallLabels, Object.values(anomalyCallMap), Object.keys(anomalyCallMap), true);
472 | }
473 | }
474 | });
475 |
476 | docClient.query(params, function(err, data) {
477 | if (err) console.log(err);
478 | else {
479 |
480 | var items = data.Items;
481 | for (var i = 0; i < items.length; i++) {
482 | totalConnectedDevices = parseInt((items[i].Data).split(':')[1]);
483 | }
484 | var callTime;
485 | if (items.length > 0) callTime = items[items.length-1].EventTime;
486 | else callTime = new Date(new Date().getTime() - 200).toISOString().replace('T',' ').replace('Z','');
487 | totalCallCtx.innerHTML = "