├── .gitignore
├── LAB14
├── .env
└── serverless.yml
├── LICENSE
├── Lab 19
├── .env
├── app.py
└── publish.py
├── Lab 21
├── handler.py
├── package-lock.json
├── requirements.txt
└── serverless.yml
├── Lab 23
├── .env
├── glue_job.py
├── package-lock.json
├── publish_data.py
└── serverless.yml
├── Lab1
├── handler.py
├── requirements.txt
└── serverless.yml
├── Lab10
└── learn
│ ├── handler.py
│ └── serverless.yml
├── Lab11
├── dispatcher.py
└── serverless.yml
├── Lab13
└── lambda
│ ├── .env
│ ├── handler.py
│ ├── requirements.txt
│ └── serverless.yml
├── Lab15
├── .env
├── glue_job.py
├── package-lock.json
└── serverless.yml
├── Lab16
├── .gitlab-ci.yml
├── README.md
├── handler.py
├── package.json
└── serverless.yml
├── Lab17
├── .env
├── lambda_function.py
├── publish-fake-data.py
└── serverless.yml
├── Lab18
├── lambda_handler.py
└── serverless.yml
├── Lab2
└── lambda-layers
│ ├── .gitignore
│ ├── package-lock.json
│ ├── requirements.txt
│ └── serverless.yml
├── Lab20
└── sqlserver
│ └── docker-compose.yml
├── Lab22
├── .env
├── consumer.py
└── serverless.yml
├── Lab24
├── .env
├── lambda_function.py
└── serverless.yml
├── Lab25
├── .env
├── lambda_function.py
└── serverless.yml
├── Lab26
├── .env
├── lambda_functions.py
├── requirements.txt
└── serverless.yml
├── Lab27
├── .env
├── handler.py
├── runner.env
├── runnerScript.py
└── serverless.yml
├── Lab28
├── EverythingOneFile
│ ├── dispatcher.py
│ └── serverless.yml
└── organized
│ ├── serverless.yml
│ └── src
│ ├── Lambda
│ ├── dispatcher.py
│ └── serverless.yml
│ └── SNS
│ └── serverless.yml
├── Lab29
├── .env
├── handler.py
├── requirements.txt
├── sample-publish-sqs.py
└── serverless.yml
├── Lab3
└── cron-jobs
│ ├── .gitignore
│ ├── README.md
│ ├── handler.py
│ ├── package-lock.json
│ ├── package.json
│ └── serverless.yml
├── Lab4
└── aws-event-bus
│ ├── .gitignore
│ ├── README.md
│ ├── handler.py
│ ├── publish.py
│ └── serverless.yml
├── Lab5
└── learn-sqs
│ ├── .gitignore
│ ├── README.md
│ ├── handler.py
│ ├── package-lock.json
│ └── serverless.yml
├── Lab6
├── event-lambda
│ ├── .gitignore
│ ├── README.md
│ ├── fail.py
│ ├── handler.py
│ ├── serverless.yml
│ └── sucess.py
└── firelambda.py
├── Lab8
└── aws-python-http-api-project
│ ├── .gitignore
│ ├── README.md
│ ├── handler.py
│ └── serverless.yml
├── Lab9
├── aws-python-http-api-project
│ ├── .gitignore
│ ├── README.md
│ ├── handler.py
│ └── serverless.yml
└── package-lock.json
├── README.md
└── lab7
└── learn
├── .gitignore
├── README.md
├── handler.py
└── serverless.yml
/.gitignore:
--------------------------------------------------------------------------------
1 | # Build and Release Folders
2 | bin-debug/
3 | bin-release/
4 | [Oo]bj/
5 | [Bb]in/
6 |
7 | # Other files and folders
8 | .settings/
9 |
10 | # Executables
11 | *.swf
12 | *.air
13 | *.ipa
14 | *.apk
15 |
16 | # Project files, i.e. `.project`, `.actionScriptProperties` and `.flexProperties`
17 | # should NOT be excluded as they contain compiler settings and other important
18 | # information for Eclipse / Flash Builder.
19 |
--------------------------------------------------------------------------------
/LAB14/.env:
--------------------------------------------------------------------------------
1 | DB_NAME='sampledb'
2 | DB_TABLE_NAME='table_name=data'
3 | GLUE_ROLE_ARN=XXXXXXXXXXXX
4 | CRAWLER_TARGET_PATH=XXXXXXXXXXXXXXXXX
5 | CRAWLER_NAME=crawler_data
--------------------------------------------------------------------------------
/LAB14/serverless.yml:
--------------------------------------------------------------------------------
1 | service: sls-create-glue-crawler
2 | useDotenv: true
3 |
4 |
5 | provider:
6 | name: aws
7 | region: us-east-1
8 |
9 | stackTags:
10 | product: datateam
11 | env: qa
12 | created-date: 2022-04-05
13 | team: data
14 | customer-impact: false
15 | terraform: false
16 |
17 | resources:
18 | Resources:
19 | GlueDatabase:
20 | Type: AWS::Glue::Database
21 | Properties:
22 | CatalogId: '867098943567'
23 | DatabaseInput:
24 | Name: ${env:DB_NAME}
25 |
26 | TableGlueCrawler:
27 | Type: AWS::Glue::Crawler
28 | Properties:
29 | DatabaseName: ${env:DB_NAME}
30 | Name: ${env:CRAWLER_NAME}
31 | RecrawlPolicy:
32 | RecrawlBehavior: CRAWL_EVERYTHING
33 | Role: ${env:GLUE_ROLE_ARN}
34 | SchemaChangePolicy:
35 | DeleteBehavior: DEPRECATE_IN_DATABASE
36 | Targets:
37 | S3Targets:
38 | - Path: ${env:CRAWLER_TARGET_PATH}
39 |
40 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/Lab 19/.env:
--------------------------------------------------------------------------------
1 | StreamName=my-input-streams
2 | ShardCount=1
3 | RetentionPeriodHours=24
4 | DeliveryStreamName=s3-delivery
5 |
--------------------------------------------------------------------------------
/Lab 19/app.py:
--------------------------------------------------------------------------------
1 | from flask import Flask, request
2 | import json
3 | import base64
4 |
5 | app = Flask(__name__)
6 |
7 |
8 | @app.route("/", methods=["GET", "POST"])
9 | def hello_world():
10 | try:
11 | data = json.loads(request.data)
12 | for item in data.get("records"):
13 | data = json.loads(base64.b64decode(item.get("data")))
14 | print(data)
15 | except Exception as e:pass
16 |
17 | return "
Hello, World!
"
18 |
19 | if __name__ == "__main__":
20 | app.run(debug=True)
--------------------------------------------------------------------------------
/Lab 19/publish.py:
--------------------------------------------------------------------------------
1 | import boto3
2 | import json
3 | from datetime import datetime
4 | import calendar
5 | import random
6 | import time
7 | import json
8 | from faker import Faker
9 | import uuid
10 | from time import sleep
11 | import os
12 | from dotenv import load_dotenv
13 |
14 | load_dotenv(".env")
15 |
16 |
17 | my_stream_name = "XXXXXXXXXXXXXXXXXXXXXXX"
18 | print("Stream Name :{} ".format(my_stream_name))
19 |
20 |
21 | kinesis_client = boto3.client('firehose',
22 | region_name='us-east-1',
23 | aws_access_key_id="XXXXXXXXXX",
24 | aws_secret_access_key="XXXXXXXXXXXXXXXXXXXXXXXXE"
25 | )
26 | faker = Faker()
27 |
28 |
29 | for i in range(1, 10):
30 | json_data = {
31 | "name":faker.name(),
32 | "city":faker.city(),
33 | "phone":faker.phone_number(),
34 | "id":uuid.uuid4().__str__(),
35 | "customer_id":random.randint(1,5)
36 | }
37 | print(json_data)
38 | sleep(0.5)
39 |
40 | put_response = kinesis_client.put_record(
41 | DeliveryStreamName=my_stream_name,
42 | Record={
43 | 'Data': json.dumps(json_data)
44 | }
45 |
46 | )
47 | print(put_response)
48 |
49 |
50 |
51 |
52 |
--------------------------------------------------------------------------------
/Lab 21/handler.py:
--------------------------------------------------------------------------------
1 | # must be called as we're using zipped requirements
2 | try:
3 | import unzip_requirements
4 | except ImportError:
5 | pass
6 |
7 | try:
8 | import json
9 | import boto3
10 | import io
11 | import pandas as pd
12 |
13 | except Exception as e:
14 | print("Error ***", e)
15 |
16 |
17 |
18 |
19 | def hello(event, context):
20 |
21 | client = boto3.client('s3',aws_access_key_id="XXX", aws_secret_access_key="XXXX", region_name="us-east-1", )
22 |
23 | for item in event.get("Records"):
24 | s3 = item.get("s3")
25 | bucket = s3.get("bucket").get("name")
26 | key = s3.get("object").get("key")
27 |
28 | print("bucket", bucket)
29 | print("key", key)
30 |
31 | response_new = client.get_object(Bucket=bucket, Key=str(key))
32 | df = pd.read_csv(io.BytesIO(response_new["Body"].read()))
33 |
34 | print(df)
35 | print("\n")
36 | print(df.shape)
37 |
38 | response = {
39 | "statusCode": 200,
40 | "body": json.dumps("Process complete ")
41 | }
42 |
43 |
--------------------------------------------------------------------------------
/Lab 21/requirements.txt:
--------------------------------------------------------------------------------
1 | pandas
--------------------------------------------------------------------------------
/Lab 21/serverless.yml:
--------------------------------------------------------------------------------
1 | service: sls-pandas-py
2 |
3 | provider:
4 | name: aws
5 | runtime: python3.7
6 | memorySize: 1000
7 | timeout: 350
8 |
9 | plugins:
10 | - serverless-python-requirements
11 |
12 |
13 | custom:
14 | pythonRequirements:
15 | dockerizePip: true
16 | layer:
17 | name: python-pandas
18 | description: "Layer which contains pandas library"
19 | compatibleRuntimes:
20 | - python3.7
21 |
22 | functions:
23 | hello:
24 | handler: handler.hello
25 | events:
26 | - s3:
27 | bucket: test-data-buckets-raw
28 | event: s3:ObjectCreated:*
29 | rules:
30 | - prefix: uploads/
31 | - suffix: .csv
32 | existing: true
33 | forceDeploy: true
34 |
--------------------------------------------------------------------------------
/Lab 23/.env:
--------------------------------------------------------------------------------
1 | DB_NAME='sampledb'
2 | DB_TABLE_NAME='table_name=data'
3 | GLUE_ROLE_ARN=XXXXXXXXXXXXXXXXXXXXXXXXX
4 | CRAWLER_TARGET_PATH_DYNAMODB=myTable
5 | CRAWLER_NAME=dynamodb-schema-crawlers
6 |
7 | job_name=my-glue-script-dynamodb
8 | glue_script_python_file_name=glue_job.py
9 | glue_iam=arn:aws:iam::XXXXXXXXXXX
10 | local_path="glue_job.py"
11 | s3_bucket=test-data-buckets-raw
12 | s3_prefix_glue_script="Scripts/"
13 | tempDirBucket=XXXXXXXXXXXXXX
14 |
15 | CRAWLER_NAME_S3=aws-s3-crawlers
16 | CRAWLER_TARGET_PATH=s3://XXXXXXXXXXXXX/
--------------------------------------------------------------------------------
/Lab 23/glue_job.py:
--------------------------------------------------------------------------------
1 | try:
2 | import sys
3 | from awsglue.transforms import *
4 | from awsglue.utils import getResolvedOptions
5 | from pyspark.context import SparkContext
6 | from awsglue.context import GlueContext
7 | from awsglue.job import Job
8 | except Exception as e:
9 | pass
10 |
11 |
12 | args = getResolvedOptions(sys.argv, ["JOB_NAME"])
13 | sc = SparkContext()
14 | glueContext = GlueContext(sc)
15 | spark = glueContext.spark_session
16 | job = Job(glueContext)
17 | job.init(args["JOB_NAME"], args)
18 |
19 | # Script generated for node AWS Glue Data Catalog
20 | AWSGlueDataCatalog_node1660773467364 = glueContext.create_dynamic_frame.from_catalog(
21 | database="sampledb",
22 | table_name="mytable",
23 | transformation_ctx="AWSGlueDataCatalog_node1660773467364",
24 | )
25 |
26 | # Script generated for node Apply Mapping
27 | ApplyMapping_node1660773505020 = ApplyMapping.apply(
28 | frame=AWSGlueDataCatalog_node1660773467364,
29 | mappings=[
30 | ("last_name", "string", "last_name", "string"),
31 | ("first_name", "string", "first_name", "string"),
32 | ("email", "string", "email", "string"),
33 | ],
34 | transformation_ctx="ApplyMapping_node1660773505020",
35 | )
36 |
37 | # Script generated for node Amazon S3
38 | AmazonS3_node1660773507696 = glueContext.write_dynamic_frame.from_options(
39 | frame=ApplyMapping_node1660773505020,
40 | connection_type="s3",
41 | format="json",
42 | connection_options={"path": "s3://soumilshah-copy-1995", "partitionKeys": []},
43 | transformation_ctx="AmazonS3_node1660773507696",
44 | )
45 |
46 | job.commit()
47 |
--------------------------------------------------------------------------------
/Lab 23/package-lock.json:
--------------------------------------------------------------------------------
1 | {
2 | "requires": true,
3 | "lockfileVersion": 1,
4 | "dependencies": {
5 | "available-typed-arrays": {
6 | "version": "1.0.5",
7 | "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz",
8 | "integrity": "sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw==",
9 | "dev": true
10 | },
11 | "aws-sdk": {
12 | "version": "2.1197.0",
13 | "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1197.0.tgz",
14 | "integrity": "sha512-BUxYU+gzxCylEM37NeGcS5kWotXVmKrOBG9+/+U+tnOTW7/3yNBrBfhPrs5IgMhm7H38CLWgOqwJaGDlYzwH/Q==",
15 | "dev": true,
16 | "requires": {
17 | "buffer": "4.9.2",
18 | "events": "1.1.1",
19 | "ieee754": "1.1.13",
20 | "jmespath": "0.16.0",
21 | "querystring": "0.2.0",
22 | "sax": "1.2.1",
23 | "url": "0.10.3",
24 | "util": "^0.12.4",
25 | "uuid": "8.0.0",
26 | "xml2js": "0.4.19"
27 | }
28 | },
29 | "base64-js": {
30 | "version": "1.5.1",
31 | "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz",
32 | "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==",
33 | "dev": true
34 | },
35 | "buffer": {
36 | "version": "4.9.2",
37 | "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.2.tgz",
38 | "integrity": "sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg==",
39 | "dev": true,
40 | "requires": {
41 | "base64-js": "^1.0.2",
42 | "ieee754": "^1.1.4",
43 | "isarray": "^1.0.0"
44 | }
45 | },
46 | "call-bind": {
47 | "version": "1.0.2",
48 | "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz",
49 | "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==",
50 | "dev": true,
51 | "requires": {
52 | "function-bind": "^1.1.1",
53 | "get-intrinsic": "^1.0.2"
54 | }
55 | },
56 | "define-properties": {
57 | "version": "1.1.4",
58 | "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.4.tgz",
59 | "integrity": "sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==",
60 | "dev": true,
61 | "requires": {
62 | "has-property-descriptors": "^1.0.0",
63 | "object-keys": "^1.1.1"
64 | }
65 | },
66 | "es-abstract": {
67 | "version": "1.20.1",
68 | "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.20.1.tgz",
69 | "integrity": "sha512-WEm2oBhfoI2sImeM4OF2zE2V3BYdSF+KnSi9Sidz51fQHd7+JuF8Xgcj9/0o+OWeIeIS/MiuNnlruQrJf16GQA==",
70 | "dev": true,
71 | "requires": {
72 | "call-bind": "^1.0.2",
73 | "es-to-primitive": "^1.2.1",
74 | "function-bind": "^1.1.1",
75 | "function.prototype.name": "^1.1.5",
76 | "get-intrinsic": "^1.1.1",
77 | "get-symbol-description": "^1.0.0",
78 | "has": "^1.0.3",
79 | "has-property-descriptors": "^1.0.0",
80 | "has-symbols": "^1.0.3",
81 | "internal-slot": "^1.0.3",
82 | "is-callable": "^1.2.4",
83 | "is-negative-zero": "^2.0.2",
84 | "is-regex": "^1.1.4",
85 | "is-shared-array-buffer": "^1.0.2",
86 | "is-string": "^1.0.7",
87 | "is-weakref": "^1.0.2",
88 | "object-inspect": "^1.12.0",
89 | "object-keys": "^1.1.1",
90 | "object.assign": "^4.1.2",
91 | "regexp.prototype.flags": "^1.4.3",
92 | "string.prototype.trimend": "^1.0.5",
93 | "string.prototype.trimstart": "^1.0.5",
94 | "unbox-primitive": "^1.0.2"
95 | }
96 | },
97 | "es-to-primitive": {
98 | "version": "1.2.1",
99 | "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz",
100 | "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==",
101 | "dev": true,
102 | "requires": {
103 | "is-callable": "^1.1.4",
104 | "is-date-object": "^1.0.1",
105 | "is-symbol": "^1.0.2"
106 | }
107 | },
108 | "events": {
109 | "version": "1.1.1",
110 | "resolved": "https://registry.npmjs.org/events/-/events-1.1.1.tgz",
111 | "integrity": "sha512-kEcvvCBByWXGnZy6JUlgAp2gBIUjfCAV6P6TgT1/aaQKcmuAEC4OZTV1I4EWQLz2gxZw76atuVyvHhTxvi0Flw==",
112 | "dev": true
113 | },
114 | "for-each": {
115 | "version": "0.3.3",
116 | "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz",
117 | "integrity": "sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==",
118 | "dev": true,
119 | "requires": {
120 | "is-callable": "^1.1.3"
121 | }
122 | },
123 | "function-bind": {
124 | "version": "1.1.1",
125 | "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz",
126 | "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==",
127 | "dev": true
128 | },
129 | "function.prototype.name": {
130 | "version": "1.1.5",
131 | "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.5.tgz",
132 | "integrity": "sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA==",
133 | "dev": true,
134 | "requires": {
135 | "call-bind": "^1.0.2",
136 | "define-properties": "^1.1.3",
137 | "es-abstract": "^1.19.0",
138 | "functions-have-names": "^1.2.2"
139 | }
140 | },
141 | "functions-have-names": {
142 | "version": "1.2.3",
143 | "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz",
144 | "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==",
145 | "dev": true
146 | },
147 | "get-intrinsic": {
148 | "version": "1.1.2",
149 | "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.2.tgz",
150 | "integrity": "sha512-Jfm3OyCxHh9DJyc28qGk+JmfkpO41A4XkneDSujN9MDXrm4oDKdHvndhZ2dN94+ERNfkYJWDclW6k2L/ZGHjXA==",
151 | "dev": true,
152 | "requires": {
153 | "function-bind": "^1.1.1",
154 | "has": "^1.0.3",
155 | "has-symbols": "^1.0.3"
156 | }
157 | },
158 | "get-symbol-description": {
159 | "version": "1.0.0",
160 | "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz",
161 | "integrity": "sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==",
162 | "dev": true,
163 | "requires": {
164 | "call-bind": "^1.0.2",
165 | "get-intrinsic": "^1.1.1"
166 | }
167 | },
168 | "has": {
169 | "version": "1.0.3",
170 | "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz",
171 | "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==",
172 | "dev": true,
173 | "requires": {
174 | "function-bind": "^1.1.1"
175 | }
176 | },
177 | "has-bigints": {
178 | "version": "1.0.2",
179 | "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz",
180 | "integrity": "sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==",
181 | "dev": true
182 | },
183 | "has-property-descriptors": {
184 | "version": "1.0.0",
185 | "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz",
186 | "integrity": "sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==",
187 | "dev": true,
188 | "requires": {
189 | "get-intrinsic": "^1.1.1"
190 | }
191 | },
192 | "has-symbols": {
193 | "version": "1.0.3",
194 | "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz",
195 | "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==",
196 | "dev": true
197 | },
198 | "has-tostringtag": {
199 | "version": "1.0.0",
200 | "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz",
201 | "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==",
202 | "dev": true,
203 | "requires": {
204 | "has-symbols": "^1.0.2"
205 | }
206 | },
207 | "ieee754": {
208 | "version": "1.1.13",
209 | "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.13.tgz",
210 | "integrity": "sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg==",
211 | "dev": true
212 | },
213 | "inherits": {
214 | "version": "2.0.4",
215 | "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
216 | "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
217 | "dev": true
218 | },
219 | "internal-slot": {
220 | "version": "1.0.3",
221 | "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.3.tgz",
222 | "integrity": "sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA==",
223 | "dev": true,
224 | "requires": {
225 | "get-intrinsic": "^1.1.0",
226 | "has": "^1.0.3",
227 | "side-channel": "^1.0.4"
228 | }
229 | },
230 | "is-arguments": {
231 | "version": "1.1.1",
232 | "resolved": "https://registry.npmjs.org/is-arguments/-/is-arguments-1.1.1.tgz",
233 | "integrity": "sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA==",
234 | "dev": true,
235 | "requires": {
236 | "call-bind": "^1.0.2",
237 | "has-tostringtag": "^1.0.0"
238 | }
239 | },
240 | "is-bigint": {
241 | "version": "1.0.4",
242 | "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz",
243 | "integrity": "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==",
244 | "dev": true,
245 | "requires": {
246 | "has-bigints": "^1.0.1"
247 | }
248 | },
249 | "is-boolean-object": {
250 | "version": "1.1.2",
251 | "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz",
252 | "integrity": "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==",
253 | "dev": true,
254 | "requires": {
255 | "call-bind": "^1.0.2",
256 | "has-tostringtag": "^1.0.0"
257 | }
258 | },
259 | "is-callable": {
260 | "version": "1.2.4",
261 | "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz",
262 | "integrity": "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==",
263 | "dev": true
264 | },
265 | "is-date-object": {
266 | "version": "1.0.5",
267 | "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz",
268 | "integrity": "sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==",
269 | "dev": true,
270 | "requires": {
271 | "has-tostringtag": "^1.0.0"
272 | }
273 | },
274 | "is-generator-function": {
275 | "version": "1.0.10",
276 | "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.0.10.tgz",
277 | "integrity": "sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A==",
278 | "dev": true,
279 | "requires": {
280 | "has-tostringtag": "^1.0.0"
281 | }
282 | },
283 | "is-negative-zero": {
284 | "version": "2.0.2",
285 | "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz",
286 | "integrity": "sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==",
287 | "dev": true
288 | },
289 | "is-number-object": {
290 | "version": "1.0.7",
291 | "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz",
292 | "integrity": "sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==",
293 | "dev": true,
294 | "requires": {
295 | "has-tostringtag": "^1.0.0"
296 | }
297 | },
298 | "is-regex": {
299 | "version": "1.1.4",
300 | "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz",
301 | "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==",
302 | "dev": true,
303 | "requires": {
304 | "call-bind": "^1.0.2",
305 | "has-tostringtag": "^1.0.0"
306 | }
307 | },
308 | "is-shared-array-buffer": {
309 | "version": "1.0.2",
310 | "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz",
311 | "integrity": "sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==",
312 | "dev": true,
313 | "requires": {
314 | "call-bind": "^1.0.2"
315 | }
316 | },
317 | "is-string": {
318 | "version": "1.0.7",
319 | "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz",
320 | "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==",
321 | "dev": true,
322 | "requires": {
323 | "has-tostringtag": "^1.0.0"
324 | }
325 | },
326 | "is-symbol": {
327 | "version": "1.0.4",
328 | "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz",
329 | "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==",
330 | "dev": true,
331 | "requires": {
332 | "has-symbols": "^1.0.2"
333 | }
334 | },
335 | "is-typed-array": {
336 | "version": "1.1.9",
337 | "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.9.tgz",
338 | "integrity": "sha512-kfrlnTTn8pZkfpJMUgYD7YZ3qzeJgWUn8XfVYBARc4wnmNOmLbmuuaAs3q5fvB0UJOn6yHAKaGTPM7d6ezoD/A==",
339 | "dev": true,
340 | "requires": {
341 | "available-typed-arrays": "^1.0.5",
342 | "call-bind": "^1.0.2",
343 | "es-abstract": "^1.20.0",
344 | "for-each": "^0.3.3",
345 | "has-tostringtag": "^1.0.0"
346 | }
347 | },
348 | "is-weakref": {
349 | "version": "1.0.2",
350 | "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz",
351 | "integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==",
352 | "dev": true,
353 | "requires": {
354 | "call-bind": "^1.0.2"
355 | }
356 | },
357 | "isarray": {
358 | "version": "1.0.0",
359 | "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz",
360 | "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==",
361 | "dev": true
362 | },
363 | "jmespath": {
364 | "version": "0.16.0",
365 | "resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.16.0.tgz",
366 | "integrity": "sha512-9FzQjJ7MATs1tSpnco1K6ayiYE3figslrXA72G2HQ/n76RzvYlofyi5QM+iX4YRs/pu3yzxlVQSST23+dMDknw==",
367 | "dev": true
368 | },
369 | "object-inspect": {
370 | "version": "1.12.2",
371 | "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz",
372 | "integrity": "sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==",
373 | "dev": true
374 | },
375 | "object-keys": {
376 | "version": "1.1.1",
377 | "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz",
378 | "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==",
379 | "dev": true
380 | },
381 | "object.assign": {
382 | "version": "4.1.4",
383 | "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.4.tgz",
384 | "integrity": "sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ==",
385 | "dev": true,
386 | "requires": {
387 | "call-bind": "^1.0.2",
388 | "define-properties": "^1.1.4",
389 | "has-symbols": "^1.0.3",
390 | "object-keys": "^1.1.1"
391 | }
392 | },
393 | "punycode": {
394 | "version": "1.3.2",
395 | "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz",
396 | "integrity": "sha512-RofWgt/7fL5wP1Y7fxE7/EmTLzQVnB0ycyibJ0OOHIlJqTNzglYFxVwETOcIoJqJmpDXJ9xImDv+Fq34F/d4Dw==",
397 | "dev": true
398 | },
399 | "querystring": {
400 | "version": "0.2.0",
401 | "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz",
402 | "integrity": "sha512-X/xY82scca2tau62i9mDyU9K+I+djTMUsvwf7xnUX5GLvVzgJybOJf4Y6o9Zx3oJK/LSXg5tTZBjwzqVPaPO2g==",
403 | "dev": true
404 | },
405 | "regexp.prototype.flags": {
406 | "version": "1.4.3",
407 | "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz",
408 | "integrity": "sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA==",
409 | "dev": true,
410 | "requires": {
411 | "call-bind": "^1.0.2",
412 | "define-properties": "^1.1.3",
413 | "functions-have-names": "^1.2.2"
414 | }
415 | },
416 | "safe-buffer": {
417 | "version": "5.2.1",
418 | "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
419 | "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==",
420 | "dev": true
421 | },
422 | "sax": {
423 | "version": "1.2.1",
424 | "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz",
425 | "integrity": "sha512-8I2a3LovHTOpm7NV5yOyO8IHqgVsfK4+UuySrXU8YXkSRX7k6hCV9b3HrkKCr3nMpgj+0bmocaJJWpvp1oc7ZA==",
426 | "dev": true
427 | },
428 | "serverless-glue": {
429 | "version": "2.9.0",
430 | "resolved": "https://registry.npmjs.org/serverless-glue/-/serverless-glue-2.9.0.tgz",
431 | "integrity": "sha512-gSlM7shnM0tdWF2YFdFV2vf6gyOE+aR55J22g+crWLn482alAPU3qCSUFQeWiiv/QwmcTDPFtaKwVI6AaZYPmQ==",
432 | "dev": true,
433 | "requires": {
434 | "aws-sdk": "^2.701.0"
435 | }
436 | },
437 | "side-channel": {
438 | "version": "1.0.4",
439 | "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz",
440 | "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==",
441 | "dev": true,
442 | "requires": {
443 | "call-bind": "^1.0.0",
444 | "get-intrinsic": "^1.0.2",
445 | "object-inspect": "^1.9.0"
446 | }
447 | },
448 | "string.prototype.trimend": {
449 | "version": "1.0.5",
450 | "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.5.tgz",
451 | "integrity": "sha512-I7RGvmjV4pJ7O3kdf+LXFpVfdNOxtCW/2C8f6jNiW4+PQchwxkCDzlk1/7p+Wl4bqFIZeF47qAHXLuHHWKAxog==",
452 | "dev": true,
453 | "requires": {
454 | "call-bind": "^1.0.2",
455 | "define-properties": "^1.1.4",
456 | "es-abstract": "^1.19.5"
457 | }
458 | },
459 | "string.prototype.trimstart": {
460 | "version": "1.0.5",
461 | "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.5.tgz",
462 | "integrity": "sha512-THx16TJCGlsN0o6dl2o6ncWUsdgnLRSA23rRE5pyGBw/mLr3Ej/R2LaqCtgP8VNMGZsvMWnf9ooZPyY2bHvUFg==",
463 | "dev": true,
464 | "requires": {
465 | "call-bind": "^1.0.2",
466 | "define-properties": "^1.1.4",
467 | "es-abstract": "^1.19.5"
468 | }
469 | },
470 | "unbox-primitive": {
471 | "version": "1.0.2",
472 | "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz",
473 | "integrity": "sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==",
474 | "dev": true,
475 | "requires": {
476 | "call-bind": "^1.0.2",
477 | "has-bigints": "^1.0.2",
478 | "has-symbols": "^1.0.3",
479 | "which-boxed-primitive": "^1.0.2"
480 | }
481 | },
482 | "url": {
483 | "version": "0.10.3",
484 | "resolved": "https://registry.npmjs.org/url/-/url-0.10.3.tgz",
485 | "integrity": "sha512-hzSUW2q06EqL1gKM/a+obYHLIO6ct2hwPuviqTTOcfFVc61UbfJ2Q32+uGL/HCPxKqrdGB5QUwIe7UqlDgwsOQ==",
486 | "dev": true,
487 | "requires": {
488 | "punycode": "1.3.2",
489 | "querystring": "0.2.0"
490 | }
491 | },
492 | "util": {
493 | "version": "0.12.4",
494 | "resolved": "https://registry.npmjs.org/util/-/util-0.12.4.tgz",
495 | "integrity": "sha512-bxZ9qtSlGUWSOy9Qa9Xgk11kSslpuZwaxCg4sNIDj6FLucDab2JxnHwyNTCpHMtK1MjoQiWQ6DiUMZYbSrO+Sw==",
496 | "dev": true,
497 | "requires": {
498 | "inherits": "^2.0.3",
499 | "is-arguments": "^1.0.4",
500 | "is-generator-function": "^1.0.7",
501 | "is-typed-array": "^1.1.3",
502 | "safe-buffer": "^5.1.2",
503 | "which-typed-array": "^1.1.2"
504 | }
505 | },
506 | "uuid": {
507 | "version": "8.0.0",
508 | "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.0.0.tgz",
509 | "integrity": "sha512-jOXGuXZAWdsTH7eZLtyXMqUb9EcWMGZNbL9YcGBJl4MH4nrxHmZJhEHvyLFrkxo+28uLb/NYRcStH48fnD0Vzw==",
510 | "dev": true
511 | },
512 | "which-boxed-primitive": {
513 | "version": "1.0.2",
514 | "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz",
515 | "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==",
516 | "dev": true,
517 | "requires": {
518 | "is-bigint": "^1.0.1",
519 | "is-boolean-object": "^1.1.0",
520 | "is-number-object": "^1.0.4",
521 | "is-string": "^1.0.5",
522 | "is-symbol": "^1.0.3"
523 | }
524 | },
525 | "which-typed-array": {
526 | "version": "1.1.8",
527 | "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.8.tgz",
528 | "integrity": "sha512-Jn4e5PItbcAHyLoRDwvPj1ypu27DJbtdYXUa5zsinrUx77Uvfb0cXwwnGMTn7cjUfhhqgVQnVJCwF+7cgU7tpw==",
529 | "dev": true,
530 | "requires": {
531 | "available-typed-arrays": "^1.0.5",
532 | "call-bind": "^1.0.2",
533 | "es-abstract": "^1.20.0",
534 | "for-each": "^0.3.3",
535 | "has-tostringtag": "^1.0.0",
536 | "is-typed-array": "^1.1.9"
537 | }
538 | },
539 | "xml2js": {
540 | "version": "0.4.19",
541 | "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.19.tgz",
542 | "integrity": "sha512-esZnJZJOiJR9wWKMyuvSE1y6Dq5LCuJanqhxslH2bxM6duahNZ+HMpCLhBQGZkbX6xRf8x1Y2eJlgt2q3qo49Q==",
543 | "dev": true,
544 | "requires": {
545 | "sax": ">=0.6.0",
546 | "xmlbuilder": "~9.0.1"
547 | }
548 | },
549 | "xmlbuilder": {
550 | "version": "9.0.7",
551 | "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-9.0.7.tgz",
552 | "integrity": "sha512-7YXTQc3P2l9+0rjaUbLwMKRhtmwg1M1eDf6nag7urC7pIPYLD9W/jmzQ4ptRSUbodw5S0jfoGTflLemQibSpeQ==",
553 | "dev": true
554 | }
555 | }
556 | }
557 |
--------------------------------------------------------------------------------
/Lab 23/publish_data.py:
--------------------------------------------------------------------------------
1 | import boto3
2 | import json
3 | from datetime import datetime
4 | import calendar
5 | import random
6 | import time
7 | import json
8 | from faker import Faker
9 | import uuid
10 | from time import sleep
11 | import os
12 | from dotenv import load_dotenv
13 | import pynamodb.attributes as at
14 | import datetime
15 | from datetime import datetime
16 | from pynamodb.models import Model
17 | from pynamodb.attributes import *
18 |
19 | load_dotenv(".env")
20 |
21 | faker = Faker()
22 |
23 | class UserModel(Model):
24 | class Meta:
25 | table_name = 'myTable'
26 | aws_access_key_id = "XXX"
27 | aws_secret_access_key = "XXXX"
28 |
29 | email = UnicodeAttribute(null=True)
30 | first_name = UnicodeAttribute(range_key=True)
31 | last_name = UnicodeAttribute(hash_key=True)
32 |
33 |
34 | def main():
35 |
36 | try:UserModel.create_table(billing_mode='PAY_PER_REQUEST')
37 | except Exception as e:pass
38 | sleep(2)
39 |
40 |
41 | average = []
42 | for i in range(1, 50):
43 |
44 | starttime = datetime.now()
45 | UserModel(email=faker.email(), first_name=faker.first_name(), last_name=faker.last_name()).save()
46 | endtime = datetime.now()
47 |
48 | delta = endtime-starttime
49 |
50 | elapsed_time = int((delta.seconds * 1000) + (delta.microseconds / 1000))
51 |
52 | average.append(elapsed_time)
53 | print("Exection Time: {} MS ".format(elapsed_time))
54 |
55 | averagetime = sum(average)/ len(average)
56 | print("\nAverage Time in MS: {} ".format(averagetime))
57 |
58 |
59 | main()
60 |
--------------------------------------------------------------------------------
/Lab 23/serverless.yml:
--------------------------------------------------------------------------------
1 | service: sls-dynamo-s3
2 |
3 |
4 | useDotenv: true
5 |
6 |
7 | provider:
8 | name: aws
9 | region: us-east-1
10 | stackTags:
11 | product: datateam
12 | env: qa
13 | created-date: 2022-04-05
14 | team: data
15 | customer-impact: false
16 | terraform: false
17 |
18 | package:
19 | exclude:
20 | - package-lock.json
21 | - package.json
22 | - node_modules/**
23 |
24 | plugins:
25 | - serverless-glue
26 |
27 |
28 | Glue:
29 | bucketDeploy: ${env:s3_bucket}
30 | tempDirBucket: ${env:tempDirBucket}
31 | jobs:
32 | - name: ${env:job_name}
33 | scriptPath: ${env:glue_script_python_file_name}
34 | type: spark
35 | glueVersion: python3-3.0
36 | role: ${env:glue_iam}
37 | MaxConcurrentRuns: 3
38 | WorkerType: Standard
39 | NumberOfWorkers: 3
40 | Timeout: 2880
41 | MaxRetries: 1
42 | tempDir: true
43 | SupportFiles:
44 | - local_path: ${env:local_path}
45 | s3_bucket: ${env:s3_bucket}
46 | s3_prefix: ${env:s3_prefix_glue_script}
47 | execute_upload: True
48 |
49 |
50 | resources:
51 | Resources:
52 |
53 | GlueDatabase:
54 | Type: AWS::Glue::Database
55 | Properties:
56 | CatalogId: '867098943567'
57 | DatabaseInput:
58 | Name: ${env:DB_NAME}
59 |
60 | TableGlueCrawler:
61 | Type: AWS::Glue::Crawler
62 | Properties:
63 | DatabaseName: ${env:DB_NAME}
64 | Name: ${env:CRAWLER_NAME}
65 | RecrawlPolicy:
66 | RecrawlBehavior: CRAWL_EVERYTHING
67 | Role: ${env:GLUE_ROLE_ARN}
68 | SchemaChangePolicy:
69 | DeleteBehavior: DEPRECATE_IN_DATABASE
70 | Targets:
71 | DynamoDBTargets:
72 | - Path: ${env:CRAWLER_TARGET_PATH_DYNAMODB}
73 |
74 | TableGlueCrawlerAthena:
75 | Type: AWS::Glue::Crawler
76 | Properties:
77 | DatabaseName: ${env:DB_NAME}
78 | Name: ${env:CRAWLER_NAME_S3}
79 | RecrawlPolicy:
80 | RecrawlBehavior: CRAWL_EVERYTHING
81 | Role: ${env:GLUE_ROLE_ARN}
82 | SchemaChangePolicy:
83 | DeleteBehavior: DEPRECATE_IN_DATABASE
84 | Targets:
85 | S3Targets:
86 | - Path: ${env:CRAWLER_TARGET_PATH}
--------------------------------------------------------------------------------
/Lab1/handler.py:
--------------------------------------------------------------------------------
1 | # must be called as we're using zipped requirements
2 | try:
3 | import unzip_requirements
4 | except ImportError:
5 | pass
6 |
7 | import json
8 | import pandas as pd
9 |
10 | def hello(event, context):
11 | print("In......")
12 | df = pd.DataFrame(data={"name":["Soumil", "Nitin"]})
13 | print(df)
14 | print("\n")
15 | print(df.shape)
16 |
17 | response = {
18 | "statusCode": 200,
19 | "body": json.dumps("hEllo")
20 | }
21 |
22 |
--------------------------------------------------------------------------------
/Lab1/requirements.txt:
--------------------------------------------------------------------------------
1 | pandas
--------------------------------------------------------------------------------
/Lab1/serverless.yml:
--------------------------------------------------------------------------------
1 | service: sls-pandas-py
2 |
3 | provider:
4 | name: aws
5 | runtime: python3.7
6 |
7 | plugins:
8 | - serverless-python-requirements
9 | custom:
10 | pythonRequirements:
11 | dockerizePip: true
12 | zip: true
13 |
14 | package:
15 | exclude:
16 | - '**/*'
17 | include:
18 | - '*.py'
19 | functions:
20 | hello:
21 | handler: handler.hello
--------------------------------------------------------------------------------
/Lab10/learn/handler.py:
--------------------------------------------------------------------------------
1 | import json
2 |
3 |
4 | def hello(event, context):
5 | body = {
6 | "message": "Go Serverless v3.0! Your function executed successfully!",
7 | "input": event,
8 | }
9 |
10 | response = {"statusCode": 200, "body": json.dumps(body)}
11 |
12 | return response
13 |
--------------------------------------------------------------------------------
/Lab10/learn/serverless.yml:
--------------------------------------------------------------------------------
1 | org: scientist1995
2 | app: demo
3 | service: aws-dynamo-project
4 | frameworkVersion: '3'
5 |
6 | provider:
7 | name: aws
8 |
9 |
10 | resources:
11 | Resources:
12 | myTable:
13 | Type: AWS::DynamoDB::Table
14 | Properties:
15 | TableName: myTable
16 | AttributeDefinitions:
17 | - AttributeName: email
18 | AttributeType: S
19 | - AttributeName: userId
20 | AttributeType: S
21 | KeySchema:
22 | - AttributeName: email
23 | KeyType: HASH
24 | - AttributeName: userId
25 | KeyType: RANGE
26 | ProvisionedThroughput:
27 | ReadCapacityUnits: 1
28 | WriteCapacityUnits: 1
29 | Tags:
30 | - Key: 'product'
31 | Value: 'soumil'
32 | - Key: 'customerImpact'
33 | Value: 'false'
--------------------------------------------------------------------------------
/Lab11/dispatcher.py:
--------------------------------------------------------------------------------
1 | import json
2 |
3 |
4 | def dispatch(event, context):
5 | body = {
6 | "message": "Go Serverless v3.0! Your function executed successfully!",
7 | "input": event,
8 | }
9 |
10 | response = {"statusCode": 200, "body": json.dumps(body)}
11 |
12 | return response
13 |
--------------------------------------------------------------------------------
/Lab11/serverless.yml:
--------------------------------------------------------------------------------
1 | org: scientist1995
2 | app: demo
3 | service: aws-sns-project
4 | frameworkVersion: '3'
5 |
6 | provider:
7 | name: aws
8 |
9 |
10 | functions:
11 | dispatcher:
12 | handler: dispatcher.dispatch
13 | events:
14 | - sns:
15 | arn:
16 | Fn::Join:
17 | - ':'
18 | - - 'arn:aws:sns'
19 | - Ref: 'AWS::Region'
20 | - Ref: 'AWS::AccountId'
21 | - 'MyCustomTopic'
22 | topicName: MyCustomTopic
23 |
24 | resources:
25 | Resources:
26 | SuperTopic:
27 | Type: AWS::SNS::Topic
28 | Properties:
29 | TopicName: MyCustomTopic
30 |
31 |
--------------------------------------------------------------------------------
/Lab13/lambda/.env:
--------------------------------------------------------------------------------
1 | DD_API_KEY=XXXXXXXXXXX
2 | DD_SITE=datadoghq.com
3 | ENV=DEV
4 |
--------------------------------------------------------------------------------
/Lab13/lambda/handler.py:
--------------------------------------------------------------------------------
1 | # must be called as we're using zipped requirements
2 | try:
3 | import unzip_requirements
4 | except ImportError:
5 | pass
6 |
7 | try:
8 | import os
9 | import json
10 | import logging
11 | from logging import StreamHandler
12 | from datadog_api_client.v2 import ApiClient, ApiException, Configuration
13 | from datadog_api_client.v2.api import logs_api
14 | from datadog_api_client.v2.models import *
15 | except Exception as e:
16 | print("Error : {} ".format(e))
17 |
18 |
19 | class DDHandler(StreamHandler):
20 | def __init__(self, configuration, service_name, ddsource):
21 | StreamHandler.__init__(self)
22 | self.configuration = configuration
23 | self.service_name = service_name
24 | self.ddsource = ddsource
25 |
26 | def emit(self, record):
27 | msg = self.format(record)
28 |
29 | with ApiClient(self.configuration) as api_client:
30 | api_instance = logs_api.LogsApi(api_client)
31 | body = HTTPLog(
32 | [
33 | HTTPLogItem(
34 | ddsource=self.ddsource,
35 | ddtags="env:{}".format(
36 | os.getenv("ENV"),
37 |
38 | ),
39 | message=msg,
40 | service=self.service_name,
41 | ),
42 | ]
43 | )
44 |
45 | try:
46 | # Send logs
47 | api_response = api_instance.submit_log(body)
48 | except ApiException as e:
49 | print("Exception when calling LogsApi->submit_log: %s\n" % e)
50 |
51 |
52 | class Logging(object):
53 | def __init__(self, service_name, ddsource, logger_name='demoapp'):
54 |
55 | self.service_name = service_name
56 | self.ddsource = ddsource
57 | self.logger_name = logger_name
58 |
59 |
60 | self.configuration = Configuration()
61 | format = "[%(asctime)s] %(name)s %(levelname)s %(message)s"
62 | self.logger = logging.getLogger(self.logger_name)
63 | formatter = logging.Formatter(
64 | format,
65 | )
66 |
67 |
68 | # Logs to Datadog
69 | dd = DDHandler(self.configuration, service_name=self.service_name,ddsource=self.ddsource)
70 | dd.setLevel(logging.INFO)
71 | dd.setFormatter(formatter)
72 | self.logger.addHandler(dd)
73 |
74 | if logging.getLogger().hasHandlers():
75 | logging.getLogger().setLevel(logging.INFO)
76 | else:
77 | logging.basicConfig(level=logging.INFO)
78 |
79 |
80 | def handler(event, context):
81 |
82 | logger = Logging(service_name='lambda', ddsource='source1', logger_name='DemoApp')
83 | response = {
84 | "statusCode": 200,
85 | "body": json.dumps("Hello From Lambda")
86 | }
87 |
88 | logger.logger.info(response)
89 |
90 |
--------------------------------------------------------------------------------
/Lab13/lambda/requirements.txt:
--------------------------------------------------------------------------------
1 | datadog-api-client
--------------------------------------------------------------------------------
/Lab13/lambda/serverless.yml:
--------------------------------------------------------------------------------
1 | service: sls-ddog-demo
2 | useDotenv: true # If set to 'true', environment variables will be automatically loaded from .env files
3 |
4 | provider:
5 | name: aws
6 | runtime: python3.6
7 | memorySize: 300
8 | timeout: 10
9 | architecture: x86_64
10 | stackTags:
11 | product: datateam
12 | env: qa
13 | created-date: 2022-04-05
14 | team: data
15 | customer-impact: false
16 | terraform: false
17 |
18 | plugins:
19 | - serverless-dotenv-plugin
20 | - serverless-python-requirements
21 | custom:
22 | pythonRequirements:
23 | layer:
24 | name: python-ddog
25 | description: "Layer which contains ddog library"
26 | compatibleRuntimes:
27 | - python3.6
28 |
29 | functions:
30 | ddog_handler:
31 | handler: handler.handler
32 | layers:
33 | - Ref: PythonRequirementsLambdaLayer
34 |
--------------------------------------------------------------------------------
/Lab15/.env:
--------------------------------------------------------------------------------
1 | job_name=my-glue-script
2 | glue_script_python_file_name=glue_job.py
3 | glue_iam=XXXXXXXX
4 | local_path=glue_job.py
5 | s3_bucket=XXXXXX
6 | s3_prefix_glue_script="Scripts/"
--------------------------------------------------------------------------------
/Lab15/glue_job.py:
--------------------------------------------------------------------------------
1 | print("Hello ")
--------------------------------------------------------------------------------
/Lab15/package-lock.json:
--------------------------------------------------------------------------------
1 | {
2 | "requires": true,
3 | "lockfileVersion": 1,
4 | "dependencies": {
5 | "aws-sdk": {
6 | "version": "2.1151.0",
7 | "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1151.0.tgz",
8 | "integrity": "sha512-VvyzXAmWrX+klvwzA+9gSTY7blDnZOTl0UTKrqmFL4K7tOLieGLYTUkpUegcPxCjYgEg7JwvYolYUnUKiHa4oA==",
9 | "dev": true,
10 | "requires": {
11 | "buffer": "4.9.2",
12 | "events": "1.1.1",
13 | "ieee754": "1.1.13",
14 | "jmespath": "0.16.0",
15 | "querystring": "0.2.0",
16 | "sax": "1.2.1",
17 | "url": "0.10.3",
18 | "uuid": "8.0.0",
19 | "xml2js": "0.4.19"
20 | }
21 | },
22 | "base64-js": {
23 | "version": "1.5.1",
24 | "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz",
25 | "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==",
26 | "dev": true
27 | },
28 | "buffer": {
29 | "version": "4.9.2",
30 | "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.2.tgz",
31 | "integrity": "sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg==",
32 | "dev": true,
33 | "requires": {
34 | "base64-js": "^1.0.2",
35 | "ieee754": "^1.1.4",
36 | "isarray": "^1.0.0"
37 | }
38 | },
39 | "events": {
40 | "version": "1.1.1",
41 | "resolved": "https://registry.npmjs.org/events/-/events-1.1.1.tgz",
42 | "integrity": "sha512-kEcvvCBByWXGnZy6JUlgAp2gBIUjfCAV6P6TgT1/aaQKcmuAEC4OZTV1I4EWQLz2gxZw76atuVyvHhTxvi0Flw==",
43 | "dev": true
44 | },
45 | "ieee754": {
46 | "version": "1.1.13",
47 | "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.13.tgz",
48 | "integrity": "sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg==",
49 | "dev": true
50 | },
51 | "isarray": {
52 | "version": "1.0.0",
53 | "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz",
54 | "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==",
55 | "dev": true
56 | },
57 | "jmespath": {
58 | "version": "0.16.0",
59 | "resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.16.0.tgz",
60 | "integrity": "sha512-9FzQjJ7MATs1tSpnco1K6ayiYE3figslrXA72G2HQ/n76RzvYlofyi5QM+iX4YRs/pu3yzxlVQSST23+dMDknw==",
61 | "dev": true
62 | },
63 | "punycode": {
64 | "version": "1.3.2",
65 | "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz",
66 | "integrity": "sha512-RofWgt/7fL5wP1Y7fxE7/EmTLzQVnB0ycyibJ0OOHIlJqTNzglYFxVwETOcIoJqJmpDXJ9xImDv+Fq34F/d4Dw==",
67 | "dev": true
68 | },
69 | "querystring": {
70 | "version": "0.2.0",
71 | "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz",
72 | "integrity": "sha512-X/xY82scca2tau62i9mDyU9K+I+djTMUsvwf7xnUX5GLvVzgJybOJf4Y6o9Zx3oJK/LSXg5tTZBjwzqVPaPO2g==",
73 | "dev": true
74 | },
75 | "sax": {
76 | "version": "1.2.1",
77 | "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz",
78 | "integrity": "sha512-8I2a3LovHTOpm7NV5yOyO8IHqgVsfK4+UuySrXU8YXkSRX7k6hCV9b3HrkKCr3nMpgj+0bmocaJJWpvp1oc7ZA==",
79 | "dev": true
80 | },
81 | "serverless-glue": {
82 | "version": "2.9.0",
83 | "resolved": "https://registry.npmjs.org/serverless-glue/-/serverless-glue-2.9.0.tgz",
84 | "integrity": "sha512-gSlM7shnM0tdWF2YFdFV2vf6gyOE+aR55J22g+crWLn482alAPU3qCSUFQeWiiv/QwmcTDPFtaKwVI6AaZYPmQ==",
85 | "dev": true,
86 | "requires": {
87 | "aws-sdk": "^2.701.0"
88 | }
89 | },
90 | "url": {
91 | "version": "0.10.3",
92 | "resolved": "https://registry.npmjs.org/url/-/url-0.10.3.tgz",
93 | "integrity": "sha1-Ah5NnHcF8hu/N9A861h2dAJ3TGQ=",
94 | "dev": true,
95 | "requires": {
96 | "punycode": "1.3.2",
97 | "querystring": "0.2.0"
98 | }
99 | },
100 | "uuid": {
101 | "version": "8.0.0",
102 | "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.0.0.tgz",
103 | "integrity": "sha512-jOXGuXZAWdsTH7eZLtyXMqUb9EcWMGZNbL9YcGBJl4MH4nrxHmZJhEHvyLFrkxo+28uLb/NYRcStH48fnD0Vzw==",
104 | "dev": true
105 | },
106 | "xml2js": {
107 | "version": "0.4.19",
108 | "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.19.tgz",
109 | "integrity": "sha512-esZnJZJOiJR9wWKMyuvSE1y6Dq5LCuJanqhxslH2bxM6duahNZ+HMpCLhBQGZkbX6xRf8x1Y2eJlgt2q3qo49Q==",
110 | "dev": true,
111 | "requires": {
112 | "sax": ">=0.6.0",
113 | "xmlbuilder": "~9.0.1"
114 | }
115 | },
116 | "xmlbuilder": {
117 | "version": "9.0.7",
118 | "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-9.0.7.tgz",
119 | "integrity": "sha1-Ey7mPS7FVlxVfiD0wi35rKaGsQ0=",
120 | "dev": true
121 | }
122 | }
123 | }
124 |
--------------------------------------------------------------------------------
/Lab15/serverless.yml:
--------------------------------------------------------------------------------
1 | service: sls-create-glue-job
2 | useDotenv: true
3 |
4 |
5 | provider:
6 | name: aws
7 | region: us-east-1
8 | stackTags:
9 | product: datateam
10 | env: qa
11 | created-date: 2022-04-05
12 | team: data
13 | customer-impact: false
14 | terraform: false
15 |
16 | package:
17 | exclude:
18 | - package-lock.json
19 | - package.json
20 | - node_modules/**
21 |
22 | plugins:
23 | - serverless-glue
24 | Glue:
25 | bucketDeploy: ${env:s3_bucket}
26 | jobs:
27 | - name: ${env:job_name}
28 | scriptPath: ${env:glue_script_python_file_name}
29 | type: spark
30 | glueVersion: python3-3.7
31 | role: ${env:glue_iam}
32 | MaxConcurrentRuns: 3
33 | WorkerType: Standard
34 | NumberOfWorkers: 1
35 | Timeout: 2880
36 | MaxRetries: 2
37 | SupportFiles:
38 | - local_path: ${env:local_path}
39 | s3_bucket: ${env:s3_bucket}
40 | s3_prefix: ${env:s3_prefix_glue_script}
41 | execute_upload: True
42 |
43 |
--------------------------------------------------------------------------------
/Lab16/.gitlab-ci.yml:
--------------------------------------------------------------------------------
1 | stages:
2 | - test
3 | - deploy
4 |
5 |
6 | #############################################################
7 | # 🚀 Code check
8 | #############################################################
9 | code-quailty:
10 | image: python:3.7
11 | stage: test
12 | allow_failure: false
13 | before_script:
14 | - echo "*********Reformatting code***********"
15 | - pip install flake8
16 | - pip install bandit
17 | script:
18 | - bandit .
19 | - flake8 --select B,C,E,F,W,T4,B9 --ignore E501,W503,F401,F541 --max-line-length 79 --max-complexity 18
20 |
21 |
22 | #############################################################
23 | # 🚀 Deploy
24 | #############################################################
25 |
26 | deployQa:
27 | stage: deploy
28 | image: node:latest
29 | when: manual
30 | only:
31 | - main
32 | before_script:
33 | - echo "My First CICD pipeline "
34 | - echo "$NAME"
35 | - npm config set prefix /usr/local
36 | - npm install -g serverless
37 | - npm install
38 | - apt-get update
39 | - apt-get -y install python3-pip
40 | - pip3 install awscli --upgrade --user
41 | - serverless config credentials --provider aws --key "$AWS_ACCESS_KEY_ID" --secret "$AWS_SECRET_ACCESS_KEY" -o
42 | script:
43 | - sls deploy
44 | environment: deployQa
--------------------------------------------------------------------------------
/Lab16/README.md:
--------------------------------------------------------------------------------
1 | # serverless-gitlab-ci
2 |
3 | ## Description
4 |
5 | Basic nodejs CI setup for gitlab's CI service. This configuration can be used with their free tier usage or their self-hosted solution. This lets devs collaborate on simple serverless projects with automatic deployment on merge. This also tags each merge to an environment for automatic tracking in the `CI / CD > Environments` tab.
6 |
7 | This setup also includes a dummy test command that will run on all branches. Ideally this would be replaced with real tests to enable a true CI experience.
8 |
9 | ## Usage
10 |
11 | Navigate to [gitlab.com](https://gitlab.com/) and create a new repo.
12 |
13 | Then go to `settings > CI / CD` and expand the `Secret Variables` section. Click `Add New Variable` and add
14 |
15 | ```yml
16 | Key: AWS_ACCESS_KEY_ID
17 | Value: YOUR_ACCESS_KEY
18 | Protected: ✓
19 |
20 | Key: AWS_SECRET_ACCESS_KEY
21 | Value: YOUR_SECRET_VALUE
22 | Protected: ✓
23 | ```
24 |
25 | Enabling protected limits these variables to specific branches that we define (eg: staging and master, since they'll be the only ones that deploy anything). See `settings > Repository` and expand protected branches to set the access (Can enable user, or group level access).
26 |
27 | Next up, template this repo with
28 | ```bash
29 | serverless create --template-url https://github.com/bvincent1/serverless-gitlab-ci/master --path myService
30 | ```
31 |
32 | And set the remote to the gitlab url
33 |
34 | ```bash
35 | cd myService
36 | git init # init repo if needed
37 | git remote add origin git@gitlab.com:username/myrepo.git
38 | git add -A
39 | git commit -a -m 'Init repo from template'
40 | git push -u origin master
41 | ```
42 |
43 | This should create the repo and automatically create the ci pipeline.
44 |
45 | ## Special Notes
46 |
47 | - Ideally you could just use AWS IAM to create a new admin user and use those keys and not your own. This lets you track usage and keeps your keys separate. Best practice even suggests you rotate keys regularly, but thats a whole different story.
48 |
49 | - This project uses `yarn` as the node_modules install tool. This can be easily changed by modifiying the `.gitlab-ci.yml` file and changing the `yarn` command to `npm`.
50 |
51 | - Additional Gitlab CI example [RestfullSheets](https://gitlab.com/dotslashsolve/RestfulSheets/)
52 |
--------------------------------------------------------------------------------
/Lab16/handler.py:
--------------------------------------------------------------------------------
1 | import json
2 |
3 | def hello(event, context):
4 |
5 | response = {
6 | "statusCode": 200,
7 | "body": json.dumps("Hello")
8 | }
9 |
10 |
--------------------------------------------------------------------------------
/Lab16/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "myService",
3 | "version": "1.0.0",
4 | "description": "Simple serverless gitlab ci template",
5 | "main": "handler.js",
6 | "author": "Ben Vincent ",
7 | "license": "MIT",
8 | "private": false,
9 | "scripts": {
10 | "test": "echo \"serverless invoke -f hello\n\" "
11 | }
12 | }
--------------------------------------------------------------------------------
/Lab16/serverless.yml:
--------------------------------------------------------------------------------
1 | service: sls-hello-cicd
2 |
3 | provider:
4 | name: aws
5 | runtime: python3.7
6 |
7 | functions:
8 | hello:
9 | handler: handler.hello
10 |
--------------------------------------------------------------------------------
/Lab17/.env:
--------------------------------------------------------------------------------
1 | StreamName=my-input-streams
2 | ShardCount=1
3 | RetentionPeriodHours=24
4 | my_aws_access_key_id=XXXXXX
5 | my_aws_secret_access_key=XXXXXXX
6 |
--------------------------------------------------------------------------------
/Lab17/lambda_function.py:
--------------------------------------------------------------------------------
1 | import json
2 | import json
3 | import boto3
4 | import base64
5 |
6 | def lambda_handler(event, context):
7 |
8 | print("Length: {} ".format(len(event['Records'])))
9 |
10 | for record in event['Records']:
11 | payload = base64.b64decode(record['kinesis']['data'])
12 | de_serialize_payload = json.loads(payload)
13 | print("de_serialize_payload", de_serialize_payload, type(de_serialize_payload))
14 | print("**************** ALL SET *********************")
15 | print("Length: {} ".format(len(event['Records'])))
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
--------------------------------------------------------------------------------
/Lab17/publish-fake-data.py:
--------------------------------------------------------------------------------
1 | import boto3
2 | import json
3 | from datetime import datetime
4 | import calendar
5 | import random
6 | import time
7 | import json
8 | from faker import Faker
9 | import uuid
10 | from time import sleep
11 | import os
12 | from dotenv import load_dotenv
13 |
14 | load_dotenv(".env")
15 |
16 |
17 | my_stream_name = os.getenv("StreamName")
18 | print("Stream Name :{} ".format(my_stream_name))
19 |
20 |
21 | kinesis_client = boto3.client('kinesis',
22 | region_name='us-east-1',
23 | aws_access_key_id=os.getenv("my_aws_access_key_id"),
24 | aws_secret_access_key=os.getenv("my_aws_secret_access_key")
25 | )
26 | faker = Faker()
27 |
28 |
29 | for i in range(1, 10):
30 | json_data = {
31 | "name":faker.name(),
32 | "city":faker.city(),
33 | "phone":faker.phone_number(),
34 | "id":uuid.uuid4().__str__(),
35 | "customer_id":random.randint(1,5)
36 | }
37 | print(json_data)
38 | sleep(0.5)
39 |
40 | put_response = kinesis_client.put_record(
41 | StreamName=my_stream_name,
42 | Data=json.dumps(json_data),
43 | PartitionKey='name')
44 | print(put_response)
45 |
46 |
47 |
48 |
49 |
--------------------------------------------------------------------------------
/Lab17/serverless.yml:
--------------------------------------------------------------------------------
1 | service: sls-kinesis-streams
2 |
3 | useDotenv: true
4 |
5 |
6 | provider:
7 | name: aws
8 | region: us-east-1
9 | runtime: python3.7
10 | memorySize: 300
11 | timeout: 10
12 | architecture: x86_64
13 | stackTags:
14 | product: datateam
15 | env: qa
16 | created-date: 2022-04-05
17 | team: data
18 | customer-impact: false
19 | terraform: false
20 |
21 |
22 | functions:
23 | lambda:
24 | handler: lambda_function.lambda_handler
25 | events:
26 | - stream:
27 | type: kinesis
28 | arn:
29 | Fn::GetAtt:
30 | - KinesisDataStreams
31 | - Arn
32 |
33 |
34 | resources:
35 | Resources:
36 | KinesisDataStreams:
37 | Type: AWS::Kinesis::Stream
38 | Properties:
39 | Name: ${env:StreamName}
40 | RetentionPeriodHours: ${env:RetentionPeriodHours}
41 | StreamModeDetails:
42 | StreamMode: ON_DEMAND
43 |
--------------------------------------------------------------------------------
/Lab18/lambda_handler.py:
--------------------------------------------------------------------------------
1 | try:
2 | import json
3 |
4 | import os
5 | import shutil
6 | import uuid
7 |
8 |
9 | from selenium import webdriver
10 | from selenium.webdriver.common.keys import Keys
11 | from selenium.webdriver.common.by import By
12 | from selenium.webdriver.chrome.options import Options
13 | from selenium.webdriver.support.ui import WebDriverWait
14 | from selenium.webdriver import Chrome
15 |
16 | import base64
17 | import datetime
18 | from dateutil.parser import parse
19 | from datetime import datetime, timedelta
20 | from dateutil.tz import tzutc
21 | from time import sleep
22 | from enum import Enum
23 | import hashlib
24 |
25 | print("All Modules are ok ...")
26 |
27 | except Exception as e:
28 |
29 | print("Error in Imports ")
30 |
31 |
32 |
33 | class WebDriver(object):
34 |
35 | def __init__(self):
36 | self.options = Options()
37 |
38 | self.options.binary_location = '/opt/headless-chromium'
39 | self.options.add_argument('--headless')
40 | self.options.add_argument('--no-sandbox')
41 | self.options.add_argument('--start-maximized')
42 | self.options.add_argument('--start-fullscreen')
43 | self.options.add_argument('--single-process')
44 | self.options.add_argument('--disable-dev-shm-usage')
45 |
46 | def get(self):
47 | driver = Chrome('/opt/chromedriver', options=self.options)
48 | return driver
49 |
50 |
51 |
52 | class Hasher(object):
53 | def __init__(self) -> None:
54 | pass
55 |
56 | def get_hash(self, data):
57 | """
58 | Returns the Hash for any data
59 | :return string
60 | """
61 | return hashlib.md5(repr(data).encode("UTF-8")).hexdigest().__str__()
62 |
63 |
64 |
65 | class PaginationScrollBottom(object):
66 | """
67 | {
68 | "sleep_interval_time_between_pagination":2
69 | }
70 | """
71 |
72 | def __init__(
73 | self, driver, sleep_interval_time_between_pagination, max_iteration=500
74 | ):
75 | self.driver = driver
76 | self.sleep_interval_time_between_pagination = (
77 | sleep_interval_time_between_pagination
78 | )
79 | self.max_iteration = max_iteration
80 | self.last_height = self.driver.execute_script(
81 | "return document.body.scrollHeight"
82 | )
83 |
84 | def paginate(self):
85 |
86 | for i in range(0, self.max_iteration):
87 |
88 | # Scroll down to bottom
89 | self.driver.execute_script(
90 | "window.scrollTo(0, document.body.scrollHeight);"
91 | )
92 |
93 | # Wait to load page
94 | sleep(self.sleep_interval_time_between_pagination)
95 |
96 | # Calculate new scroll height and compare with last scroll height
97 | new_height = self.driver.execute_script("return document.body.scrollHeight")
98 |
99 | if new_height == self.last_height:
100 | break
101 |
102 | self.last_height = new_height
103 |
104 | html = self.driver.page_source
105 |
106 | return html
107 |
108 |
109 | class PaginationClickAndScroll(object):
110 |
111 | """
112 | {
113 | "commands":[
114 | {
115 | "selector":"xpath",
116 | "path":"/html/body/div[1]/div/form/div[1]/div/input",
117 | "command":"type",
118 | "search":"software Engineer"
119 | },
120 | {
121 | "command":"sleep",
122 | "time":"1"
123 | },
124 | {
125 | "selector":"xpath",
126 | "path":"//html/body/div[1]/div/form/button",
127 | "command":"click"
128 | },
129 | {
130 | "command":"scroll_bottom",
131 | "sleep_interval_time_between_pagination":2
132 | }
133 | ]
134 | }
135 | """
136 |
137 | def __init__(self, driver, jsonaction, max_iteration=500):
138 | self.driver = driver
139 | self.jsonaction = jsonaction
140 | self.max_iteration = max_iteration
141 | self.last_height = self.driver.execute_script(
142 | "return document.body.scrollHeight"
143 | )
144 |
145 | def paginate(self):
146 | try:
147 | commands = self.jsonaction.get("commands", [])
148 | _commands = Commands(driver=self.driver, commands=commands)
149 | response = _commands.execute()
150 | return True
151 | except Exception as e:
152 | return False
153 |
154 |
155 | class Commands(object):
156 |
157 | """
158 | {
159 | "commands":[
160 | {
161 | "selector":"xpath",
162 | "path":"/html/body/div[1]/div/form/div[1]/div/input",
163 | "command":"type",
164 | "search":"software Engineer"
165 | },
166 | {
167 | "command":"sleep",
168 | "time":"1"
169 | },
170 | {
171 | "selector":"xpath",
172 | "path":"//html/body/div[1]/div/form/button",
173 | "command":"click"
174 | },
175 | {
176 | "command":"scroll_bottom",
177 | "sleep_interval_time_between_pagination":2
178 | }
179 | ]
180 | }
181 | """
182 |
183 | def __init__(self, driver, commands):
184 |
185 | self.driver = driver
186 | self.commands = commands
187 |
188 | def execute(self):
189 |
190 | if self.commands is not None:
191 |
192 | for command in self.commands:
193 |
194 | try:
195 | if command.get("command").lower() == "sleep":
196 | sleep(command.get("time", 0))
197 |
198 | if command.get("command").lower() == "click":
199 |
200 | if command.get("selector").lower() == "xpath":
201 | try:
202 | self.driver.find_element_by_xpath(
203 | command.get("path")
204 | ).click()
205 | except Exception as e:
206 | print("Click error ", e)
207 | raise Exception ("error")
208 |
209 | if command.get("selector").lower() == "id":
210 | try:
211 | self.driver.find_element_by_id(
212 | command.get("path")
213 | ).click()
214 | except Exception as e:
215 | raise Exception ("error")
216 |
217 | if command.get("command").lower() == "type":
218 |
219 | if command.get("selector").lower() == "xpath":
220 | try:
221 | self.driver.find_element_by_xpath(
222 | command.get("path")
223 | ).send_keys(command.get("search"))
224 | except Exception as e:
225 | print("error: {}".format(e))
226 |
227 | if command.get("selector") == "id":
228 | try:
229 | self.driver.find_element_by_xpath(
230 | command.get("path")
231 | ).send_keys(command.get("search"))
232 | except Exception as e:
233 | pass
234 |
235 | if command.get("command").lower() == "scroll_bottom":
236 | _helper = PaginationScrollBottom(
237 | driver=self.driver,
238 | sleep_interval_time_between_pagination=command.get(
239 | "sleep_interval_time_between_pagination", 2
240 | ),
241 | )
242 | _helper.paginate()
243 |
244 | except Exception as e:pass
245 |
246 |
247 | class WindowTracker(Enum):
248 | MAIN_WINDOW = 0
249 | TAB_WINDOW = 1
250 |
251 |
252 | def get_page_data(driver, PARENT, LOOP, xp_name, xp_review, xp_text):
253 |
254 | data = []
255 |
256 | parent_element = driver.find_element_by_xpath(PARENT) # get the Parent
257 | loop_element = parent_element.find_elements_by_xpath(LOOP) # [xx1, xx2, xx3 ........]
258 | sleep(4)
259 | hasher = Hasher()
260 |
261 | for item in loop_element:
262 |
263 | _data = {}
264 |
265 | try:
266 | _data["name"] = item.find_element_by_xpath(xp_name).text.strip()
267 | except Exception as e:
268 | pass
269 |
270 | review_data = item.find_element_by_xpath(xp_review).text
271 |
272 | try:
273 | _data["review_date"] = review_data.split("Degree")[0].split("Reviewed:")[1]
274 | except Exception as e:
275 | pass
276 |
277 | try:
278 | _data["review_major"] = review_data.split("Year:")[0].split("Degree:")[1]
279 | except Exception as e:
280 | pass
281 |
282 | try:
283 | _data["review_major"] = review_data.split("Year:")[1]
284 | except Exception as e:
285 | pass
286 |
287 | try:
288 | _data["review_text"] = item.find_element_by_xpath(xp_text).text
289 | except Exception as e:
290 | pass
291 |
292 | hash_key= hasher.get_hash(data=_data)
293 | _data["review_hash_key"] = hash_key
294 |
295 | data.append(_data)
296 | print(_data)
297 |
298 | return data
299 |
300 |
301 | def start_scrape(url, id, name):
302 |
303 | # ============================================================
304 | PARENT = """html/body/div[@id='under_splash']/main/div[@class='review__page']/div[@class='fixed-width mdc-layout-grid']"""
305 | LOOP = """.//school-review-list[@id='reviews']/div[@class='reviews']/div[@class='reviews__item']"""
306 | xp_name = """.//div[@class='reviews__text--name']"""
307 | xp_review = ".//div[@class='mdc-layout-grid__inner reviews__item--inner']/div[@class]/ul[@class='reviews__details']"
308 | xp_text = ".//div[@class='reviews__text']"
309 |
310 | # =============================================================
311 |
312 | # path = os.path.join(os.getcwd(), "chromedriver.exe")
313 | # driver_ = WebDriver(path=path)
314 | # driver = driver_.get(headless=False)
315 |
316 | instance_ = WebDriver()
317 | driver = instance_.get()
318 |
319 | driver.get(url)
320 | sleep(2)
321 |
322 | commands = [{"command": "scroll_bottom", "sleep_interval_time_between_pagination": 2},]
323 | commands = Commands(commands=commands, driver=driver)
324 | commands.execute()
325 | sleep(3)
326 |
327 | global_data = []
328 |
329 | for i in range(1, 100):
330 | try:
331 | url_ = "{}/?page={}".format(url, i)
332 | driver.get(url_)
333 | sleep(2)
334 |
335 | try:
336 | parent_nav = driver.find_element_by_xpath(""".//nav[@class='pagination text--centered']""")
337 |
338 | if str(i) in parent_nav.text:
339 | print("Valid page : {} ".format(i))
340 | sleep(2)
341 | data = get_page_data( driver=driver,PARENT=PARENT ,LOOP=LOOP, xp_name=xp_name, xp_review=xp_review, xp_text=xp_text)
342 | for x in data:
343 | global_data.append(x)
344 |
345 | except Exception as e:
346 |
347 | if i==1:
348 | data = get_page_data( driver=driver,PARENT=PARENT ,LOOP=LOOP, xp_name=xp_name, xp_review=xp_review, xp_text=xp_text)
349 | for x in data:
350 | global_data.append(x)
351 |
352 | break
353 |
354 | except Exception as e:
355 | print("Complete", e)
356 | break
357 |
358 | if global_data != []:
359 |
360 | _ = {
361 | "id":id,
362 | "name":name,
363 | "reviews":global_data,
364 | "total_reviews" : len(global_data),
365 | "ingestion_date":datetime.now().__str__()
366 |
367 | }
368 |
369 | driver.quit()
370 |
371 | return global_data
372 |
373 |
374 | def lambda_handler(event, context):
375 | print("In...")
376 | url = "https://www.gradreports.com/colleges/stanford-university"
377 | response = start_scrape(url=url, id="121", name="stanford")
378 | print(response)
379 | return True
380 |
--------------------------------------------------------------------------------
/Lab18/serverless.yml:
--------------------------------------------------------------------------------
1 | service: sls-scrapper
2 |
3 | useDotenv: true
4 |
5 |
6 | provider:
7 | name: aws
8 | region: us-east-1
9 | runtime: python3.7
10 | memorySize: 512
11 | timeout: 10
12 | architecture: x86_64
13 | stackTags:
14 | product: datateam
15 | env: qa
16 | created-date: 2022-04-05
17 | team: data
18 | customer-impact: false
19 | terraform: false
20 |
21 |
22 | functions:
23 | lambda:
24 | handler: lambda_handler.lambda_handler
25 | timeout: 850
26 | layers:
27 | - arn:aws:lambda:us-east-1:867098943567:layer:selenium:1
28 |
29 |
--------------------------------------------------------------------------------
/Lab2/lambda-layers/.gitignore:
--------------------------------------------------------------------------------
1 | # Distribution / packaging
2 | .Python
3 | env/
4 | build/
5 | develop-eggs/
6 | dist/
7 | downloads/
8 | eggs/
9 | .eggs/
10 | lib/
11 | lib64/
12 | parts/
13 | sdist/
14 | var/
15 | *.egg-info/
16 | .installed.cfg
17 | *.egg
18 |
19 | # Serverless directories
20 | .serverless
--------------------------------------------------------------------------------
/Lab2/lambda-layers/requirements.txt:
--------------------------------------------------------------------------------
1 | numpy==1.21.0
--------------------------------------------------------------------------------
/Lab2/lambda-layers/serverless.yml:
--------------------------------------------------------------------------------
1 | service: layer-numpy
2 |
3 | frameworkVersion: '3'
4 |
5 | provider:
6 | name: aws
7 | runtime: python3.8
8 | lambdaHashingVersion: 20201221
9 |
10 | plugins:
11 | - serverless-python-requirements
12 | custom:
13 | pythonRequirements:
14 |
15 |
16 | dockerizePip: true
17 | layer:
18 | name: python-numpy
19 | description: "Layer which contains numpy library"
20 | compatibleRuntimes:
21 | - python3.8
--------------------------------------------------------------------------------
/Lab20/sqlserver/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "3.2"
2 | services:
3 |
4 | sql-server-db:
5 | container_name: sql-server-db
6 | image: mcr.microsoft.com/mssql/server:2022-latest
7 | ports:
8 | - "1433:1433"
9 | environment:
10 | SA_PASSWORD: "Master1@345"
11 | ACCEPT_EULA: "Y"
--------------------------------------------------------------------------------
/Lab22/.env:
--------------------------------------------------------------------------------
1 | TopicName=my-custom-topic.fifo
2 | QueueName=epic-events.fifo
3 | ACCOUNT=XXXX
--------------------------------------------------------------------------------
/Lab22/consumer.py:
--------------------------------------------------------------------------------
1 | import json
2 | import logging
3 | import os
4 | import boto3
5 |
6 | def consume(event, context):
7 | print(event)
8 | return 200
9 |
--------------------------------------------------------------------------------
/Lab22/serverless.yml:
--------------------------------------------------------------------------------
1 | service: events
2 | frameworkVersion: '3'
3 |
4 | provider:
5 | name: aws
6 | runtime: python3.7
7 | region: us-east-1
8 | memorySize: 300
9 | timeout: 300
10 | architecture: x86_64
11 |
12 | useDotenv: true
13 |
14 |
15 | functions:
16 |
17 | hello:
18 | handler: consumer.consume
19 | name: events-consumer
20 | events:
21 | - sqs:
22 | arn:
23 | Fn::GetAtt:
24 | - MyQueue
25 | - Arn
26 | package:
27 | patterns:
28 | - '!node_modules/**'
29 |
30 |
31 | resources:
32 |
33 | Resources:
34 |
35 | SNSTopic:
36 | Type: AWS::SNS::Topic
37 | Properties:
38 | TopicName: ${env:TopicName}
39 | ContentBasedDeduplication: false
40 | FifoTopic: true
41 |
42 | MyQueue:
43 | Type: "AWS::SQS::Queue"
44 | Properties:
45 | FifoQueue: true
46 | QueueName: ${env:QueueName}
47 |
48 | MyQueuePolicy:
49 | Type: AWS::SQS::QueuePolicy
50 | Properties:
51 | Queues:
52 | - !Ref MyQueue
53 | PolicyDocument:
54 | Statement:
55 | -
56 | Sid: "MySQSPolicy001"
57 | Effect: "Allow"
58 | Principal: "*"
59 | Action: "sqs:SendMessage"
60 | Resource: !GetAtt MyQueue.Arn
61 | Condition:
62 | ArnEquals:
63 | aws:SourceArn: arn:aws:sns:us-east-1:${env:ACCOUNT}:${env:TopicName}
64 |
65 | SnsSubscription:
66 | Type: AWS::SNS::Subscription
67 | Properties:
68 | Protocol: sqs
69 | Endpoint: !GetAtt MyQueue.Arn
70 | RawMessageDelivery: 'true'
71 | TopicArn: arn:aws:sns:us-east-1:${env:ACCOUNT}:${env:TopicName}
72 |
73 |
74 |
--------------------------------------------------------------------------------
/Lab24/.env:
--------------------------------------------------------------------------------
1 | DYNAMO_DB_TABLE_NAME=myTable
2 |
3 | bucketname=XXXX
4 | S3_TABLE_NAME=dynamo_db
5 |
6 |
7 | StreamName=dynamodb-streams
8 | RetentionPeriodHours=24
9 | DEV_AWS_ACCESS_KEY=XXXXX
10 | DEV_AWS_SECRET_KEY=XXXXX
11 | DEV_AWS_REGION_NAME=us-east-1
12 |
13 |
14 | DB_NAME='datalakedb'
15 | DB_TABLE_NAME='table_name=data'
16 | CRAWLER_NAME_S3='s3_dynamo_db_crawler'
17 | CRAWLER_TARGET_PATH=s3://XXXXXX/table_name=dynamo_db
18 | GLUE_ROLE_ARN=arn:aws:iam::XXXXXX:role/service-role/AXXXXXXX
--------------------------------------------------------------------------------
/Lab24/lambda_function.py:
--------------------------------------------------------------------------------
1 | try:
2 | import json
3 | import json
4 | import boto3
5 | import base64
6 | import os
7 | import datetime
8 | import uuid
9 | from datetime import datetime
10 | from boto3.dynamodb.types import TypeDeserializer, TypeSerializer
11 | except Exception as e:
12 | print("Error : {} ".format(e))
13 |
14 | def unmarshall(dynamo_obj: dict) -> dict:
15 | """Convert a DynamoDB dict into a standard dict."""
16 | deserializer = TypeDeserializer()
17 | return {k: deserializer.deserialize(v) for k, v in dynamo_obj.items()}
18 |
19 |
20 | def marshall(python_obj: dict) -> dict:
21 | """Convert a standard dict into a DynamoDB ."""
22 | serializer = TypeSerializer()
23 | return {k: serializer.serialize(v) for k, v in python_obj.items()}
24 |
25 | class AWSS3(object):
26 | """Helper class to which add functionality on top of boto3 """
27 |
28 | def __init__(
29 | self,
30 | bucket=os.getenv("bucketname"),
31 | aws_access_key_id=os.getenv("DEV_AWS_ACCESS_KEY"),
32 | aws_secret_access_key=os.getenv("DEV_AWS_SECRET_KEY"),
33 | region_name=os.getenv("DEV_AWS_REGION_NAME"),
34 | ):
35 | self.BucketName = bucket
36 | self.client = boto3.client(
37 | "s3",
38 | aws_access_key_id=aws_access_key_id,
39 | aws_secret_access_key=aws_secret_access_key,
40 | region_name=region_name,
41 | )
42 |
43 | def put_files(self, Response=None, Key=None):
44 | """
45 | Put the File on S3
46 | :return: Bool
47 | """
48 | try:
49 | response = self.client.put_object(
50 | ACL="private", Body=Response, Bucket=self.BucketName, Key=Key
51 | )
52 | return "ok"
53 | except Exception as e:
54 | raise Exception("Failed to upload records. Error : {}".format(e))
55 |
56 | def item_exists(self, Key):
57 | """Given key check if the items exists on AWS S3 """
58 | try:
59 | response_new = self.client.get_object(
60 | Bucket=self.BucketName, Key=str(Key))
61 | return True
62 | except Exception as e:
63 | return False
64 |
65 | def get_item(self, Key):
66 | """Gets the Bytes Data from AWS S3 """
67 | try:
68 | response_new = self.client.get_object(
69 | Bucket=self.BucketName, Key=str(Key))
70 | return response_new["Body"].read()
71 | except Exception as e:
72 | print("Error :{}".format(e))
73 | return False
74 |
75 | class Datetime(object):
76 | @staticmethod
77 | def get_year_month_day():
78 | """
79 | Return Year month and day
80 | :return: str str str
81 | """
82 | dt = datetime.now()
83 | year = dt.year
84 | month = dt.month
85 | day = dt.day
86 | return year, month, day
87 |
88 |
89 | def flatten_dict(data, parent_key='', sep='_'):
90 | """Flatten data into a single dict"""
91 | try:
92 | items = []
93 | for key, value in data.items():
94 | new_key = parent_key + sep + key if parent_key else key
95 | if type(value) == dict:
96 | items.extend(flatten_dict(value, new_key, sep=sep).items())
97 | else:
98 | items.append((new_key, value))
99 | return dict(items)
100 | except Exception as e:
101 | return {}
102 |
103 |
104 | def lambda_handler(event, context):
105 |
106 | print("event", event)
107 | print("\n")
108 |
109 | print("Length: {} ".format(len(event['Records'])))
110 |
111 | for record in event['Records']:
112 |
113 | payload = base64.b64decode(record['kinesis']['data'])
114 | de_serialize_payload = json.loads(payload)
115 |
116 | print("de_serialize_payload", de_serialize_payload, type(de_serialize_payload))
117 |
118 | eventName = de_serialize_payload.get("eventName")
119 | print("eventName", eventName)
120 |
121 |
122 | json_data = None
123 |
124 | if eventName.strip().lower() == "INSERT".lower():
125 | json_data = de_serialize_payload.get("dynamodb").get("NewImage")
126 |
127 | if eventName.strip().lower() == "MODIFY".lower():
128 | json_data = de_serialize_payload.get("dynamodb").get("NewImage")
129 |
130 | if eventName.strip().lower() == "REMOVE".lower():
131 | json_data = de_serialize_payload.get("dynamodb").get("OldImage")
132 |
133 | if json_data is not None:
134 |
135 | json_data_unmarshal = unmarshall(json_data)
136 | json_data_unmarshal["awsRegion"] = de_serialize_payload.pop("awsRegion")
137 | json_data_unmarshal["eventID"] = de_serialize_payload.pop("eventID")
138 | json_data_unmarshal["eventName"] = de_serialize_payload.pop("eventName")
139 | json_data_unmarshal["eventSource"] = de_serialize_payload.pop("eventSource")
140 |
141 |
142 | helper = AWSS3()
143 | year, month, day = Datetime.get_year_month_day()
144 | _final_processed_json = flatten_dict(json_data_unmarshal)
145 | helper.put_files(
146 | Key="table_name={}/year={}/month={}/day={}/{}.json".format(os.getenv("S3_TABLE_NAME"),year, month, day, uuid.uuid4().__str__()),
147 | Response=json.dumps(_final_processed_json)
148 | )
149 | print("_final_processed_json", _final_processed_json)
150 |
151 |
152 | print("**************** ALL SET *********************")
153 | print("Length: {} ".format(len(event['Records'])))
154 |
155 |
--------------------------------------------------------------------------------
/Lab24/serverless.yml:
--------------------------------------------------------------------------------
1 |
2 | service: dynamodb-archive-project
3 | frameworkVersion: '3'
4 |
5 |
6 |
7 | provider:
8 | name: aws
9 | runtime: python3.7
10 | memorySize: 512
11 | timeout: 600
12 | architecture: x86_64
13 | stackTags:
14 | product: job-topic-model
15 | env: qa
16 | created-date: 2022-04-26
17 | team: python-dev
18 | customer-impact: false
19 | terraform: false
20 |
21 | useDotenv: true
22 |
23 |
24 |
25 | functions:
26 | lambda:
27 | handler: lambda_function.lambda_handler
28 | environment:
29 |
30 | DEV_AWS_ACCESS_KEY: ${env:DEV_AWS_ACCESS_KEY}
31 | DEV_AWS_SECRET_KEY: ${env:DEV_AWS_SECRET_KEY}
32 | DEV_AWS_REGION_NAME: ${env:DEV_AWS_REGION_NAME}
33 | bucketname: ${env:bucketname}
34 | S3_TABLE_NAME: ${env:S3_TABLE_NAME}
35 |
36 | events:
37 | - stream:
38 | type: kinesis
39 | arn:
40 | Fn::GetAtt:
41 | - KinesisDataStreams
42 | - Arn
43 |
44 |
45 | resources:
46 | Resources:
47 |
48 | S3Bucket:
49 | Type: AWS::S3::Bucket
50 | Properties:
51 | BucketName: ${env:bucketname}
52 |
53 | KinesisDataStreams:
54 | Type: AWS::Kinesis::Stream
55 | Properties:
56 | Name: ${env:StreamName}
57 | RetentionPeriodHours: ${env:RetentionPeriodHours}
58 | StreamModeDetails:
59 | StreamMode: ON_DEMAND
60 |
61 | DynamoDBTable:
62 | Type: AWS::DynamoDB::Table
63 | Properties:
64 | TableName: ${env:DYNAMO_DB_TABLE_NAME}
65 | AttributeDefinitions:
66 | - AttributeName: pk
67 | AttributeType: S
68 | - AttributeName: sk
69 | AttributeType: S
70 | KeySchema:
71 | - AttributeName: pk
72 | KeyType: HASH
73 | - AttributeName: sk
74 | KeyType: RANGE
75 | BillingMode: PAY_PER_REQUEST
76 | TableClass: STANDARD
77 | PointInTimeRecoverySpecification:
78 | PointInTimeRecoveryEnabled: true
79 | KinesisStreamSpecification:
80 | StreamArn: !GetAtt KinesisDataStreams.Arn
81 |
82 | GlueDatabase:
83 | Type: AWS::Glue::Database
84 | Properties:
85 | CatalogId: '867098943567'
86 | DatabaseInput:
87 | Name: ${env:DB_NAME}
88 |
89 | TableGlueCrawlerAthena:
90 | Type: AWS::Glue::Crawler
91 | Properties:
92 | DatabaseName: ${env:DB_NAME}
93 | Name: ${env:CRAWLER_NAME_S3}
94 | RecrawlPolicy:
95 | RecrawlBehavior: CRAWL_EVERYTHING
96 | Role: ${env:GLUE_ROLE_ARN}
97 | SchemaChangePolicy:
98 | DeleteBehavior: DEPRECATE_IN_DATABASE
99 | Targets:
100 | S3Targets:
101 | - Path: ${env:CRAWLER_TARGET_PATH}
--------------------------------------------------------------------------------
/Lab25/.env:
--------------------------------------------------------------------------------
1 | DYNAMO_DB_TABLE_NAME=myTable
2 |
3 |
4 | StreamName=dynamodb-streams
5 | RetentionPeriodHours=24
6 |
7 | DEV_AWS_ACCESS_KEY=XXXXXXXXX
8 | DEV_AWS_SECRET_KEY=XXXXXXXXXXX
9 |
10 | DEV_AWS_REGION_NAME=us-east-1
11 |
12 | TopicName=my-custom-topic
13 | ACCOUNT=XXXXX
--------------------------------------------------------------------------------
/Lab25/lambda_function.py:
--------------------------------------------------------------------------------
1 | try:
2 | import json
3 | import json
4 | import boto3
5 | import base64
6 | import os
7 | import datetime
8 | import uuid
9 | from datetime import datetime
10 | from boto3.dynamodb.types import TypeDeserializer, TypeSerializer
11 | except Exception as e:
12 | print("Error : {} ".format(e))
13 |
14 | def unmarshall(dynamo_obj: dict) -> dict:
15 | """Convert a DynamoDB dict into a standard dict."""
16 | deserializer = TypeDeserializer()
17 | return {k: deserializer.deserialize(v) for k, v in dynamo_obj.items()}
18 |
19 |
20 | def marshall(python_obj: dict) -> dict:
21 | """Convert a standard dict into a DynamoDB ."""
22 | serializer = TypeSerializer()
23 | return {k: serializer.serialize(v) for k, v in python_obj.items()}
24 |
25 | class AwsSNS:
26 | def __init__(self, aws_access_key_id, aws_secret_access_key, region_name):
27 | self.sns_client = boto3.client(
28 | "sns",
29 | aws_access_key_id=aws_access_key_id,
30 | aws_secret_access_key=aws_secret_access_key,
31 | region_name=region_name,
32 | )
33 | self.topic_arn = os.getenv("TopicArn")
34 |
35 | def publish_msg_to_sns(self, message):
36 | try:
37 | print("self.topic_arn", self.topic_arn)
38 |
39 | response = self.sns_client.publish(
40 | TopicArn=self.topic_arn,
41 | Message=message,
42 | Subject=str(os.getenv("ENV")) + " DynamoDB Events",
43 | )
44 | return {
45 | "statusCode": response["ResponseMetadata"]["HTTPStatusCode"],
46 | "message": "Message published to SNS",
47 | }
48 | except Exception as e:
49 | print("error: {}".format(e))
50 | return {"statusCode": -1, "message": "error: {}".format(e)}
51 |
52 |
53 | class Datetime(object):
54 | @staticmethod
55 | def get_year_month_day():
56 | """
57 | Return Year month and day
58 | :return: str str str
59 | """
60 | dt = datetime.now()
61 | year = dt.year
62 | month = dt.month
63 | day = dt.day
64 | return year, month, day
65 |
66 |
67 | def flatten_dict(data, parent_key='', sep='_'):
68 | """Flatten data into a single dict"""
69 | try:
70 | items = []
71 | for key, value in data.items():
72 | new_key = parent_key + sep + key if parent_key else key
73 | if type(value) == dict:
74 | items.extend(flatten_dict(value, new_key, sep=sep).items())
75 | else:
76 | items.append((new_key, value))
77 | return dict(items)
78 | except Exception as e:
79 | return {}
80 |
81 |
82 | def lambda_handler(event, context):
83 |
84 | print("event", event)
85 | print("\n")
86 |
87 | print("Length: {} ".format(len(event['Records'])))
88 |
89 | for record in event['Records']:
90 |
91 | payload = base64.b64decode(record['kinesis']['data'])
92 | de_serialize_payload = json.loads(payload)
93 |
94 | print("de_serialize_payload", de_serialize_payload, type(de_serialize_payload))
95 |
96 | eventName = de_serialize_payload.get("eventName")
97 | print("eventName", eventName)
98 |
99 |
100 | json_data = None
101 |
102 | if eventName.strip().lower() == "INSERT".lower():
103 | json_data = de_serialize_payload.get("dynamodb").get("NewImage")
104 |
105 | if eventName.strip().lower() == "MODIFY".lower():
106 | json_data = de_serialize_payload.get("dynamodb").get("NewImage")
107 |
108 | if eventName.strip().lower() == "REMOVE".lower():
109 | json_data = de_serialize_payload.get("dynamodb").get("OldImage")
110 |
111 | if json_data is not None:
112 | json_data_unmarshal = unmarshall(json_data)
113 | json_data_unmarshal["awsRegion"] = de_serialize_payload.pop("awsRegion")
114 | json_data_unmarshal["eventID"] = de_serialize_payload.pop("eventID")
115 | json_data_unmarshal["eventName"] = de_serialize_payload.pop("eventName")
116 | json_data_unmarshal["eventSource"] = de_serialize_payload.pop("eventSource")
117 |
118 | _final_processed_json = flatten_dict(json_data_unmarshal)
119 | print("_final_processed_json", _final_processed_json)
120 | sns_helper = AwsSNS(
121 | aws_access_key_id=os.getenv("DEV_AWS_ACCESS_KEY"),
122 | aws_secret_access_key=os.getenv("DEV_AWS_SECRET_KEY"),
123 | region_name=os.getenv("DEV_AWS_REGION_NAME"),
124 | )
125 | response_sns = sns_helper.publish_msg_to_sns(message=
126 | json.dumps(_final_processed_json)
127 | )
128 |
129 | print("response_sns", response_sns)
130 |
131 | print("**************** ALL SET *********************")
132 | print("Length: {} ".format(len(event['Records'])))
133 |
134 |
--------------------------------------------------------------------------------
/Lab25/serverless.yml:
--------------------------------------------------------------------------------
1 |
2 | service: dynamodb-down-stream-project
3 | frameworkVersion: '3'
4 |
5 |
6 |
7 | provider:
8 | name: aws
9 | runtime: python3.7
10 | memorySize: 512
11 | timeout: 600
12 | architecture: x86_64
13 | stackTags:
14 | product: job-topic-model
15 | env: qa
16 | created-date: 2022-04-26
17 | team: python-dev
18 | customer-impact: false
19 | terraform: false
20 |
21 | useDotenv: true
22 |
23 |
24 |
25 | functions:
26 | lambda:
27 | handler: lambda_function.lambda_handler
28 | environment:
29 |
30 | DEV_AWS_ACCESS_KEY: ${env:DEV_AWS_ACCESS_KEY}
31 | DEV_AWS_SECRET_KEY: ${env:DEV_AWS_SECRET_KEY}
32 | DEV_AWS_REGION_NAME: ${env:DEV_AWS_REGION_NAME}
33 | TopicArn: arn:aws:sns:us-east-1:${env:ACCOUNT}:${env:TopicName}
34 | ENV: 'dev'
35 |
36 | events:
37 | - stream:
38 | type: kinesis
39 | arn:
40 | Fn::GetAtt:
41 | - KinesisDataStreams
42 | - Arn
43 |
44 |
45 | resources:
46 | Resources:
47 |
48 | SNSTopic:
49 | Type: AWS::SNS::Topic
50 | Properties:
51 | TopicName: ${env:TopicName}
52 |
53 | KinesisDataStreams:
54 | Type: AWS::Kinesis::Stream
55 | Properties:
56 | Name: ${env:StreamName}
57 | RetentionPeriodHours: ${env:RetentionPeriodHours}
58 | StreamModeDetails:
59 | StreamMode: ON_DEMAND
60 |
61 | DynamoDBTable:
62 | Type: AWS::DynamoDB::Table
63 | Properties:
64 | TableName: ${env:DYNAMO_DB_TABLE_NAME}
65 | AttributeDefinitions:
66 | - AttributeName: pk
67 | AttributeType: S
68 | - AttributeName: sk
69 | AttributeType: S
70 | KeySchema:
71 | - AttributeName: pk
72 | KeyType: HASH
73 | - AttributeName: sk
74 | KeyType: RANGE
75 | BillingMode: PAY_PER_REQUEST
76 | TableClass: STANDARD
77 | PointInTimeRecoverySpecification:
78 | PointInTimeRecoveryEnabled: true
79 | KinesisStreamSpecification:
80 | StreamArn: !GetAtt KinesisDataStreams.Arn
81 |
--------------------------------------------------------------------------------
/Lab26/.env:
--------------------------------------------------------------------------------
1 | END=QA
2 | DEV_ACCESS_KEY=XXXXX
3 | DEV_SECRET_KEY=XXXXXX
4 | DEV_REGION=us-east-1
5 | REPORTS_BUCKETS=
6 |
7 |
8 | DB_NAME='athenareportsdb'
9 | DB_TABLE_NAME='table_name=athenareports'
10 | S3AthenaReports=athenareports
11 | CRAWLER_NAME_S3='crawler-athena-reports-metrics'
12 | CRAWLER_TARGET_PATH=s3:///athenareports
13 | GLUE_ROLE_ARN=arn:aws:iam::XXXXX:role/service-role/XXXXXXXX
--------------------------------------------------------------------------------
/Lab26/lambda_functions.py:
--------------------------------------------------------------------------------
1 | try:
2 | import boto3
3 | import csv
4 | import uuid
5 | import json
6 | import datetime
7 | import re
8 | from datetime import datetime
9 | import os
10 | import io
11 | from io import StringIO
12 | from dotenv import load_dotenv
13 | load_dotenv(".env")
14 |
15 | except Exception as e:
16 | print("Error :{} ".format(e))
17 |
18 |
19 | class AWSS3(object):
20 | """Helper class to which add functionality on top of boto3 """
21 |
22 | def __init__(
23 | self,
24 | bucket=os.getenv("REPORTS_BUCKETS"),
25 | aws_access_key_id=os.getenv("DEV_ACCESS_KEY"),
26 | aws_secret_access_key=os.getenv("DEV_SECRET_KEY"),
27 | region_name=os.getenv("DEV_REGION"),
28 | ):
29 | self.BucketName = bucket
30 | self.client = boto3.client(
31 | "s3",
32 | aws_access_key_id=aws_access_key_id,
33 | aws_secret_access_key=aws_secret_access_key,
34 | region_name=region_name,
35 | )
36 |
37 | def put_files(self, Response=None, Key=None):
38 | """
39 | Put the File on S3
40 | :return: Bool
41 | """
42 | try:
43 | response = self.client.put_object(
44 | ACL="private", Body=Response, Bucket=self.BucketName, Key=Key
45 | )
46 | return "ok"
47 | except Exception as e:
48 | raise Exception("Failed to upload records. Error : {}".format(e))
49 |
50 | def item_exists(self, Key):
51 | """Given key check if the items exists on AWS S3 """
52 | try:
53 | response_new = self.client.get_object(
54 | Bucket=self.BucketName, Key=str(Key))
55 | return True
56 | except Exception as e:
57 | return False
58 |
59 | def get_item(self, Key):
60 | """Gets the Bytes Data from AWS S3 """
61 | try:
62 | response_new = self.client.get_object(
63 | Bucket=self.BucketName, Key=str(Key))
64 | return response_new["Body"].read()
65 | except Exception as e:
66 | print("Error :{}".format(e))
67 | return False
68 |
69 | def find_one_update(self, data=None, key=None):
70 | """
71 | This checks if Key is on S3 if it is return the data from s3
72 | else store on s3 and return it
73 | """
74 | flag = self.item_exists(Key=key)
75 | if flag:
76 | data = self.get_item(Key=key)
77 | return data
78 | else:
79 | self.put_files(Key=key, Response=data)
80 | return data
81 |
82 | def delete_object(self, Key):
83 | response = self.client.delete_object(Bucket=self.BucketName, Key=Key)
84 | return response
85 |
86 | def get_all_keys(self, Prefix=""):
87 | """
88 | :param Prefix: Prefix string
89 | :return: Keys List
90 | """
91 | try:
92 | paginator = self.client.get_paginator("list_objects_v2")
93 | pages = paginator.paginate(Bucket=self.BucketName, Prefix=Prefix)
94 | tmp = []
95 | for page in pages:
96 | for obj in page["Contents"]:
97 | tmp.append(obj["Key"])
98 | return tmp
99 | except Exception as e:
100 | return []
101 |
102 | def print_tree(self):
103 | keys = self.get_all_keys()
104 | for key in keys:
105 | print(key)
106 | return None
107 |
108 | def find_one_similar_key(self, searchTerm=""):
109 | keys = self.get_all_keys()
110 | return [key for key in keys if re.search(searchTerm, key)]
111 |
112 | def __repr__(self):
113 | return "AWS S3 Helper class "
114 |
115 |
116 | class Datetime(object):
117 | @staticmethod
118 | def get_year_month_day():
119 | """
120 | Return Year month and day
121 | :return: str str str
122 | """
123 | dt = datetime.now()
124 | year = dt.year
125 | month = dt.month
126 | day = dt.day
127 | return year, month, day
128 |
129 |
130 | def flatten_dict(data, parent_key="", sep="_"):
131 | """Flatten data into a single dict"""
132 | try:
133 | items = []
134 | for key, value in data.items():
135 | new_key = parent_key + sep + key if parent_key else key
136 | if type(value) == dict:
137 | items.extend(flatten_dict(value, new_key, sep=sep).items())
138 | else:
139 | items.append((new_key, value))
140 | return dict(items)
141 | except Exception as e:
142 | return {}
143 |
144 |
145 | def create_workgroup_history(day, workgroup):
146 | print("**", day)
147 | file_name = "workgroup_{}_{}_queries.json".format(workgroup, day)
148 |
149 | records = ""
150 |
151 | athena = boto3.client(
152 | "athena",
153 | aws_access_key_id=os.getenv("DEV_ACCESS_KEY"),
154 | aws_secret_access_key=os.getenv("DEV_SECRET_KEY"),
155 | region_name=os.getenv("DEV_REGION"),
156 | )
157 |
158 | paginator = athena.get_paginator("list_query_executions").paginate(
159 | WorkGroup=workgroup
160 | )
161 | print("paginator",paginator)
162 |
163 | for page in paginator:
164 | print("page", page)
165 |
166 | query_executions = athena.batch_get_query_execution(
167 | QueryExecutionIds=page["QueryExecutionIds"]
168 | )
169 |
170 | for query in query_executions["QueryExecutions"]:
171 |
172 | if "CompletionDateTime" not in query["Status"]:
173 | continue
174 |
175 | query_day = query["Status"]["CompletionDateTime"].strftime("%Y-%m-%d")
176 | print("query_day", query_day, type(query_day))
177 | print("day", day, type(day))
178 |
179 | if day == query_day:
180 |
181 | json_payload = {}
182 |
183 | json_payload["QueryExecutionId"] = query.get("QueryExecutionId")
184 | json_payload["Query"] = query.get("Query")
185 | json_payload["StatementType"] = query.get("StatementType")
186 | json_payload["WorkGroup"] = query.get("WorkGroup")
187 |
188 | for key, value in flatten_dict(
189 | query.get("ResultConfiguration")
190 | ).items():
191 | json_payload[key] = value.__str__()
192 |
193 | for key, value in flatten_dict(
194 | query.get("QueryExecutionContext")
195 | ).items():
196 | json_payload[key] = value.__str__()
197 |
198 | for key, value in flatten_dict(query.get("EngineVersion")).items():
199 | json_payload[key] = value.__str__()
200 |
201 | for key, value in flatten_dict(query.get("Statistics")).items():
202 | json_payload[key] = value.__str__()
203 |
204 | for key, value in flatten_dict(query.get("Status")).items():
205 | json_payload[key] = value.__str__()
206 |
207 | for key, value in flatten_dict(query.get("Statistics")).items():
208 | json_payload[key] = value.__str__()
209 |
210 | records = records + json.dumps(json_payload) + "\n"
211 |
212 | elif query_day < day:
213 | return records
214 |
215 | return records
216 |
217 |
218 | def handler(event=None, context=None):
219 |
220 | current_day = datetime.now().strftime("%Y-%m-%d").__str__()
221 | report_date = current_day
222 |
223 | workgroups = ["soumil"]
224 |
225 | for workgroup in workgroups:
226 |
227 | csv_buffer = StringIO()
228 | year, month, day = Datetime.get_year_month_day()
229 | file_name = "workgroup_{}_{}_queries.json".format(workgroup, report_date)
230 |
231 | response = create_workgroup_history(day=report_date, workgroup=workgroup)
232 | csv_buffer.write(response)
233 | csv_buffer.seek(0)
234 |
235 | helper = AWSS3()
236 |
237 | path = "{}/year={}/month={}/day={}/{}".format(
238 | os.getenv("S3AthenaReports"),
239 | year, month, day, file_name)
240 |
241 | helper.put_files(Response=csv_buffer.getvalue(), Key=path)
242 |
243 |
244 |
--------------------------------------------------------------------------------
/Lab26/requirements.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/soumilshah1995/install-external-python-packages-on-serverless/58d900fc5606b8d792b1a2eb73a47b979c325943/Lab26/requirements.txt
--------------------------------------------------------------------------------
/Lab26/serverless.yml:
--------------------------------------------------------------------------------
1 | service: athena-primary-workgroup-reports
2 |
3 | frameworkVersion: '3'
4 |
5 | provider:
6 | name: aws
7 | runtime: python3.8
8 | memorySize: 512
9 | timeout: 600
10 | architecture: x86_64
11 | stackTags:
12 | product: reports
13 | env: qa
14 | created-date: 2022-04-26
15 | team: python-dev
16 | customer-impact: false
17 | terraform: false
18 |
19 | useDotenv: true
20 |
21 | functions:
22 | athena-reporting:
23 | handler: lambda_functions.handler
24 | environment:
25 | DEV_AWS_ACCESS_KEY: ${env:DEV_ACCESS_KEY}
26 | DEV_AWS_SECRET_KEY: ${env:DEV_SECRET_KEY}
27 | DEV_AWS_REGION_NAME: ${env:DEV_REGION}
28 | REPORTS_BUCKETS: ${env:REPORTS_BUCKETS}
29 | S3AthenaReports: ${env:S3AthenaReports}
30 |
31 | events:
32 | - schedule: cron(5,35 14 * * ? *)
33 |
34 | resources:
35 | Resources:
36 | S3Bucket:
37 | Type: AWS::S3::Bucket
38 | Properties:
39 | BucketName: ${env:REPORTS_BUCKETS}
40 |
41 | GlueDatabase:
42 | Type: AWS::Glue::Database
43 | Properties:
44 | CatalogId: '867098943567'
45 | DatabaseInput:
46 | Name: ${env:DB_NAME}
47 |
48 | TableGlueCrawlerAthena:
49 | Type: AWS::Glue::Crawler
50 | Properties:
51 | DatabaseName: ${env:DB_NAME}
52 | Name: ${env:CRAWLER_NAME_S3}
53 | RecrawlPolicy:
54 | RecrawlBehavior: CRAWL_EVERYTHING
55 | Role: ${env:GLUE_ROLE_ARN}
56 | SchemaChangePolicy:
57 | DeleteBehavior: DEPRECATE_IN_DATABASE
58 | Targets:
59 | S3Targets:
60 | - Path: ${env:CRAWLER_TARGET_PATH}
--------------------------------------------------------------------------------
/Lab27/.env:
--------------------------------------------------------------------------------
1 | APP_AWS_ACCESS_KEY=XXXXX
2 | APP_AWS_REGION_NAME=us-east-1
3 | APP_AWS_SECRET_KEY=XXXXX
4 | S3_BUCKET=XXXXXXX
5 | NAME_SOURCE_QUEUE=mongo-worker
6 | NAME_DLQ_QUEUE=mongo-worker-dlq
--------------------------------------------------------------------------------
/Lab27/handler.py:
--------------------------------------------------------------------------------
1 | try:
2 | import os
3 | import re
4 | import json
5 | import boto3
6 | import uuid
7 | import json
8 | from io import StringIO
9 | from dateutil.parser import parse
10 | except Exception as e:
11 | print("Error : {}".format(e))
12 |
13 |
14 | class AWSS3(object):
15 | """Helper class to which add functionality on top of boto3 """
16 |
17 | def __init__(self, bucket, aws_access_key_id, aws_secret_access_key, region_name):
18 |
19 | self.BucketName = bucket
20 | self.client = boto3.client(
21 | "s3",
22 | aws_access_key_id=aws_access_key_id,
23 | aws_secret_access_key=aws_secret_access_key,
24 | region_name=region_name,
25 | )
26 |
27 | def put_files(self, Response=None, Key=None):
28 | """
29 | Put the File on S3
30 | :return: Bool
31 | """
32 | try:
33 | response = self.client.put_object(
34 | ACL="private", Body=Response, Bucket=self.BucketName, Key=Key
35 | )
36 | return "ok"
37 | except Exception as e:
38 | print("Error : {} ".format(e))
39 | return "error"
40 |
41 | def item_exists(self, Key):
42 | """Given key check if the items exists on AWS S3 """
43 | try:
44 | response_new = self.client.get_object(Bucket=self.BucketName, Key=str(Key))
45 | return True
46 | except Exception as e:
47 | return False
48 |
49 | def get_item(self, Key):
50 |
51 | """Gets the Bytes Data from AWS S3 """
52 |
53 | try:
54 | response_new = self.client.get_object(Bucket=self.BucketName, Key=str(Key))
55 | return response_new["Body"].read()
56 |
57 | except Exception as e:
58 | print("Error :{}".format(e))
59 | return False
60 |
61 | def find_one_update(self, data=None, key=None):
62 |
63 | """
64 | This checks if Key is on S3 if it is return the data from s3
65 | else store on s3 and return it
66 | """
67 |
68 | flag = self.item_exists(Key=key)
69 |
70 | if flag:
71 | data = self.get_item(Key=key)
72 | return data
73 |
74 | else:
75 | self.put_files(Key=key, Response=data)
76 | return data
77 |
78 | def delete_object(self, Key):
79 |
80 | response = self.client.delete_object(Bucket=self.BucketName, Key=Key, )
81 | return response
82 |
83 | def get_all_keys(self, Prefix=""):
84 |
85 | """
86 | :param Prefix: Prefix string
87 | :return: Keys List
88 | """
89 | try:
90 | paginator = self.client.get_paginator("list_objects_v2")
91 | pages = paginator.paginate(Bucket=self.BucketName, Prefix=Prefix)
92 |
93 | tmp = []
94 |
95 | for page in pages:
96 | for obj in page["Contents"]:
97 | tmp.append(obj["Key"])
98 |
99 | return tmp
100 | except Exception as e:
101 | return []
102 |
103 | def print_tree(self):
104 | keys = self.get_all_keys()
105 | for key in keys:
106 | print(key)
107 | return None
108 |
109 | def find_one_similar_key(self, searchTerm=""):
110 | keys = self.get_all_keys()
111 | return [key for key in keys if re.search(searchTerm, key)]
112 |
113 | def __repr__(self):
114 | return "AWS S3 Helper class "
115 |
116 |
117 | class HistoricalDataDump(AWSS3):
118 | def __init__(self, data):
119 | self.data = data
120 | AWSS3.__init__(self,
121 | aws_access_key_id=os.getenv("APP_AWS_ACCESS_KEY"),
122 | aws_secret_access_key=os.getenv("APP_AWS_SECRET_KEY"),
123 | region_name=os.getenv("APP_AWS_REGION_NAME"),
124 | bucket=os.getenv("S3_BUCKET"),
125 | )
126 | self.table_name = "mongo_data"
127 |
128 | def flatten_dict(self, data, parent_key='', sep='_'):
129 | """Flatten data into a single dict"""
130 | items = []
131 | for key, value in data.items():
132 | new_key = parent_key + sep + key if parent_key else key
133 |
134 | if type(value) == dict:
135 | items.extend(self.flatten_dict(value, new_key, sep=sep).items())
136 | else:
137 | items.append((new_key, value))
138 |
139 | return dict(items)
140 |
141 | def dict_clean(self, items):
142 | result = {}
143 | for key, value in items:
144 | if value is None:
145 | value = 'n/a'
146 | if value == "None":
147 | value = 'n/a'
148 | if value == "null":
149 | value = 'n/a'
150 | if len(str(value)) < 1:
151 | value = 'n/a'
152 | result[key] = str(value)
153 | return result
154 |
155 | def mongo_to_s3(self):
156 |
157 | """Flatten data into a single dict"""
158 | clean_messages = [self.flatten_dict(message) for message in self.data.get("data")]
159 | clean_messages_new = [json.loads(json.dumps(message), object_pairs_hook=self.dict_clean) for message in
160 | clean_messages]
161 | response = self.put_json_file_to_s3(data=clean_messages_new)
162 | print("""Successfully uploading the record.""")
163 |
164 | def put_json_file_to_s3(self, data=[]):
165 | """Put the json object to S3"""
166 | try:
167 | dt = parse(data[0].get("createdAt"))
168 | csv_buffer = StringIO()
169 |
170 | records = ""
171 | for record in data: records += json.dumps(record) + "\n"
172 |
173 | new_s3_path = "database=mongodb/tablename={}/year={}/month={}/day={}/{}.json".format(
174 | self.table_name, dt.year, dt.month, dt.day, uuid.uuid4().__str__()
175 | )
176 | csv_buffer.write(records)
177 | csv_buffer.seek(0)
178 | self.put_files(Response=csv_buffer.getvalue(), Key=new_s3_path)
179 |
180 | except Exception:
181 | raise Exception("Uploading records failed")
182 |
183 |
184 | def lambda_handler(event, context):
185 | messages = [json.loads(record.get("body")) for record in event['Records']]
186 |
187 | for message in messages:
188 | helper = HistoricalDataDump(data=message)
189 | helper.mongo_to_s3()
190 |
191 | return {
192 | 'statusCode': 200,
193 | 'body': json.dumps('Successfully uploaded records!!')
194 | }
195 |
--------------------------------------------------------------------------------
/Lab27/runner.env:
--------------------------------------------------------------------------------
1 | BATCH_MONGO_CHUNK_SIZE=1000
2 |
3 | BATCH_MONGO_DB="mongodb+srv://:@"
4 | BATCH_MONGO_DATABASE_NAME = 'XXXX'
5 | BATCH_MONGO_COLLECTION_NAME = 'XXX'
6 | DEV_AWS_ACCESS_KEY = 'XXX'
7 | DEV_AWS_SECRET_KEY = 'XXXXXX'
8 | DEV_AWS_REGION_NAME = 'us-east-1'
9 | S3_BUCKET = 'XXXXXXXXXXXX'
10 | QUEUE_NAME = 'mongo-worker'
--------------------------------------------------------------------------------
/Lab27/runnerScript.py:
--------------------------------------------------------------------------------
1 | try:
2 | import os
3 | import pymongo
4 | from functools import wraps
5 | from enum import Enum
6 | import json
7 | import sys
8 | import boto3
9 | import re
10 | import datetime
11 | from datetime import datetime
12 |
13 | from dateutil.parser import parse
14 | from datetime import datetime, timezone, timedelta
15 |
16 | from dotenv import load_dotenv
17 | load_dotenv("runner.env")
18 |
19 | except Exception as e:
20 | print("Error@@@@@@@@@@@ : {} ".format(e))
21 |
22 |
23 | class MongoDbSettings(object):
24 | def __init__(self, connection_string, database_name, collection_name):
25 | self.connection_string = connection_string
26 | self.collection_name = collection_name
27 | self.database_name = database_name
28 |
29 |
30 | class MongoDB:
31 | def __init__(self, mongo_db_settings):
32 | self.mongo_db_settings = mongo_db_settings
33 |
34 | if type(self.mongo_db_settings).__name__ != "MongoDbSettings":
35 | raise Exception("Please mongo_db_settings pass correct Instance")
36 |
37 | self.client = pymongo.MongoClient(
38 | self.mongo_db_settings.connection_string,
39 | port=27017,
40 | tls=True,tlsAllowInvalidCertificates=True
41 | )
42 | self.cursor = self.client[self.mongo_db_settings.database_name][
43 | self.mongo_db_settings.collection_name
44 | ]
45 |
46 | def get_total_count(self, query={}, logger=None):
47 | total_count = self.cursor.count_documents(filter=query)
48 |
49 | return total_count
50 |
51 | def get_data(self, query={}, sort=pymongo.ASCENDING, mongo_batch_size=int(os.getenv("BATCH_MONGO_CHUNK_SIZE"))):
52 |
53 | # mongo_batch_size = 300
54 |
55 | # print("mongo_batch_size", mongo_batch_size)
56 |
57 | # data = list(self.cursor.find(query).sort("createdAt", sort))
58 |
59 | # return data
60 |
61 | total_count = self.cursor.count_documents(filter=query)
62 | total_pages = total_count // mongo_batch_size
63 | page_size = mongo_batch_size
64 |
65 | if total_count % mongo_batch_size != 0:
66 | total_pages += 1
67 | for page_number in range(total_pages):
68 | skips = page_size * page_number
69 | data = list(self.cursor.find(query).skip(skips).limit(page_size).sort('createdAt', sort))
70 | yield data
71 |
72 | # =================== Connector ==========================================================
73 |
74 |
75 | class Connector(Enum):
76 | MONGODB = MongoDB(
77 | mongo_db_settings=MongoDbSettings(
78 | connection_string=os.getenv("BATCH_MONGO_DB"),
79 | database_name=os.getenv("BATCH_MONGO_DATABASE_NAME"),
80 | collection_name=os.getenv("BATCH_MONGO_COLLECTION_NAME"),
81 | )
82 | )
83 |
84 |
85 | # ========================================================================================
86 |
87 |
88 | class AWSS3(object):
89 |
90 | """Helper class to which add functionality on top of boto3 """
91 |
92 | def __init__(self, bucket, aws_access_key_id, aws_secret_access_key, region_name):
93 |
94 | self.BucketName = bucket
95 | self.client = boto3.client(
96 | "s3",
97 | aws_access_key_id=aws_access_key_id,
98 | aws_secret_access_key=aws_secret_access_key,
99 | region_name=region_name,
100 | )
101 |
102 |
103 | def put_files(self, Response=None, Key=None):
104 | """
105 | Put the File on S3
106 | :return: Bool
107 | """
108 | try:
109 |
110 | response = self.client.put_object(
111 | ACL="private", Body=Response, Bucket=self.BucketName, Key=Key
112 | )
113 | return "ok"
114 | except Exception as e:
115 | print("Error : {} ".format(e))
116 | return "error"
117 |
118 | def item_exists(self, Key):
119 | """Given key check if the items exists on AWS S3 """
120 | try:
121 | response_new = self.client.get_object(Bucket=self.BucketName, Key=str(Key))
122 | return True
123 | except Exception as e:
124 | return False
125 |
126 | def get_item(self, Key):
127 |
128 | """Gets the Bytes Data from AWS S3 """
129 |
130 | try:
131 | response_new = self.client.get_object(Bucket=self.BucketName, Key=str(Key))
132 | return response_new["Body"].read()
133 |
134 | except Exception as e:
135 | print("Error :{}".format(e))
136 | return False
137 |
138 | def find_one_update(self, data=None, key=None):
139 |
140 | """
141 | This checks if Key is on S3 if it is return the data from s3
142 | else store on s3 and return it
143 | """
144 |
145 | flag = self.item_exists(Key=key)
146 |
147 | if flag:
148 | data = self.get_item(Key=key)
149 | return data
150 |
151 | else:
152 | self.put_files(Key=key, Response=data)
153 | return data
154 |
155 | def delete_object(self, Key):
156 |
157 | response = self.client.delete_object(Bucket=self.BucketName, Key=Key,)
158 | return response
159 |
160 | def get_all_keys(self, Prefix=""):
161 |
162 | """
163 | :param Prefix: Prefix string
164 | :return: Keys List
165 | """
166 | try:
167 | paginator = self.client.get_paginator("list_objects_v2")
168 | pages = paginator.paginate(Bucket=self.BucketName, Prefix=Prefix)
169 |
170 | tmp = []
171 |
172 | for page in pages:
173 | for obj in page["Contents"]:
174 | tmp.append(obj["Key"])
175 |
176 | return tmp
177 | except Exception as e:
178 | return []
179 |
180 | def print_tree(self):
181 | keys = self.get_all_keys()
182 | for key in keys:
183 | print(key)
184 | return None
185 |
186 | def find_one_similar_key(self, searchTerm=""):
187 | keys = self.get_all_keys()
188 | return [key for key in keys if re.search(searchTerm, key)]
189 |
190 | def __repr__(self):
191 | return "AWS S3 Helper class "
192 |
193 |
194 | class AWSSQS(object):
195 |
196 | """Helper class to which add functionality on top of boto3 """
197 |
198 | def __init__(self, aws_access_key_id, aws_secret_access_key, region_name):
199 | self.sqs_client = boto3.resource(
200 | 'sqs',
201 | aws_access_key_id=aws_access_key_id,
202 | aws_secret_access_key=aws_secret_access_key,
203 | region_name=region_name,
204 | )
205 |
206 | def get_queue_by_name(self, queue_name=""):
207 | try:
208 | queue_name = self.sqs_client.get_queue_by_name(QueueName=queue_name)
209 | return queue_name
210 | except Exception as e:
211 | raise Exception("Error : {}".format(e))
212 |
213 | def __repr__(self):
214 | return "AWS SQS Helper class "
215 |
216 |
217 | class Master(AWSS3, AWSSQS):
218 |
219 | def __init__(self):
220 | AWSS3.__init__(self,
221 | aws_access_key_id=os.getenv("DEV_AWS_ACCESS_KEY"),
222 | aws_secret_access_key=os.getenv("DEV_AWS_SECRET_KEY"),
223 | region_name=os.getenv("DEV_AWS_REGION_NAME"),
224 | bucket=os.getenv("S3_BUCKET"),
225 | )
226 | AWSSQS.__init__(self,
227 | aws_access_key_id=os.getenv("DEV_AWS_ACCESS_KEY"),
228 | aws_secret_access_key=os.getenv("DEV_AWS_SECRET_KEY"),
229 | region_name=os.getenv("DEV_AWS_REGION_NAME"),
230 | )
231 | self.mongo_connector = Connector.MONGODB.value
232 | self.queue = self.get_queue_by_name(queue_name=os.getenv("QUEUE_NAME"))
233 |
234 | def run(self):
235 | response_step_1 = self.step_1_start_historical_data_dump()
236 | if response_step_1.get("status") == 200:
237 | print("Process complete ")
238 | return True
239 | else:
240 | return False
241 |
242 | def step_1_start_historical_data_dump(self):
243 |
244 | table_name = os.getenv("BATCH_MONGO_COLLECTION_NAME")
245 | key = "FileProcessedLogs/{}.json".format(table_name)
246 |
247 | """check key exist or not"""
248 | item_exist = self.item_exists(Key=key)
249 | print("item_exist", item_exist)
250 |
251 | if not item_exist:
252 |
253 | _data = bytes(json.dumps({"table_name": table_name,
254 | "last_created_date":datetime(2021, 3, 21, 0, 0, 0, tzinfo=timezone.utc).__str__(),
255 | "max_created_date":datetime(2022, 3, 21, 0, 0, 0, tzinfo=timezone.utc).__str__()}).encode("UTF-8"))
256 | self.put_files(Key=key, Response=_data)
257 |
258 | response = self.get_item(Key=key)
259 | first_created_date = parse(json.loads(response.decode("UTF-8")).get("last_created_date"))
260 | max_created_date = parse(json.loads(response.decode("UTF-8")).get("max_created_date"))
261 |
262 |
263 | """manually run for date range """
264 | # first_created_date = datetime(2021, 3, 22, 0, 0, 0, tzinfo=timezone.utc)
265 | # max_created_date = datetime(2021, 9, 23, 0, 0, 0, tzinfo=timezone.utc)
266 | print("Starting job......")
267 | print("first_created_date", first_created_date)
268 | print("max_created_date", max_created_date)
269 |
270 |
271 | while first_created_date < max_created_date:
272 |
273 | first_created_date = self.put_message_on_sqs(first_created_date, table_name, key, max_created_date)
274 |
275 | if first_created_date is not None:
276 | _data = bytes(json.dumps({"table_name": table_name,
277 | "last_created_date":first_created_date.__str__(),
278 | "max_created_date":max_created_date.__str__()}).encode("UTF-8"))
279 | self.put_files(
280 | Key=key, Response=_data
281 | )
282 |
283 | if first_created_date is not None:
284 | return {"status": 200}
285 | else:
286 | return {"status": 412}
287 |
288 | def put_message_on_sqs(self, first_created_date, table_name, key, max_created_date):
289 |
290 | """Process messages on SQS Queue"""
291 |
292 | for hour in range(first_created_date.hour, 24):
293 | try:
294 | last_created_date = first_created_date + timedelta(hours=1)
295 |
296 | filter={
297 | 'createdAt': {
298 | '$gte': first_created_date,
299 | '$lt': last_created_date
300 | }
301 | }
302 |
303 | count = self.mongo_connector.get_total_count(query=filter)
304 |
305 | flag = True
306 |
307 | if count > 0:
308 | response_data = self.mongo_connector.get_data(query=filter)
309 | while True:
310 | try:
311 | batch_data = next(response_data)
312 | data = {"data":batch_data}
313 |
314 | print("SENT TO SQS : {}".format(data))
315 |
316 | response = self.queue.send_message(MessageBody=json.dumps(data, default=str))
317 | print(response)
318 |
319 | except StopIteration:
320 | break
321 | except Exception as e:
322 | flag = False
323 | break
324 |
325 | if flag:
326 | first_created_date = last_created_date
327 |
328 | else:
329 | raise Exception("Failed to process batch for {} date.".format(first_created_date))
330 |
331 | except Exception as e:
332 | print("_________{}___________".format(first_created_date))
333 | _data = bytes(json.dumps({"table_name": table_name,
334 | "last_created_date":first_created_date.__str__(),
335 | "max_created_date":max_created_date.__str__()}).encode("UTF-8"))
336 | self.put_files(
337 | Key=key, Response=_data
338 | )
339 | first_created_date = None
340 | raise Exception("Failed to process further")
341 |
342 | return first_created_date
343 |
344 |
345 | def main():
346 | helper = Master()
347 | helper.run()
348 |
349 |
350 | if __name__ == "__main__":
351 | main()
352 |
--------------------------------------------------------------------------------
/Lab27/serverless.yml:
--------------------------------------------------------------------------------
1 | service: mongo-migrations-workers
2 |
3 | frameworkVersion: '3'
4 |
5 | useDotenv: true
6 |
7 | provider:
8 | name: aws
9 | runtime: python3.8
10 | stackTags:
11 | product: datateam
12 | env: qa
13 | created-date: 2022-03-16
14 | team: data
15 | customer-impact: false
16 | terraform: false
17 |
18 |
19 | functions:
20 | s3consumer:
21 | handler: handler.lambda_handler
22 | timeout: 200 # seconds
23 | reservedConcurrency: 900
24 | environment:
25 | APP_AWS_ACCESS_KEY: ${env:APP_AWS_ACCESS_KEY}
26 | APP_AWS_REGION_NAME: ${env:APP_AWS_REGION_NAME}
27 | APP_AWS_SECRET_KEY: ${env:APP_AWS_SECRET_KEY}
28 | S3_BUCKET: ${env:S3_BUCKET}
29 | events:
30 | - sqs:
31 | arn:
32 | Fn::GetAtt:
33 | - MyQueue
34 | - Arn
35 |
36 | resources:
37 |
38 | Resources:
39 |
40 | MyQueue:
41 | Type: "AWS::SQS::Queue"
42 | Properties:
43 | QueueName: ${env:NAME_SOURCE_QUEUE}
44 | VisibilityTimeout : 500
45 | MessageRetentionPeriod: 1209600
46 | MaximumMessageSize: 262144
47 | ReceiveMessageWaitTimeSeconds: 2
48 | RedrivePolicy:
49 | deadLetterTargetArn: !GetAtt MyDeadLetterQueue.Arn
50 | maxReceiveCount: 5
51 |
52 | MyDeadLetterQueue:
53 | Type: AWS::SQS::Queue
54 | Properties:
55 | QueueName: ${env:NAME_DLQ_QUEUE}
56 |
57 |
--------------------------------------------------------------------------------
/Lab28/EverythingOneFile/dispatcher.py:
--------------------------------------------------------------------------------
1 | import json
2 |
3 |
4 | def dispatch(event, context):
5 | body = {
6 | "message": "Go Serverless v3.0! Your function executed successfully!",
7 | "input": event,
8 | }
9 |
10 | response = {"statusCode": 200, "body": json.dumps(body)}
11 |
12 | return response
--------------------------------------------------------------------------------
/Lab28/EverythingOneFile/serverless.yml:
--------------------------------------------------------------------------------
1 | service: aws-sns-project
2 | frameworkVersion: '3'
3 |
4 | provider:
5 | name: aws
6 |
7 | functions:
8 | dispatcher:
9 | handler: dispatcher.dispatch
10 | events:
11 | - sns:
12 | arn:
13 | Fn::Join:
14 | - ':'
15 | - - 'arn:aws:sns'
16 | - Ref: 'AWS::Region'
17 | - Ref: 'AWS::AccountId'
18 | - 'MyCustomTopic'
19 | topicName: MyCustomTopic
20 |
21 | resources:
22 | Resources:
23 | SuperTopic:
24 | Type: AWS::SNS::Topic
25 | Properties:
26 | TopicName: MyCustomTopic
--------------------------------------------------------------------------------
/Lab28/organized/serverless.yml:
--------------------------------------------------------------------------------
1 | service: aws-sns-project
2 | frameworkVersion: '3'
3 |
4 |
5 | provider:
6 | name: aws
7 | runtime: python3.8
8 | memorySize: 200
9 | timeout: 500
10 | stackTags:
11 | created-date: "2022-12-26"
12 | customer-impact: "false"
13 | terraform: "false"
14 | role: serverless
15 |
16 |
17 | useDotenv: true
18 |
19 |
20 | functions: ${file(src/Lambda/serverless.yml)}
21 |
22 |
23 | resources:
24 | - ${file(src/SNS/serverless.yml)}
25 |
26 |
--------------------------------------------------------------------------------
/Lab28/organized/src/Lambda/dispatcher.py:
--------------------------------------------------------------------------------
1 | import json
2 |
3 |
4 | def dispatch(event, context):
5 | body = {
6 | "message": "Go Serverless v3.0! Your function executed successfully!",
7 | "input": event,
8 | }
9 |
10 | response = {"statusCode": 200, "body": json.dumps(body)}
11 |
12 | return response
--------------------------------------------------------------------------------
/Lab28/organized/src/Lambda/serverless.yml:
--------------------------------------------------------------------------------
1 |
2 | dispatcher:
3 | handler: dispatcher.dispatch
4 | events:
5 | - sns:
6 | arn:
7 | Fn::Join:
8 | - ':'
9 | - - 'arn:aws:sns'
10 | - Ref: 'AWS::Region'
11 | - Ref: 'AWS::AccountId'
12 | - 'MyCustomTopic'
13 | topicName: MyCustomTopic
14 |
15 |
--------------------------------------------------------------------------------
/Lab28/organized/src/SNS/serverless.yml:
--------------------------------------------------------------------------------
1 | resources:
2 | Resources:
3 | SuperTopic:
4 | Type: AWS::SNS::Topic
5 | Properties:
6 | TopicName: MyCustomTopic
--------------------------------------------------------------------------------
/Lab29/.env:
--------------------------------------------------------------------------------
1 | DEV_ACCESS_KEY="XXXXXXXXXXX"
2 | DEV_SECRET_KEY="XXXXXXXXXXXXXXXZ"
3 | DEV_REGION='us-east-1'
4 | BUCKET="XXXXXXXXXXXXXX"
5 | QueueName="data-ingestion-queue"
--------------------------------------------------------------------------------
/Lab29/handler.py:
--------------------------------------------------------------------------------
1 | try:
2 | import unzip_requirements
3 | except ImportError:
4 | pass
5 |
6 | try:
7 | import json
8 | import uuid
9 | import os
10 | import datetime
11 | from datetime import datetime
12 |
13 | import sys
14 | import pandas as pd
15 | import pyarrow as pa
16 | import pyarrow.parquet as pq
17 | import boto3
18 | except Exception as e:
19 | print('error', e)
20 |
21 |
22 | class DataTransform(object):
23 | def __init__(self):
24 | pass
25 |
26 | def error_handler(func, exit_flag=False):
27 | def wrapper(*args, **kwargs):
28 | try:
29 | result = func(*args, **kwargs)
30 | print(f"INFO: {func.__name__} -> SUCCESSFUL")
31 | return result
32 | except Exception as e:
33 | print(f"ERROR: {func.__name__} -> UNSUCCESSFUL : {str(e)}")
34 | if exit_flag: sys.exit(1)
35 |
36 | return wrapper
37 |
38 | @error_handler
39 | def flatten_dict(self, data, parent_key="", sep="_"):
40 | """Flatten data into a single dict"""
41 | items = []
42 | for key, value in data.items():
43 | new_key = parent_key + sep + key if parent_key else key
44 | if type(value) == dict:
45 | items.extend(self.flatten_dict(value, new_key, sep=sep).items())
46 | else:
47 | items.append((new_key, value))
48 | return dict(items)
49 |
50 | @error_handler
51 | def dict_clean(self, items):
52 | result = {}
53 | for key, value in items.items():
54 | if value is None:
55 | value = "n/a"
56 | if value == "None":
57 | value = "n/a"
58 | if value == "null":
59 | value = "n/a"
60 | if len(str(value)) < 1:
61 | value = "n/a"
62 | result[key] = str(value)
63 | return result
64 |
65 |
66 | def consume(event, context):
67 | data_transform = DataTransform()
68 |
69 | processed_messages = []
70 | for record in event.get('Records'):
71 | data = json.loads(record.get('body'))
72 | clean_flatten_record = data_transform.dict_clean(data_transform.flatten_dict(data))
73 | processed_messages.append(clean_flatten_record)
74 |
75 | print("processed_messages")
76 | print(processed_messages)
77 |
78 | df = pd.DataFrame(data=processed_messages)
79 | print("df", df.head())
80 |
81 | # Convert the Pandas dataframe to an Arrow table
82 | table = pa.Table.from_pandas(df)
83 |
84 | # Write the Arrow table to a Parquet file in memory
85 | parquet_bytes = pa.BufferOutputStream()
86 | pq.write_table(table, parquet_bytes)
87 |
88 | # Upload the Parquet file to S3
89 | s3 = boto3.client(
90 | 's3',
91 | aws_access_key_id=os.getenv("DEV_ACCESS_KEY"),
92 | aws_secret_access_key=os.getenv("DEV_SECRET_KEY"),
93 | region_name=os.getenv("DEV_REGION"),
94 | )
95 |
96 | dt = datetime.now()
97 | year = dt.year
98 | month = dt.month
99 | day = dt.day
100 | path = f"raw/table_name=sample/year={year}/month={month}/day={day}/{uuid.uuid4().__str__()}.parquet"
101 | print("type", type(parquet_bytes.getvalue()))
102 |
103 | s3.put_object(
104 | Bucket=os.getenv("BUCKET"),
105 | Key=path,
106 | Body=parquet_bytes.getvalue().to_pybytes()
107 | )
108 |
109 | return {
110 | 'statusCode': 200,
111 | 'body': 'Parquet file uploaded to S3'
112 | }
113 |
--------------------------------------------------------------------------------
/Lab29/requirements.txt:
--------------------------------------------------------------------------------
1 | pandas
2 | numpy
3 | pyarrow
--------------------------------------------------------------------------------
/Lab29/sample-publish-sqs.py:
--------------------------------------------------------------------------------
1 | import time
2 |
3 | import boto3
4 | from faker import Faker
5 | import json
6 | from dotenv import load_dotenv
7 | load_dotenv(".env")
8 | import os
9 |
10 | try:
11 | from dotenv import load_dotenv
12 | load_dotenv(".env")
13 |
14 | os.environ['AWS_ACCESS_KEY_ID'] = os.getenv("DEV_ACCESS_KEY")
15 | os.environ['AWS_SECRET_ACCESS_KEY'] = os.getenv("DEV_SECRET_KEY")
16 | os.environ['AWS_REGION'] = os.getenv("DEV_REGION")
17 | except Exception as e:
18 | print("Error",e)
19 |
20 |
21 | # Create a Faker object
22 | fake = Faker()
23 |
24 | # Define the SQS queue URL
25 | queue_url = 'https://sqs.us-east-1.amazonaws.com/043916019468/data-ingestion-queue'
26 |
27 | # Create an SQS client
28 | sqs = boto3.client('sqs')
29 |
30 | # Generate fake data and publish to SQS
31 | while True:
32 | # Generate fake data
33 | fake_data = {
34 | 'name': fake.name(),
35 | 'address': fake.address(),
36 | 'phone_number': fake.phone_number()
37 | }
38 |
39 | # Convert the fake data to a JSON string
40 | message_body = json.dumps(fake_data)
41 |
42 | # Publish the message to the SQS queue
43 | response = sqs.send_message(
44 | QueueUrl=queue_url,
45 | MessageBody=message_body
46 | )
47 |
48 | print(f"Message ID: {response['MessageId']}")
49 | print(f"Message Contents: {message_body}")
50 |
51 |
--------------------------------------------------------------------------------
/Lab29/serverless.yml:
--------------------------------------------------------------------------------
1 | service: aws-lambda-parquet-files
2 |
3 | frameworkVersion: '3'
4 |
5 | provider:
6 | name: aws
7 | runtime: python3.7
8 | memorySize: 800
9 | timeout: 200
10 | architecture: x86_64
11 | stackTags:
12 | product: datateam
13 | env: qa
14 | created-date: 2022-04-05
15 | team: data
16 | customer-impact: false
17 | terraform: false
18 |
19 | useDotenv: true
20 |
21 | plugins:
22 | - serverless-dotenv-plugin
23 | - serverless-python-requirements
24 |
25 |
26 | custom:
27 | pythonRequirements:
28 | dockerizePip: true
29 | zip: true
30 |
31 |
32 | functions:
33 | consumer:
34 | handler: handler.consume
35 | environment:
36 | DEV_ACCESS_KEY: ${env:DEV_ACCESS_KEY}
37 | DEV_SECRET_KEY: ${env:DEV_SECRET_KEY}s
38 | DEV_REGION: ${env:DEV_REGION}
39 | BUCKET: ${env:BUCKET}
40 | events:
41 | - sqs:
42 | arn:
43 | Fn::GetAtt:
44 | - MyQueue
45 | - Arn
46 | batchSize: 10
47 |
48 | resources:
49 | Resources:
50 | MyQueue:
51 | Type: "AWS::SQS::Queue"
52 | Properties:
53 | QueueName: ${env:QueueName}
54 | VisibilityTimeout: 400
55 |
--------------------------------------------------------------------------------
/Lab3/cron-jobs/.gitignore:
--------------------------------------------------------------------------------
1 | .serverless
2 | *.pyc
3 | *.pyo
4 |
--------------------------------------------------------------------------------
/Lab3/cron-jobs/README.md:
--------------------------------------------------------------------------------
1 |
13 |
14 | # Serverless Framework Python Scheduled Cron on AWS
15 |
16 | This template demonstrates how to develop and deploy a simple cron-like service running on AWS Lambda using the traditional Serverless Framework.
17 |
18 | ## Schedule event type
19 |
20 | This examples defines two functions, `rateHandler` and `cronHandler`, both of which are triggered by an event of `schedule` type, which is used for configuring functions to be executed at specific time or in specific intervals. For detailed information about `schedule` event, please refer to corresponding section of Serverless [docs](https://serverless.com/framework/docs/providers/aws/events/schedule/).
21 |
22 | When defining `schedule` events, we need to use `rate` or `cron` expression syntax.
23 |
24 | ### Rate expressions syntax
25 |
26 | ```pseudo
27 | rate(value unit)
28 | ```
29 |
30 | `value` - A positive number
31 |
32 | `unit` - The unit of time. ( minute | minutes | hour | hours | day | days )
33 |
34 | In below example, we use `rate` syntax to define `schedule` event that will trigger our `rateHandler` function every minute
35 |
36 | ```yml
37 | functions:
38 | rateHandler:
39 | handler: handler.run
40 | events:
41 | - schedule: rate(1 minute)
42 | ```
43 |
44 | Detailed information about rate expressions is available in official [AWS docs](https://docs.aws.amazon.com/AmazonCloudWatch/latest/events/ScheduledEvents.html#RateExpressions).
45 |
46 |
47 | ### Cron expressions syntax
48 |
49 | ```pseudo
50 | cron(Minutes Hours Day-of-month Month Day-of-week Year)
51 | ```
52 |
53 | All fields are required and time zone is UTC only.
54 |
55 | | Field | Values | Wildcards |
56 | | ------------- |:--------------:|:-------------:|
57 | | Minutes | 0-59 | , - * / |
58 | | Hours | 0-23 | , - * / |
59 | | Day-of-month | 1-31 | , - * ? / L W |
60 | | Month | 1-12 or JAN-DEC| , - * / |
61 | | Day-of-week | 1-7 or SUN-SAT | , - * ? / L # |
62 | | Year | 192199 | , - * / |
63 |
64 | In below example, we use `cron` syntax to define `schedule` event that will trigger our `cronHandler` function every second minute every Monday through Friday
65 |
66 | ```yml
67 | functions:
68 | cronHandler:
69 | handler: handler.run
70 | events:
71 | - schedule: cron(0/2 * ? * MON-FRI *)
72 | ```
73 |
74 | Detailed information about cron expressions in available in official [AWS docs](https://docs.aws.amazon.com/AmazonCloudWatch/latest/events/ScheduledEvents.html#CronExpressions).
75 |
76 |
77 | ## Usage
78 |
79 | ### Deployment
80 |
81 | This example is made to work with the Serverless Framework dashboard, which includes advanced features such as CI/CD, monitoring, metrics, etc.
82 |
83 | In order to deploy with dashboard, you need to first login with:
84 |
85 | ```
86 | serverless login
87 | ```
88 |
89 | and then perform deployment with:
90 |
91 | ```
92 | serverless deploy
93 | ```
94 |
95 | After running deploy, you should see output similar to:
96 |
97 | ```bash
98 | Deploying aws-python-scheduled-cron-project to stage dev (us-east-1)
99 |
100 | ✔ Service deployed to stack aws-python-scheduled-cron-project-dev (205s)
101 |
102 | functions:
103 | rateHandler: aws-python-scheduled-cron-project-dev-rateHandler (2.9 kB)
104 | cronHandler: aws-python-scheduled-cron-project-dev-cronHandler (2.9 kB)
105 | ```
106 |
107 | There is no additional step required. Your defined schedules becomes active right away after deployment.
108 |
109 | ### Local invocation
110 |
111 | In order to test out your functions locally, you can invoke them with the following command:
112 |
113 | ```
114 | serverless invoke local --function rateHandler
115 | ```
116 |
117 | After invocation, you should see output similar to:
118 |
119 | ```bash
120 | INFO:handler:Your cron function aws-python-scheduled-cron-dev-rateHandler ran at 15:02:43.203145
121 | ```
122 |
123 | ### Bundling dependencies
124 |
125 | In case you would like to include 3rd party dependencies, you will need to use a plugin called `serverless-python-requirements`. You can set it up by running the following command:
126 |
127 | ```bash
128 | serverless plugin install -n serverless-python-requirements
129 | ```
130 |
131 | Running the above will automatically add `serverless-python-requirements` to `plugins` section in your `serverless.yml` file and add it as a `devDependency` to `package.json` file. The `package.json` file will be automatically created if it doesn't exist beforehand. Now you will be able to add your dependencies to `requirements.txt` file (`Pipfile` and `pyproject.toml` is also supported but requires additional configuration) and they will be automatically injected to Lambda package during build process. For more details about the plugin's configuration, please refer to [official documentation](https://github.com/UnitedIncome/serverless-python-requirements).
132 |
--------------------------------------------------------------------------------
/Lab3/cron-jobs/handler.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | import logging
3 |
4 | logger = logging.getLogger(__name__)
5 | logger.setLevel(logging.INFO)
6 |
7 |
8 | def run(event, context):
9 | current_time = datetime.datetime.now().time()
10 | name = context.function_name
11 | logger.info("Your cron function " + name + " ran at " + str(current_time))
12 |
--------------------------------------------------------------------------------
/Lab3/cron-jobs/package-lock.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "cron-jobs",
3 | "version": "1.0.0",
4 | "lockfileVersion": 1
5 | }
6 |
--------------------------------------------------------------------------------
/Lab3/cron-jobs/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "cron-jobs",
3 | "version": "1.0.0",
4 | "description": "Example of creating a function that runs as a cron job using the serverless `schedule` event",
5 | "author": "",
6 | "license": "MIT"
7 | }
8 |
--------------------------------------------------------------------------------
/Lab3/cron-jobs/serverless.yml:
--------------------------------------------------------------------------------
1 | service: cron-jobs
2 |
3 | frameworkVersion: '3'
4 |
5 |
6 | provider:
7 | name: aws
8 | runtime: python3.8
9 |
10 | functions:
11 | cronHandler:
12 | handler: handler.run
13 | events:
14 | # Invoke Lambda function every 2nd minute from Mon-Fri
15 | - schedule: cron(0/2 * ? * MON-FRI *)
16 |
--------------------------------------------------------------------------------
/Lab4/aws-event-bus/.gitignore:
--------------------------------------------------------------------------------
1 | # Distribution / packaging
2 | .Python
3 | env/
4 | build/
5 | develop-eggs/
6 | dist/
7 | downloads/
8 | eggs/
9 | .eggs/
10 | lib/
11 | lib64/
12 | parts/
13 | sdist/
14 | var/
15 | *.egg-info/
16 | .installed.cfg
17 | *.egg
18 |
19 | # Serverless directories
20 | .serverless
--------------------------------------------------------------------------------
/Lab4/aws-event-bus/README.md:
--------------------------------------------------------------------------------
1 |
13 |
14 |
15 | # Serverless Framework AWS Python Example
16 |
17 | This template demonstrates how to deploy a Python function running on AWS Lambda using the traditional Serverless Framework. The deployed function does not include any event definitions as well as any kind of persistence (database). For more advanced configurations check out the [examples repo](https://github.com/serverless/examples/) which includes integrations with SQS, DynamoDB or examples of functions that are triggered in `cron`-like manner. For details about configuration of specific `events`, please refer to our [documentation](https://www.serverless.com/framework/docs/providers/aws/events/).
18 |
19 | ## Usage
20 |
21 | ### Deployment
22 |
23 | In order to deploy the example, you need to run the following command:
24 |
25 | ```
26 | $ serverless deploy
27 | ```
28 |
29 | After running deploy, you should see output similar to:
30 |
31 | ```bash
32 | Deploying aws-python-project to stage dev (us-east-1)
33 |
34 | ✔ Service deployed to stack aws-python-project-dev (112s)
35 |
36 | functions:
37 | hello: aws-python-project-dev-hello (1.5 kB)
38 | ```
39 |
40 | ### Invocation
41 |
42 | After successful deployment, you can invoke the deployed function by using the following command:
43 |
44 | ```bash
45 | serverless invoke --function hello
46 | ```
47 |
48 | Which should result in response similar to the following:
49 |
50 | ```json
51 | {
52 | "statusCode": 200,
53 | "body": "{\"message\": \"Go Serverless v3.0! Your function executed successfully!\", \"input\": {}}"
54 | }
55 | ```
56 |
57 | ### Local development
58 |
59 | You can invoke your function locally by using the following command:
60 |
61 | ```bash
62 | serverless invoke local --function hello
63 | ```
64 |
65 | Which should result in response similar to the following:
66 |
67 | ```
68 | {
69 | "statusCode": 200,
70 | "body": "{\"message\": \"Go Serverless v3.0! Your function executed successfully!\", \"input\": {}}"
71 | }
72 | ```
73 |
74 | ### Bundling dependencies
75 |
76 | In case you would like to include third-party dependencies, you will need to use a plugin called `serverless-python-requirements`. You can set it up by running the following command:
77 |
78 | ```bash
79 | serverless plugin install -n serverless-python-requirements
80 | ```
81 |
82 | Running the above will automatically add `serverless-python-requirements` to `plugins` section in your `serverless.yml` file and add it as a `devDependency` to `package.json` file. The `package.json` file will be automatically created if it doesn't exist beforehand. Now you will be able to add your dependencies to `requirements.txt` file (`Pipfile` and `pyproject.toml` is also supported but requires additional configuration) and they will be automatically injected to Lambda package during build process. For more details about the plugin's configuration, please refer to [official documentation](https://github.com/UnitedIncome/serverless-python-requirements).
83 |
--------------------------------------------------------------------------------
/Lab4/aws-event-bus/handler.py:
--------------------------------------------------------------------------------
1 | import json
2 |
3 |
4 | def hello(event, context):
5 | print(event)
6 | body = {
7 | "message": "Go Serverless v3.0! Your function executed successfully!",
8 | "input": event,
9 | }
10 |
11 | return {"statusCode": 200, "body": json.dumps(body)}
12 |
--------------------------------------------------------------------------------
/Lab4/aws-event-bus/publish.py:
--------------------------------------------------------------------------------
1 | import re
2 | import os
3 | import json
4 | import boto3
5 | import datetime
6 | import uuid
7 | from datetime import datetime
8 | import json
9 |
10 |
11 |
12 | class Secrets:
13 | def __init__(self):
14 | self.AWS_ACCESS_KEY = "XXXXXXXXX"
15 | self.AWS_SECRET_KEY = "XXXX"
16 | self.AWS_REGION_NAME = "us-east-1"
17 | self.EventBusName = 'XXXXX'
18 |
19 | class AWSEventBus(Secrets):
20 |
21 | """Helper class to which add functionality on top of boto3 """
22 |
23 | def __init__(self, **kwargs):
24 | Secrets.__init__(
25 | self
26 | )
27 | self.client = boto3.client(
28 | "events",
29 | aws_access_key_id=self.AWS_ACCESS_KEY,
30 | aws_secret_access_key=self.AWS_SECRET_KEY,
31 | region_name=self.AWS_REGION_NAME,
32 | )
33 |
34 | def send_events(self, json_message, DetailType, Source):
35 | response = self.client.put_events(
36 | Entries=[
37 | {
38 | 'Time': datetime.now(),
39 | 'Source': Source,
40 | 'Resources': [],
41 | 'DetailType': DetailType,
42 | 'Detail':json.dumps(json_message) ,
43 | 'EventBusName': self.EventBusName,
44 |
45 |
46 | },
47 | ]
48 | )
49 | return response
50 |
51 | def main():
52 | """"
53 | {
54 | "detail":{
55 | "status":["new order"]
56 | }
57 |
58 | }
59 | """
60 | json_data = {
61 | "status":"new order",
62 | "message":"Soumil bought new product 1234",
63 | "language":["Python", "aws"]
64 | }
65 | helper = AWSEventBus()
66 | message = helper.send_events(json_message=json_data,
67 | DetailType='MyEvent',
68 | Source="MyProducer")
69 | print(message)
70 |
71 | main()
--------------------------------------------------------------------------------
/Lab4/aws-event-bus/serverless.yml:
--------------------------------------------------------------------------------
1 | org: scientist1995
2 | app: demo
3 | console: true
4 | service: aws-event-bus
5 |
6 | frameworkVersion: '3'
7 |
8 | provider:
9 | name: aws
10 | runtime: python3.8
11 |
12 | functions:
13 | lambda-events:
14 | handler: handler.hello
15 | events:
16 | - eventBridge:
17 | eventBus: XXXXXXXXXXXXXXXX
18 | pattern:
19 | version:
20 | - "0"
21 |
22 |
--------------------------------------------------------------------------------
/Lab5/learn-sqs/.gitignore:
--------------------------------------------------------------------------------
1 | # Distribution / packaging
2 | .Python
3 | env/
4 | build/
5 | develop-eggs/
6 | dist/
7 | downloads/
8 | eggs/
9 | .eggs/
10 | lib/
11 | lib64/
12 | parts/
13 | sdist/
14 | var/
15 | *.egg-info/
16 | .installed.cfg
17 | *.egg
18 |
19 | # Serverless directories
20 | .serverless
--------------------------------------------------------------------------------
/Lab5/learn-sqs/README.md:
--------------------------------------------------------------------------------
1 |
13 |
14 |
15 | # Serverless Framework AWS Python Example
16 |
17 | This template demonstrates how to deploy a Python function running on AWS Lambda using the traditional Serverless Framework. The deployed function does not include any event definitions as well as any kind of persistence (database). For more advanced configurations check out the [examples repo](https://github.com/serverless/examples/) which includes integrations with SQS, DynamoDB or examples of functions that are triggered in `cron`-like manner. For details about configuration of specific `events`, please refer to our [documentation](https://www.serverless.com/framework/docs/providers/aws/events/).
18 |
19 | ## Usage
20 |
21 | ### Deployment
22 |
23 | In order to deploy the example, you need to run the following command:
24 |
25 | ```
26 | $ serverless deploy
27 | ```
28 |
29 | After running deploy, you should see output similar to:
30 |
31 | ```bash
32 | Deploying aws-python-project to stage dev (us-east-1)
33 |
34 | ✔ Service deployed to stack aws-python-project-dev (112s)
35 |
36 | functions:
37 | hello: aws-python-project-dev-hello (1.5 kB)
38 | ```
39 |
40 | ### Invocation
41 |
42 | After successful deployment, you can invoke the deployed function by using the following command:
43 |
44 | ```bash
45 | serverless invoke --function hello
46 | ```
47 |
48 | Which should result in response similar to the following:
49 |
50 | ```json
51 | {
52 | "statusCode": 200,
53 | "body": "{\"message\": \"Go Serverless v3.0! Your function executed successfully!\", \"input\": {}}"
54 | }
55 | ```
56 |
57 | ### Local development
58 |
59 | You can invoke your function locally by using the following command:
60 |
61 | ```bash
62 | serverless invoke local --function hello
63 | ```
64 |
65 | Which should result in response similar to the following:
66 |
67 | ```
68 | {
69 | "statusCode": 200,
70 | "body": "{\"message\": \"Go Serverless v3.0! Your function executed successfully!\", \"input\": {}}"
71 | }
72 | ```
73 |
74 | ### Bundling dependencies
75 |
76 | In case you would like to include third-party dependencies, you will need to use a plugin called `serverless-python-requirements`. You can set it up by running the following command:
77 |
78 | ```bash
79 | serverless plugin install -n serverless-python-requirements
80 | ```
81 |
82 | Running the above will automatically add `serverless-python-requirements` to `plugins` section in your `serverless.yml` file and add it as a `devDependency` to `package.json` file. The `package.json` file will be automatically created if it doesn't exist beforehand. Now you will be able to add your dependencies to `requirements.txt` file (`Pipfile` and `pyproject.toml` is also supported but requires additional configuration) and they will be automatically injected to Lambda package during build process. For more details about the plugin's configuration, please refer to [official documentation](https://github.com/UnitedIncome/serverless-python-requirements).
83 |
--------------------------------------------------------------------------------
/Lab5/learn-sqs/handler.py:
--------------------------------------------------------------------------------
1 | import json
2 |
3 |
4 | def hello(event, context):
5 | print(event)
6 | body = {
7 | "message": "Go Serverless v3.0! Your function executed successfully!",
8 | "input": event,
9 | }
10 |
11 | return {"statusCode": 200, "body": json.dumps(body)}
12 |
--------------------------------------------------------------------------------
/Lab5/learn-sqs/serverless.yml:
--------------------------------------------------------------------------------
1 | org: scientist1995
2 | app: demo
3 | console: true
4 | service: learn-sqs
5 |
6 | frameworkVersion: '3'
7 |
8 | provider:
9 | name: aws
10 | runtime: python3.8
11 |
12 | plugins:
13 | - serverless-lift
14 |
15 | constructs:
16 | my-queue:
17 | type: queue
18 | worker:
19 | handler: handler.hello
--------------------------------------------------------------------------------
/Lab6/event-lambda/.gitignore:
--------------------------------------------------------------------------------
1 | # Distribution / packaging
2 | .Python
3 | env/
4 | build/
5 | develop-eggs/
6 | dist/
7 | downloads/
8 | eggs/
9 | .eggs/
10 | lib/
11 | lib64/
12 | parts/
13 | sdist/
14 | var/
15 | *.egg-info/
16 | .installed.cfg
17 | *.egg
18 |
19 | # Serverless directories
20 | .serverless
--------------------------------------------------------------------------------
/Lab6/event-lambda/README.md:
--------------------------------------------------------------------------------
1 |
13 |
14 |
15 | # Serverless Framework AWS Python Example
16 |
17 | This template demonstrates how to deploy a Python function running on AWS Lambda using the traditional Serverless Framework. The deployed function does not include any event definitions as well as any kind of persistence (database). For more advanced configurations check out the [examples repo](https://github.com/serverless/examples/) which includes integrations with SQS, DynamoDB or examples of functions that are triggered in `cron`-like manner. For details about configuration of specific `events`, please refer to our [documentation](https://www.serverless.com/framework/docs/providers/aws/events/).
18 |
19 | ## Usage
20 |
21 | ### Deployment
22 |
23 | In order to deploy the example, you need to run the following command:
24 |
25 | ```
26 | $ serverless deploy
27 | ```
28 |
29 | After running deploy, you should see output similar to:
30 |
31 | ```bash
32 | Deploying aws-python-project to stage dev (us-east-1)
33 |
34 | ✔ Service deployed to stack aws-python-project-dev (112s)
35 |
36 | functions:
37 | hello: aws-python-project-dev-hello (1.5 kB)
38 | ```
39 |
40 | ### Invocation
41 |
42 | After successful deployment, you can invoke the deployed function by using the following command:
43 |
44 | ```bash
45 | serverless invoke --function hello
46 | ```
47 |
48 | Which should result in response similar to the following:
49 |
50 | ```json
51 | {
52 | "statusCode": 200,
53 | "body": "{\"message\": \"Go Serverless v3.0! Your function executed successfully!\", \"input\": {}}"
54 | }
55 | ```
56 |
57 | ### Local development
58 |
59 | You can invoke your function locally by using the following command:
60 |
61 | ```bash
62 | serverless invoke local --function hello
63 | ```
64 |
65 | Which should result in response similar to the following:
66 |
67 | ```
68 | {
69 | "statusCode": 200,
70 | "body": "{\"message\": \"Go Serverless v3.0! Your function executed successfully!\", \"input\": {}}"
71 | }
72 | ```
73 |
74 | ### Bundling dependencies
75 |
76 | In case you would like to include third-party dependencies, you will need to use a plugin called `serverless-python-requirements`. You can set it up by running the following command:
77 |
78 | ```bash
79 | serverless plugin install -n serverless-python-requirements
80 | ```
81 |
82 | Running the above will automatically add `serverless-python-requirements` to `plugins` section in your `serverless.yml` file and add it as a `devDependency` to `package.json` file. The `package.json` file will be automatically created if it doesn't exist beforehand. Now you will be able to add your dependencies to `requirements.txt` file (`Pipfile` and `pyproject.toml` is also supported but requires additional configuration) and they will be automatically injected to Lambda package during build process. For more details about the plugin's configuration, please refer to [official documentation](https://github.com/UnitedIncome/serverless-python-requirements).
83 |
--------------------------------------------------------------------------------
/Lab6/event-lambda/fail.py:
--------------------------------------------------------------------------------
1 | import json
2 |
3 |
4 | def fail_handler(event, context):
5 |
6 | print("Iam in fail ")
7 | print(event)
8 | body = {
9 | "message": "Go Serverless v3.0! Your function executed successfully!",
10 | "input": event,
11 | }
12 |
13 | return {"statusCode": 200, "body": json.dumps(body)}
14 |
--------------------------------------------------------------------------------
/Lab6/event-lambda/handler.py:
--------------------------------------------------------------------------------
1 | import json
2 |
3 |
4 | def hello(event, context):
5 |
6 | raise Exception("Soumil Fail ")
7 |
8 | body = {
9 | "message": "Go Serverless v3.0! Your function executed successfully!",
10 | "input": event,
11 | }
12 |
13 | return {"statusCode": 200, "body": json.dumps(body)}
14 |
--------------------------------------------------------------------------------
/Lab6/event-lambda/serverless.yml:
--------------------------------------------------------------------------------
1 | service: event-bus-lambda
2 |
3 | frameworkVersion: '3'
4 |
5 | provider:
6 | name: aws
7 | runtime: python3.8
8 |
9 | functions:
10 |
11 | hello:
12 | handler: handler.hello
13 | destinations:
14 | onSuccess: sucessHandler
15 | onFailure: FailureHandler
16 |
17 | sucessHandler:
18 | handler: sucess.sucess_handler
19 |
20 | FailureHandler:
21 | handler: fail.fail_handler
--------------------------------------------------------------------------------
/Lab6/event-lambda/sucess.py:
--------------------------------------------------------------------------------
1 | import json
2 |
3 |
4 | def sucess_handler(event, context):
5 |
6 | print("Iam in success ")
7 | print(event)
8 | body = {
9 | "message": "Go Serverless v3.0! Your function executed successfully!",
10 | "input": event,
11 | }
12 |
13 | return {"statusCode": 200, "body": json.dumps(body)}
14 |
--------------------------------------------------------------------------------
/Lab6/firelambda.py:
--------------------------------------------------------------------------------
1 | import boto3
2 | import json
3 |
4 | client = boto3.client(
5 | "lambda",
6 | aws_access_key_id="XXXXXXXXXXXXX",
7 | aws_secret_access_key="XXXX",
8 | region_name="us-east-1",
9 | )
10 |
11 |
12 | response = client.invoke_async(
13 | FunctionName='event-bus-lambda-dev-hello',
14 | InvokeArgs=json.dumps({
15 | 'Status': 123
16 | })
17 | )
18 | print(response)
--------------------------------------------------------------------------------
/Lab8/aws-python-http-api-project/.gitignore:
--------------------------------------------------------------------------------
1 | # Distribution / packaging
2 | .Python
3 | env/
4 | build/
5 | develop-eggs/
6 | dist/
7 | downloads/
8 | eggs/
9 | .eggs/
10 | lib/
11 | lib64/
12 | parts/
13 | sdist/
14 | var/
15 | *.egg-info/
16 | .installed.cfg
17 | *.egg
18 |
19 | # Serverless directories
20 | .serverless
--------------------------------------------------------------------------------
/Lab8/aws-python-http-api-project/README.md:
--------------------------------------------------------------------------------
1 |
12 |
13 | # Serverless Framework Python HTTP API on AWS
14 |
15 | This template demonstrates how to make a simple HTTP API with Python running on AWS Lambda and API Gateway using the Serverless Framework.
16 |
17 | This template does not include any kind of persistence (database). For more advanced examples, check out the [serverless/examples repository](https://github.com/serverless/examples/) which includes DynamoDB, Mongo, Fauna and other examples.
18 |
19 | ## Usage
20 |
21 | ### Deployment
22 |
23 | ```
24 | $ serverless deploy
25 | ```
26 |
27 | After deploying, you should see output similar to:
28 |
29 | ```bash
30 | Deploying aws-python-http-api-project to stage dev (us-east-1)
31 |
32 | ✔ Service deployed to stack aws-python-http-api-project-dev (140s)
33 |
34 | endpoint: GET - https://xxxxxxxxxx.execute-api.us-east-1.amazonaws.com/
35 | functions:
36 | hello: aws-python-http-api-project-dev-hello (2.3 kB)
37 | ```
38 |
39 | _Note_: In current form, after deployment, your API is public and can be invoked by anyone. For production deployments, you might want to configure an authorizer. For details on how to do that, refer to [http event docs](https://www.serverless.com/framework/docs/providers/aws/events/apigateway/).
40 |
41 | ### Invocation
42 |
43 | After successful deployment, you can call the created application via HTTP:
44 |
45 | ```bash
46 | curl https://xxxxxxx.execute-api.us-east-1.amazonaws.com/
47 | ```
48 |
49 | Which should result in response similar to the following (removed `input` content for brevity):
50 |
51 | ```json
52 | {
53 | "message": "Go Serverless v3.0! Your function executed successfully!",
54 | "input": {
55 | ...
56 | }
57 | }
58 | ```
59 |
60 | ### Local development
61 |
62 | You can invoke your function locally by using the following command:
63 |
64 | ```bash
65 | serverless invoke local --function hello
66 | ```
67 |
68 | Which should result in response similar to the following:
69 |
70 | ```
71 | {
72 | "statusCode": 200,
73 | "body": "{\n \"message\": \"Go Serverless v3.0! Your function executed successfully!\",\n \"input\": \"\"\n}"
74 | }
75 | ```
76 |
77 | Alternatively, it is also possible to emulate API Gateway and Lambda locally by using `serverless-offline` plugin. In order to do that, execute the following command:
78 |
79 | ```bash
80 | serverless plugin install -n serverless-offline
81 | ```
82 |
83 | It will add the `serverless-offline` plugin to `devDependencies` in `package.json` file as well as will add it to `plugins` in `serverless.yml`.
84 |
85 | After installation, you can start local emulation with:
86 |
87 | ```
88 | serverless offline
89 | ```
90 |
91 | To learn more about the capabilities of `serverless-offline`, please refer to its [GitHub repository](https://github.com/dherault/serverless-offline).
92 |
93 | ### Bundling dependencies
94 |
95 | In case you would like to include 3rd party dependencies, you will need to use a plugin called `serverless-python-requirements`. You can set it up by running the following command:
96 |
97 | ```bash
98 | serverless plugin install -n serverless-python-requirements
99 | ```
100 |
101 | Running the above will automatically add `serverless-python-requirements` to `plugins` section in your `serverless.yml` file and add it as a `devDependency` to `package.json` file. The `package.json` file will be automatically created if it doesn't exist beforehand. Now you will be able to add your dependencies to `requirements.txt` file (`Pipfile` and `pyproject.toml` is also supported but requires additional configuration) and they will be automatically injected to Lambda package during build process. For more details about the plugin's configuration, please refer to [official documentation](https://github.com/UnitedIncome/serverless-python-requirements).
102 |
--------------------------------------------------------------------------------
/Lab8/aws-python-http-api-project/handler.py:
--------------------------------------------------------------------------------
1 | import json
2 |
3 |
4 | def hello(event, context):
5 | body = {
6 | "message": "Go Serverless v3.0! Your function executed successfully!",
7 | "input": event,
8 | }
9 |
10 | response = {"statusCode": 200, "body": json.dumps(body)}
11 |
12 | return response
13 |
--------------------------------------------------------------------------------
/Lab8/aws-python-http-api-project/serverless.yml:
--------------------------------------------------------------------------------
1 | org: scientist1995
2 | app: demo
3 | service: aws-python-http-api-project
4 | frameworkVersion: '3'
5 |
6 | provider:
7 | name: aws
8 | runtime: python3.8
9 | apiGateway:
10 | apiKeys:
11 | - name: 'user1'
12 | - value: 'your-api-key-that-is-at-least-20-characters-long'
13 |
14 | functions:
15 | hello:
16 | handler: handler.hello
17 | events:
18 | - http:
19 | path: /hello
20 | method: post
21 | private: true
--------------------------------------------------------------------------------
/Lab9/aws-python-http-api-project/.gitignore:
--------------------------------------------------------------------------------
1 | # Distribution / packaging
2 | .Python
3 | env/
4 | build/
5 | develop-eggs/
6 | dist/
7 | downloads/
8 | eggs/
9 | .eggs/
10 | lib/
11 | lib64/
12 | parts/
13 | sdist/
14 | var/
15 | *.egg-info/
16 | .installed.cfg
17 | *.egg
18 |
19 | # Serverless directories
20 | .serverless
--------------------------------------------------------------------------------
/Lab9/aws-python-http-api-project/README.md:
--------------------------------------------------------------------------------
1 |
12 |
13 | # Serverless Framework Python HTTP API on AWS
14 |
15 | This template demonstrates how to make a simple HTTP API with Python running on AWS Lambda and API Gateway using the Serverless Framework.
16 |
17 | This template does not include any kind of persistence (database). For more advanced examples, check out the [serverless/examples repository](https://github.com/serverless/examples/) which includes DynamoDB, Mongo, Fauna and other examples.
18 |
19 | ## Usage
20 |
21 | ### Deployment
22 |
23 | ```
24 | $ serverless deploy
25 | ```
26 |
27 | After deploying, you should see output similar to:
28 |
29 | ```bash
30 | Deploying aws-python-http-api-project to stage dev (us-east-1)
31 |
32 | ✔ Service deployed to stack aws-python-http-api-project-dev (140s)
33 |
34 | endpoint: GET - https://xxxxxxxxxx.execute-api.us-east-1.amazonaws.com/
35 | functions:
36 | hello: aws-python-http-api-project-dev-hello (2.3 kB)
37 | ```
38 |
39 | _Note_: In current form, after deployment, your API is public and can be invoked by anyone. For production deployments, you might want to configure an authorizer. For details on how to do that, refer to [http event docs](https://www.serverless.com/framework/docs/providers/aws/events/apigateway/).
40 |
41 | ### Invocation
42 |
43 | After successful deployment, you can call the created application via HTTP:
44 |
45 | ```bash
46 | curl https://xxxxxxx.execute-api.us-east-1.amazonaws.com/
47 | ```
48 |
49 | Which should result in response similar to the following (removed `input` content for brevity):
50 |
51 | ```json
52 | {
53 | "message": "Go Serverless v3.0! Your function executed successfully!",
54 | "input": {
55 | ...
56 | }
57 | }
58 | ```
59 |
60 | ### Local development
61 |
62 | You can invoke your function locally by using the following command:
63 |
64 | ```bash
65 | serverless invoke local --function hello
66 | ```
67 |
68 | Which should result in response similar to the following:
69 |
70 | ```
71 | {
72 | "statusCode": 200,
73 | "body": "{\n \"message\": \"Go Serverless v3.0! Your function executed successfully!\",\n \"input\": \"\"\n}"
74 | }
75 | ```
76 |
77 | Alternatively, it is also possible to emulate API Gateway and Lambda locally by using `serverless-offline` plugin. In order to do that, execute the following command:
78 |
79 | ```bash
80 | serverless plugin install -n serverless-offline
81 | ```
82 |
83 | It will add the `serverless-offline` plugin to `devDependencies` in `package.json` file as well as will add it to `plugins` in `serverless.yml`.
84 |
85 | After installation, you can start local emulation with:
86 |
87 | ```
88 | serverless offline
89 | ```
90 |
91 | To learn more about the capabilities of `serverless-offline`, please refer to its [GitHub repository](https://github.com/dherault/serverless-offline).
92 |
93 | ### Bundling dependencies
94 |
95 | In case you would like to include 3rd party dependencies, you will need to use a plugin called `serverless-python-requirements`. You can set it up by running the following command:
96 |
97 | ```bash
98 | serverless plugin install -n serverless-python-requirements
99 | ```
100 |
101 | Running the above will automatically add `serverless-python-requirements` to `plugins` section in your `serverless.yml` file and add it as a `devDependency` to `package.json` file. The `package.json` file will be automatically created if it doesn't exist beforehand. Now you will be able to add your dependencies to `requirements.txt` file (`Pipfile` and `pyproject.toml` is also supported but requires additional configuration) and they will be automatically injected to Lambda package during build process. For more details about the plugin's configuration, please refer to [official documentation](https://github.com/UnitedIncome/serverless-python-requirements).
102 |
--------------------------------------------------------------------------------
/Lab9/aws-python-http-api-project/handler.py:
--------------------------------------------------------------------------------
1 | import json
2 |
3 |
4 | def hello(event, context):
5 | body = {
6 | "message": "Go Serverless v3.0! Your function executed successfully!",
7 | "input": event,
8 | }
9 |
10 | response = {"statusCode": 200, "body": json.dumps(body)}
11 |
12 | return response
13 |
--------------------------------------------------------------------------------
/Lab9/aws-python-http-api-project/serverless.yml:
--------------------------------------------------------------------------------
1 | org: scientist1995
2 | app: demo
3 | service: aws-python-http-api-project
4 | frameworkVersion: '3'
5 |
6 | provider:
7 | name: aws
8 | runtime: python3.8
9 |
10 | plugins:
11 | - serverless-add-api-key
12 |
13 | custom:
14 | apiKeys:
15 | - name: dev-free-account
16 | - value: 'your-api-key-that-is-at-least-20-characters-long'
17 | usagePlan:
18 | name: "Free Plan"
19 | description: "Free plan allows only limited requests"
20 | quota:
21 | limit: 5 # The target maximum number of requests that can be made in a given time period.
22 | period: DAY # The time period for which the target maximum limit of requests applies, such as DAY or WEEK. For valid values, see the period property for the UsagePlan resource in the Amazon API Gateway REST API Reference.
23 | throttle:
24 | burstLimit: 100 # The API target request burst rate limit. This allows more requests through for a period of time than the target rate limit. For more information about request throttling, see Manage API Request Throttling in the API Gateway Developer Guide.
25 | rateLimit: 10 # The API
26 |
27 |
28 | functions:
29 | hello:
30 | handler: handler.hello
31 | events:
32 | - http:
33 | path: /hello
34 | method: post
35 | private: true
36 |
--------------------------------------------------------------------------------
/Lab9/package-lock.json:
--------------------------------------------------------------------------------
1 | {
2 | "requires": true,
3 | "lockfileVersion": 1,
4 | "dependencies": {
5 | "ansi-styles": {
6 | "version": "3.2.1",
7 | "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz",
8 | "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==",
9 | "dev": true,
10 | "requires": {
11 | "color-convert": "^1.9.0"
12 | }
13 | },
14 | "aws-sdk": {
15 | "version": "2.1120.0",
16 | "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1120.0.tgz",
17 | "integrity": "sha512-3cKXUFxC3CDBbJ/JlXEKmJZKFZhqGii7idGaLxvV5/OzqEDUstYkHGX3TCJdQRHrRwpFvRVOekXSwLxBltqXuQ==",
18 | "dev": true,
19 | "requires": {
20 | "buffer": "4.9.2",
21 | "events": "1.1.1",
22 | "ieee754": "1.1.13",
23 | "jmespath": "0.16.0",
24 | "querystring": "0.2.0",
25 | "sax": "1.2.1",
26 | "url": "0.10.3",
27 | "uuid": "3.3.2",
28 | "xml2js": "0.4.19"
29 | }
30 | },
31 | "base64-js": {
32 | "version": "1.5.1",
33 | "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz",
34 | "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==",
35 | "dev": true
36 | },
37 | "buffer": {
38 | "version": "4.9.2",
39 | "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.2.tgz",
40 | "integrity": "sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg==",
41 | "dev": true,
42 | "requires": {
43 | "base64-js": "^1.0.2",
44 | "ieee754": "^1.1.4",
45 | "isarray": "^1.0.0"
46 | }
47 | },
48 | "chalk": {
49 | "version": "2.4.2",
50 | "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz",
51 | "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==",
52 | "dev": true,
53 | "requires": {
54 | "ansi-styles": "^3.2.1",
55 | "escape-string-regexp": "^1.0.5",
56 | "supports-color": "^5.3.0"
57 | }
58 | },
59 | "color-convert": {
60 | "version": "1.9.3",
61 | "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz",
62 | "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==",
63 | "dev": true,
64 | "requires": {
65 | "color-name": "1.1.3"
66 | }
67 | },
68 | "color-name": {
69 | "version": "1.1.3",
70 | "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz",
71 | "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=",
72 | "dev": true
73 | },
74 | "escape-string-regexp": {
75 | "version": "1.0.5",
76 | "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz",
77 | "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=",
78 | "dev": true
79 | },
80 | "events": {
81 | "version": "1.1.1",
82 | "resolved": "https://registry.npmjs.org/events/-/events-1.1.1.tgz",
83 | "integrity": "sha1-nr23Y1rQmccNzEwqH1AEKI6L2SQ=",
84 | "dev": true
85 | },
86 | "has-flag": {
87 | "version": "3.0.0",
88 | "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz",
89 | "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=",
90 | "dev": true
91 | },
92 | "ieee754": {
93 | "version": "1.1.13",
94 | "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.13.tgz",
95 | "integrity": "sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg==",
96 | "dev": true
97 | },
98 | "isarray": {
99 | "version": "1.0.0",
100 | "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz",
101 | "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=",
102 | "dev": true
103 | },
104 | "jmespath": {
105 | "version": "0.16.0",
106 | "resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.16.0.tgz",
107 | "integrity": "sha512-9FzQjJ7MATs1tSpnco1K6ayiYE3figslrXA72G2HQ/n76RzvYlofyi5QM+iX4YRs/pu3yzxlVQSST23+dMDknw==",
108 | "dev": true
109 | },
110 | "lodash.get": {
111 | "version": "4.4.2",
112 | "resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz",
113 | "integrity": "sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk="
114 | },
115 | "lodash.isempty": {
116 | "version": "4.4.0",
117 | "resolved": "https://registry.npmjs.org/lodash.isempty/-/lodash.isempty-4.4.0.tgz",
118 | "integrity": "sha1-b4bL7di+TsmHvpqvM8loTbGzHn4="
119 | },
120 | "punycode": {
121 | "version": "1.3.2",
122 | "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz",
123 | "integrity": "sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0=",
124 | "dev": true
125 | },
126 | "querystring": {
127 | "version": "0.2.0",
128 | "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz",
129 | "integrity": "sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=",
130 | "dev": true
131 | },
132 | "sax": {
133 | "version": "1.2.1",
134 | "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz",
135 | "integrity": "sha1-e45lYZCyKOgaZq6nSEgNgozS03o=",
136 | "dev": true
137 | },
138 | "serverless-add-api-key": {
139 | "version": "4.2.1",
140 | "resolved": "https://registry.npmjs.org/serverless-add-api-key/-/serverless-add-api-key-4.2.1.tgz",
141 | "integrity": "sha512-UOEWGo+8NgFzUDiZIDeeK8ssbWcUTc+3aDZIZqqXWAy4kZdYdEjK0itj30VyT2HN1LFSbD0dh9VLmBlhIyY+3A==",
142 | "dev": true,
143 | "requires": {
144 | "aws-sdk": "^2.421.0",
145 | "chalk": "^2.4.1"
146 | }
147 | },
148 | "serverless-api-gateway-throttling": {
149 | "version": "1.2.2",
150 | "resolved": "https://registry.npmjs.org/serverless-api-gateway-throttling/-/serverless-api-gateway-throttling-1.2.2.tgz",
151 | "integrity": "sha512-ZfQaWOOn3l7M2tyWfU/NqQ+jNSV1rhRGnPTerrRMOpgG4XJZlc7YHDj2156eaaLIxPKr8Sf96HsKgIBOQA6M4g==",
152 | "requires": {
153 | "lodash.get": "^4.4.2",
154 | "lodash.isempty": "^4.4.0"
155 | }
156 | },
157 | "serverless-reqvalidator-plugin": {
158 | "version": "2.0.0",
159 | "resolved": "https://registry.npmjs.org/serverless-reqvalidator-plugin/-/serverless-reqvalidator-plugin-2.0.0.tgz",
160 | "integrity": "sha512-FmzjI6wEoxX27tUg6cNGxDoLNy9o4qbZJiAfSOoayOppVAXarEQ4WAkOCh+Ede6YI1gKxzZSb1VStA381yvXvg=="
161 | },
162 | "supports-color": {
163 | "version": "5.5.0",
164 | "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz",
165 | "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==",
166 | "dev": true,
167 | "requires": {
168 | "has-flag": "^3.0.0"
169 | }
170 | },
171 | "url": {
172 | "version": "0.10.3",
173 | "resolved": "https://registry.npmjs.org/url/-/url-0.10.3.tgz",
174 | "integrity": "sha1-Ah5NnHcF8hu/N9A861h2dAJ3TGQ=",
175 | "dev": true,
176 | "requires": {
177 | "punycode": "1.3.2",
178 | "querystring": "0.2.0"
179 | }
180 | },
181 | "uuid": {
182 | "version": "3.3.2",
183 | "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz",
184 | "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==",
185 | "dev": true
186 | },
187 | "xml2js": {
188 | "version": "0.4.19",
189 | "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.19.tgz",
190 | "integrity": "sha512-esZnJZJOiJR9wWKMyuvSE1y6Dq5LCuJanqhxslH2bxM6duahNZ+HMpCLhBQGZkbX6xRf8x1Y2eJlgt2q3qo49Q==",
191 | "dev": true,
192 | "requires": {
193 | "sax": ">=0.6.0",
194 | "xmlbuilder": "~9.0.1"
195 | }
196 | },
197 | "xmlbuilder": {
198 | "version": "9.0.7",
199 | "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-9.0.7.tgz",
200 | "integrity": "sha1-Ey7mPS7FVlxVfiD0wi35rKaGsQ0=",
201 | "dev": true
202 | }
203 | }
204 | }
205 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Learn Serverless Framework work in easy way
2 | install external python packages on serverless
3 |
4 | # Tutorials
5 |
6 | | Sr No | Title |Video Link | Lab Code |
7 | |-------|---------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------|
8 | | 1 | Serverless Framework Deploy Python lambdas in minutes |https://www.youtube.com/watch?v=Ke7DSpsszWY&list=PLL2hlSFBmWwzA7ut0KKYM6F8LKfu84-5c&index=1 | https://github.com/soumilshah1995/install-external-python-packages-on-serverless/tree/main/Lab1 |
9 | | 2 | Rapidly create deploy python library on AWS Layers in Minutes with serverless framework #3 |https://www.youtube.com/watch?v=lC489CpKg-s&list=PLL2hlSFBmWwzA7ut0KKYM6F8LKfu84-5c&index=2 | https://github.com/soumilshah1995/install-external-python-packages-on-serverless/tree/main/Lab2/lambda-layers |
10 | | 3 | How to install External Python library such as Pandas on AWS lambda using serverless framework #2 |https://www.youtube.com/watch?v=qVk1L7MHjGM&list=PLL2hlSFBmWwzA7ut0KKYM6F8LKfu84-5c&index=3 | https://github.com/soumilshah1995/install-external-python-packages-on-serverless/tree/main/Lab1 |
11 | | 4 | How to Fire your Lambda on CRON Schedule using serverless framework |https://www.youtube.com/watch?v=SEHEAVOMQfQ&list=PLL2hlSFBmWwzA7ut0KKYM6F8LKfu84-5c&index=4 | https://github.com/soumilshah1995/install-external-python-packages-on-serverless/tree/main/Lab3/cron-jobs |
12 | | 5 | Server less framework | Fire Lambda when event is published to Event Bus |https://www.youtube.com/watch?v=0Wb-a9Fa7pQ&list=PLL2hlSFBmWwzA7ut0KKYM6F8LKfu84-5c&index=5 | https://github.com/soumilshah1995/install-external-python-packages-on-serverless/tree/main/Lab4/aws-event-bus |
13 | | 6 | Rapidly Deploy SQS queue with lambda Worker with Severless Framework using Plugin lift #5 |https://www.youtube.com/watch?v=cLK-mFLYzvY&feature=youtu.be | https://github.com/soumilshah1995/install-external-python-packages-on-serverless/tree/main/Lab5/learn-sqs |
14 | | 7 | Learn about Lambda Destination with serverless Framework #6 |https://www.youtube.com/watch?v=UglcKQ3cnvc&list=PLL2hlSFBmWwzA7ut0KKYM6F8LKfu84-5c&index=7 | https://github.com/soumilshah1995/install-external-python-packages-on-serverless/tree/main/Lab6 |
15 | | 8 | Getting Started with serverless Framework |API Gateway and lambda | Lab 7 |https://www.youtube.com/watch?v=fPxos27jOFE&list=PLL2hlSFBmWwzA7ut0KKYM6F8LKfu84-5c&index=8 | https://github.com/soumilshah1995/install-external-python-packages-on-serverless/tree/main/lab7/learn |
16 | | 9 | Getting Started with serverless Framework |API Gateway and lambda | Lab 8| API Keys |https://www.youtube.com/watch?v=j_Gz-ACum80&list=PLL2hlSFBmWwzA7ut0KKYM6F8LKfu84-5c&index=9 | https://github.com/soumilshah1995/install-external-python-packages-on-serverless/tree/main/Lab8/aws-python-http-api-project |
17 | | 10 | Getting Started with Serverless Framework |API Gateway and lambda | Lab 9| Usage Plan |https://www.youtube.com/watch?v=IzeC881uDKY&list=PLL2hlSFBmWwzA7ut0KKYM6F8LKfu84-5c&index=10| https://github.com/soumilshah1995/install-external-python-packages-on-serverless/tree/main/Lab9/aws-python-http-api-project |
18 | | 11 | Serverless Framework Lab 10: Creating dynamoDB tables |https://www.youtube.com/watch?v=BbcogaTFhXs&list=PLL2hlSFBmWwzA7ut0KKYM6F8LKfu84-5c&index=11| |
19 | | 12 | Lab 11: Learning about SNS and Lambda with Serverless Framework |https://www.youtube.com/watch?v=we2EJDT4oWA&list=PLL2hlSFBmWwzA7ut0KKYM6F8LKfu84-5c&index=12| https://github.com/soumilshah1995/install-external-python-packages-on-serverless/tree/main/Lab11 |
20 | | 13 | Ship Python Logs from AWS lambda to DataDog | Learn how to use Datadog on AWS Lambda | Python |https://www.youtube.com/watch?v=1uCHqN3UidY&list=PLL2hlSFBmWwzA7ut0KKYM6F8LKfu84-5c&index=14| https://github.com/soumilshah1995/install-external-python-packages-on-serverless/tree/main/Lab13/lambda |
21 | | 14 | Learn how to write Infrastructure Code for Glue crawlers and databases using serverless Framework |https://www.youtube.com/watch?v=uk5fh3wA1rs&list=PLL2hlSFBmWwzA7ut0KKYM6F8LKfu84-5c&index=15| https://github.com/soumilshah1995/install-external-python-packages-on-serverless/tree/main/LAB14 |
22 | | 15 | Learn How to Deploy Glue Job (Scripts) Through Serverless Framework with Code | Lab 15 |https://www.youtube.com/watch?v=l3OtAhOJ-CM&list=PLL2hlSFBmWwzA7ut0KKYM6F8LKfu84-5c&index=16| https://github.com/soumilshah1995/install-external-python-packages-on-serverless/tree/main/Lab15 |
23 | | 16 | Learn how to create CICD Pipeline in a very easy for Serverless Framework | Lambda | | https://github.com/soumilshah1995/install-external-python-packages-on-serverless/tree/main/Lab16 |
24 | | 17 | Learn how to integrate and deploy Kinesis Data Stream with Lambda with serverless Framework Lab 17 | https://www.youtube.com/watch?v=ggAGHTLivbs | https://github.com/soumilshah1995/install-external-python-packages-on-serverless/tree/main/Lab17 |
25 | | 18 | Learn how to Develop Web Scrappers and scrape things at Scale Using AWS Lambdas Lab 18 | XXXX | https://github.com/soumilshah1995/install-external-python-packages-on-serverless/tree/main/Lab18 |
26 | | 21 | Process CSV Files When Inserted on AWS S3 Via Events and Lambdas |https://youtu.be/fyyQeZVC3Nc | https://github.com/soumilshah1995/install-external-python-packages-on-serverless/tree/main/Lab%2021 | |
27 | | 22 | Learn how to Build FIFO FAN Out Model (SNS SQS Lambda) | https://youtu.be/Hq_z4UQWjHs | https://github.com/soumilshah1995/install-external-python-packages-on-serverless/tree/main/Lab22 | |
28 | | 23 | How to Move Data from DynamoDB to Data lake S3 Hands on Lab Glue Serverless Framework Lab 23 | https://youtu.be/V3O9O5plC4M | https://github.com/soumilshah1995/install-external-python-packages-on-serverless/tree/main/Lab%2023 | |
29 | | 24 | Populate Datalake (S3) Real Time from DynamoDB with Streams Infrasture Code Data Archive Project | https://youtu.be/ytpbxb_D6FY | https://github.com/soumilshah1995/install-external-python-packages-on-serverless/tree/main/Lab24 | |
30 | | 25 | Powering DownStream Users from DynamoDB Streams FAN out Model Via Kinesis Lab 25 with Code | https://youtu.be/Qli_cEjGbbQ | https://github.com/soumilshah1995/install-external-python-packages-on-serverless/tree/main/Lab25 | |
31 | | 26 | Athena Reporting Lambda | https://www.youtube.com/watch?v=-uhTElNP29U&feature=youtu.be | https://github.com/soumilshah1995/install-external-python-packages-on-serverless/tree/main/Lab26 | |
32 | | 27 | Hands on Lab how we moved 8.5M Records in 60 Minutes from Mongo to S3 Using SQS and Lambdas | https://www.youtube.com/watch?v=-SOyrlNSfUA | https://github.com/soumilshah1995/install-external-python-packages-on-serverless/tree/main/Lab27 | |
33 | | |
34 | | 29 | Serverless Data Engineering: How to Generate Parquet Files with AWS Lambda and Upload to S3 | https://www.youtube.com/watch?v=k2rpwOCuEsw&t=1s | https://github.com/soumilshah1995/install-external-python-packages-on-serverless/tree/main/Lab29 | |
35 |
36 |
37 | * Lab 18 : Refer https://github.com/soumilshah1995/Selenium-on-AWS-Lambda-Python3.7
38 |
39 | ## Step 1: Install Serverless
40 | ```
41 | npm install -g serverless
42 |
43 | serverless config credentials --provider aws --key XXXX --secret XXXXX -o
44 |
45 | ```
46 |
47 |
48 |
49 | ## Step 2: Create Project
50 | ```
51 | serverless create --template aws-python3 --name lambda-learn --path lambda-learn
52 |
53 | cd lambda-learn
54 | ```
55 |
56 |
57 | ## Step 3: Install plugins
58 | ```
59 | sls plugin install -n serverless-python-requirements
60 |
61 | cd lambda-learn
62 | ```
63 |
64 | ## Step 4:
65 | ```
66 | sls deploy
67 | ```
68 |
69 |
70 | ### Reference
71 |
72 | * https://www.serverless.com/blog/serverless-python-packaging/
73 |
74 | * https://www.serverless.com/plugins/serverless-python-requirements
75 |
76 | * https://towardsdatascience.com/deploying-aws-lamba-function-layer-with-serverless-framework-f3f4fab1b7e9
77 | * https://aws.plainenglish.io/serverless-framework-setting-up-a-custom-domain-to-api-gateway-91064a598f1d
78 |
79 |
80 | # Further reading
81 |
82 | * Build a Python REST API with Serverless, Lambda, and DynamoDB
83 | https://www.serverless.com/blog/flask-python-rest-api-serverless-lambda-dynamodb/
84 |
85 |
86 |
87 |
--------------------------------------------------------------------------------
/lab7/learn/.gitignore:
--------------------------------------------------------------------------------
1 | # Distribution / packaging
2 | .Python
3 | env/
4 | build/
5 | develop-eggs/
6 | dist/
7 | downloads/
8 | eggs/
9 | .eggs/
10 | lib/
11 | lib64/
12 | parts/
13 | sdist/
14 | var/
15 | *.egg-info/
16 | .installed.cfg
17 | *.egg
18 |
19 | # Serverless directories
20 | .serverless
--------------------------------------------------------------------------------
/lab7/learn/README.md:
--------------------------------------------------------------------------------
1 |
12 |
13 | # Serverless Framework Python HTTP API on AWS
14 |
15 | This template demonstrates how to make a simple HTTP API with Python running on AWS Lambda and API Gateway using the Serverless Framework.
16 |
17 | This template does not include any kind of persistence (database). For more advanced examples, check out the [serverless/examples repository](https://github.com/serverless/examples/) which includes DynamoDB, Mongo, Fauna and other examples.
18 |
19 | ## Usage
20 |
21 | ### Deployment
22 |
23 | ```
24 | $ serverless deploy
25 | ```
26 |
27 | After deploying, you should see output similar to:
28 |
29 | ```bash
30 | Deploying aws-python-http-api-project to stage dev (us-east-1)
31 |
32 | ✔ Service deployed to stack aws-python-http-api-project-dev (140s)
33 |
34 | endpoint: GET - https://xxxxxxxxxx.execute-api.us-east-1.amazonaws.com/
35 | functions:
36 | hello: aws-python-http-api-project-dev-hello (2.3 kB)
37 | ```
38 |
39 | _Note_: In current form, after deployment, your API is public and can be invoked by anyone. For production deployments, you might want to configure an authorizer. For details on how to do that, refer to [http event docs](https://www.serverless.com/framework/docs/providers/aws/events/apigateway/).
40 |
41 | ### Invocation
42 |
43 | After successful deployment, you can call the created application via HTTP:
44 |
45 | ```bash
46 | curl https://xxxxxxx.execute-api.us-east-1.amazonaws.com/
47 | ```
48 |
49 | Which should result in response similar to the following (removed `input` content for brevity):
50 |
51 | ```json
52 | {
53 | "message": "Go Serverless v3.0! Your function executed successfully!",
54 | "input": {
55 | ...
56 | }
57 | }
58 | ```
59 |
60 | ### Local development
61 |
62 | You can invoke your function locally by using the following command:
63 |
64 | ```bash
65 | serverless invoke local --function hello
66 | ```
67 |
68 | Which should result in response similar to the following:
69 |
70 | ```
71 | {
72 | "statusCode": 200,
73 | "body": "{\n \"message\": \"Go Serverless v3.0! Your function executed successfully!\",\n \"input\": \"\"\n}"
74 | }
75 | ```
76 |
77 | Alternatively, it is also possible to emulate API Gateway and Lambda locally by using `serverless-offline` plugin. In order to do that, execute the following command:
78 |
79 | ```bash
80 | serverless plugin install -n serverless-offline
81 | ```
82 |
83 | It will add the `serverless-offline` plugin to `devDependencies` in `package.json` file as well as will add it to `plugins` in `serverless.yml`.
84 |
85 | After installation, you can start local emulation with:
86 |
87 | ```
88 | serverless offline
89 | ```
90 |
91 | To learn more about the capabilities of `serverless-offline`, please refer to its [GitHub repository](https://github.com/dherault/serverless-offline).
92 |
93 | ### Bundling dependencies
94 |
95 | In case you would like to include 3rd party dependencies, you will need to use a plugin called `serverless-python-requirements`. You can set it up by running the following command:
96 |
97 | ```bash
98 | serverless plugin install -n serverless-python-requirements
99 | ```
100 |
101 | Running the above will automatically add `serverless-python-requirements` to `plugins` section in your `serverless.yml` file and add it as a `devDependency` to `package.json` file. The `package.json` file will be automatically created if it doesn't exist beforehand. Now you will be able to add your dependencies to `requirements.txt` file (`Pipfile` and `pyproject.toml` is also supported but requires additional configuration) and they will be automatically injected to Lambda package during build process. For more details about the plugin's configuration, please refer to [official documentation](https://github.com/UnitedIncome/serverless-python-requirements).
102 |
--------------------------------------------------------------------------------
/lab7/learn/handler.py:
--------------------------------------------------------------------------------
1 | import json
2 |
3 |
4 | def hello(event, context):
5 | body = {
6 | "message": "Go Serverless v3.0! Your function executed successfully!",
7 | "input": event,
8 | }
9 |
10 | response = {"statusCode": 200, "body": json.dumps(body)}
11 |
12 | return response
13 |
--------------------------------------------------------------------------------
/lab7/learn/serverless.yml:
--------------------------------------------------------------------------------
1 | org: scientist1995
2 | app: demo
3 | service: aws-python-http-api-project
4 | frameworkVersion: '3'
5 |
6 | provider:
7 | name: aws
8 | runtime: python3.8
9 | memorySize: 2000
10 | timeout: 900
11 | architecture: x86_64
12 | stackTags:
13 | product: soumil-team
14 | env: qa
15 | created-date: 2022-04-05
16 | team: data
17 | customer-impact: false
18 | terraform: false
19 |
20 | functions:
21 | hello:
22 | handler: handler.hello
23 | events:
24 | - http:
25 | path: /hello
26 | method: post
--------------------------------------------------------------------------------