├── .gitignore ├── amazon-s3 ├── .DS_Store ├── fruit-images.zip └── index.html ├── aws-cloudformation ├── .DS_Store ├── 1-ec2-template.yml ├── 2-ec2-template.yml ├── 3-ec2-template.yml └── vpc-with-cloudformation.yml ├── amazon-ec2 ├── generate-load-on-alb.md └── user-data-web-server.sh ├── aws-iam └── sts-assume-role.json ├── amazon-vpc ├── user-data-simple-website.sh └── custom-vpc.md ├── serverless-app ├── ProcessOrderFunction.py ├── SubmitOrderFunction.py ├── index.html └── serverless-app-instructions.md ├── amazon-eventbridge └── stop-instance-not-t2-micro.py ├── amazon-ebs ├── user-data-custom-ami.sh └── amazon-ebs-volumes.md ├── aws-ml-and-ai └── process-analyze-images.md ├── amazon-efs └── working-with-efs.md ├── README.md ├── aws-lambda └── working-with-lambda.md └── amazon-dynamodb ├── create-table-add-data.md └── batch-write.json /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | -------------------------------------------------------------------------------- /amazon-s3/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ushev-s/aws_course/main/amazon-s3/.DS_Store -------------------------------------------------------------------------------- /amazon-s3/fruit-images.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ushev-s/aws_course/main/amazon-s3/fruit-images.zip -------------------------------------------------------------------------------- /aws-cloudformation/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ushev-s/aws_course/main/aws-cloudformation/.DS_Store -------------------------------------------------------------------------------- /amazon-ec2/generate-load-on-alb.md: -------------------------------------------------------------------------------- 1 | # Command to generate load on the ALB 2 | 3 | ***replace with your alb dns name*** 4 | ```for i in {1..200}; do curl http://your-alb-address.com & done; wait``` 5 | -------------------------------------------------------------------------------- /aws-iam/sts-assume-role.json: -------------------------------------------------------------------------------- 1 | { 2 | "Version": "2012-10-17", 3 | "Statement": { 4 | "Effect": "Allow", 5 | "Action": "sts:AssumeRole", 6 | "Resource": "arn:aws:iam::975050181034:role/ec2-role" 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /amazon-vpc/user-data-simple-website.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Update the system and install necessary packages 4 | yum update -y 5 | yum install -y httpd 6 | 7 | # Start the Apache server 8 | systemctl start httpd 9 | systemctl enable httpd -------------------------------------------------------------------------------- /aws-cloudformation/1-ec2-template.yml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Description: Create an EC2 instance with a security group for SSH access 3 | Resources: 4 | InstanceSecurityGroup: 5 | Type: AWS::EC2::SecurityGroup 6 | Properties: 7 | GroupDescription: Enable SSH access 8 | SecurityGroupIngress: 9 | - IpProtocol: tcp 10 | FromPort: 22 11 | ToPort: 22 12 | CidrIp: 0.0.0.0/0 13 | MyInstance: 14 | Type: AWS::EC2::Instance 15 | Properties: 16 | ImageId: ami-0440d3b780d96b29d 17 | InstanceType: t2.micro 18 | SecurityGroups: 19 | - !Ref InstanceSecurityGroup -------------------------------------------------------------------------------- /serverless-app/ProcessOrderFunction.py: -------------------------------------------------------------------------------- 1 | import json 2 | import boto3 3 | from datetime import datetime 4 | 5 | dynamodb = boto3.resource('dynamodb') 6 | table_name = 'YOUR_DYNAMODB_TABLE_NAME' 7 | table = dynamodb.Table(table_name) 8 | 9 | def lambda_handler(event, context): 10 | for record in event['Records']: 11 | message_body = json.loads(record['body']) 12 | item = {'orderId': record['messageId'], 'productName': message_body['productName'], 'quantity': message_body['quantity'], 'orderDate': datetime.now().isoformat()} 13 | table.put_item(Item=item) 14 | return {'statusCode': 200, 'body': json.dumps({'message': 'Orders processed successfully'})} 15 | -------------------------------------------------------------------------------- /amazon-vpc/custom-vpc.md: -------------------------------------------------------------------------------- 1 | # Create VPC 2 | Name: MyVPC 3 | IPv4 CIDR Block: 10.0.0.0/16 4 | 5 | # Create Public and Private Subnets 6 | 7 | Name: Public-1A 8 | Availability Zone: us-east-1a 9 | IPv4 CIDR Block: 10.0.1.0/24 10 | 11 | Name: Public-1B 12 | Availability Zone: us-east-1b 13 | IPv4 CIDR Block: 10.0.2.0/24 14 | 15 | Name: Private-1A 16 | Availability Zone: us-east-1a 17 | IPv4 CIDR Block: 10.0.3.0/24 18 | 19 | Name: Private-1B 20 | Availability Zone: us-east-1b 21 | IPv4 CIDR Block: 10.0.4.0/24 22 | 23 | # Create private route table 24 | 25 | Name: Private-RT 26 | VPC: MyVPC 27 | Subnet associations: Private-1A, Private-1B 28 | 29 | # Create Internet Gateway 30 | 31 | Name: MyIGW 32 | VPC: MyVPC -------------------------------------------------------------------------------- /serverless-app/SubmitOrderFunction.py: -------------------------------------------------------------------------------- 1 | import json 2 | import boto3 3 | 4 | sqs = boto3.client('sqs') 5 | queue_url = 'YOUR_SQS_QUEUE_URL' 6 | 7 | def lambda_handler(event, context): 8 | try: 9 | order_details = json.loads(event['body']) 10 | response = sqs.send_message(QueueUrl=queue_url, MessageBody=json.dumps(order_details)) 11 | return {'statusCode': 200, 'headers': {'Access-Control-Allow-Origin': '*', 'Access-Control-Allow-Headers': 'Content-Type', 'Access-Control-Allow-Methods': 'OPTIONS,POST'}, 'body': json.dumps({'message': 'Order submitted to queue successfully'})} 12 | except Exception as e: 13 | return {'statusCode': 400, 'body': json.dumps({'error': str(e)})} 14 | -------------------------------------------------------------------------------- /amazon-eventbridge/stop-instance-not-t2-micro.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | import json 3 | 4 | def lambda_handler(event, context): 5 | ec2 = boto3.client('ec2') 6 | sns = boto3.client('sns') 7 | 8 | # Extract the instance ID from the event 9 | instance_id = event['detail']['instance-id'] 10 | instance_type = ec2.describe_instances(InstanceIds=[instance_id])['Reservations'][0]['Instances'][0]['InstanceType'] 11 | 12 | if instance_type != 't2.micro': 13 | # Stop the instance 14 | ec2.stop_instances(InstanceIds=[instance_id]) 15 | print(f"Instance {instance_id} stopped because it is not a t2.micro.") 16 | 17 | else: 18 | print(f"Instance {instance_id} is a t2.micro. No action taken.") 19 | 20 | return { 21 | 'statusCode': 200, 22 | 'body': json.dumps('Lambda function execution completed.') 23 | } 24 | -------------------------------------------------------------------------------- /aws-cloudformation/2-ec2-template.yml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Description: Attach an EBS volume to the EC2 instance, retaining original resources 3 | Resources: 4 | InstanceSecurityGroup: 5 | Type: AWS::EC2::SecurityGroup 6 | Properties: 7 | GroupDescription: Enable SSH access 8 | SecurityGroupIngress: 9 | - IpProtocol: tcp 10 | FromPort: 22 11 | ToPort: 22 12 | CidrIp: 0.0.0.0/0 13 | MyInstance: 14 | Type: AWS::EC2::Instance 15 | Properties: 16 | ImageId: ami-0440d3b780d96b29d 17 | InstanceType: t2.micro 18 | SecurityGroups: 19 | - !Ref InstanceSecurityGroup 20 | MyVolume: 21 | Type: AWS::EC2::Volume 22 | Properties: 23 | AvailabilityZone: !GetAtt MyInstance.AvailabilityZone 24 | Size: 10 25 | MyVolumeAttachment: 26 | Type: AWS::EC2::VolumeAttachment 27 | Properties: 28 | InstanceId: !Ref MyInstance 29 | VolumeId: !Ref MyVolume 30 | Device: /dev/sdf -------------------------------------------------------------------------------- /aws-cloudformation/3-ec2-template.yml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Description: Add an Amazon S3 bucket to the setup, retaining all previous resources 3 | 4 | Resources: 5 | InstanceSecurityGroup: 6 | Type: AWS::EC2::SecurityGroup 7 | Properties: 8 | GroupDescription: Enable SSH access 9 | SecurityGroupIngress: 10 | - IpProtocol: tcp 11 | FromPort: 22 12 | ToPort: 22 13 | CidrIp: 0.0.0.0/0 14 | 15 | MyInstance: 16 | Type: AWS::EC2::Instance 17 | Properties: 18 | ImageId: ami-0440d3b780d96b29d # Ensure this AMI ID is valid for your region 19 | InstanceType: t2.micro 20 | SecurityGroups: 21 | - !Ref InstanceSecurityGroup 22 | 23 | MyVolume: 24 | Type: AWS::EC2::Volume 25 | Properties: 26 | AvailabilityZone: !GetAtt MyInstance.AvailabilityZone 27 | Size: 10 28 | 29 | MyVolumeAttachment: 30 | Type: AWS::EC2::VolumeAttachment 31 | Properties: 32 | InstanceId: !Ref MyInstance 33 | VolumeId: !Ref MyVolume 34 | Device: /dev/sdf 35 | 36 | MyS3Bucket: 37 | Type: AWS::S3::Bucket 38 | Properties: 39 | BucketName: my-unique-bucket-name-4rw3dda34 # Ensure this is globally unique -------------------------------------------------------------------------------- /amazon-ebs/user-data-custom-ami.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Update the system and install Apache web server 4 | yum update -y 5 | yum install -y httpd 6 | 7 | # Start and enable Apache to run on boot 8 | systemctl start httpd 9 | systemctl enable httpd 10 | 11 | # Create an index.html file with CSS animations for background color changing 12 | cat > /var/www/html/index.html <<'EOF' 13 | 14 | 15 | 16 | 17 | Custom AMI Instance 18 | 36 | 37 | 38 |
This Instance Was Launched from a Custom AMI
39 | 40 | 41 | EOF 42 | 43 | # Ensure the httpd service is correctly set up to start on boot 44 | chkconfig httpd on 45 | -------------------------------------------------------------------------------- /amazon-ec2/user-data-web-server.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Update the system and install necessary packages 4 | yum update -y 5 | yum install -y httpd 6 | 7 | # Start the Apache server 8 | systemctl start httpd 9 | systemctl enable httpd 10 | 11 | # Fetch the Availability Zone information using IMDSv2 12 | TOKEN=`curl -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 21600"` 13 | AZ=`curl -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/placement/availability-zone` 14 | 15 | # Create the index.html file 16 | cat > /var/www/html/index.html < 18 | 19 | Instance Availability Zone 20 | 33 | 34 | 35 |
This instance is located in Availability Zone: $AZ
36 | 37 | 38 | EOF 39 | 40 | # Ensure the httpd service is correctly set up to start on boot 41 | chkconfig httpd on -------------------------------------------------------------------------------- /amazon-ebs/amazon-ebs-volumes.md: -------------------------------------------------------------------------------- 1 | # Amazon EBS Volume Lab 2 | 3 | ## Launch Instances in two AZs 4 | 5 | 1. Launch an instance using the Amazon Linux AMI in us-east-1a 6 | 2. Launch another instnace using the Amazon Linux AMI in us-east-1b 7 | 8 | ## Create and Attach an EBS Volume 9 | 1. Create a 10GB gp2 volume in us-east-1a with a name tag of 'data-volume' 10 | 2. List non-loopback block devices on instance 11 | sudo lsblk -e7 12 | 3. Attach the volume to the instance in us-east-1a 13 | 4. Rerun the command to view block devices 14 | 15 | ## Create a filesystem and mount the volume 16 | 1. Create a filesystem on the EBS volume 17 | sudo mkfs -t ext4 /dev/xvdf 18 | 2. Create a mount point for the EBS volume 19 | sudo mkdir /data 20 | 3. Mount the EBS volume to the mount point 21 | sudo mount /dev/xvdf /data 22 | 4. Make the volume mount persistent 23 | Run: 'sudo nano /etc/fstab' then add '/dev/xvdf /data ext4 defaults,nofail 0 2' and save the file 24 | 25 | ## Add some data to the volume 26 | 27 | 1. Change to the /data mount point directory 28 | 2. Create some files and folders 29 | 30 | ## Take a snapshot and move the volume to us-east-1b 31 | 32 | 1. Take a snapshot of the data volume 33 | 2. Create a new EBS volume from the snapshot in us-east-1b 34 | 3. Mount the new EBS volume to the instance in us-east-1b 35 | 4. Change to the /data mount point and view the data 36 | 37 | -------------------------------------------------------------------------------- /aws-ml-and-ai/process-analyze-images.md: -------------------------------------------------------------------------------- 1 | # Instructions 2 | 3 | 1. Create an S3 bucket 4 | 5 | 2. Create a DynamoDB table 6 | - Name: ImageAnalysisResults 7 | - Primary key: ImageName 8 | 9 | 3. Create a Lambda function 10 | - Name: RekognitionLab 11 | - Runtime: Python 3.9 12 | - Code: Add the following code 13 | 14 | ```python 15 | import boto3 16 | import json 17 | 18 | def lambda_handler(event, context): 19 | # Initialize clients 20 | s3_client = boto3.client('s3') 21 | rekognition_client = boto3.client('rekognition') 22 | dynamodb = boto3.resource('dynamodb') 23 | table = dynamodb.Table('YourDynamoDBTableName') # Replace with your table name 24 | 25 | # Get the S3 bucket name and object key from the event 26 | bucket_name = event['Records'][0]['s3']['bucket']['name'] 27 | object_key = event['Records'][0]['s3']['object']['key'] 28 | 29 | # Call Amazon Rekognition to detect labels in the image 30 | response = rekognition_client.detect_labels( 31 | Image={'S3Object': {'Bucket': bucket_name, 'Name': object_key}}, 32 | MaxLabels=10 33 | ) 34 | 35 | # Store the labels detected in DynamoDB 36 | labels = [{'Confidence': label['Confidence'], 'Name': label['Name']} for label in response['Labels']] 37 | table.put_item( 38 | Item={ 39 | 'ImageName': object_key, 40 | 'Labels': json.dumps(labels) 41 | } 42 | ) 43 | 44 | return { 45 | 'statusCode': 200, 46 | 'body': json.dumps('Image processed successfully!') 47 | } 48 | ``` 49 | 50 | 4. Edit the table name in the code and deploy 51 | 52 | 5. Add permissions to Lambda 53 | - AmazonRekognitionFullAccess 54 | - AmazonDynamoDBFullAccess 55 | - AmazonS3ReadOnlyAccess 56 | 57 | 6. In Lambda create a trigger for object creation events in the S3 bucket 58 | 7. Upload images to the bucket and review the results in DynamoDB -------------------------------------------------------------------------------- /amazon-s3/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Welcome to Our Website 7 | 39 | 40 | 41 |
42 |

Welcome to Our Website!

43 |

We're glad you're here. Check out our gallery below:

44 | 49 |
50 | 51 | -------------------------------------------------------------------------------- /amazon-efs/working-with-efs.md: -------------------------------------------------------------------------------- 1 | 2 | # Working with EFS 3 | 4 | ## Launch instances in multiple AZs 5 | 1. Create a security group 6 | aws ec2 create-security-group --group-name StorageLabs --description "Temporary SG for the Storage Service Labs" 7 | 2. Add a rule for SSH inbound to the security group 8 | aws ec2 authorize-security-group-ingress --group-name StorageLabs --protocol tcp --port 22 --cidr 0.0.0.0/0 9 | 3. Launch instance in US-EAST-1A 10 | aws ec2 run-instances --image-id ami-0440d3b780d96b29d --instance-type t2.micro --placement AvailabilityZone=us-east-1a --security-group-ids 11 | 4. Launch instance in US-EAST-1B 12 | aws ec2 run-instances --image-id ami-0440d3b780d96b29d --instance-type t2.micro --placement AvailabilityZone=us-east-1b --security-group-ids 13 | 14 | ## Create an EFS File System 15 | 1. Add a rule to the security group to allow the NFS protocol from group members 16 | 17 | ```aws ec2 authorize-security-group-ingress --group-id --protocol tcp --port 2049 --source-group ``` 18 | 19 | 2. Create an EFS file system through the console, and add the StorageLabs security group to the mount targets for each AZ 20 | 21 | ## Mount using the NFS Client (perform steps on both instances) 22 | 1. Create an EFS mount point 23 | mkdir ~/efs-mount-point 24 | 2. Install NFS client 25 | sudo yum -y install nfs-utils 26 | 3. Mount using the EFS client 27 | sudo mount -t nfs4 -o nfsvers=4.1,rsize=1048576,wsize=1048576,hard,timeo=600,retrans=2,noresvport :/ ~/efs-mount-point 28 | 4. Create a file on the file system 29 | 5. Add a file system policy to enforce encryption in-transit 30 | 6. Unmount (make sure to change directory out of efs-mount-point first) 31 | sudo umount ~/efs-mount-point 32 | 4. Mount again using the EFS client (what happens?) 33 | 34 | ## Mount using the EFS utils (perform steps on both instances) 35 | 1. Install EFS utils 36 | sudo yum install -y amazon-efs-utils 37 | 2. Mount using the EFS mount helper 38 | sudo mount -t efs -o tls :/ ~/efs-mount-point -------------------------------------------------------------------------------- /serverless-app/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Product Order Form 7 | 16 | 17 | 18 |
19 |

Submit a Product Order

20 |
21 | 22 | 23 | 24 | 25 | 26 |
27 |
28 | 41 | 42 | 43 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Introduction to Cloud Computing on AWS for Beginners Course Code 2 | *By [Digital Cloud Training](https://digitalcloud.training/) - Course Author Neal Davis* 3 | 4 | ## How to Use the Course Code 5 | 6 | The code used throughout the course has been zipped up and is available for download from this repo. Please download the code to your computer and unzip the contents. When course updates are made the file may be updated and you will need to ensure you download the latest version. 7 | 8 | ## Course Overview 9 | 10 | - In this video course, you'll learn general cloud computing concepts and AWS from fundamentals right through to advanced concepts 11 | - You'll also build hands-on skills using many of the core Amazon Web Services (AWS) services 12 | - We use a highly visual and effective method of teaching cloud computing and AWS concepts using diagrams and animations so you gain a much deeper understanding (no bullet-point slides) 13 | - There are lots of hands-on exercises using an AWS free tier account to give you practical experience 14 | 15 | ***What you will learn:*** 16 | 17 | - Learn fundamental concepts of cloud computing and including storage, database, networking, virtualization, containers, and cloud architecture 18 | - Create an AWS Free Tier account and launch your first virtual servers (Amazon EC2 instances) on the AWS Cloud 19 | - Configure elasticity, high availability and fault tolerance using Amazon EC2 Auto Scaling and Amazon Elastic Load Balancing 20 | - Create and configure storage services and upload files and objects using Amazon EBS, Amazon EFS and Amazon S3 21 | - Launch a relational database on Amazon RDS and a NoSQL database using Amazon DynamoDB 22 | - Automatically deploy infrastructure using code through AWS CloudFormation 23 | - Create a Platform as a Service (PaaS) application on AWS Elastic Beanstalk 24 | - Learn how to use DevOps tools on AWS to automate a continuous integration and continuous delivery (CI/CD) pipeline 25 | - Implement serverless computing and Docker containers on AWS using AWS Lambda and Amazon ECS 26 | - Create event-driven architectures with serverless on Lambda 27 | - Create loosely coupled services with Amazon SQS and Amazon SNS 28 | 29 | Learn more and [enroll in this course](https://digitalcloud.training/cloud-computing-on-aws-for-beginners/) now to get started learning Amazon Web Services 30 | -------------------------------------------------------------------------------- /aws-lambda/working-with-lambda.md: -------------------------------------------------------------------------------- 1 | 2 | # Create a Lambda that logs a message to CloudWatch Logs 3 | 4 | 1. Create a Lambda function with the Python runtime and the following code 5 | 6 | ```python 7 | import logging 8 | import json 9 | 10 | # Configure the logging 11 | logger = logging.getLogger() 12 | logger.setLevel(logging.INFO) 13 | 14 | def lambda_handler(event, context): 15 | # Extract the message from the event. Assuming the input is a simple JSON object {"message": "your message here"} 16 | message = event.get('message', 'No message provided') 17 | 18 | # Log the message 19 | logger.info(message) 20 | 21 | return { 22 | 'statusCode': 200, 23 | 'body': json.dumps('Message logged successfully!') 24 | } 25 | ``` 26 | 27 | 2. Create a test event in the console and add the following test data 28 | 29 | ```json 30 | { 31 | "message": "Hello, CloudWatch!" 32 | } 33 | ``` 34 | 35 | 3. Run the test and then view the message in CloudWatch Logs 36 | 4. Test using the CLI. Create a file in CloudShell named "payload.json" with the following code: 37 | 38 | ```json 39 | { 40 | "message": "Hello from CLI!" 41 | } 42 | ``` 43 | 5. Run the following command in AWS CloudShell 44 | 45 | ```aws lambda invoke --function-name --payload fileb://payload.json response.json``` 46 | 47 | # Create an event notification for S3 upploads 48 | 49 | In this exercise we'll modify the function to write the names of files uploaded to an S3 bucket to CloudWatch Logs 50 | 51 | 1. Update the lambda function code and deploy the update 52 | 53 | ```python 54 | import json 55 | import logging 56 | import boto3 57 | 58 | # Initialize logging 59 | logger = logging.getLogger() 60 | logger.setLevel(logging.INFO) 61 | 62 | def lambda_handler(event, context): 63 | # Log the raw event 64 | logger.info("Event: " + json.dumps(event)) 65 | 66 | # Process each record within the event 67 | for record in event['Records']: 68 | # Extract the bucket name and file key from the event 69 | bucket_name = record['s3']['bucket']['name'] 70 | file_key = record['s3']['object']['key'] 71 | 72 | # Log the bucket name and file key to CloudWatch 73 | logger.info(f"New file uploaded: {file_key} in bucket {bucket_name}") 74 | 75 | return { 76 | 'statusCode': 200, 77 | 'body': json.dumps('Processed S3 upload event successfully!') 78 | } 79 | ``` 80 | 2. Edit the execution role to add permissions to Lambda to read from S3 81 | 3. Create an event notification for all S3 object create events by adding a trigger to AWS Lambda 82 | 4. Upload a file and check if a message is written to CloudWatch Logs that includes the file name 83 | 84 | 85 | -------------------------------------------------------------------------------- /amazon-dynamodb/create-table-add-data.md: -------------------------------------------------------------------------------- 1 | # 1. Create a DynamoDB table 2 | 3 | 1. Create a DynamoDB table 4 | 2. Set the table name to `myorders` 5 | 3. Set the primary key to `clientid` 6 | 4. Set the sort key to `created` 7 | 8 | # 2. Load items into the table 9 | 10 | 1. Open AWS CloudShell 11 | 2. Upload the `batch-write.json` file 12 | 3. Run the following command to load the entries from the file 13 | 14 | ```bash 15 | aws dynamodb batch-write-item --request-items file://batch-write.json 16 | ``` 17 | 18 | 4. Use the "Explore items" view under "Tables" to view the items in the table 19 | 20 | # 3. Use Scan APIs to find data 21 | 22 | 1. This example demonstrates how to scan the myorders table for items in a specific category, such as "Electronics" 23 | 24 | ```bash 25 | aws dynamodb scan \ 26 | --table-name myorders \ 27 | --filter-expression "category = :cat" \ 28 | --expression-attribute-values '{":cat":{"S":"Electronics"}}' 29 | ``` 30 | 31 | ```--filter-expression "category = :cat"``` specifies the condition to filter items where the category attribute equals a value we define 32 | ```--expression-attribute-values '{":cat":{"S":"Electronics"}}'``` defines the value for :cat used in the filter expression, in this case, "Electronics" 33 | 34 | 2. This example shows how to scan the myorders table for items where the quantity (qty) is greater than 2 35 | 36 | ```bash 37 | aws dynamodb scan \ 38 | --table-name myorders \ 39 | --filter-expression "qty > :q" \ 40 | --expression-attribute-values '{":q":{"N":"2"}}' 41 | ``` 42 | ```--filter-expression "qty > :q"``` specifies the condition to filter items where the qty attribute is greater than a value we define 43 | ```--expression-attribute-values '{":q":{"N":"2"}}'``` defines the value for :q used in the filter expression, in this case, a quantity of 2 44 | 45 | # 4. Use Query APIs to find data 46 | 47 | 1. This example demonstrates how to query the myorders table for all orders made by a specific client, identified by clientid 48 | 49 | ```bash 50 | aws dynamodb query \ 51 | --table-name myorders \ 52 | --key-condition-expression "clientid = :clientid" \ 53 | --expression-attribute-values '{":clientid":{"S":"client01@example.com"}}' 54 | ``` 55 | ```--key-condition-expression "clientid = :clientid"``` specifies the condition for the query to find items where the clientid matches the specified value 56 | ```--expression-attribute-values '{":clientid":{"S":"client01@example.com"}}'``` defines the value for :clientid used in the key condition expression 57 | 58 | 2. If you want to find orders from a specific client within a certain date range, you can use the sort key (created) along with the partition key (clientid) in your query 59 | 60 | ```bash 61 | aws dynamodb query \ 62 | --table-name myorders \ 63 | --key-condition-expression "clientid = :clientid AND created BETWEEN :date1 AND :date2" \ 64 | --expression-attribute-values '{":clientid":{"S":"client01@example.com"}, ":date1":{"S":"2023-01-01T00:00Z"}, ":date2":{"S":"2023-01-31T23:59Z"}}' 65 | ``` 66 | 67 | ```--key-condition-expression "clientid = :clientid AND created BETWEEN :date1 AND :date2"``` specifies the condition for the query to find items where the clientid matches the specified value and the created date falls within the specified range 68 | ```--expression-attribute-values '{":clientid":{"S":"client01@example.com"}, ":date1":{"S":"2023-01-01T00:00Z"}, ":date2":{"S":"2023-01-31T23:59Z"}}'``` defines the values for :clientid, :date1, and :date2 used in the key condition expression -------------------------------------------------------------------------------- /serverless-app/serverless-app-instructions.md: -------------------------------------------------------------------------------- 1 | # Serverless Application with REST API – Part 1 2 | 3 | ## 1. Create the first Lambda function 4 | 5 | 1. Create a Lambda function with the following settings 6 | - Name: SubmitOrderFunction 7 | - Python 3.9 runtime 8 | 9 | 2. Add the code from the `SubmitOrderFunction.py` file 10 | 3. Add the `AmazonSQSFullAccess` permissions policy to the execution role 11 | 12 | ***you need to come back and add the queue URL shortly*** 13 | 14 | ## 2. Create the SQS queue 15 | 16 | 1. Create an SQS queue 17 | 2. Use the standard queue type 18 | 3. Name it: `ProductOrdersQueue` 19 | 4. Copy the queue URL and add it to line 5 of the SubmitOrderFunction code 20 | 5. Deploy the Lambda function 21 | 22 | ## 3. Test order submissions 23 | 24 | 1. In Lambda create and submit a test event with the following data 25 | 26 | ```json 27 | { 28 | "body": "{\"productName\":\"Test Product\",\"quantity\":3}" 29 | } 30 | ``` 31 | 32 | 2. Go to SQS and poll for messages - you should see a message waiting the queue 33 | 34 | ## 4. Create the processing function 35 | 36 | 1. Create a Lambda function with the following settings 37 | - Name: ProcessOrderFunction 38 | - Python 3.9 runtime 39 | 40 | 2. Add the code from the `ProcessOrderFunction.py` file 41 | 3. Add the `AmazonSQSFullAccess` and `AmazonDynamoDBFullAccess` permissions policies to the execution role 42 | 43 | ## 5. Create the DynamoDB table 44 | 45 | 1. Create a DynamoDB table with the following settings 46 | - Name: ProductOrders 47 | - Primary key: orderId 48 | 49 | ## 6. Deploy and test the application 50 | 51 | 1. Add the table name to line 6 of the ProcessOrderFunction function code 52 | 2. Go to SQS and configure a Lambd trigger and specify the ProcessOrderFunction 53 | 3. Check the DynamoDB table to see if the first test event was processed 54 | 4. Test using the CLI. Using CloudShell create a file named `input.json` with the following contents 55 | 56 | ```json 57 | { 58 | "body": "{\"productName\":\"Test Product 2\",\"quantity\":2}" 59 | } 60 | ``` 61 | 62 | 5. Invoke the function: 63 | 64 | ```bash 65 | aws lambda invoke --function-name --payload fileb://input.json output.json 66 | ``` 67 | 68 | # Serverless Application with REST API – Part 2 69 | 70 | ## 1. Create the API 71 | 72 | 1. Create a REST API in the API Gateway Console named `ProductOrdersAPI` 73 | 2. Create a new resource `/orders` and enable CORS 74 | 3. Create a `POST` method for `/orders` integrated with the `SubmitOrderFunction` 75 | 4. Enable a Lambda proxy integration 76 | 5. Click back up to the `/orders` resource and click "Enable CORS" 77 | 6. Select all CORS options 78 | 7. Deploy your API to a new stage named `prod` 79 | 8. Update the invoke URL in the index.html file on line 32 where it says `YOUR_API_ENDPOINT` 80 | 81 | ***note, the invoke URL on line 32 should include /prod/orders on the end and look like this example*** 82 | 83 | 'https://v1grynidwb.execute-api.us-east-1.amazonaws.com/prod/orders' 84 | 85 | ## 2. Create the static website bucket and test the application 86 | 87 | 1. In Amazon S3 create a bucket 88 | 2. Configure the bucket for static website hosting 89 | 3. Set the default document to `index.html` 90 | 4. Enable public access using a bucket policy 91 | 92 | ```json 93 | { 94 | "Version": "2012-10-17", 95 | "Statement": [ 96 | { 97 | "Sid": "PublicReadGetObject", 98 | "Effect": "Allow", 99 | "Principal": "*", 100 | "Action": "s3:GetObject", 101 | "Resource": "/*" 102 | } 103 | ] 104 | } 105 | ``` 106 | 107 | 5. Upload the edited index.html that has the API endpoint URL configured 108 | 6. Navigate to the static website endpoint 109 | 7. Submit some order information and check it is added to the DynamoDB table 110 | 8. If you do not receive an "Order submitted successfully!" response, use your browsers Developer Tools to troubleshoot 111 | 112 | 113 | 114 | 115 | 116 | -------------------------------------------------------------------------------- /aws-cloudformation/vpc-with-cloudformation.yml: -------------------------------------------------------------------------------- 1 | Description: This template deploys a VPC, with a pair of public and private subnets spread 2 | across two Availability Zones. It deploys an internet gateway, with a default 3 | route on the public subnets. 4 | 5 | Parameters: 6 | EnvironmentName: 7 | Description: An environment name that is prefixed to resource names 8 | Type: String 9 | 10 | VpcCIDR: 11 | Description: Please enter the IP range (CIDR notation) for this VPC 12 | Type: String 13 | Default: 10.0.0.0/16 14 | 15 | PublicSubnet1CIDR: 16 | Description: Please enter the IP range (CIDR notation) for the public subnet in the first Availability Zone 17 | Type: String 18 | Default: 10.0.1.0/24 19 | 20 | PublicSubnet2CIDR: 21 | Description: Please enter the IP range (CIDR notation) for the public subnet in the second Availability Zone 22 | Type: String 23 | Default: 10.0.2.0/24 24 | 25 | PrivateSubnet1CIDR: 26 | Description: Please enter the IP range (CIDR notation) for the private subnet in the first Availability Zone 27 | Type: String 28 | Default: 10.0.3.0/24 29 | 30 | PrivateSubnet2CIDR: 31 | Description: Please enter the IP range (CIDR notation) for the private subnet in the second Availability Zone 32 | Type: String 33 | Default: 10.0.4.0/24 34 | 35 | Resources: 36 | VPC: 37 | Type: AWS::EC2::VPC 38 | Properties: 39 | CidrBlock: !Ref VpcCIDR 40 | EnableDnsSupport: true 41 | EnableDnsHostnames: true 42 | Tags: 43 | - Key: Name 44 | Value: !Ref EnvironmentName 45 | 46 | InternetGateway: 47 | Type: AWS::EC2::InternetGateway 48 | Properties: 49 | Tags: 50 | - Key: Name 51 | Value: !Ref EnvironmentName 52 | 53 | InternetGatewayAttachment: 54 | Type: AWS::EC2::VPCGatewayAttachment 55 | Properties: 56 | InternetGatewayId: !Ref InternetGateway 57 | VpcId: !Ref VPC 58 | 59 | PublicSubnet1: 60 | Type: AWS::EC2::Subnet 61 | Properties: 62 | VpcId: !Ref VPC 63 | AvailabilityZone: !Select [ 0, !GetAZs '' ] 64 | CidrBlock: !Ref PublicSubnet1CIDR 65 | MapPublicIpOnLaunch: true 66 | Tags: 67 | - Key: Name 68 | Value: !Sub ${EnvironmentName} Public Subnet (AZ1) 69 | 70 | PublicSubnet2: 71 | Type: AWS::EC2::Subnet 72 | Properties: 73 | VpcId: !Ref VPC 74 | AvailabilityZone: !Select [ 1, !GetAZs '' ] 75 | CidrBlock: !Ref PublicSubnet2CIDR 76 | MapPublicIpOnLaunch: true 77 | Tags: 78 | - Key: Name 79 | Value: !Sub ${EnvironmentName} Public Subnet (AZ2) 80 | 81 | PrivateSubnet1: 82 | Type: AWS::EC2::Subnet 83 | Properties: 84 | VpcId: !Ref VPC 85 | AvailabilityZone: !Select [ 0, !GetAZs '' ] 86 | CidrBlock: !Ref PrivateSubnet1CIDR 87 | MapPublicIpOnLaunch: false 88 | Tags: 89 | - Key: Name 90 | Value: !Sub ${EnvironmentName} Private Subnet (AZ1) 91 | 92 | PrivateSubnet2: 93 | Type: AWS::EC2::Subnet 94 | Properties: 95 | VpcId: !Ref VPC 96 | AvailabilityZone: !Select [ 1, !GetAZs '' ] 97 | CidrBlock: !Ref PrivateSubnet2CIDR 98 | MapPublicIpOnLaunch: false 99 | Tags: 100 | - Key: Name 101 | Value: !Sub ${EnvironmentName} Private Subnet (AZ2) 102 | 103 | PublicRouteTable: 104 | Type: AWS::EC2::RouteTable 105 | Properties: 106 | VpcId: !Ref VPC 107 | Tags: 108 | - Key: Name 109 | Value: !Sub ${EnvironmentName} Public Routes 110 | 111 | DefaultPublicRoute: 112 | Type: AWS::EC2::Route 113 | DependsOn: InternetGatewayAttachment 114 | Properties: 115 | RouteTableId: !Ref PublicRouteTable 116 | DestinationCidrBlock: 0.0.0.0/0 117 | GatewayId: !Ref InternetGateway 118 | 119 | PublicSubnet1RouteTableAssociation: 120 | Type: AWS::EC2::SubnetRouteTableAssociation 121 | Properties: 122 | RouteTableId: !Ref PublicRouteTable 123 | SubnetId: !Ref PublicSubnet1 124 | 125 | PublicSubnet2RouteTableAssociation: 126 | Type: AWS::EC2::SubnetRouteTableAssociation 127 | Properties: 128 | RouteTableId: !Ref PublicRouteTable 129 | SubnetId: !Ref PublicSubnet2 130 | 131 | 132 | PrivateRouteTable1: 133 | Type: AWS::EC2::RouteTable 134 | Properties: 135 | VpcId: !Ref VPC 136 | Tags: 137 | - Key: Name 138 | Value: !Sub ${EnvironmentName} Private Routes 139 | 140 | PrivateSubnet1RouteTableAssociation: 141 | Type: AWS::EC2::SubnetRouteTableAssociation 142 | Properties: 143 | RouteTableId: !Ref PrivateRouteTable1 144 | SubnetId: !Ref PrivateSubnet1 145 | 146 | PrivateSubnet2RouteTableAssociation: 147 | Type: AWS::EC2::SubnetRouteTableAssociation 148 | Properties: 149 | RouteTableId: !Ref PrivateRouteTable1 150 | SubnetId: !Ref PrivateSubnet2 151 | 152 | NoIngressSecurityGroup: 153 | Type: AWS::EC2::SecurityGroup 154 | Properties: 155 | GroupName: "no-ingress-sg" 156 | GroupDescription: "Security group with no ingress rule" 157 | VpcId: !Ref VPC 158 | 159 | Outputs: 160 | VPC: 161 | Description: A reference to the created VPC 162 | Value: !Ref VPC 163 | 164 | PublicSubnets: 165 | Description: A list of the public subnets 166 | Value: !Join [ ",", [ !Ref PublicSubnet1, !Ref PublicSubnet2 ]] 167 | 168 | PrivateSubnets: 169 | Description: A list of the private subnets 170 | Value: !Join [ ",", [ !Ref PrivateSubnet1, !Ref PrivateSubnet2 ]] 171 | 172 | PublicSubnet1: 173 | Description: A reference to the public subnet in the 1st Availability Zone 174 | Value: !Ref PublicSubnet1 175 | 176 | PublicSubnet2: 177 | Description: A reference to the public subnet in the 2nd Availability Zone 178 | Value: !Ref PublicSubnet2 179 | 180 | PrivateSubnet1: 181 | Description: A reference to the private subnet in the 1st Availability Zone 182 | Value: !Ref PrivateSubnet1 183 | 184 | PrivateSubnet2: 185 | Description: A reference to the private subnet in the 2nd Availability Zone 186 | Value: !Ref PrivateSubnet2 187 | 188 | NoIngressSecurityGroup: 189 | Description: Security group with no ingress rule 190 | Value: !Ref NoIngressSecurityGroup -------------------------------------------------------------------------------- /amazon-dynamodb/batch-write.json: -------------------------------------------------------------------------------- 1 | { 2 | "myorders": [ 3 | { 4 | "PutRequest": { 5 | "Item": { 6 | "clientid": {"S": "client01@example.com"}, 7 | "created": {"S": "2023-01-01T10:00Z"}, 8 | "sku": {"S": "SKU-001"}, 9 | "category": {"S": "Electronics"}, 10 | "size": {"S": "N/A"}, 11 | "colour": {"S": "Black"}, 12 | "qty": {"N": "1"}, 13 | "price": {"N": "100"}, 14 | "weight": {"S": "Medium"} 15 | } 16 | } 17 | }, 18 | { 19 | "PutRequest": { 20 | "Item": { 21 | "clientid": {"S": "client02@example.com"}, 22 | "created": {"S": "2023-01-02T11:00Z"}, 23 | "sku": {"S": "SKU-002"}, 24 | "category": {"S": "Books"}, 25 | "size": {"S": "N/A"}, 26 | "colour": {"S": "N/A"}, 27 | "qty": {"N": "2"}, 28 | "price": {"N": "50"}, 29 | "weight": {"S": "Light"} 30 | } 31 | } 32 | }, 33 | { 34 | "PutRequest": { 35 | "Item": { 36 | "clientid": {"S": "client03@example.com"}, 37 | "created": {"S": "2023-01-03T12:00Z"}, 38 | "sku": {"S": "SKU-003"}, 39 | "category": {"S": "Clothing"}, 40 | "size": {"S": "Medium"}, 41 | "colour": {"S": "Red"}, 42 | "qty": {"N": "1"}, 43 | "price": {"N": "35"}, 44 | "weight": {"S": "Light"} 45 | } 46 | } 47 | }, 48 | { 49 | "PutRequest": { 50 | "Item": { 51 | "clientid": {"S": "client04@example.com"}, 52 | "created": {"S": "2023-01-04T13:00Z"}, 53 | "sku": {"S": "SKU-004"}, 54 | "category": {"S": "Home & Kitchen"}, 55 | "size": {"S": "N/A"}, 56 | "colour": {"S": "Silver"}, 57 | "qty": {"N": "1"}, 58 | "price": {"N": "80"}, 59 | "weight": {"S": "Medium"} 60 | } 61 | } 62 | }, 63 | { 64 | "PutRequest": { 65 | "Item": { 66 | "clientid": {"S": "client05@example.com"}, 67 | "created": {"S": "2023-01-05T14:00Z"}, 68 | "sku": {"S": "SKU-005"}, 69 | "category": {"S": "Toys"}, 70 | "size": {"S": "N/A"}, 71 | "colour": {"S": "Multicolor"}, 72 | "qty": {"N": "3"}, 73 | "price": {"N": "25"}, 74 | "weight": {"S": "Light"} 75 | } 76 | } 77 | }, 78 | { 79 | "PutRequest": { 80 | "Item": { 81 | "clientid": {"S": "client06@example.com"}, 82 | "created": {"S": "2023-01-06T15:00Z"}, 83 | "sku": {"S": "SKU-006"}, 84 | "category": {"S": "Electronics"}, 85 | "size": {"S": "N/A"}, 86 | "colour": {"S": "White"}, 87 | "qty": {"N": "1"}, 88 | "price": {"N": "120"}, 89 | "weight": {"S": "Medium"} 90 | } 91 | } 92 | }, 93 | { 94 | "PutRequest": { 95 | "Item": { 96 | "clientid": {"S": "client07@example.com"}, 97 | "created": {"S": "2023-01-07T16:00Z"}, 98 | "sku": {"S": "SKU-007"}, 99 | "category": {"S": "Gardening"}, 100 | "size": {"S": "N/A"}, 101 | "colour": {"S": "Green"}, 102 | "qty": {"N": "2"}, 103 | "price": {"N": "45"}, 104 | "weight": {"S": "Medium"} 105 | } 106 | } 107 | }, 108 | { 109 | "PutRequest": { 110 | "Item": { 111 | "clientid": {"S": "client08@example.com"}, 112 | "created": {"S": "2023-01-08T17:00Z"}, 113 | "sku": {"S": "SKU-008"}, 114 | "category": {"S": "Sports"}, 115 | "size": {"S": "Large"}, 116 | "colour": {"S": "Blue"}, 117 | "qty": {"N": "1"}, 118 | "price": {"N": "75"}, 119 | "weight": {"S": "Heavy"} 120 | } 121 | } 122 | }, 123 | { 124 | "PutRequest": { 125 | "Item": { 126 | "clientid": {"S": "client09@example.com"}, 127 | "created": {"S": "2023-01-09T18:00Z"}, 128 | "sku": {"S": "SKU-009"}, 129 | "category": {"S": "Books"}, 130 | "size": {"S": "N/A"}, 131 | "colour": {"S": "N/A"}, 132 | "qty": {"N": "1"}, 133 | "price": {"N": "15"}, 134 | "weight": {"S": "Light"} 135 | } 136 | } 137 | }, 138 | { 139 | "PutRequest": { 140 | "Item": { 141 | "clientid": {"S": "client10@example.com"}, 142 | "created": {"S": "2023-01-10T19:00Z"}, 143 | "sku": {"S": "SKU-010"}, 144 | "category": {"S": "Clothing"}, 145 | "size": {"S": "Small"}, 146 | "colour": {"S": "Black"}, 147 | "qty": {"N": "2"}, 148 | "price": {"N": "60"}, 149 | "weight": {"S": "Light"} 150 | } 151 | } 152 | }, 153 | { 154 | "PutRequest": { 155 | "Item": { 156 | "clientid": {"S": "client11@example.com"}, 157 | "created": {"S": "2023-01-11T20:00Z"}, 158 | "sku": {"S": "SKU-011"}, 159 | "category": {"S": "Electronics"}, 160 | "size": {"S": "N/A"}, 161 | "colour": {"S": "Grey"}, 162 | "qty": {"N": "1"}, 163 | "price": {"N": "200"}, 164 | "weight": {"S": "Medium"} 165 | } 166 | } 167 | }, 168 | { 169 | "PutRequest": { 170 | "Item": { 171 | "clientid": {"S": "client12@example.com"}, 172 | "created": {"S": "2023-01-12T21:00Z"}, 173 | "sku": {"S": "SKU-012"}, 174 | "category": {"S": "Home & Kitchen"}, 175 | "size": {"S": "N/A"}, 176 | "colour": {"S": "Yellow"}, 177 | "qty": {"N": "1"}, 178 | "price": {"N": "40"}, 179 | "weight": {"S": "Light"} 180 | } 181 | } 182 | }, 183 | { 184 | "PutRequest": { 185 | "Item": { 186 | "clientid": {"S": "client13@example.com"}, 187 | "created": {"S": "2023-01-13T22:00Z"}, 188 | "sku": {"S": "SKU-013"}, 189 | "category": {"S": "Toys"}, 190 | "size": {"S": "N/A"}, 191 | "colour": {"S": "Red"}, 192 | "qty": {"N": "2"}, 193 | "price": {"N": "30"}, 194 | "weight": {"S": "Light"} 195 | } 196 | } 197 | }, 198 | { 199 | "PutRequest": { 200 | "Item": { 201 | "clientid": {"S": "client14@example.com"}, 202 | "created": {"S": "2023-01-14T23:00Z"}, 203 | "sku": {"S": "SKU-014"}, 204 | "category": {"S": "Gardening"}, 205 | "size": {"S": "N/A"}, 206 | "colour": {"S": "Brown"}, 207 | "qty": {"N": "3"}, 208 | "price": {"N": "22"}, 209 | "weight": {"S": "Medium"} 210 | } 211 | } 212 | }, 213 | { 214 | "PutRequest": { 215 | "Item": { 216 | "clientid": {"S": "client15@example.com"}, 217 | "created": {"S": "2023-01-15T24:00Z"}, 218 | "sku": {"S": "SKU-015"}, 219 | "category": {"S": "Sports"}, 220 | "size": {"S": "Medium"}, 221 | "colour": {"S": "Green"}, 222 | "qty": {"N": "1"}, 223 | "price": {"N": "85"}, 224 | "weight": {"S": "Heavy"} 225 | } 226 | } 227 | }, 228 | { 229 | "PutRequest": { 230 | "Item": { 231 | "clientid": {"S": "client16@example.com"}, 232 | "created": {"S": "2023-01-16T01:00Z"}, 233 | "sku": {"S": "SKU-016"}, 234 | "category": {"S": "Books"}, 235 | "size": {"S": "N/A"}, 236 | "colour": {"S": "N/A"}, 237 | "qty": {"N": "1"}, 238 | "price": {"N": "20"}, 239 | "weight": {"S": "Light"} 240 | } 241 | } 242 | }, 243 | { 244 | "PutRequest": { 245 | "Item": { 246 | "clientid": {"S": "client17@example.com"}, 247 | "created": {"S": "2023-01-17T02:00Z"}, 248 | "sku": {"S": "SKU-017"}, 249 | "category": {"S": "Clothing"}, 250 | "size": {"S": "Large"}, 251 | "colour": {"S": "Purple"}, 252 | "qty": {"N": "1"}, 253 | "price": {"N": "75"}, 254 | "weight": {"S": "Light"} 255 | } 256 | } 257 | }, 258 | { 259 | "PutRequest": { 260 | "Item": { 261 | "clientid": {"S": "client18@example.com"}, 262 | "created": {"S": "2023-01-18T03:00Z"}, 263 | "sku": {"S": "SKU-018"}, 264 | "category": {"S": "Electronics"}, 265 | "size": {"S": "N/A"}, 266 | "colour": {"S": "Black"}, 267 | "qty": {"N": "2"}, 268 | "price": {"N": "150"}, 269 | "weight": {"S": "Medium"} 270 | } 271 | } 272 | }, 273 | { 274 | "PutRequest": { 275 | "Item": { 276 | "clientid": {"S": "client19@example.com"}, 277 | "created": {"S": "2023-01-19T04:00Z"}, 278 | "sku": {"S": "SKU-019"}, 279 | "category": {"S": "Home & Kitchen"}, 280 | "size": {"S": "N/A"}, 281 | "colour": {"S": "White"}, 282 | "qty": {"N": "1"}, 283 | "price": {"N": "65"}, 284 | "weight": {"S": "Medium"} 285 | } 286 | } 287 | }, 288 | { 289 | "PutRequest": { 290 | "Item": { 291 | "clientid": {"S": "client20@example.com"}, 292 | "created": {"S": "2023-01-20T05:00Z"}, 293 | "sku": {"S": "SKU-020"}, 294 | "category": {"S": "Toys"}, 295 | "size": {"S": "N/A"}, 296 | "colour": {"S": "Blue"}, 297 | "qty": {"N": "3"}, 298 | "price": {"N": "45"}, 299 | "weight": {"S": "Light"} 300 | } 301 | } 302 | } 303 | ] 304 | } 305 | --------------------------------------------------------------------------------