├── AWS_Connect_Greeting.py ├── AWS_Connect_Salesforce.py ├── CheckForDeltas.py ├── Inspector_script_updated.py ├── NACL_Practice.py ├── Pulumi_Docker_Inspector.py ├── README.md ├── S3_Select_ApiGateway_query.py ├── SmallStackDeployment ├── Dockerfile ├── bootstrap.sh ├── deploy_first_lambda_stack.py ├── lambda_handler.py └── put_metric_data.py ├── UsingOkta_1.py ├── UsingOkta_2.js ├── VMIE_basic.py ├── VPC_API ├── api_base.py └── vpc_api.py ├── addStorageRDS.py ├── add_cloudfront_src_ip_resource_policy.py ├── add_instance_to_domain.py ├── add_sg_to_instances_cloudwatch.py ├── alb_lambda_trigger.py ├── alexa_echo_commands_lambda.py ├── apik ├── api_key_wrapper.py └── api_keys_api.py ├── assign_2_eip_eni.py ├── automount ├── attach_partition.py └── autoMount.py ├── aws_inspector.py ├── basic_serverless_api_deployment.yml ├── browser_automation_get_traffic.py ├── cdk_stack_asg_elb_webserver.py ├── checkSecurityGroup.py ├── components ├── App.js ├── Cal.js ├── Form.js ├── Navigation.js ├── SendMail.js ├── Video.js ├── react_dates_overrides.css └── video │ └── fallBack.mp4 ├── covid19_docker_redis_flask_webscrape ├── __init__.py ├── covid_19_docker_plot.py ├── covid_wiki_scrapper.py ├── heat_plot278.html ├── plot_data.py └── working_with_redis_container.py ├── covid19_wiki_redis_ws.py ├── covid19ws ├── cv19WS.py ├── redis_mysql.py └── unused_queries.py ├── createVPC_Link_CLI.sh ├── create_aws_workspace_newuser.py ├── custom_domain_api.py ├── custom_token_authorizer.py ├── dynamo_db_replication.py ├── elasticSearch_lambda_vpcLink.py ├── evalConfigChange.py ├── failure22Connect.py ├── fetch_error_info.py ├── fetch_vpc_logs.py ├── fix_bucket_lambda_config.py ├── flask_sns_cdk_webhook ├── Dockerfile ├── bootstrap.sh ├── cdk_webhook.py └── flask_hook.py ├── flaskify ├── Dockerfile ├── static │ ├── add.css │ ├── app.js │ ├── search.css │ └── style.css ├── templates │ ├── add.html │ └── index.html ├── userdata.sh └── webserver.py ├── fun_with_youtube.py ├── gen-api-friendlyname.py ├── generate_thumbnail.py ├── get_orphan_sg_from_trusted_advisor.py ├── google-chrome-repo.txt ├── iam-maintenance.py ├── iam_react_js_policies ├── App.js ├── Aux.js ├── Modal.js ├── SideBar.js ├── SideBarElements.js └── iam.py ├── insert__kinesis_data_aurora.py ├── labwork_ssh_ec2_practice.py ├── lambda.py ├── lambdaHasher.py ├── lambda_handler.py ├── lambda_handler_2.py ├── lambda_layer_wt ├── README.md ├── build │ ├── asset-manifest.json │ ├── favicon.ico │ ├── index.html │ ├── logo192.png │ ├── logo512.png │ ├── manifest.json │ ├── robots.txt │ └── static │ │ ├── css │ │ ├── 2.1a02f21c.chunk.css │ │ ├── 2.1a02f21c.chunk.css.map │ │ ├── main.c625180c.chunk.css │ │ └── main.c625180c.chunk.css.map │ │ └── js │ │ ├── 2.11b63539.chunk.js │ │ ├── 2.11b63539.chunk.js.LICENSE.txt │ │ ├── 2.11b63539.chunk.js.map │ │ ├── main.a6a939c5.chunk.js │ │ ├── main.a6a939c5.chunk.js.map │ │ ├── runtime-main.37e5f3d1.js │ │ └── runtime-main.37e5f3d1.js.map ├── clean.py ├── package-lock.json ├── package.json ├── public │ ├── favicon.ico │ ├── index.html │ ├── logo192.png │ ├── logo512.png │ ├── manifest.json │ └── robots.txt ├── server │ └── webserver.py ├── src │ ├── App.css │ └── index.js └── yarn.lock ├── learningGraphQL.js ├── learning_paramiko.py ├── linked_in_saved_items.py ├── manage_cron.py ├── miniWikiChaliceJS ├── add.html ├── app.js ├── app.py └── index.html ├── modify_instance_attribute ├── put_metric_data.py └── upgrade_instance.py ├── monitor_new_files.py ├── mount_efs.py ├── nestedStack_deployment_customDomain.yml ├── piechart_vanillaJS_python ├── index.html └── lambda_handler.py ├── preFlightChecker_cors.js ├── recognize.py ├── remote_control_github ├── app.py ├── index.html ├── index.js └── styles.css ├── report_for_duty.py ├── request_lambda_authorizer.py ├── rescue_me.py ├── resize_fs_1 ├── extend_fs.py ├── pmiko.py └── resize_root_partition.py ├── sample_incident_response.py ├── sec.py ├── selenium_aws.py ├── send_fake_data.py ├── serverless.yml ├── serverless_chat_app_basic ├── first_connection.py └── send_message.py ├── service_quota_react_js_github ├── App.js ├── Articles.js ├── Aux.js ├── Sections.js └── service_quotas.py ├── service_quotas.py ├── set_time_zone.py ├── slack_bolt_slash_cmd_add_workspace_user.py ├── snapshot_practice.py ├── ssm_raid0.py ├── stepFunctionSandbox ├── func1.py ├── func2.py ├── func3.py └── stateMachina.json ├── traffic_report_ec2.py ├── transcribe_file.py ├── userdata_kubernetes_bootsrap_to_deployment.sh ├── vpc_peering.py └── weather_forecast.py /AWS_Connect_Greeting.py: -------------------------------------------------------------------------------- 1 | import json, boto3 2 | 3 | TABLE_NAME = "customer-phone" 4 | 5 | dynamoClient = boto3.client('dynamodb',region_name='us-east-1') 6 | dynamoResource = boto3.resource('dynamodb', region_name='us-east-1') 7 | 8 | 9 | def getPhoneNumber(phoneNum): 10 | dynamoResultSet = dynamoClient.scan(TableName=TABLE_NAME) 11 | if phoneNum in [num['phoneNumber']['S'] for num in dynamoResultSet["Items"]]: 12 | return True 13 | 14 | def storePhoneNumber(phoneNumber): 15 | dynamoResource.Table(TABLE_NAME).put_item( Item={ "phoneNumber":phoneNumber } ) 16 | 17 | 18 | def lambda_handler(event, context): 19 | try: 20 | cust_phone = event['Details']['ContactData']['CustomerEndpoint']['Address'] 21 | if getPhoneNumber(cust_phone): 22 | return { 23 | 'Greeting': 'Welcome back!' 24 | } 25 | else: 26 | storePhoneNumber(cust_phone) 27 | return { 28 | 'Greeting': 'You are calling for the first time. We have saved your phone \ 29 | number for faster service next time.' 30 | } 31 | except: 32 | pass 33 | 34 | 35 | #AWS Connect (Call Center) Lambda DynamoDb Python3 exercise 36 | #Use lambda to determine if customer has dialed into IVR after receiving customer input 37 | #Elliott Arnold DFW DMS 11-9-2020 38 | 39 | 40 | 41 | 42 | 43 | -------------------------------------------------------------------------------- /AWS_Connect_Salesforce.py: -------------------------------------------------------------------------------- 1 | from simple_salesforce import Salesforce 2 | import os 3 | 4 | accessToken = os.getenv('TOKEN') 5 | password = os.getenv('PASSWORD') 6 | username = os.getenv('USERNAME') 7 | 8 | def forceBeWithYou(): 9 | return Salesforce(username=username, password=password,security_token=accessToken) 10 | 11 | def customerDialInNumber(event): 12 | cust_phone = event['Details']['ContactData']['CustomerEndpoint']['Address'] 13 | return cust_phone 14 | 15 | def create_new_contact(event): 16 | cust_desired_contact = event['Details']['Parameters']['StoredCustomerInput'] 17 | contact_id = event['Details']['Parameters']['ContactID'] 18 | sForce = forceBeWithYou() 19 | if cust_desired_contact != customerDialInNumber(event): 20 | res = sForce.Contact.create({'LastName':"JDoe-" + contact_id,'LeadSource':contact_id,'MobilePhone':cust_desired_contact,'OtherPhone': customerDialInNumber(event)}) 21 | else: 22 | res = sForce.Contact.create({'LastName':"JDoe-" + contact_id,'LeadSource':contact_id,'MobilePhone':cust_desired_contact}) 23 | 24 | return res['success'] 25 | 26 | 27 | def lambda_handler(event,context): 28 | try: 29 | if create_new_contact(event): 30 | return { 31 | "Message": "A new record was created in salesforce" 32 | } 33 | else: 34 | return { 35 | "Message": "There was an error. No data was created in salesforce" 36 | } 37 | except: 38 | pass 39 | 40 | 41 | #AWS Connect Salesforce Lambda IVR practice 42 | #Create Contact Flow Retrieve Customer infomation use to create salesforce user 43 | #Elliott Arnold DMS DFW Covid-19 11-13-20 44 | #lateNightToil BuringTheMidnightOil 45 | -------------------------------------------------------------------------------- /CheckForDeltas.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | import json 3 | import hashlib 4 | 5 | dynamo = boto3.resource('dynamodb', region_name='us-east-1') 6 | dynamo_client = boto3.client('dynamodb', region_name='us-east-1') 7 | ec2 = boto3.client('ec2') 8 | sns = boto3.client('sns') 9 | arn = "arn:aws:sns:us-east-1:952151691101:Rule_Change_Detected" 10 | 11 | 12 | def main(event,context): 13 | '''check if table has been populated, if not populate table''' 14 | result = dynamo_client.scan(TableName='ConfigMonitor') 15 | if result['Count'] == 0: 16 | loadDDB() 17 | else: 18 | delta = compare(result) 19 | if len(delta) != 0: 20 | print(str(delta)) 21 | 22 | def get_current_config(): 23 | results = ec2.describe_security_groups() 24 | processed_dictionary = genHashedDictionary(results) 25 | return processed_dictionary 26 | 27 | def loadDDB(): 28 | processed_dictionary = get_current_config() 29 | for key,val in processed_dictionary.items(): 30 | putItem(dynamo,key,val) 31 | 32 | def compare(res): 33 | '''compare current security group hash with that previously listed in ddb table, 34 | if deviation is detected, publish to an sns topic''' 35 | modified = [] 36 | current_confg = get_current_config() 37 | scanned_dictionary = {result['groupID']['S']: result['hashedPolicies']['S'] for result in res['Items']} 38 | for i in scanned_dictionary: 39 | if i in current_confg: 40 | if current_confg[i] != scanned_dictionary[i]: 41 | modified.append(i) 42 | res = sns.publish( 43 | TopicArn='arn:aws:sns:us-east-1:952151691101:SecG_changes', 44 | Message=f'Rule Change Detected for: {i}') 45 | return modified 46 | 47 | def genHashValue(data_dictionary): 48 | '''convert dictionary to string and encode into bytes - required for hexdigest method ''' 49 | bytes = json.dumps(data_dictionary, sort_keys=True).encode() 50 | hashed_string_from_bytes = hashlib.sha256(bytes).hexdigest() 51 | return hashed_string_from_bytes 52 | 53 | 54 | def genHashedDictionary(data_dictionary): 55 | '''use dictionary comprehension ''' 56 | hashed_dictionay = {element['GroupId']: genHashValue(element['IpPermissions']) for element in data_dictionary['SecurityGroups']} 57 | return hashed_dictionay 58 | 59 | 60 | def putItem(resource,group_id,hashed_policy,table='ConfigMonitor'): 61 | response = resource.Table(table).put_item( 62 | Item={ 63 | 'groupID':group_id, 64 | 'hashedPolicies':hashed_policy 65 | } 66 | ) 67 | return response 68 | 69 | 70 | #AWS Lambda practice: Security groups and dynamoDb: Evalaute changes in security group rules using 71 | #MD5 hash. Rules are compared against last recorded hash in dynamodDb, if hash changes: the script will publish to an sns topic 72 | #Elliott Arnold 8-14-19 73 | #si3mshady 74 | 75 | 76 | 77 | 78 | 79 | -------------------------------------------------------------------------------- /NACL_Practice.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | import time 3 | 4 | class Toggle_NACL: 5 | 6 | def __init__(self, network_acl_id='acl-02f93c4db1c5b9c5a'): 7 | self.ec2 = boto3.resource('ec2') 8 | self.network_acl = self.ec2.NetworkAcl(network_acl_id) 9 | 10 | '''prohibit all inbound/outbound traffic to subnet''' 11 | 12 | def subnet_offline(self): 13 | self.master_deny_ingress_rule() 14 | self.master_deny_egress_rule() 15 | 16 | '''permit all inbound/outbound traffic to subnet''' 17 | 18 | def subnet_online(self): 19 | self.master_allow_ingress_rule() 20 | self.master_allow_egress_rule() 21 | 22 | def twenty_sec_toggle_test(self): 23 | print(f'Locking down subnet {self.network_acl.associations[0]["SubnetId"]}.') 24 | 25 | self.subnet_offline() 26 | 27 | print(f'Subnet {self.network_acl.associations[0]["SubnetId"]} is unreachable.') 28 | 29 | time.sleep(20) 30 | 31 | print(f'Opening connections for subnet {self.network_acl.associations[0]["SubnetId"]}.') 32 | 33 | self.subnet_online() 34 | print(f'Subnet {self.network_acl.associations[0]["SubnetId"]} is now reachable.') 35 | 36 | def master_allow_egress_rule(self): 37 | response = self.network_acl.replace_entry( 38 | CidrBlock='0.0.0.0/0', 39 | DryRun=False, 40 | Egress=True, 41 | Protocol='-1', 42 | RuleAction='allow', 43 | RuleNumber=1 44 | ) 45 | return response 46 | 47 | def master_allow_ingress_rule(self): 48 | response = self.network_acl.replace_entry( 49 | CidrBlock='0.0.0.0/0', 50 | DryRun=False, 51 | Egress=False, 52 | Protocol='-1', 53 | RuleAction='allow', 54 | RuleNumber=1 55 | ) 56 | return response 57 | 58 | def master_deny_egress_rule(self): 59 | response = self.network_acl.replace_entry( 60 | CidrBlock='0.0.0.0/0', 61 | DryRun=False, 62 | Egress=True, 63 | Protocol='-1', 64 | RuleAction='deny', 65 | RuleNumber=1 66 | ) 67 | return response 68 | 69 | def master_deny_ingress_rule(self): 70 | response = self.network_acl.replace_entry( 71 | CidrBlock='0.0.0.0/0', 72 | DryRun=False, 73 | Egress=False, 74 | Protocol='-1', 75 | RuleAction='deny', 76 | RuleNumber=1 77 | ) 78 | return response 79 | 80 | # AWS VPC practice - learning to control ingress/egress traffic using NACL 81 | # Basic Rules for Denying / Allowing all traffic into subnet 82 | # Elliott Arnold 83 | # 11-23-19 84 | 85 | 86 | if __name__ == '__main__': 87 | t = Toggle_NACL() 88 | #t.twenty_sec_toggle_test() 89 | -------------------------------------------------------------------------------- /Pulumi_Docker_Inspector.py: -------------------------------------------------------------------------------- 1 | import string, random 2 | import pulumi 3 | import pulumi_aws as aws 4 | from pulumi_aws import lambda_ 5 | import pulumi_docker as docker 6 | 7 | docker_image_config = {"resource_name":"jenkins-root-user", \ 8 | "docker_image":"si3mshady/jenkins-iam-root:latest"} 9 | 10 | ports = [{"external":"8080", "internal": "8080"}] 11 | 12 | def get_random_string(length): 13 | letters = string.ascii_lowercase 14 | return ''.join(random.choice(letters) for i in range(length)) 15 | 16 | 17 | rule_packages = { 18 | "CVE": "arn:aws:inspector:us-east-1:316112463485:rulespackage/0-gEjTy7T7", 19 | "OSSecConfigBenchmarks": "arn:aws:inspector:us-east-1:316112463485:rulespackage/0-rExsr2X8", 20 | "NetworkReachability": "arn:aws:inspector:us-east-1:316112463485:rulespackage/0-PmNV0Tcd", 21 | "SecurityBestPractices": "arn:aws:inspector:us-east-1:316112463485:rulespackage/0-R01qwB5Q" 22 | } 23 | 24 | class LambdaConfig: 25 | def __init__(self, args=None, custom=None) -> None: 26 | self.args = args 27 | self.custom = custom 28 | 29 | def dispatch_multiple_functions(self): 30 | for key, value in rule_packages.items(): 31 | 32 | try: 33 | aws.lambda_.Function( 34 | f"Inspector-Function-{get_random_string(5)}", 35 | code=pulumi.AssetArchive({'.': pulumi.FileArchive('./app.zip')}), 36 | timeout= 30, 37 | handler="app.handler", 38 | runtime="python3.8", 39 | environment={ 40 | "variables": { 41 | key: value 42 | } 43 | } , 44 | role="arn:aws:iam::888:role/lambda-kratos-exec-role" 45 | 46 | 47 | ) 48 | except Exception as e: 49 | print(e) 50 | 51 | 52 | class DockerConfig: 53 | 54 | def __init__(self, ports:list=ports,config:dict=docker_image_config) -> None: 55 | self.config = config 56 | self.ports = ports 57 | 58 | def deploy_container(self): 59 | self.docker_resource = self.provision_docker_image(**self.config) 60 | self.launch_container = self.launch_container(self.config.get('resource_name'),\ 61 | self.docker_resource, self.ports 62 | ) 63 | 64 | def provision_docker_image(self, resource_name: str, docker_image: str) -> docker.RemoteImage: 65 | return docker.RemoteImage(resource_name=resource_name, name=docker_image) 66 | 67 | def launch_container(self, resource_name, docker_image_resource, ports)-> None: 68 | docker.Container(resource_name, \ 69 | image=docker_image_resource.latest, ports=ports) 70 | 71 | 72 | dg = DockerConfig(ports,docker_image_config) 73 | dg.deploy_container() 74 | 75 | 76 | lg = LambdaConfig() 77 | lg.dispatch_multiple_functions() 78 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## A collection of AWS Lambda Functions and scripts using BOTO3 2 | 3 | -------------------------------------------------------------------------------- /S3_Select_ApiGateway_query.py: -------------------------------------------------------------------------------- 1 | from collections import OrderedDict 2 | import boto3, json 3 | s3 = boto3.client('s3') 4 | 5 | def lambda_validator(event): 6 | queryStringParams = event.get("queryStringParameters",None) 7 | if queryStringParams.get('query',None): 8 | return queryStringParams['query'] 9 | 10 | def make_ordered_dictionary(array): 11 | od = OrderedDict() 12 | for i in array: 13 | val = i.split(':') 14 | od[val[0]] = val[1] 15 | 16 | return od 17 | 18 | 19 | def extract_from_s3(query,data): 20 | body = data.get('body').split('&') 21 | clean_result = [val.replace('=',':') for val in body] 22 | od = make_ordered_dictionary(clean_result) 23 | 24 | bucket = od.get('Bucket') 25 | key = od.get('Key') 26 | 27 | params = {"Bucket": bucket, "Key": key, 28 | "ExpressionType":'SQL', "Expression": query, 29 | "InputSerialization": {'CSV': {"FileHeaderInfo": "Use"}}, 30 | "OutputSerialization":{'CSV': {}}} 31 | 32 | result = s3.select_object_content(**params) 33 | 34 | #exract s3 payload, access csv data of file 35 | 36 | v = [v for v in result['Payload']] #listcomprehension 37 | 38 | document_data = v[0].get('Records') 39 | return document_data 40 | 41 | 42 | def lambda_handler(event,context): 43 | query = lambda_validator(event) 44 | if query: 45 | data = extract_from_s3(query,event) 46 | resp = {'statusCode': 200,'headers': {'Content-Type': 'application/json', 47 | 'Access-Control-Allow-Origin': '*' },'body': str(data), 48 | "isBase64Encoded": False } 49 | 50 | return resp 51 | 52 | 53 | #AWS Python3 APIgateway Lambda SQL S3Select 54 | #Make requests to ApiGateway; proxy requests to S3 Select and fetch data 55 | #From CSV files in S3 56 | #Elliott Arnold - DMS DFW Covid19 Bee-Gees Night Fever 57 | -------------------------------------------------------------------------------- /SmallStackDeployment/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:alpine 2 | 3 | RUN mkdir cdk/ && apk add python3-dev && \ 4 | apk add py-pip && npm install -g aws-cdk 5 | 6 | WORKDIR cdk/ 7 | 8 | CMD ["sh"] 9 | -------------------------------------------------------------------------------- /SmallStackDeployment/bootstrap.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | OUTPUT=~/put_metric_data.py 4 | wget --output-document=$OUTPUT https://raw.githubusercontent.com/si3mshady/aws_lambda_functions-/master/modify_instance_attribute/put_metric_data.py 5 | 6 | #make script executable 7 | chmod +x $OUTPUT 8 | 9 | #update instance, install python3, atop boto3, set cron job to push metrics to cloudwatch 10 | yum update -y 11 | yum install epel-release -y 12 | yum install python3 -y 13 | yum install python-pip -y 14 | pip3 install boto3 15 | yum install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm 16 | yum-config-manager --enable epel 17 | yum install atop -y 18 | 19 | crontab< None: 7 | super().__init__(scope, id, **kwargs) 8 | 9 | def get_userdata(): 10 | with open('bootstrap.sh','r') as userdata: 11 | return userdata.read() 12 | 13 | kratos_role = aws_iam.Role.from_role_arn(self,'KratosXL', 14 | role_arn="arn:aws:iam::88888888:role/KratosRole") 15 | 16 | lambda_role = aws_iam.Role.from_role_arn(self,'LambdaXL', 17 | role_arn="arn:aws:iam::999999999:role/Lambda_Kratos") 18 | 19 | sns_topic = aws_sns.Topic(self, "Topic", display_name="cdk-sns-trigger") 20 | 21 | lambda_function = aws_lambda.Function(self, "FetchAtopLogs", runtime=aws_lambda.Runtime.PYTHON_3_6,role=lambda_role, 22 | handler="lambda_handler.lambda_handler", code=aws_lambda.Code.from_asset('myfunc')) 23 | 24 | lambda_function.add_event_source(aws_lambda_event_sources.SnsEventSource(sns_topic)) 25 | sns_subscription = aws_sns_subscriptions.LambdaSubscription(lambda_function) 26 | 27 | 28 | def generate_instances(count=1): 29 | amazon_linux_2 = aws_ec2.GenericLinuxImage({"us-east-1": "ami-0fc61db8544a617ed"}) 30 | ec2_objects = [] 31 | for i in range(count): 32 | ec2_instnace = aws_ec2.Instance(self,f"CDK-Instance-{i + int(1)}", 33 | instance_type=aws_ec2.InstanceType('t2.micro'), 34 | role=kratos_role, machine_image=amazon_linux_2, 35 | security_group=aws_ec2.CfnSecurityGroup(self,id=f"SG{i + int(1)}", 36 | group_description=f"SG-CDK-{i}"), 37 | vpc=aws_ec2.Vpc.from_lookup(self,f'CDK-VPC-{i + int(1)}', 38 | vpc_id="vpc-eeeee3"), 39 | user_data=aws_ec2.UserData.custom(get_userdata()), 40 | key_name="covidQuarantine") 41 | ec2_objects.append(ec2_instnace) 42 | return ec2_objects 43 | 44 | generate_instances() 45 | 46 | 47 | #AWS EC2 SQS SSM S3 Cloudwatch Cloudformation CDK practice exercise - 48 | #Bootstrap EC2 instances with ATOP and custom script to push instance metrics to Cloudwatch. 49 | #Creates an SNS topic with a Lambda fuction subscription. The Lambda is trigged 50 | #bootsrapped python script publishes to the SNS topic when a metric threshold is breached. 51 | #Once breached the lambda uses Systems Manager (SSM) to execute a bash command to upload ATOP logs to S3 52 | #Elliott Arnold 3-29-20 53 | #Covid19Quarantine 54 | 55 | 56 | 57 | #Resources 58 | #https://docs.aws.amazon.com/cdk/latest/guide/how_to_set_cw_alarm.html 59 | #https://cdkworkshop.com/30-python/20-create-project/500-deploy.html 60 | #https://linuxhint.com/bash-heredoc-tutorial/ 61 | #add admin permissions to ec2 instances for testing 62 | #https://aws.amazon.com/premiumsupport/knowledge-center/ec2-enable-epel/ 63 | #https://docs.aws.amazon.com/cdk/api/latest/python/aws_cdk.aws_lambda_event_sources.html 64 | 65 | -------------------------------------------------------------------------------- /SmallStackDeployment/lambda_handler.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | #this lambda needs kratos role also 3 | #bucket _arn arn:aws:s3:::atop-logs-sandbox 4 | class ATPS3: 5 | 6 | def __init__(self,instance_id): 7 | self.instance_id = instance_id 8 | self.ssm = boto3.client('ssm') 9 | 10 | def atop_to_s3(self): 11 | cmd = "for i in $(ls /var/log/atop); do aws s3 cp /var/log/atop/$i s3://atop-logs-sandbox/$(curl http://169.254.169.254/latest/meta-data/instance-id)$i; done" 12 | self.ssm.send_command(InstanceIds=[self.instance_id],DocumentName='AWS-RunShellScript', Parameters={'commands': [cmd]}) 13 | 14 | def lambda_handler(event,context): 15 | instance_id = event['Records'][0]['Sns']['Message'] 16 | checker = ATPS3(instance_id) 17 | checker.atop_to_s3() -------------------------------------------------------------------------------- /SmallStackDeployment/put_metric_data.py: -------------------------------------------------------------------------------- 1 | #!/bin/python3 2 | import subprocess 3 | import boto3 4 | import time 5 | import re 6 | 7 | class PIM: 8 | cloudwatch = boto3.client('cloudwatch', region_name='us-east-1') 9 | sns = boto3.client('sns',region_name='us-east-1') 10 | ssm = boto3.client('ssm',region_name='us-east-1') 11 | 12 | percent_idle = "iostat | grep -A1 avg-cpu | column | awk '{print $6}' | grep '[0-9]'" 13 | 14 | @classmethod 15 | def put_idle_metric(cls): 16 | pi = float(subprocess.check_output(cls.percent_idle, shell=True).decode('utf-8')) 17 | cls.cloudwatch.put_metric_data( 18 | MetricData=[ 19 | { 20 | 'MetricName': 'Custom_Percent_Idle', 21 | 'Dimensions': [ 22 | { 23 | 'Name': 'Custom Data', 24 | 'Value': 'Percent_Idle' 25 | }, 26 | ], 27 | 'Unit': 'Percent', 28 | 'Value': pi 29 | }, 30 | ], 31 | Namespace='Idle/CPU' 32 | ) 33 | 34 | return pi 35 | 36 | @classmethod 37 | def publish_instance_id(cls): 38 | param = cls.ssm.get_parameter(Name='cdk-sns-arn') 39 | sns_arn = param['Parameter']['Value'] 40 | result = subprocess.Popen("curl http://169.254.169.254/latest/meta-data/instance-id",stdout=subprocess.PIPE,shell=True) 41 | res, _ = result.communicate() 42 | instance_id = re.findall(r'(i-[0-9aA-zZ]+)',res.decode())[0] 43 | cls.sns.publish(TargetArn=sns_arn, Message=instance_id) 44 | 45 | @classmethod 46 | def run_metric_for_minute(cls): 47 | count = 0 48 | for i in range(61): 49 | percent_idle = cls.put_idle_metric() 50 | if percent_idle > float(50): 51 | count +=1 52 | time.sleep(1) 53 | if count > 59: 54 | cls.publish_instance_id() 55 | 56 | 57 | 58 | PIM.run_metric_for_minute() 59 | 60 | #AWS EC2 SQS SSM S3 Cloudwatch Cloudformation CDK practice exercise - 61 | #Bootstrap EC2 instances with ATOP and custom script to push instance metrics to Cloudwatch. 62 | #Creates an SNS topic with a Lambda fuction subscription. The Lambda is trigged 63 | #bootsrapped python script publishes to the SNS topic when a metric threshold is breached. 64 | #Once breached the lambda uses Systems Manager (SSM) to execute a bash command to upload ATOP logs to S3 65 | 66 | -------------------------------------------------------------------------------- /UsingOkta_1.py: -------------------------------------------------------------------------------- 1 | import requests 2 | 3 | #Needed for OauthFlow Needed for Oauth2Flow 4 | def getAuthCodeFromCallBack(oktaDomain,clientID,redirectUri): 5 | #url needed to start Oauth2 flow in order to recieve 6 | #uthCode returned in url, authcode is needed to retrieve JWT 7 | return f'https://{oktaDomain}/oauth2/' \ 8 | + f'default/v1/authorize?client_id={clientID}'\ 9 | + f'&response_type=code&scope=openid&'\ 10 | + f'redirect_uri={redirectUri}&state=si3mshady&nonce=888' 11 | 12 | def getOktaJWT(oktaDomain,clientID,clientSecret,redirectUrl,AuthCode): 13 | #jwt identifies a user 14 | url = f'https://{oktaDomain}/oauth2/default/v1/token' 15 | payload = {"client_id":clientID,"client_secret":clientSecret,"grant_type":"authorization_code",\ 16 | "redirect_uri":redirectUrl,"code":AuthCode} 17 | r1 = requests.post(url, data=payload) 18 | if r1.status_code == 200: 19 | return r1.json()['access_token'] 20 | else: 21 | print(res) 22 | print(res.text) 23 | 24 | 25 | def loginGetSesionToken(oktaDomain,username,password): 26 | # https://realpython.com/python-requests/#the-message-body 27 | url = f"https://{oktaDomain}/api/v1/authn" 28 | headers = {"Accept": "application/json", "Content-Type": "application/json"} 29 | #if you use data parameter you will get 400 The request body was not well-formed 30 | res = requests.post(url,headers=headers,json={"username":username,"password": password}) 31 | if res.status_code == 200: 32 | print(res) 33 | return res.json()['sessionToken'] 34 | else: 35 | print(res) 36 | print(res.text) 37 | 38 | def getLogs(oktaDomain, application_api_token): 39 | url = f"https://{oktaDomain}/api/v1/logs" 40 | headers = {"Accept": "application/json","Content-Type": "application/json", "Authorization": f"SSWS {application_api_token}"} 41 | res = requests.get(url, headers=headers) 42 | if res.status_code == 200: 43 | print(res) 44 | print(res.text) 45 | postToSIEM(res.text) 46 | return res.json() 47 | else: 48 | print(res) 49 | print(res.text) 50 | 51 | 52 | def postToSIEM(data,siem_endpoint=''): 53 | siem_endpoint = "https://hvyjotibx6.execute-api.us-east-1.amazonaws.com/v1/siemdata" 54 | res = requests.post(url=siem_endpoint, data=data) 55 | if res.status_code == 200: 56 | print(f"Data posted successfully to {siem_endpoint}") 57 | else: 58 | print(res) 59 | print(res.text) 60 | print(f"Data post unsuccessfull") 61 | 62 | 63 | 64 | #AWS Okta IdentityProvider Requests 65 | #Okta Identity 66 | 67 | # https://developer.okta.com/docs/reference/ 68 | #https://developer.okta.com/docs/reference/api/system-log/#examples 69 | #/https://devforum.okta.com/t/how-to-login-to-okta-and-access-an-app-using-python-for-automation-purpose/11171 70 | # https://www.w3schools.com/python/ref_requests_response.asp 71 | -------------------------------------------------------------------------------- /UsingOkta_2.js: -------------------------------------------------------------------------------- 1 | const express = require('express') 2 | const bodyParser = require('body-parser') 3 | const app = express(); 4 | 5 | app.use(bodyParser.urlencoded({ 6 | extended: true 7 | })); 8 | 9 | 10 | app.get('/', (req,res) => { 11 | res.send('

Authcode and JWT fetcher

') 12 | }) 13 | 14 | 15 | app.get('/authorization-code/callback', (req,res) => { 16 | const authCode = res.req.query.code 17 | console.log(authCode) 18 | const data = { authcode: authCode } 19 | res.setHeader('Content-Type','application/json') 20 | res.json(data) 21 | 22 | }) 23 | 24 | app.listen(8080, () => { 25 | console.log("Server started on port 8080") 26 | }) 27 | 28 | //to serve callback http request needed for Oauth2 Authentication 29 | //1-17-21 Elliott Arnold AWS DMS 30 | -------------------------------------------------------------------------------- /VPC_API/api_base.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | from botocore.exceptions import ClientError 3 | 4 | class CustomVPC: 5 | def __init__(self,vpc_cidr='10.0.0.0/16'): 6 | if VPC_UTIL.check_valid_cb(vpc_cidr): 7 | self.ec2 = boto3.client('ec2') 8 | self.vpcCidrBlock = vpc_cidr 9 | self.vpc_metadata = self.makeCustomVPC() 10 | self.vpc_cb = self.vpc_metadata['CidrBlock'] 11 | self.vpc_id = self.vpc_metadata['VpcId'] 12 | 13 | def makeCustomVPC(self): 14 | return self.ec2.create_vpc(CidrBlock=self.vpcCidrBlock)['Vpc'] 15 | 16 | class CustomSubnet: 17 | def __init__(self,vpc_id,cidrBlk): 18 | self.ec2 = boto3.client('ec2') 19 | self.vpc_id = vpc_id 20 | self.cidrBlk = cidrBlk 21 | if VPC_UTIL.check_valid_cb(cidrBlk): 22 | self.subnet_metadata = self.make_subnet() 23 | if self.subnet_metadata != None: 24 | self.az_id = self.subnet_metadata['AvailabilityZoneId'] 25 | self.az = self.subnet_metadata['AvailabilityZone'] 26 | self.subnet_id = self.subnet_metadata['SubnetId'] 27 | 28 | def make_subnet(self): 29 | try: 30 | return self.ec2.create_subnet(CidrBlock=self.cidrBlk, VpcId=self.vpc_id)['Subnet'] 31 | except ClientError as e: 32 | print(e) 33 | 34 | class CustomRT: 35 | def __init__(self,vpc_id): 36 | self.ec2 = boto3.client('ec2') 37 | self.vpc_id = vpc_id 38 | self.rt_metadata = self.create_route_table() 39 | self.route_table_id = self.rt_metadata['RouteTableId'] 40 | self.routes = self.rt_metadata['Routes'] 41 | 42 | def create_route_table(self): 43 | return self.ec2.create_route_table(VpcId=self.vpc_id)['RouteTable'] 44 | 45 | class MakeIGW: 46 | def __init__(self): 47 | self.ec2 = boto3.client('ec2') 48 | self.igw_metadata = self.makeigw() 49 | self.igw_id = self.igw_metadata['InternetGatewayId'] 50 | 51 | def makeigw(self): 52 | return self.ec2.create_internet_gateway()['InternetGateway'] 53 | 54 | 55 | class VPC_UTIL: 56 | ec2 = boto3.client('ec2') 57 | 58 | @classmethod 59 | def associate_rt(cls,route_table_id,subnet_id): 60 | return cls.ec2.associate_route_table(RouteTableId=route_table_id, SubnetId=subnet_id)['AssociationId'] 61 | 62 | @classmethod 63 | def check_valid_cb(cls, cidr): 64 | cidr = int(cidr.split('/')[-1]) 65 | if cidr < 16 or cidr > 24: 66 | print('VPC Cidr block cannot be larger than /16 or smaller than /24') 67 | return False 68 | else: 69 | return True 70 | 71 | @classmethod 72 | def attach_igw(cls,igw_id,vpc_id): 73 | return cls.ec2.attach_internet_gateway(InternetGatewayId=igw_id, VpcId=vpc_id)['ResponseMetadata'] 74 | 75 | @classmethod 76 | def create_public_route(cls,route_table_id,gateway_id): 77 | return cls.ec2.create_route(DestinationCidrBlock='0.0.0.0/0',RouteTableId=route_table_id,GatewayId=gateway_id)['Return'] 78 | 79 | 80 | 81 | #AWS VPC FLASK Practice 82 | #Creating Custom VPCS, Subnets, Route Tables, IGW and Associations & Routes 83 | #Use flask-restful to create a simple api to mak 84 | #Elliott Arnold 12-21-19 85 | 86 | 87 | 88 | 89 | 90 | -------------------------------------------------------------------------------- /addStorageRDS.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import boto3 3 | import json 4 | import os 5 | 6 | '''configure log level''' 7 | logger = logging.getLogger() 8 | logger.setLevel(logging.INFO) 9 | 10 | '''access environment variables''' 11 | db_instance = os.environ['DB'] 12 | arn = os.environ['TOPIC'] 13 | 14 | '''access resources''' 15 | client = boto3.client('rds') 16 | sns = boto3.client('sns') 17 | 18 | def addStorage(event,context): 19 | message_body = event['Records'][0]['Sns']['Message'] 20 | trigger = json.loads(message_body)['Trigger'] 21 | logging.info(trigger) 22 | modifyDbResponse = client.modify_db_instance(DBInstanceIdentifier=db_instance, 23 | AllocatedStorage=50, 24 | ApplyImmediately=True) 25 | response = sns.publish(TopicArn=arn, Message=str(trigger)) 26 | 27 | 28 | #AWS Lambda practice exercise - Using Cloudwatch - SNS and Lambda to monitor low storage condition on RDS. 29 | #Fuction allocates additional storage to database when triggered and publishes to a SNS topic 30 | #Elliott Arnold 7-20-19 31 | 32 | 33 | 34 | -------------------------------------------------------------------------------- /add_cloudfront_src_ip_resource_policy.py: -------------------------------------------------------------------------------- 1 | import requests 2 | import json 3 | 4 | 5 | 6 | def get_cf_ip_ranges(range_type='GLOBAL'): 7 | ip = 'http://d7uri8nf7uskq.cloudfront.net/tools/list-cloudfront-ips' 8 | ranges = requests.get(ip).json() 9 | if range_type.upper() == 'GLOBAL': 10 | return ranges['CLOUDFRONT_GLOBAL_IP_LIST'] 11 | elif range_type.upper() == 'REGIONAL': 12 | return ranges['CLOUDFRONT_REGIONAL_EDGE_IP_LIST'] 13 | 14 | 15 | def allowListCFSourceIPs(resource,allowList): 16 | resource_policy = { 17 | "Version": "2012-10-17", 18 | "Statement": [{ 19 | "Effect": "Allow", 20 | "Principal": "*", 21 | "Action": "execute-api:Invoke", 22 | "Resource": resource 23 | }, 24 | { 25 | "Effect": "Deny", 26 | "Principal": "*", 27 | "Action": "execute-api:Invoke", 28 | "Resource": resource, 29 | "Condition": { 30 | "NotIpAddress": { 31 | "aws:SourceIp": allowList 32 | } 33 | } 34 | } 35 | ] 36 | } 37 | 38 | print(json.dumps(resource_policy)) 39 | 40 | 41 | 42 | 43 | if __name__ == "__main__": 44 | while True: 45 | ip_ranges = input("Type 'Regional' or 'Global' for Cloudfront IP Ranges> ") 46 | if ip_ranges.lower() == 'regional' or ip_ranges.lower() == 'global': 47 | break 48 | else: 49 | continue 50 | allowListCFSourceIPs(resource='execute-api:/*/*/*',allowList=get_cf_ip_ranges(ip_ranges)) 51 | 52 | 53 | 54 | 55 | 56 | -------------------------------------------------------------------------------- /add_sg_to_instances_cloudwatch.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | ec2 = boto3.client('ec2') 3 | ec2Resource = boto3.resource('ec2') 4 | 5 | sg_list = ["sg-067fd407a5350c96e","sg-016934c304771db40"] 6 | 7 | 8 | def compare(list_a,list_b): 9 | #the use of 'set' allows for comparing the values and removing duplicates 10 | if len(list_a) > len(list_b): 11 | 12 | missing_sg = set(list_a) - set(list_b) 13 | 14 | return list(set(list_a)) + list(missing_sg))) 15 | 16 | elif len(list_b) > len(list_a): 17 | 18 | missing_sg = set(list_b) - set(list_a) 19 | 20 | return list(set(list_b)) + list(missing_sg) 21 | else: 22 | return (list(set(list_b)) - list(set(list_a))) 23 | 24 | def check_attached_security_groups(instance_id,security_group_list): 25 | 26 | current_instance = ec2Resource.Instance(instance_id) 27 | 28 | attached_security_groups = [group['GroupId'] for group in current_instance.security_groups] 29 | 30 | required_groups = compare(security_group_list,attached_security_groups) 31 | 32 | complete_sg = list(set(required_groups + attached_security_groups)) 33 | 34 | if len(complete_sg) != 0: 35 | current_instance.modify_attribute(Groups=complete_sg) 36 | 37 | def lambda_handler(event, context): 38 | try: 39 | ec2_event = event 40 | instance_id = ec2_event['resources'][0].split('/')[-1] 41 | check_attached_security_groups(instance_id,sg_list) 42 | except KeyError: 43 | print(event) 44 | 45 | #AWS Lambda EC2 Security Groups Cloudwatch 46 | #Elliott Arnold 47 | #Add SG to Instance when launched in VPC => running state 48 | #If SG's are attached then no action is taken 49 | #quick and dirty 50 | #AWS DMS DFW 51 | #7/13/20 52 | -------------------------------------------------------------------------------- /alexa_echo_commands_lambda.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | 3 | ec2 = boto3.client('ec2') 4 | 5 | def lambda_handler(event,context): 6 | if event['request']['type'] == "LaunchRequest": 7 | return process_response("Would you like to start stop or count your instances") 8 | 9 | elif event['request']['type'] == "IntentRequest": 10 | return process_intent(event) 11 | 12 | def process_intent(event): 13 | intent = event['request']['intent']['name'] 14 | count = runningInstances() 15 | 16 | if intent == 'activateInstances': 17 | if count is not None: 18 | message = f"launching {count} instances" 19 | launchInstances() 20 | return process_response(message) 21 | 22 | elif intent == 'deactivateInstances': 23 | 24 | if count is not None: 25 | message = f"shutting down {count} instances" 26 | stopInstances() 27 | return process_response(message) 28 | 29 | elif intent == 'countInstances': 30 | 31 | if count is not None: 32 | message = f" you have {count}" 33 | return process_response(message) 34 | 35 | 36 | def process_response(message): 37 | speech_response = { 38 | "version": "1.0", 39 | "response": { 40 | "outputSpeech": { 41 | "type": "PlainText", 42 | "text": message 43 | }, 44 | "shouldEndSession": True 45 | } 46 | } 47 | return speech_response 48 | 49 | 50 | def launchInstances(): 51 | instance_ids = get_instance_ids() 52 | response = ec2.start_instances(InstanceIds=instance_ids,DryRun=False) 53 | return response 54 | 55 | def stopInstances(): 56 | instance_ids = get_instance_ids() 57 | response = ec2.stop_instances(InstanceIds=instance_ids,Force=True) 58 | return response 59 | 60 | 61 | def runningInstances(): 62 | running_instances = len(get_instance_ids()) 63 | return running_instances 64 | 65 | 66 | def get_instance_ids(): 67 | response = ec2.describe_instances() 68 | instance_id_list = [[ec2['InstanceId'] for ec2 in response['Reservations'][i]['Instances']] for i in range(len(response['Reservations']))] 69 | instance_id_strings = [i[0] for i in instance_id_list] 70 | return instance_id_strings 71 | 72 | 73 | 74 | #AlexaSkillsPractice - Amazon Echo - Created a basic Alexa skill with intents and utterances to start stop and count EC2 instances 75 | #Elliott Arnold 9-27-19 76 | 77 | #https://developer.amazon.com/docs/custom-skills/handle-requests-sent-by-alexa.html 78 | #https://developer.amazon.com/docs/custom-skills/request-and-response-json-reference.html 79 | -------------------------------------------------------------------------------- /apik/api_key_wrapper.py: -------------------------------------------------------------------------------- 1 | from functools import wraps 2 | import boto3 3 | 4 | #think of the params being applied to "new_event" as those 5 | #you will supply to the actual calling function in this case 'arg1','arg2' 6 | class KeyManagment: 7 | @classmethod 8 | def store_authentication_information(cls,fn): 9 | @wraps(fn) 10 | def new_event(arg1,arg2): 11 | ddbR = boto3.resource('dynamodb',region_name='us-east-1') 12 | dest_table = ddbR.Table('testing_api_keys') 13 | apigw = boto3.client('apigateway') 14 | user, email = fn(arg1,arg2) 15 | result = apigw.create_api_key(name=email,enabled=True) 16 | api_key = result['value'] 17 | api_key_id = result['id'] 18 | dest_table.put_item(Item={str('email'): str(email), \ 19 | str('user'): str(user), \ 20 | str('api_key') : str(api_key), \ 21 | str('api_key_id') : str(api_key_id) }) 22 | return new_event 23 | 24 | @classmethod 25 | def add_api_key_usage_plan(cls,fn): 26 | @wraps(fn) 27 | def new_event(arg): 28 | email = fn(arg) 29 | apigw = boto3.client('apigateway') 30 | ddb = boto3.client('dynamodb',region_name='us-east-1') 31 | ddb_result = ddb.scan(TableName='testing_api_keys') 32 | api_key_id = [key['api_key_id'] for key in ddb_result['Items'] \ 33 | if key['email']['S'] == email ][0]['S'] 34 | response = apigw.create_usage_plan_key(usagePlanId='tkfpit', 35 | keyId=api_key_id, keyType='API_KEY') 36 | return new_event 37 | 38 | 39 | 40 | @KeyManagment.store_authentication_information 41 | def get_username_email(arg1,arg2): 42 | return arg1,arg2 43 | 44 | @KeyManagment.add_api_key_usage_plan 45 | def update_usage_plan(arg): 46 | return arg 47 | 48 | #AWS API Gateway Dynamo DB Usage API Key practice 49 | #Create simple decorators to work with API 50 | #Decorators provide additional functionality to methods 51 | #When used, decorators generate api keys, 52 | #updates DynamoDb table and assign api key to usage plan for use with authentication 53 | #Elliott Arnold 6-11-20 54 | 55 | 56 | 57 | 58 | 59 | -------------------------------------------------------------------------------- /apik/api_keys_api.py: -------------------------------------------------------------------------------- 1 | from api_key_wrapper import (KeyManagment, get_username_email, update_usage_plan) 2 | from flask import Flask, request 3 | from flask_restful import Resource,Api 4 | import boto3 5 | 6 | 7 | app=application=Flask(__name__) 8 | api=Api(app) 9 | 10 | 11 | class NewApiKey(Resource): 12 | def post(self): 13 | data = request.get_json() 14 | username = data['username'] 15 | email = data['email'] 16 | get_username_email(username,email) 17 | update_usage_plan(email) 18 | json_response = { 19 | "status": 200, 20 | "message": f"Thank you for registering,{username}, " + \ 21 | "your API KEY will be sent to you in a seperate email." 22 | } 23 | return json_response 24 | 25 | api.add_resource(NewApiKey,'/new_key') 26 | 27 | 28 | if __name__ == "__main__": 29 | app.run(host="0.0.0.0", debug=True) 30 | 31 | 32 | #AWS API Gateway Dynamo DB Usage API Key practice 33 | #Create simple decorators to work with flask API 34 | #Decorators provide additional functionality to methods 35 | #When used, decorators generate api keys, updates DynamoDb table and 36 | #assign api key to usage plan for use with authentication 37 | #Elliott Arnold 6-11-20 -------------------------------------------------------------------------------- /assign_2_eip_eni.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | 3 | class Assign2EIP: 4 | def __init__(self, instance): 5 | self.instance_id = instance 6 | self.ec2 = boto3.client('ec2') 7 | self.ec2R = boto3.resource('ec2') 8 | self.ssm = boto3.client('ssm') 9 | self.eni, self.attachment_id = self.get_eni() 10 | self.assign_private_ip() 11 | self.eip_dict = self.generate_2_eip() 12 | self.private_ips = self.fetch_private_ips() 13 | self.associate_eip_private_ip() 14 | 15 | def get_eni(self): 16 | eni = self.ec2R.Instance(self.instance_id) 17 | return (eni.network_interfaces_attribute[0]['NetworkInterfaceId'],eni.network_interfaces_attribute[0]['Attachment']['AttachmentId']) 18 | 19 | 20 | def assign_private_ip(self): 21 | result = self.ec2.assign_private_ip_addresses(NetworkInterfaceId=self.eni, SecondaryPrivateIpAddressCount=1) 22 | return result['AssignedPrivateIpAddresses'][0]['PrivateIpAddress'] 23 | 24 | def generate_2_eip(self): 25 | data = self.ec2.describe_addresses() 26 | counter = [] 27 | eip_dictionary = {} 28 | 29 | if len(data['Addresses']) == 0: 30 | for execute in range(2): 31 | data = self.ec2.allocate_address(Domain='vpc') 32 | public_ip = data['PublicIp'] 33 | allocation_id = data['AllocationId'] 34 | eip_dictionary[public_ip] = allocation_id 35 | return eip_dictionary 36 | 37 | if len(data['Addresses']) < 2: 38 | for i in data['Addresses']: 39 | if 'InstanceId' not in i.keys(): 40 | counter.append(i) 41 | if len(counter) < 2: 42 | for execute in range(1): 43 | data = self.ec2.allocate_address(Domain='vpc') 44 | updated_data = self.ec2.describe_addresses()['Addresses'] 45 | return {eip['PublicIp']: eip['AllocationId'] for eip in updated_data} 46 | 47 | 48 | else: 49 | data = self.ec2.describe_addresses()['Addresses'] 50 | return {eip['PublicIp']:eip['AllocationId'] for eip in data} 51 | 52 | def associate_eip_private_ip(self): 53 | allocation_ids = [ai for ai in self.eip_dict.values()] 54 | for index, pip in enumerate(self.private_ips): 55 | print(allocation_ids[index], pip) 56 | self.ec2.associate_address( 57 | AllocationId=allocation_ids[index], 58 | InstanceId=self.instance_id, 59 | PrivateIpAddress=pip) 60 | 61 | 62 | def fetch_private_ips(self): 63 | data = self.ec2.describe_network_interfaces(NetworkInterfaceIds=[self.eni]) 64 | return [pip['PrivateIpAddress'] for pip in data['NetworkInterfaces'][0]['PrivateIpAddresses']] 65 | 66 | 67 | def lambda_handler(event,context): 68 | instance_id = event['detail']['instance-id'] 69 | go = Assign2EIP(instance_id) 70 | -------------------------------------------------------------------------------- /automount/attach_partition.py: -------------------------------------------------------------------------------- 1 | import paramiko 2 | import boto3 3 | 4 | class MountFS: 5 | def __init__(self,instance_id): 6 | self.SSH_KEY_LOC = "/tmp/keyZ.pem" 7 | self.MOUNT_SCRIPT_LOC = "/tmp/autoMount.py" 8 | self.instance_id = instance_id 9 | self.ec2 = boto3.client('ec2') 10 | self.s3 = boto3.client('s3') 11 | 12 | def init_mount(self): 13 | self.download_script() 14 | self.download_ssh_key() 15 | self.ssh_put_script() 16 | self.ssh_run_command() 17 | 18 | def get_dns_name(self): 19 | response = self.ec2.describe_instances(InstanceIds=[self.instance_id]) 20 | return response['Reservations'][0]['Instances'][0]['PublicDnsName'] 21 | 22 | def download_ssh_key(self,bucket='ssh-keyz',key='keyZ.pem'): 23 | s3_resp = self.s3.get_object(Bucket=bucket, Key=key) 24 | ssh_key = s3_resp['Body'].read().decode() 25 | with open(self.SSH_KEY_LOC,'w') as ink: 26 | ink.write(ssh_key) 27 | 28 | def download_script(self, bucket='ssh-keyz', key='autoMount.py'): 29 | s3_resp = self.s3.get_object(Bucket=bucket, Key=key) 30 | mount_script = s3_resp['Body'].read().decode() 31 | with open(self.MOUNT_SCRIPT_LOC, 'w') as ink: 32 | ink.write(mount_script) 33 | print('Script downloaded') 34 | 35 | def ssh_put_script(self): 36 | key = paramiko.RSAKey.from_private_key_file(self.SSH_KEY_LOC) 37 | client = paramiko.SSHClient() 38 | client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) 39 | client.connect(hostname=self.get_dns_name(), username="ubuntu", pkey=key) 40 | ftp = client.open_sftp() 41 | ftp.put(self.MOUNT_SCRIPT_LOC,'/home/ubuntu/autoMount.py') 42 | ftp.close() 43 | print('Script Uploaded') 44 | 45 | def ssh_run_command(self): 46 | key = paramiko.RSAKey.from_private_key_file(self.SSH_KEY_LOC) 47 | client = paramiko.SSHClient() 48 | client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) 49 | client.connect(hostname=self.get_dns_name(), username="ubuntu", pkey=key) 50 | _, stdout, _ = client.exec_command('python3 /home/ubuntu/autoMount.py') 51 | print('Command Run Successfully') 52 | 53 | 54 | def lambda_handler(event,context): 55 | instance_id = event['detail']['responseElements']['instanceId'] 56 | mount = MountFS(instance_id) 57 | mount.init_mount() 58 | -------------------------------------------------------------------------------- /automount/autoMount.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | 3 | 4 | def mountFS(): 5 | result = subprocess.check_output("lsblk | awk '{print $1}'", shell=True).decode() 6 | 7 | result_list = result.split('\n') 8 | 9 | if '' in result_list: 10 | result_list.remove('') 11 | 12 | output = subprocess.check_output(f"sudo file -s /dev/{result_list[-1]}", shell=True).decode() 13 | 14 | if ": data" in output: 15 | subprocess.check_output(f"sudo mkfs -t xfs /dev/{result_list[-1]}", shell=True).decode() 16 | output = subprocess.check_output(f"sudo mkdir /{result_list[-1]}", shell=True).decode() 17 | subprocess.check_output(f"sudo mount /dev/{result_list[-1]} /{result_list[-1]}", shell=True).decode() 18 | 19 | 20 | mountFS() 21 | 22 | 23 | 24 | #AWS CloudWatch EBS Lambda Practice 25 | #Triggering Lambda with Cloudwatch event 'Attach Partition' 26 | #Once EC2 instance has a volume attached, lambda is triggered and the volume is mounted to the FileSystem 27 | #Elliott Arnold 28 | #11-26-2019 29 | -------------------------------------------------------------------------------- /basic_serverless_api_deployment.yml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Transform: AWS::Serverless-2016-10-31 3 | 4 | Resources: 5 | ReplicateCustIssue: 6 | Type: AWS::Serverless::Api 7 | Properties: 8 | StageName: earlybird 9 | 10 | Validate: 11 | Type: AWS::Serverless::Function 12 | Properties: 13 | FunctionName: ReplicateCustomerIssue 14 | Runtime: python3.8 15 | Handler: lambda_function.lambda_handler 16 | CodeUri: s3://deploy-bucket-s3-us-east-1/func.zip 17 | Events: 18 | ValidateResponse: 19 | Type: Api 20 | Properties: 21 | RestApiId: !Ref "ReplicateCustIssue" 22 | Path: /earlybird 23 | Method: GET 24 | 25 | InlineFunction: 26 | Type: AWS::Serverless::Function 27 | Properties: 28 | FunctionName: ReplicateCustomerIssuePart2 29 | Runtime: python3.8 30 | Handler: index.lambda_handler 31 | InlineCode: | 32 | import json 33 | 34 | def lambda_handler(event, context): 35 | # TODO implement 36 | return { 37 | 'statusCode': 200, 38 | 'body': json.dumps('Looks good to me, Si3mshady!') 39 | } 40 | Events: 41 | ValidateResponse: 42 | Type: Api 43 | Properties: 44 | RestApiId: !Ref "ReplicateCustIssue" 45 | Path: /earlybirdspecial 46 | Method: GET 47 | -------------------------------------------------------------------------------- /checkSecurityGroup.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | from botocore.exceptions import ClientError 3 | 4 | class CheckSecurityGroups: 5 | def __init__(self): 6 | self.ec2 = boto3.client('ec2') 7 | self.data = self.ec2.describe_security_groups() 8 | 9 | def ingressQuadZeroGroups(self): 10 | '''produces a generator containing all sec groups where ingress rule permits all traffic 0.0.0.0/0 ''' 11 | for i, _ in enumerate(self.data['SecurityGroups']): 12 | try: 13 | if self.data['SecurityGroups'][i]['IpPermissions'][0]['IpRanges'][0]['CidrIp'] == '0.0.0.0/0': 14 | yield (self.data['SecurityGroups'][i]['GroupId']) 15 | except IndexError: 16 | pass 17 | 18 | def get_active_sg(self): 19 | '''retrieve security group of all ec2 instances''' 20 | self.ec2_resource = boto3.resource('ec2') 21 | response = self.ec2.describe_instances() 22 | instance_id_list = [[insta['InstanceId'] for insta in response['Reservations'][i]['Instances']] for i in 23 | range(len(response['Reservations']))] 24 | for i in instance_id_list: 25 | self.instance = self.ec2_resource.Instance(i[0]) 26 | sg = self.instance.security_groups 27 | active_groups = sg[0]['GroupId'] 28 | yield (active_groups) 29 | 30 | def removeUnattachedSG(self): 31 | '''Determine which security groups 32 | allow all inbound traffic to an ec2 instance but does not have 33 | a dependent object (instance) associated''' 34 | check = CheckSecurityGroups() 35 | quadZero = list(check.ingressQuadZeroGroups()) 36 | active = list(check.get_active_sg()) 37 | while len(active) < len(quadZero): 38 | for i in quadZero: 39 | if i not in active: 40 | try: 41 | quadZero.remove(i) 42 | response = self.ec2.delete_security_group(GroupId=i) 43 | except ClientError: 44 | pass 45 | 46 | #AWS Security Group practice - Determine which security groups 47 | #allow all inbound traffic to an instance which does not have a dependent object (instance) attached 48 | #Elliott Arnold 10-9-19 49 | 50 | 51 | 52 | 53 | 54 | -------------------------------------------------------------------------------- /components/App.js: -------------------------------------------------------------------------------- 1 | import React , {useState} from 'react'; 2 | import Video from './components/Video' 3 | import Form from './components/Form' 4 | import Calendar from './components/Cal' 5 | import Nav from './components/Navigation' 6 | 7 | export default function App(props) { 8 | const [submitClicked,setSubmitClicked] = useState(false) 9 | 10 | function showCal(e) { setSubmitClicked(true) } 11 | 12 | return ( 13 |
14 | 15 | 16 | 17 |
26 | 27 | ) 28 | } 29 | 30 | //Learning ReactJS - Basic Web form with AirBnB OpenSource Calendar (React-Dates ) AWS SES 31 | //Learning ReactJS at AWS 32 | //Elliott Arnold 11-5-20 WIP 33 | -------------------------------------------------------------------------------- /components/Navigation.js: -------------------------------------------------------------------------------- 1 | import React from 'react' 2 | import {Nav} from 'react-bootstrap' 3 | 4 | export default function Navigation() { 5 | return ( 6 |
7 | 8 | 16 | 17 |
18 | ) 19 | } 20 | -------------------------------------------------------------------------------- /components/SendMail.js: -------------------------------------------------------------------------------- 1 | const AWS = require('aws-sdk') 2 | const secretKey = process.env.REACT_APP_AWS_SECRET_ACCESS_KEY 3 | const accessKey = process.env.REACT_APP_AWS_ACCESS_KEY_ID 4 | 5 | export const sendEmail = (fname,email,datetime) => { 6 | console.log('Inside Config') 7 | 8 | 9 | AWS.config.update({ 10 | accessKeyId: accessKey, 11 | secretAccessKey: secretKey, 12 | region: 'us-east-1' 13 | }); 14 | 15 | // const ses = new AWS.SES({ apiVersion: "2010-12-01" }); 16 | const params = { 17 | Destination: { 18 | ToAddresses: [`${email}`] // Email address/addresses that you want to send your email 19 | }, 20 | 21 | Message: { 22 | Body: { 23 | Html: { 24 | // HTML Format of the email 25 | Charset: "UTF-8", 26 | Data: 27 | `

Hello ${fname}c

Sample description

Scheduled service time ${datetime}

` 28 | }, 29 | Text: { 30 | Charset: "UTF-8", 31 | Data: `Thank you for confirming your service time` 32 | } 33 | }, 34 | Subject: { 35 | Charset: "UTF-8", 36 | Data: "Test email" 37 | } 38 | }, 39 | Source: "alquimista2891@gmail.com" 40 | }; 41 | 42 | var sendEmail = new AWS.SES({apiVersion: '2010-12-01'}).sendEmail(params).promise(); 43 | 44 | // var sendEmail = ses.sendEmail(params).promise(); 45 | 46 | sendEmail 47 | .then(data => { 48 | console.log("email submitted to SES", data); 49 | }) 50 | .catch(error => { 51 | console.log(error); 52 | }) 53 | 54 | 55 | 56 | } 57 | 58 | //Learning ReactJS - Basic Web form with AirBnB OpenSource Calendar (React-Dates ) AWS SES 59 | //Learning ReactJS at AWS 60 | //Elliott Arnold 11-5-20 WIP -------------------------------------------------------------------------------- /components/Video.js: -------------------------------------------------------------------------------- 1 | import React from 'react' 2 | import FallBack from '../components/video/fallBack.mp4' 3 | 4 | export default function video() { 5 | return ( 6 |
7 | 20 | 21 | 22 |
23 | ) 24 | } 25 | -------------------------------------------------------------------------------- /components/video/fallBack.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/si3mshady/aws_lambda_functions-/e4942354f59deb4f404056661d5834ffd33ae7cb/components/video/fallBack.mp4 -------------------------------------------------------------------------------- /covid19_docker_redis_flask_webscrape/__init__.py: -------------------------------------------------------------------------------- 1 | # -------------------------------------------------------------------------------- /covid19_docker_redis_flask_webscrape/covid_19_docker_plot.py: -------------------------------------------------------------------------------- 1 | from collections import OrderedDict 2 | import gmplot 3 | import xlrd 4 | import redis 5 | import re 6 | 7 | 8 | class ProcessCSV: 9 | def __init__(self): 10 | self._filename = 'uscities.xlsx' 11 | self._workbook = self.open_xlsx() 12 | 13 | def open_xlsx(self) -> xlrd: 14 | return xlrd.open_workbook(self._filename) 15 | 16 | def get_sheets(self) -> xlrd: 17 | return self._workbook.sheets() 18 | 19 | def get_cities_states_lats_longs(self) -> tuple: 20 | #preserve list order dont sort yet 21 | sheet = self.get_sheets()[0] 22 | cities = sheet.col_values(colx=0)[1:] 23 | states = sheet.col_values(colx=3)[1:] 24 | latitudes = sheet.col_values(colx=8)[1:] #N/S 25 | longitudes = sheet.col_values(colx=9)[1:] #E/W 26 | zip_code = sheet.col_values(colx=17)[1:] 27 | return (cities,states,latitudes,longitudes,zip_code) 28 | 29 | def map_cites_states_lats_longs(self) -> dict: 30 | container = OrderedDict() 31 | cities,states,lats,longs, zip_code = self.get_cities_states_lats_longs() 32 | for i,_ in enumerate(cities): 33 | container[cities[i] +','+ states[i]] = {'City':cities[i],\ 34 | 'State':states[i],'Latitude':lats[i], \ 35 | 'Longitude':longs[i],'Zipcode':zip_code[i]} 36 | return container 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | #https://xlrd.readthedocs.io/en/latest/api.html -------------------------------------------------------------------------------- /covid19_docker_redis_flask_webscrape/plot_data.py: -------------------------------------------------------------------------------- 1 | from covid_wiki_scrapper import CV19 2 | from covid_19_docker_plot import ProcessCSV 3 | import gmplot 4 | import glob 5 | 6 | class GPlot: 7 | def __init__(self): 8 | self.us_center = (39.587522, -101.040949) 9 | self.zoom = 3 10 | self.csv_parser = ProcessCSV() 11 | self.wiki_scrape = CV19() 12 | self.gmap = self.set_gmplot() 13 | self.file_count = self.local_file_count() 14 | 15 | def set_gmplot(self) -> gmplot: 16 | return gmplot.GoogleMapPlotter(self.us_center[0],\ 17 | self.us_center[1],self.zoom) 18 | 19 | def local_file_count(self) -> int: 20 | return len(glob.glob('*')) 21 | 22 | 23 | def complile_lats_longs_heat_map(self) -> None: 24 | data = list(self.wiki_csv_fusion_generator()) 25 | lats = [lat['Latitude'] for lat in data] 26 | longs = [lat['Longitude'] for lat in data] 27 | self.gmap.heatmap(lats,longs) 28 | self.gmap.draw(f'heat_plot_{self.file_count}.html') 29 | 30 | def wiki_csv_fusion_generator(self): 31 | #order data by states in order to populate the lat,long lists required by gmplot 32 | csv_lats_longs = self.csv_parser.map_cites_states_lats_longs() 33 | wiki_scrapped_data = self.wiki_scrape.make_data_documents() 34 | for wiki in wiki_scrapped_data: 35 | for entry in csv_lats_longs.values(): 36 | if wiki in entry['State']: 37 | yield entry 38 | 39 | 40 | if __name__ == "__main__": 41 | test = GPlot() 42 | test.complile_lats_longs_heat_map 43 | 44 | 45 | #get the center of the country -> Kansas 39.587522, -101.040949 46 | #Google Map Plotter 1-3 zoom 47 | #use heatmap 48 | 49 | 50 | -------------------------------------------------------------------------------- /covid19ws/redis_mysql.py: -------------------------------------------------------------------------------- 1 | import mysql.connector 2 | import boto3 3 | import re 4 | 5 | class DBConnect: 6 | def __init__(self): 7 | self.ssm = boto3.client('ssm',region_name='us-east-1') 8 | self.create_db = "CREATE DATABASE IF NOT EXISTS elasticache;" 9 | self.use_db_query = "USE elasticache;" 10 | self.create_table = "CREATE TABLE IF NOT EXISTS ec (state VARCHAR(30),cases VARCHAR(30),fatal VARCHAR(30),recovered VARCHAR(30))" 11 | self.rds_pw,self.rds_ep = self.get_ssm_parameters() 12 | self.db_connection, self.db_cursor = self.set_db_connection_cursor() 13 | self.set_db_default_table() 14 | 15 | def get_ssm_parameters(self): 16 | #parameter store 17 | rds_pw = self.ssm.get_parameter(Name='rds-password')['Parameter']['Value'] 18 | rds_ep = self.ssm.get_parameter(Name='rds-endpoint')['Parameter']['Value'] 19 | return (rds_pw,rds_ep) 20 | 21 | def set_db_connection_cursor(self): 22 | #cursor is required to execute queries, connection processes commits 23 | connection = mysql.connector.connect(host=self.rds_ep,user='si3mshady',password=self.rds_pw) 24 | cursor = connection.cursor() 25 | return connection,cursor 26 | 27 | def set_db_default_table(self): 28 | try: 29 | self.db_cursor.execute(self.create_db) 30 | self.db_cursor.execute(self.use_db_query) 31 | self.db_cursor.execute(self.create_table) 32 | except Exception as e: 33 | print(e) 34 | 35 | def insert_all_db(self,state,case,rip,val): 36 | #called by cv19WS dynamically insert values into db 37 | try: 38 | query = "INSERT INTO ec (state,cases,fatal,recovered) Values(%s,%s,%s,%s)" 39 | self.db_cursor.execute(query,(state,case,rip,val,)) 40 | except IndexError as e: 41 | pass 42 | finally: 43 | self.db_connection.commit() 44 | 45 | 46 | #AWS ElastiCache #Redis #Webscrape 47 | #Learning Redis + RDS 48 | #Scrape Covid19 data from Wikipedia, process and insert into Elasticache (redis) 49 | #Elliott Arnold 4-12-20 => LateNightToil2 -------------------------------------------------------------------------------- /covid19ws/unused_queries.py: -------------------------------------------------------------------------------- 1 | def fill_table_states(self,state): 2 | try: 3 | query = "INSERT INTO ec (state) Values(%s)" 4 | self.db_cursor.execute(query,(state,)) 5 | self.db_connection.commit() 6 | except ValueError: 7 | pass 8 | 9 | 10 | def fill_table_cases(self,case): 11 | try: 12 | case = re.sub(r'[\n\[\]-,]','',str(case)) 13 | query = "INSERT INTO ec (cases) Values(%s)" 14 | self.db_cursor.execute(query,(case,)) 15 | self.db_connection.commit() 16 | except Exception as e: 17 | print(e) 18 | 19 | 20 | def fill_table_fatals(self,rip): 21 | try: 22 | rip = re.sub(r'[\n\[\]-,]','',str(rip)) 23 | query = "INSERT INTO ec (fatal) Values(%s)" 24 | self.db_cursor.execute(query,(rip,)) 25 | self.db_connection.commit() 26 | except Exception as e: 27 | print(e) 28 | 29 | 30 | def fill_table_recovered(self,val): 31 | try: 32 | val = re.sub(r'[\n\[\]-,]','',str(val)) 33 | query = "INSERT INTO ec (recovered) Values(%s)" 34 | self.db_cursor.execute(query,(val,)) 35 | self.db_connection.commit() 36 | except Exception as e: 37 | print(e) -------------------------------------------------------------------------------- /createVPC_Link_CLI.sh: -------------------------------------------------------------------------------- 1 | #step 1 create vpc link - must have network load balancer => 'target-arn' 2 | aws apigateway create-vpc-link --name tgif \ 3 | --target-arns arn:aws:elasticloadbalancing:us-east-:*:loadbalancer* 4 | 5 | #step 2 get json response - id = connection-id of VPC LINK 6 | { 7 | "id": "888888", 8 | "name": "tgif", 9 | "targetArns": [rn:aws:elasticloadbalancing:us-east-:*:loadbalancer*], 10 | "status": "PENDING" 11 | } 12 | 13 | #Step 3 execute method integration command 14 | aws apigateway put-integration \ 15 | --region us-east-1 \ 16 | --rest-api-id 3pq5**** \ #located from API Dashboard - Name,Description,ID 17 | --resource-id lw**** \ #ID of Endpoint resource - 6 digit number located in navigation bar of API gateway console 18 | --http-method ANY \ 19 | --connection-type VPC_LINK \ 20 | --connection-id 888888 \ #Connection Id generated from step 2 21 | --integration-http-method ANY \ 22 | --type HTTP_PROXY \ 23 | --uri https://www.testing123.com 24 | 25 | #Step 4 check json response 26 | { 27 | "type": "HTTP_PROXY", 28 | "httpMethod": "ANY", 29 | "uri": "https://www.testing123.com", 30 | "connectionType": "VPC_LINK", 31 | "connectionId": "888888", 32 | "passthroughBehavior": "WHEN_NO_MATCH", 33 | "timeoutInMillis": 29000, 34 | "cacheNamespace": "lw****", #same as resource id 35 | "cacheKeyParameters": [] 36 | } 37 | -------------------------------------------------------------------------------- /custom_token_authorizer.py: -------------------------------------------------------------------------------- 1 | class CustomAuth: 2 | def __init__(self,event): 3 | self.event = event 4 | self.token = self.event['authorizationToken'] 5 | self.action = ['execute-api:Invoke'] 6 | 7 | def evaluate_authorization(self): 8 | #return 200 9 | if self.token.title() == 'Allow': 10 | return CreatePolicyDoc.gen_policy_document(action=self.action,effect=self.token) 11 | #return 403 12 | elif self.token.title() == 'Deny': 13 | return CreatePolicyDoc.gen_policy_document(action=self.action,effect=self.token) 14 | #return 401 15 | else: 16 | return CreatePolicyDoc.gen_policy_document() 17 | 18 | class CreatePolicyDoc: 19 | @classmethod 20 | def gen_policy_document(cls,effect='',action='',resource="*") -> dict: 21 | #policy response body mandatory fields principalId and policyDocument 22 | policy = { 23 | "principalId": "yyyyyyyy", 24 | "policyDocument": { 25 | "Version": "2012-10-17", 26 | "Statement": [ 27 | { 28 | "Action":cls.get_action_values(action), 29 | "Effect": effect.title(), 30 | "Resource": resource 31 | } 32 | ] 33 | }} 34 | return policy 35 | 36 | @classmethod 37 | def get_action_values(cls,action): 38 | if type(action) == list: 39 | return action 40 | else: 41 | return action 42 | 43 | def lambda_handler(event,context): 44 | ''' 45 | request must be in the format 46 | {'type': 'TOKEN', 'authorizationToken': <'Allow'|'Deny'>, 47 | 'methodArn': 'arn of the target API Endpoint'} 48 | authorizer authenticates to invoke url 49 | ''' 50 | print(event) 51 | authorizer = CustomAuth(event) 52 | return authorizer.evaluate_authorization() 53 | 54 | #AWS Apigateway Lambda Token Authorizer 55 | #Serverless Practice 56 | #Elliott Arnold 5-22-20 57 | 58 | 59 | #https://console.aws.amazon.com/apigateway/home?region=us-east-1#/apis/nakuwetywj/authorizers 60 | #https://stackoverflow.com/questions/41486130/aws-api-gateway-execution-failed-due-to-configuration-error-invalid-json-in-re 61 | -------------------------------------------------------------------------------- /dynamo_db_replication.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | 3 | class ReplicateDDb: 4 | def __init__(self,event): 5 | self.event = event 6 | self.ddbR = boto3.resource('dynamodb',region_name='us-east-2') 7 | self.src_table,self.dest_table = self.get_source_dest_tables() 8 | 9 | def get_source_dest_tables(self) -> tuple: 10 | source_arn = self.event['Records'][0]['eventSourceARN'] 11 | source_table = source_arn.split('/')[-3] 12 | destination_table = source_arn.split('/')[-3].replace('1','2') 13 | return (source_table,destination_table) 14 | 15 | def process_event(self): 16 | try: 17 | if self.event['Records']: 18 | if self.event['Records'][0]['eventName'] == 'INSERT': 19 | self.replicate_new_event_insert() 20 | elif self.event['Records'][0]['eventName'] == 'MODIFY': 21 | self.replicate_new_event_modify() 22 | except Exception as e: 23 | print(e) 24 | exit() 25 | 26 | def replicate_new_event_insert(self): 27 | new_data_dictionary = self.event['Records'][0]['dynamodb']['NewImage'] 28 | #record of insertion data into dynamodb table 29 | kvp_list = list(new_data_dictionary.items()) 30 | key1 = kvp_list[0][0] 31 | data1 = [data for data in kvp_list[0][1].values()][0] 32 | 33 | key2 = kvp_list[1][0] 34 | data2 = [data for data in kvp_list[1][1].values()][0] 35 | 36 | dest_table = self.ddbR.Table(self.dest_table) 37 | dest_table.put_item(Item={str(key1): str(data1),str(key2): str(data2)}) 38 | 39 | return 1 40 | 41 | def replicate_new_event_modify(self): 42 | 43 | new_data_dictionary = self.event['Records'][0]['dynamodb']['NewImage'] 44 | #creates a tuple of dictionaries - easier to work with 45 | kvp_list = list(new_data_dictionary.items()) 46 | #nested dictionaries 47 | primary_key = kvp_list[0][0] 48 | primary_key_value = [data for data in kvp_list[0][1].values()][0] 49 | update_column = kvp_list[1][0] 50 | new_value = [data for data in kvp_list[1][1].values()][0] 51 | 52 | dest_table = self.ddbR.Table(self.dest_table) 53 | dest_table.update_item(Key={f'{primary_key}': f'{primary_key_value}'},\ 54 | UpdateExpression=f"set {update_column} = :placeholder", \ 55 | ExpressionAttributeValues={":placeholder":f"{new_value}" }) 56 | 57 | return 1 58 | 59 | 60 | 61 | def lambda_handler(event,context): 62 | print(event) 63 | replicator = ReplicateDDb(event) 64 | replicator.process_event() 65 | 66 | 67 | #AWS #Lambda #DynamoDb Cross Region DDB Replication Practice 68 | #Use lambda to replicate ddb changes across regions - inserting new documents, modifiying existing documents 69 | #Primary key => string for simplicity 70 | #Elliott Arnold 71 | #5-19-2020 72 | 73 | 74 | -------------------------------------------------------------------------------- /evalConfigChange.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | 3 | ec2_client = boto3.client('ec2') 4 | ec2 = boto3.resource('ec2') 5 | sns = boto3.client('sns') 6 | response = ec2_client.describe_security_groups(GroupIds=['sg-011b88e8dd44b13c3']) 7 | check_egress = allowed_sg = '' 8 | 9 | def eval_securityGroup(event, context): 10 | try: 11 | check_egress = response['SecurityGroups'][0]['IpPermissionsEgress'][0]['IpRanges'][0]['CidrIp'] 12 | except: 13 | pass 14 | try: 15 | allowed_sg = response['SecurityGroups'][0]['IpPermissions'][0]['UserIdGroupPairs'][0]['GroupId'] 16 | except: 17 | pass 18 | 19 | def setEgress(): 20 | response = ec2_client.authorize_security_group_egress( 21 | GroupId='sg-011b88e8dd44b13c3', 22 | IpPermissions=[ 23 | { 24 | 'FromPort': 80, 25 | 'IpProtocol': 'tcp', 26 | 'IpRanges': [ 27 | { 28 | 'CidrIp': '10.0.0.0/24', 29 | 'Description': 'For testing purposes' 30 | }, 31 | ], 32 | 'ToPort': 5000 33 | }, 34 | ]) 35 | res = sns.publish( 36 | TopicArn='arn:aws:sns:us-east-1:952151691101:SecG_changes', 37 | Message='Reset Egress Rule') 38 | 39 | def setIngress(): 40 | response = ec2_client.authorize_security_group_ingress( 41 | GroupId='sg-011b88e8dd44b13c3', 42 | SourceSecurityGroupName='DummyGroup') 43 | res = sns.publish( 44 | TopicArn='arn:aws:sns:us-east-1:952151691101:SecG_changes', 45 | Message='Reset Ingress Rule') 46 | 47 | 48 | if check_egress != '10.0.0.0/24': 49 | setEgress() 50 | 51 | if allowed_sg != 'sg-07642620dd0e8cb28': 52 | setIngress() 53 | 54 | #AWS Lambda practice - basic lambda function that tests for configuration changes in a security group 55 | #if detected function resets the rule and message is published to an sns topic 56 | #Elliott Arnold 8-12-19 57 | 58 | -------------------------------------------------------------------------------- /fetch_error_info.py: -------------------------------------------------------------------------------- 1 | import wget, time 2 | from selenium.webdriver import Firefox 3 | from selenium.webdriver.common.by import By 4 | from selenium.common.exceptions import NoSuchElementException, ElementNotInteractableException 5 | 6 | 7 | class TroubleShoot_Exceptions: 8 | @classmethod 9 | def init_driver(cls): 10 | url = "https://www.google.com" 11 | driver = Firefox(executable_path="/Users/si3mshady/geckodriver") 12 | driver.get(url) 13 | driver.implicitly_wait(4) 14 | return driver 15 | 16 | @classmethod 17 | def simulate_exception(cls): 18 | exception = cls.throw_exception() 19 | cls.clarify_exception(exception) 20 | 21 | @classmethod 22 | def throw_exception(cls): 23 | value='x' 24 | try: 25 | int(value) + int(value) 26 | except Exception as e: 27 | return str(e) 28 | 29 | @classmethod 30 | def clarify_exception(cls,exception: str): 31 | driver = cls.init_driver() 32 | try: 33 | driver.find_element(By.XPATH, "//input[@type='text']").send_keys(exception) 34 | driver.find_element(By.XPATH, "//ul//li[1]").click() 35 | target = driver.find_element(By.XPATH, "(//h3[@class='LC20lb']/parent::a[@href])[1]") 36 | driver.execute_script("arguments[0].click();",target) 37 | time.sleep(5) 38 | print(driver.current_url) 39 | wget.download(url=driver.current_url, out='exception-troubleshooting-' + str(round(time.time() * 1000))) 40 | driver.close() 41 | except (NoSuchElementException, ElementNotInteractableException): 42 | driver.find_element(By.XPATH, "//input[@type='text']").click() 43 | driver.find_element(By.XPATH, "(//h3[@class='LC20lb']/parent::a[@href])[1]").click() 44 | time.sleep(5) 45 | print(driver.current_url) 46 | wget.download(url=driver.current_url, out='exception-troubleshooting-' + str(round(time.time() * 1000))) 47 | driver.close() 48 | finally: 49 | driver.quit() 50 | 51 | #Selenium Practice - learning to traverse the DOM with xpath 52 | #Utility Class to be used alongside troubleshooting exceptions 53 | #Once configured the script will preform a search and download of the most relevant article to resolve the issue. 54 | #Elliott Arnold 11-2-19 55 | #si3mshady 56 | 57 | 58 | 59 | 60 | 61 | -------------------------------------------------------------------------------- /fetch_vpc_logs.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from botocore.exceptions import ClientError 3 | import boto3 4 | import subprocess 5 | import os 6 | 7 | 8 | class VPC_LF: 9 | 10 | @classmethod 11 | def check_inventory_file_exist(cls): 12 | if 'lf_inventory.txt' not in os.listdir('/home/ubuntu/even_flow'): 13 | subprocess.check_output([f'touch /home/ubuntu/even_flow/lf_inventory.txt'], shell=True) 14 | return 15 | 16 | @classmethod 17 | def begin(cls): 18 | cls.check_inventory_file_exist() 19 | if len(cls.check_for_new_log_files()) > 0: 20 | print('Downloading ' + str(len(cls.check_for_new_log_files())) + ' log files') 21 | for lf in cls.check_for_new_log_files(): 22 | cls.inventory_log_files_from_s3(lf) 23 | cls.fetch_vpc_flow_log(lf) 24 | cls.gunzip() 25 | else: 26 | print('No new log files.') 27 | 28 | @classmethod 29 | def sort_by_date(cls,data): 30 | '''sort using lambda func - last modified key''' 31 | return sorted(data, key=lambda x: x['LastModified']) 32 | 33 | @classmethod 34 | def list_log_files_from_s3(cls): 35 | bucket_name = 'evenflows' 36 | s3 = boto3.client('s3') 37 | return cls.sort_by_date(s3.list_objects(Bucket=bucket_name, 38 | Marker='AWSLogs/952151691101/vpcflowlogs/us-east-1')['Contents']) 39 | 40 | @classmethod 41 | def extract_log_file_key_name(cls): 42 | '''works in tandem with inventory log files from s3''' 43 | return set([keyname['Key'].split('/')[-1] for keyname in cls.list_log_files_from_s3()]) 44 | 45 | @classmethod 46 | def inventory_log_files_from_s3(cls,logfile): 47 | '''works in tandem with extract_log_file_key_name''' 48 | with open('lf_inventory.txt', 'a') as ink: 49 | ink.write(logfile + '\n') 50 | 51 | @classmethod 52 | def get_lf_inventory_from_file(cls): 53 | '''read log file inventory and return set of strings ''' 54 | return set([file.strip() for file in open('lf_inventory.txt').readlines()]) 55 | 56 | @classmethod 57 | def check_for_new_log_files(cls): 58 | '''check difference in s3 bucket list and local inventory ''' 59 | return cls.extract_log_file_key_name() - cls.get_lf_inventory_from_file() 60 | 61 | @classmethod 62 | def fetch_vpc_flow_log(cls,lf): 63 | s3 = boto3.resource('s3') 64 | formatted_timestring = (datetime.now().strftime('%Y/%m/%d')) 65 | base_key_path = f'AWSLogs/952151691101/vpcflowlogs/us-east-1/{formatted_timestring}/' 66 | try: 67 | s3.meta.client.download_file('evenflows', base_key_path + lf, f'/home/ubuntu/even_flow/lf/{lf}') 68 | except ClientError: 69 | pass 70 | 71 | @classmethod 72 | def gunzip(cls,directory='/home/ubuntu/even_flow/lf'): 73 | '''directory format .../dir../dir no trailing forward slash for directory ''' 74 | subprocess.check_output([f'gunzip {directory}/*.gz'], shell=True) 75 | 76 | 77 | VPC_LF.begin() 78 | 79 | #AWS EC2 VPC LINUX practice 80 | #Fetching VPC flow logs from s3 to ec2 for future processing 81 | #Elliott Arnold 82 | #elAlquimista 83 | #11-17-19 84 | 85 | -------------------------------------------------------------------------------- /flask_sns_cdk_webhook/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:alpine 2 | 3 | RUN mkdir cdk/ && apk add python3-dev && \ 4 | apk add py-pip && npm install -g aws-cdk && \ 5 | pip3 install aws-cdk.aws_ec2 && pip3 install aws_cdk.aws_iam &&\ 6 | pip3 install boto3 7 | 8 | WORKDIR cdk/ 9 | 10 | COPY ./requirements.txt . 11 | 12 | RUN pip3 install -r requirements.txt 13 | 14 | CMD ["sh"] -------------------------------------------------------------------------------- /flask_sns_cdk_webhook/bootstrap.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | yum update -y; 3 | #install java8 4 | yum install java-1.8.0-openjdk-devel -y 5 | yum install git -y 6 | git clone https://github.com/srimukh9/cleanjobhistory /home/ubuntu/cleanjobhistory 7 | cd /home/ubuntu/cleanjobhistory 8 | bash /home/ubuntu/cleanjobhistory/mvnw -------------------------------------------------------------------------------- /flask_sns_cdk_webhook/cdk_webhook.py: -------------------------------------------------------------------------------- 1 | from aws_cdk import (core, aws_ec2, aws_iam) 2 | 3 | class WebhookProjectStack(core.Stack): 4 | 5 | def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: 6 | super().__init__(scope, id, **kwargs) 7 | 8 | def get_userdata(): 9 | with open('bootstrap.sh', 'r') as userdata: 10 | return userdata.read() 11 | 12 | vmie_ami = "ami-00bf35d2ab0bdb452" 13 | default_vpc = "vpc-e94d1f93" 14 | ec2_role = "arn:aws:iam::88888888888:role/KratosRole" 15 | account_id = "8888888888" 16 | vm_import_image = aws_ec2.GenericLinuxImage({"us-east-1": vmie_ami}) 17 | core.Environment(account=account_id) 18 | kratos_role = aws_iam.Role.from_role_arn(self, 'KratosXL', role_arn=ec2_role) 19 | 20 | aws_ec2.Instance(self, f"VMIE-{vmie_ami}", instance_type=aws_ec2.InstanceType('t2.micro'), 21 | role=kratos_role, machine_image=vm_import_image, security_group=aws_ec2.CfnSecurityGroup(self, id=f"SG-{vmie_ami}", 22 | group_description=f"SG-CDK-{vmie_ami}"), vpc=aws_ec2.Vpc.from_lookup(self, f'CDK-VPC--{vmie_ami}', vpc_id=default_vpc), 23 | user_data=aws_ec2.UserData.custom(get_userdata()), key_name="covidQuarantine") 24 | 25 | -------------------------------------------------------------------------------- /flask_sns_cdk_webhook/flask_hook.py: -------------------------------------------------------------------------------- 1 | from flask import Flask, request 2 | from flask_restful import Resource, Api 3 | import subprocess 4 | 5 | app=application=Flask(__name__) 6 | api=Api(app) 7 | 8 | class CaptainHook(Resource): 9 | def post(self): 10 | data = request.data 11 | cmd = 'sudo docker run -itv $(pwd):/cdk si3mshady/cdk-ec2-iam cdk deploy' 12 | subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True) 13 | 14 | api.add_resource(CaptainHook,'/hook') 15 | 16 | if __name__ == "__main__": 17 | app.run(debug=True, host='0.0.0.0') 18 | 19 | -------------------------------------------------------------------------------- /flaskify/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM alpine:3.12 2 | WORKDIR /app 3 | COPY . . 4 | RUN apk update && \ 5 | apk upgrade 6 | 7 | RUN apk add py-pip 8 | 9 | RUN pip install flask 10 | CMD ["python3","webserver.py"] -------------------------------------------------------------------------------- /flaskify/static/add.css: -------------------------------------------------------------------------------- 1 | * { 2 | margin: 0; 3 | padding: 0; 4 | background: #204051; 5 | } 6 | 7 | a { 8 | font-size: 34px; 9 | font-family: serif; 10 | 11 | color: #204051; 12 | text-decoration: none; 13 | 14 | 15 | 16 | 17 | } 18 | 19 | .center { 20 | position: absolute; 21 | left: 25%; 22 | top: 50% 23 | } 24 | 25 | #submit-btn { 26 | position: relative; 27 | top: 0px; 28 | left: 19px; 29 | 30 | } 31 | 32 | 33 | .navigation { 34 | height: 20%; 35 | width: 100%; 36 | display: block; 37 | position: fixed; 38 | z-index: 1; 39 | top: 0; 40 | left: 0; 41 | background-color:#cae8d5; 42 | 43 | } 44 | 45 | .pad-right { 46 | margin-right: 10px; 47 | } 48 | 49 | 50 | .btn-info { 51 | 52 | 53 | background-color: #204051 !important; 54 | } 55 | 56 | .btn-info a{ 57 | 58 | text-decoration: none; 59 | color: whitesmoke !important; 60 | } 61 | 62 | .btn-info a:link { 63 | 64 | text-decoration: none; 65 | color: whitesmoke !important; 66 | } 67 | 68 | .saturate { filter: saturate(3); } 69 | .grayscale { filter: grayscale(100%); } 70 | .contrast { filter: contrast(160%); } 71 | .brightness { filter: brightness(0.25); } 72 | .blur { filter: blur(3px); } 73 | .invert { filter: invert(100%); } 74 | .sepia { filter: sepia(100%); } 75 | .huerotate { filter: hue-rotate(180deg); } 76 | .rss.opacity { filter: opacity(50%); } 77 | 78 | .btn { 79 | 80 | color: wheat; 81 | position: relative; 82 | top: 4px !important; 83 | } 84 | -------------------------------------------------------------------------------- /flaskify/static/app.js: -------------------------------------------------------------------------------- 1 | //index.html 2 | const makeUrlString = (basePath) => { 3 | 4 | return `https://6m295ad1if.execute-api.us-east-1.amazonaws.com/api/${basePath}/${Date.now()}` 5 | } 6 | 7 | $('#submit-button').click( () => { 8 | var keyword = $('#search-text').val() 9 | axios.post(makeUrlString('searchQuery'), {key: keyword}).then((response) => { 10 | var myData = response.data.success 11 | myData.forEach(element => { 12 | $('#bundle').append(`
${element}`) 13 | }); 14 | console.log(myData); 15 | }, (error) => {console.log(error); }); 16 | }) 17 | 18 | // add.html 19 | $('#submit-btn').click( () => { 20 | //take inputs 21 | var keyword = $('#keyword').val() 22 | var url = $('#url').val() 23 | //clear inputs 24 | $('#keyword').val('') 25 | $('#url').val('') 26 | 27 | axios.post(makeUrlString('addToDatabse'), { 28 | key: keyword, link: url 29 | 30 | }).then((response) => {console.log(response); 31 | 32 | },(error) => { console.log(error); }); 33 | }) 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | -------------------------------------------------------------------------------- /flaskify/static/search.css: -------------------------------------------------------------------------------- 1 | 2 | body { 3 | font-family: serif; 4 | background: #cae8d5; 5 | } 6 | 7 | * { 8 | box-sizing: border-box; 9 | } 10 | 11 | 12 | a { 13 | font-size: 34px; 14 | font-family: serif; 15 | 16 | color: #204051; 17 | text-decoration: none; 18 | 19 | } 20 | 21 | 22 | .overlay { 23 | height: 50%; 24 | width: 100%; 25 | display: block; 26 | position: fixed; 27 | z-index: 1; 28 | top: 20%; 29 | left: 0; 30 | background-color: #204051 31 | 32 | } 33 | 34 | 35 | .navigation { 36 | height: 50%; 37 | width: 100%; 38 | display: block; 39 | position: fixed; 40 | z-index: 1; 41 | top: 0; 42 | left: 0; 43 | background: #cae8d5; 44 | 45 | } 46 | 47 | 48 | 49 | .overlay-content { 50 | position: relative; 51 | top: 46%; 52 | width: 80%; 53 | text-align: center; 54 | margin-top: 30px; 55 | margin: auto; 56 | } 57 | 58 | .overlay .closebtn:hover { 59 | color: #ccc; 60 | } 61 | 62 | .overlay input[type=text] { 63 | font-family: serif; 64 | padding: 15px; 65 | font-size: 17px; 66 | border: none; 67 | float: left; 68 | width: 80%; 69 | background: white; 70 | } 71 | 72 | .overlay input[type=text]:hover { 73 | background: #f1f1f1; 74 | } 75 | 76 | .overlay button { 77 | float: left; 78 | width: 20%; 79 | padding: 15px; 80 | background: #ddd; 81 | font-size: 17px; 82 | border: none; 83 | cursor: pointer; 84 | } 85 | 86 | .overlay button:hover { 87 | background: #bbb; 88 | } 89 | -------------------------------------------------------------------------------- /flaskify/templates/add.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 21 | 22 |
23 | 24 |
25 | 26 | 27 |
28 |
29 | 30 | 31 |
32 | 33 | 34 | 35 |
36 | 37 | 38 | 39 | 40 | -------------------------------------------------------------------------------- /flaskify/templates/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | Results 13 | 14 | 15 | 16 | 17 | 24 | 25 |
26 |
27 | 28 |
29 |

Begin search

30 | 31 | 32 | 33 | 34 | 35 |
36 | 37 | 38 |
39 |
40 | 41 | 42 |
43 |
44 | 45 |
46 | 47 |

Mini Wiki Results

48 | 51 | 52 | 53 |
54 | 55 |
56 |
57 | 58 | 59 | 60 | 61 | -------------------------------------------------------------------------------- /flaskify/userdata.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | #update EC2 & prepare system to run flask on EC2 instance 4 | sudo yum update -y 5 | sudo yum install -y gcc-c++ make 6 | sudo yum install httpd -y 7 | sudo chkconfig httpd on 8 | sudo service httpd start 9 | sudo amazon-linux-extras install docker 10 | sudo service docker start 11 | sudo usermod -a -G docker ec2-user 12 | sudo cp /etc/httpd/conf/httpd.conf /etc/httpd/conf/httpd.conf.bak 13 | sudo sed -i 's/AllowOverride None/AllowOverride ALL/g' /etc/httpd/conf/httpd.conf #critical 14 | docker pull si3mshady/miniwiki 15 | docker run --publish 888:888 si3mshady/miniwiki 16 | #needed to override default EC2 Apache settings establish flask 17 | #AllowOverride directive is used to allow the use of .htaccess within the web server to allow 18 | #overriding of the Apache config on a per directory basis. 19 | #port forwarding syntax -> ssh -L [LOCAL_IP:]LOCAL_PORT:DESTINATION:DESTINATION_PORT [USER@]SSH_SERVER 20 | #Docker Bash Userdata Port Forwarding Practice 21 | #Elliott Arnold 9-27-20 22 | -------------------------------------------------------------------------------- /flaskify/webserver.py: -------------------------------------------------------------------------------- 1 | from flask import Flask, render_template, url_for 2 | 3 | app=application=Flask(__name__) 4 | 5 | 6 | @app.route('/') 7 | def index(): 8 | return render_template('index.html') 9 | 10 | 11 | @app.route('/add') 12 | def add(): 13 | return render_template('add.html') 14 | 15 | 16 | if __name__ == "__main__": 17 | app.run(debug=True, port=888, host='0.0.0.0') -------------------------------------------------------------------------------- /gen-api-friendlyname.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import boto3 3 | 4 | apigw = boto3.client('apigateway') 5 | 6 | #oneliner 7 | res = pd.DataFrame({'ids': [i.get('id') for i in apigw.get_rest_apis().get('items')],'name': \ 8 | [i.get('name') for i in apigw.get_rest_apis().get('items')]}) 9 | 10 | # >>> res 11 | # ids name 12 | # 0 xxjl5v9lsg second api 13 | # 1 ad5nzq58oj fourth api 14 | # 2 an5iwk2s98 third api 15 | # 3 cs9o4b5hkg first api 16 | 17 | # >>> res.to_csv() 18 | 19 | # ',ids,name\n0,55jl5v9lsg,second api\n1,ad5nzq58oj,fourth api\n2,an5iwk2s98,third api\n3,cs9o4b5hkg,first api\n' 20 | 21 | #Elliott Arnold - prod notes from the field - 7-11 22 | #DevOps/Developer 1-9-21 23 | -------------------------------------------------------------------------------- /generate_thumbnail.py: -------------------------------------------------------------------------------- 1 | from PIL import Image 2 | import boto3, cStringIO 3 | 4 | client = boto3.client('s3') 5 | 6 | def thumbnail(event,context): 7 | #print structure of event to log 8 | print(event) 9 | bucket = event['Records'][0]['s3']['bucket']['name'] 10 | key = event['Records'][0]['s3']['object']['key'] 11 | image = fetch_image(bucket,key) 12 | thumbnail_fileObject = pymParticles(image) 13 | thumbnail_key = generate_thumbnail_key(key) 14 | thumbnail_to_s3(bucket,thumbnail_key,thumbnail_fileObject) 15 | print('Thumbnail sucessfully added to s3 bucket: ' + bucket) 16 | return "GoT" 17 | 18 | def pymParticles(image): 19 | size = 128, 128 20 | fileObj = cStringIO.StringIO() #return a StringIO-like stream for reading or writing 21 | copied_image = image.copy() 22 | copied_image.thumbnail(size) 23 | copied_image.save(fileObj, 'PNG') 24 | fileObj.seek(0) 25 | return fileObj 26 | 27 | def fetch_image(bucket,key): 28 | s3_response = client.get_object(Bucket=bucket, Key=key) 29 | image_binary_data = s3_response['Body'].read() 30 | #cStringIO.StringIO requires a string that is encoded as a bytes string we can impersonate string or bytes data like a file. 31 | fileObj = cStringIO.StringIO(image_binary_data) 32 | open_image_file_object = Image.open(fileObj) 33 | return open_image_file_object 34 | 35 | def thumbnail_to_s3(bucket,key,image_file_object): 36 | response = client.put_object( 37 | ACL='public-read', 38 | Body=image_file_object, 39 | Bucket=bucket, 40 | ContentType='image/png', 41 | Key=key ) 42 | 43 | 44 | def generate_thumbnail_key(string): 45 | split_string = string.rsplit('.', 1) 46 | return split_string[0] + "antman_.thumbnail.PNG" 47 | 48 | 49 | 50 | #Thumbnail Generator - AWS_Lambda - 5-19-19 Elliott Arnold got 51 | #Using buffer modules(StringIO, BytesIO, cStringIO) 52 | #These buffer modules help us to mimic our data like a normal file which we can further use for processing. 53 | #cStringIO.StringIO requires a string that is encoded as a bytes string 54 | #https://docs.python.org/2/library/stringio.html 55 | #https://pillow.readthedocs.io/en/3.0.x/reference/Image.html 56 | #https://pillow.readthedocs.io/en/3.0.x/reference/ImageOps.html 57 | #https://webkul.com/blog/using-io-for-creating-file-object/ 58 | #https://stackoverflow.com/questions/45473501/getting-pil-pillow-4-2-1-to-upload-properly-to-aws-lambda-py3-6 59 | # docker run -v `pwd`:/working -it --rm ubuntu 60 | -------------------------------------------------------------------------------- /get_orphan_sg_from_trusted_advisor.py: -------------------------------------------------------------------------------- 1 | import boto3, xlrd, re, os 2 | 3 | class GetOrphanSG: 4 | def __init__(self,filepath: str): 5 | self.ec2 = boto3.client('ec2') 6 | self.filepath = filepath if (os.path.isfile(filepath) == True) else exit() 7 | self.get_orphan_sg() 8 | 9 | def load_excel_file(self): 10 | try: 11 | workbook = xlrd.open_workbook(self.filepath) 12 | sheet = workbook.sheet_by_index(0) 13 | if "security groups" in sheet.cell(0, 0).value.lower(): 14 | sg = list((val for val in sheet.col_values(2) if val != '' and 'sg' in val)) 15 | parsed_sg = [re.sub(r"\(vpc-[\S]+",'',val) for val in sg] 16 | return set(parsed_sg) 17 | except FileNotFoundError: 18 | print('File ' + self.filepath + ' not found.') 19 | 20 | def check_sg_instance_association(self): 21 | return set([i['Instances'][0]['SecurityGroups'][0]['GroupId']for i in self.ec2.describe_instances()['Reservations']]) 22 | 23 | def get_orphan_sg(self): 24 | with open('orphaned_sg.txt', 'w') as ink: 25 | trusted_advisor_flagged_sg = self.load_excel_file() 26 | instances_with_sg = self.check_sg_instance_association() 27 | ink.write(str(trusted_advisor_flagged_sg.difference(instances_with_sg))) 28 | 29 | if __name__ == "__main__": 30 | while True: 31 | path = input("Please enter full path to spreadsheet> ") 32 | if not path.endswith('.xls'): 33 | print("Please ensure spreadsheet extension ends with .xls") 34 | continue 35 | else: 36 | break 37 | checkSg = GetOrphanSG(path) 38 | 39 | #AWS Trusted Advisor practice 40 | #Parse list of vulnerable security groups determine which groups are not associated with EC2 instance 41 | #Elliott Arnold 3-26-20 42 | 43 | 44 | -------------------------------------------------------------------------------- /google-chrome-repo.txt: -------------------------------------------------------------------------------- 1 | [google-chrome] 2 | name=google-chrome 3 | baseurl=http://dl.google.com/linux/chrome/rpm/stable/$basearch 4 | enabled=1 5 | gpgcheck=1 6 | gpgkey=https://dl-ssl.google.com/linux/linux_signing_key.pub 7 | -------------------------------------------------------------------------------- /iam-maintenance.py: -------------------------------------------------------------------------------- 1 | from dateutil.tz import tzutc 2 | from datetime import datetime 3 | import boto3 4 | 5 | class IAM_Maintenance(): 6 | 7 | def __init__(self,numDaysFromCreatedDate=90,lastAuthenticationThreshold=90,agedAccessKeyThreshold=90): 8 | self.iam = boto3.client('iam') 9 | self.inactiveDays = numDaysFromCreatedDate 10 | self.lastAuthThreshold = lastAuthenticationThreshold 11 | self.agedAccessKey = agedAccessKeyThreshold 12 | 13 | def fetchIAMUsers(self): 14 | return [username['UserName'] for username in self.iam.list_users()['Users']] 15 | 16 | def checkAgedAccessKeys(self): 17 | '''creates generator of all users with aged (x) access keys''' 18 | for _ ,user in enumerate(self.fetchIAMUsers()): 19 | createDate = self.iam.list_access_keys(UserName=user)['AccessKeyMetadata'][0]['CreateDate'] 20 | delta = datetime.now(tzutc()) - createDate 21 | if delta.days > self.agedAccessKey: 22 | yield (user) 23 | 24 | def fetchAgedRoles(self): 25 | '''if tzutc is not set, method will throw TypeError exception: 26 | can't subtract offset-naive and offset-aware datetimes''' 27 | self.roles = self.iam.list_roles() 28 | for index, record in enumerate(self.roles['Roles']): 29 | createdDate = self.roles['Roles'][index]['CreateDate'] 30 | deltaTime = datetime.now(tzutc()) - createdDate 31 | if deltaTime.days > self.inactiveDays: 32 | try: 33 | if self.checkLastAuthenticationDate(record['RoleName'],record['Arn']): 34 | yield(record['RoleName']) 35 | except KeyError: 36 | pass 37 | 38 | def checkLastAuthenticationDate(self,roleName,arn): 39 | '''check when an IAM resource (user, group, role, or policy) 40 | was last used in an attempt to access AWS services ''' 41 | job_id = self.iam.generate_service_last_accessed_details(Arn=arn)['JobId'] 42 | last_authenticated_date = self.iam.get_service_last_accessed_details(JobId=job_id)['ServicesLastAccessed'][0]['LastAuthenticated'] 43 | delta = datetime.now(tzutc()) - last_authenticated_date 44 | if delta.days > self.lastAuthThreshold: 45 | return True 46 | 47 | #AWS IAM Administration exercise: created basic class to help determine aging IAM resources (roles and user access) 48 | #Elliott Arnold 10-13-19 49 | #late night toil burning the midnight oil 50 | 51 | 52 | -------------------------------------------------------------------------------- /iam_react_js_policies/App.js: -------------------------------------------------------------------------------- 1 | import React, {Component} from 'react'; 2 | import '/***/***/iam_viewer_modal_sidebar/iam-viewer/src/App.css' 3 | import Sidebar from './component/SideBar/SideBar' 4 | import Modal from './component/Modal/Modal' 5 | import Aux from './Aux/Aux' 6 | 7 | class App extends Component { 8 | 9 | state = { 10 | policyArn: 11 | ["arn:aws:iam::aws:policy/AdministratorAccess", 12 | "arn:aws:iam::aws:policy/AmazonCodeGuruProfilerFullAccess", 13 | "arn:aws:iam::aws:policy/AmazonDetectiveFullAccess"], 14 | url: "https://p3a7rd5ztb.execute-api.us-east-1.amazonaws.com/sandbox/iam-policy" 15 | } 16 | 17 | sideBarHandler = (event) => { 18 | let element = document.querySelector('#aside') 19 | console.log(event.target) 20 | element.remove() 21 | 22 | 23 | } 24 | render () { 25 | let element = this.state.policyArn.map((_,index) => { 26 | return 28 | }) 29 | 30 | return ( 31 | 32 |
33 | 34 | {element} 35 |
36 |
37 | 38 | ) 39 | } 40 | 41 | } 42 | 43 | export default App; 44 | 45 | //AWS IAM ApiGateway Lambda Python React Compnents 46 | // Make request to ApiGateway return IAM policy document to browser 47 | // Practice with Modals and Sidebars 48 | // Elliott Arnold 7-19-20 49 | // Learning React JS - Need to learn CSS for real 50 | -------------------------------------------------------------------------------- /iam_react_js_policies/Aux.js: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | 3 | const aux = (props) => ( 4 | 5 |
{props.children}
6 | ); 7 | export default aux; 8 | 9 | //AWS IAM ApiGateway Lambda Python React Compnents 10 | // Make request to ApiGateway return IAM policy document to browser 11 | // Practice with Modals and Sidebars 12 | // Elliott Arnold 7-19-20 13 | // Learning React JS - Need to learn CSS for real 14 | -------------------------------------------------------------------------------- /iam_react_js_policies/Modal.js: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | 3 | import '/***/***/iam_viewer_modal_sidebar/iam-viewer/src/App.css' 4 | 5 | const modal = (props) => ( 6 | 7 |
8 |
9 | 10 | 11 |
12 |             
13 |              {props.policyDoc} 
14 |             
15 |         
16 |
17 |
18 |
19 | 20 | ); 21 | 22 | export default modal 23 | 24 | 25 | //https://stackoverflow.com/questions/130404/javascript-data-formatting-pretty-printer -------------------------------------------------------------------------------- /iam_react_js_policies/SideBar.js: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import '/***/***/iam_viewer_modal_sidebar/iam-viewer/src/App.css' 3 | import SBE from './SideBarElements' 4 | import Aux from '/Users/ellarnol/iam_viewer_modal_sidebar/iam-viewer/src/Aux/Aux.js' 5 | 6 | 7 | const sidebar = (props) => ( 8 | 9 |
10 | 19 |
20 |
21 | ); 22 | 23 | export default sidebar 24 | 25 | //AWS IAM ApiGateway Lambda Python React Compnents 26 | // Make request to ApiGateway return IAM policy document to browser 27 | // Practice with Modals and Sidebars 28 | // Elliott Arnold 7-19-20 29 | // Learning React JS - Need to learn CSS for real 30 | -------------------------------------------------------------------------------- /iam_react_js_policies/SideBarElements.js: -------------------------------------------------------------------------------- 1 | import React, {Component} from 'react'; 2 | import Axios from 'axios' 3 | import Aux from '../../Aux/Aux' 4 | import '/***/***/iam_viewer_modal_sidebar/iam-viewer/src/App.css' 5 | import Modal from '/Users/ellarnol/iam_viewer_modal_sidebar/iam-viewer/src/component/Modal/Modal.js' 6 | 7 | 8 | class SidebarElements extends Component { 9 | 10 | state = { 11 | url: "https://p3a7rd5ztb.execute-api.us-east-1.amazonaws.com/sandbox/iam-policy", 12 | policy: null 13 | 14 | } 15 | 16 | ajaxRequest = (url,policyArn) => { 17 | const params = {arn: policyArn, version:"v1"} 18 | return Axios.post(url, params) 19 | 20 | } 21 | // Handle click event -> pass arn to axios -> api gateway -> display in modal 22 | apiGatewayHandler = (event,url) => { 23 | const iamPolicy = event.target 24 | 25 | console.log(event.target) 26 | this.ajaxRequest(url,iamPolicy.innerText).then(data => { 27 | const policy = data.data 28 | // policy must be stringified to display in browser 29 | this.setState({ 30 | policy: JSON.stringify(policy, null, 4) 31 | }) 32 | 33 | }).catch(err => { 34 | console.log(err,'WTF') 35 | }) 36 | 37 | } 38 | 39 | render () { 40 | return ( 41 | 42 | 43 |
44 |
    45 |
  • 46 | 49 | 50 |
  • 51 |
52 |
53 | 54 |
55 | 56 | 57 |
58 |
59 | ) 60 | 61 | } 62 | }; 63 | 64 | 65 | 66 | export default SidebarElements; 67 | 68 | //AWS IAM ApiGateway Lambda Python React Compnents 69 | // Make request to ApiGateway return IAM policy document to browser 70 | // Practice with Modals and Sidebars 71 | // Elliott Arnold 7-19-20 72 | // Learning React JS - Need to learn CSS for real 73 | -------------------------------------------------------------------------------- /iam_react_js_policies/iam.py: -------------------------------------------------------------------------------- 1 | import json,boto3,pprint 2 | 3 | iam = boto3.client('iam') 4 | 5 | def get_json_iam(arn,version): 6 | p = iam.get_policy_version( PolicyArn=arn,VersionId=version) 7 | return p['PolicyVersion']['Document'] 8 | 9 | def format_json_string(string): 10 | return string.replace("\'", "\"") 11 | 12 | 13 | def lambda_handler(event,context): 14 | print(event) 15 | try: 16 | arn = json.loads(event['body'])['arn'] 17 | version = json.loads(event['body'])['version'] 18 | data = pprint.pformat(get_json_iam(arn,version)) 19 | return { 20 | 'statusCode': 200, 21 | 'headers': {'Content-Type': 'application/json', 22 | 'Access-Control-Allow-Origin': '*' }, 23 | 'body': format_json_string(str(data)), 24 | "isBase64Encoded": False 25 | 26 | } 27 | 28 | except Exception as e: 29 | print(e) 30 | 31 | 32 | #AWS IAM ApiGateway Lambda Python React Compnents 33 | #Make request to ApiGateway return IAM policy document to browser 34 | #Practice with Modals and Sidebars 35 | #Elliott Arnold 7-19-20 36 | #Learning React JS - Need to learn CSS for real 37 | -------------------------------------------------------------------------------- /insert__kinesis_data_aurora.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | import base64 3 | import json 4 | import os 5 | 6 | class Serverless_RDS_Connect: 7 | def __init__(self,event): 8 | '''connect to aurora data api for querying rds db''' 9 | self.rds_data_client = boto3.client('rds-data') 10 | self.db_secret_arn = self.db_credentials_secrets_store_arn() 11 | self.cluster_arn, self.database_name = self.get_environment_variables() 12 | self.event = event 13 | 14 | '''kinesis event is encoded as base64 string ''' 15 | def decode_base64_event(self) -> dict: 16 | decoded = base64.b64decode(self.event['Records'][0]['kinesis']['data']).decode() 17 | return json.loads(decoded) 18 | 19 | def get_environment_variables(self) -> tuple: 20 | cluster_arn = os.getenv('cluster_arn') 21 | database_name = os.getenv('database_name') 22 | return cluster_arn,database_name 23 | 24 | def get_sql_params(self) -> list: 25 | insert_values = self.decode_base64_event() 26 | 27 | sql_parameters = [ {'name':'name', 'value':{'stringValue': str(insert_values['Name'][0])}}, 28 | {'name':'phone_number', 'value':{'stringValue': str(insert_values["Phone Number"][0])}}, 29 | {'name':'ssn', 'value':{'stringValue': str(insert_values["SSN"])}}] 30 | return sql_parameters 31 | 32 | def get_insert_statement(self) -> str: 33 | sql = 'insert into Users (name, phone_number, ssn ) values (:name,:phone_number,:ssn)' 34 | return sql 35 | 36 | def db_credentials_secrets_store_arn(self) -> str: 37 | '''secrets manager holds db username and pw ''' 38 | secret_client = boto3.client(service_name='secretsmanager') 39 | return secret_client.get_secret_value(SecretId='serverless')['ARN'] 40 | 41 | 42 | def execute_sql_query(self): 43 | self.rds_data_client.execute_statement(secretArn=self.db_secret_arn, 44 | database=self.database_name,resourceArn=self.cluster_arn, 45 | sql=self.get_insert_statement(),parameters=self.get_sql_params()) 46 | 47 | def lambda_handler(event,context): 48 | try: 49 | if 'kinesis' in event['Records'][0]: 50 | serverless = Serverless_RDS_Connect(event) 51 | serverless.execute_sql_query() 52 | except IndexError: 53 | pass 54 | 55 | #AWS Kinesis Lambda Aurora practice 56 | #Elliott Arnold BLM 57 | #6-5-20 58 | 59 | -------------------------------------------------------------------------------- /lambda.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | import boto3 3 | FFMPEG_STATIC_TMP = "/tmp/ffmpeg" 4 | 5 | client = boto3.client('s3') 6 | 7 | def init(event,context): 8 | bucket = event['Records'][0]['s3']['bucket']['name'] 9 | key = event['Records'][0]['s3']['object']['key'] 10 | make_wavs(bucket,key) 11 | 12 | def make_wavs(bucket,key): 13 | s3_response = client.get_object(Bucket=bucket, Key=key) 14 | s3_response_ffmpeg = client.get_object(Bucket='lambda-code-aws', Key='ffmpeg') 15 | audio_data = s3_response['Body'].read() 16 | ffmpegExe = s3_response_ffmpeg['Body'].read() 17 | pathToMp3 = '/tmp/' + key.split('.')[0] + '.mp3' 18 | pathToWav = '/tmp/' + key.split('.')[0] + '.wav' 19 | with open(pathToMp3, 'wb') as ad: 20 | ad.write(audio_data) 21 | '''Download ffmpeg binary from s3 bucket, save it in /tmp/ good technique''' 22 | with open(FFMPEG_STATIC_TMP, 'wb') as ffmpeg: 23 | ffmpeg.write(ffmpegExe) 24 | cmd = f"chmod 755 {FFMPEG_STATIC_TMP}" 25 | data = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True) 26 | cmd2 = FFMPEG_STATIC_TMP + " -i " + pathToMp3 + " " + pathToWav 27 | data = subprocess.Popen(cmd2, stdout=subprocess.PIPE, shell=True) 28 | wavBinary = open(pathToWav,'rb') 29 | waveToS3(bucket,pathToWav,wavBinary) 30 | 31 | def waveToS3(bucket,key,fileObj): 32 | response = client.put_object( 33 | ACL='public-read', 34 | Body=fileObj, 35 | Bucket=bucket, 36 | ContentType='audio/wav', 37 | Key=key.split('/')[-1] ) 38 | 39 | #AWS Lambda practice: convert MP3 audiofile to WAV file using lambda function 40 | #function is triggered by the creation of a new mp3 file in s3 bucket 41 | #Elliott Arnold 7-10-19 42 | # si3mshady -------------------------------------------------------------------------------- /lambdaHasher.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | from hashlib import sha256 3 | from datetime import datetime 4 | import logging 5 | 6 | logger = logging.getLogger() 7 | logger.setLevel(logging.INFO) 8 | s3_cli = boto3.client('s3') 9 | ddb_res = boto3.resource('dynamodb', region_name='us-east-1') 10 | 11 | def init(event,context): 12 | logger.info(event) 13 | bucket = event['Records'][0]['s3']['bucket']['name'] 14 | key = event['Records'][0]['s3']['object']['key'] 15 | hashedVal = hashfile(key,bucket,s3_cli) 16 | response = putItem(ddb_res,hashedVal,key) 17 | logger.info(response) 18 | 19 | def hashfile(key,bucket,clientS3): 20 | response = clientS3.get_object(Bucket=bucket, Key=key) 21 | binary_data = response['Body'].read() 22 | hashed_value = sha256(binary_data).hexdigest() 23 | return hashed_value 24 | 25 | def genDateString(): 26 | now = datetime.now() 27 | dateString = now.strftime("%b-%d-%Y %H:%M:%S") 28 | return dateString 29 | 30 | def putItem(resourceDDB,hashVal,filename,table='Metadata'): 31 | 32 | response = resourceDDB.Table(table).put_item( 33 | Item={ 34 | 'md5':hashVal, 35 | 'filename':filename, 36 | 'timestamp': genDateString() 37 | } 38 | ) 39 | return response 40 | 41 | #AWS Lambda Practice Exercises: S3 & DynamoDB - lambda function for hashing uploaded files and writing data to DDB 42 | #Elliott Arnold 7-13-19 43 | #si3mshady 44 | 45 | 46 | -------------------------------------------------------------------------------- /lambda_handler.py: -------------------------------------------------------------------------------- 1 | import boto3, logging, psycopg2, openpyxl 2 | from configparser import ConfigParser 3 | from io import BytesIO 4 | 5 | logger = logging.getLogger() 6 | logger.setLevel(logging.INFO) 7 | s3_cli = boto3.client('s3') 8 | 9 | DB_CONFIG = '/tmp/config.ini' 10 | 11 | def begin(event,context): 12 | logger.info(event) 13 | bucket = event['Records'][0]['s3']['bucket']['name'] 14 | key = event['Records'][0]['s3']['object']['key'] 15 | getDBConfig(bucket) 16 | insert_data = openExcel(bucket,key) 17 | insertRDS(insert_data) 18 | 19 | 20 | def getDBConfig(bucket): 21 | responseObj = s3_cli.get_object(Bucket=bucket, Key=DB_CONFIG.rsplit('/')[-1]) 22 | cfg = responseObj['Body'].read() 23 | with open(DB_CONFIG,'wb') as config: 24 | config.write(cfg) 25 | 26 | def openExcel(bucket,key): 27 | responseObj = s3_cli.get_object(Bucket=bucket, Key=key) 28 | excelBinaryData = responseObj['Body'].read() 29 | '''use BytesIO to load binary data, instead of writing to disk''' 30 | wb = openpyxl.load_workbook(BytesIO(excelBinaryData)) 31 | sheet = wb.sheetnames[0] 32 | active_sheet = wb[sheet] 33 | insert_list = [x[0] for x in active_sheet.values] 34 | formatted_list = create_tuple_list(insert_list) 35 | return formatted_list 36 | 37 | def create_tuple_list(array): 38 | '''returns properly formatted list of tuples for working with SQL syntax''' 39 | formatted = [tuple([val]) for val in array] 40 | return formatted 41 | 42 | def config(filename=DB_CONFIG, section='postgresql'): 43 | '''parse conf file ''' 44 | parser = ConfigParser() 45 | '''read the config''' 46 | parser.read(filename) 47 | '''get the section wanted''' 48 | db = {} 49 | if parser.has_section(section): 50 | print('Reading db config') 51 | params = parser.items(section) 52 | for param in params: 53 | db[param[0]] = param[1] #key value structure from file 54 | else: 55 | raise Exception('Section {0} is not found in {1} file'.format(section,filename)) 56 | return db 57 | 58 | def insertRDS(data_list): 59 | # %s denotes string value 60 | sql = "Insert INTO artists(name) VALUES (%s)" 61 | connection = None 62 | try: 63 | params = config() 64 | connection = psycopg2.connect(**params) 65 | cursor = connection.cursor() 66 | cursor.executemany(sql, create_tuple_list(data_list)) 67 | connection.commit() 68 | cursor.close() 69 | except (Exception, psycopg2.DatabaseError) as error: 70 | print(error) 71 | finally: 72 | if connection is not None: 73 | connection.close() 74 | 75 | 76 | #AWS Lambda practice: Triggering Lambda functions 77 | #Function is triggered by the upload of a spreadsheet file (.xlsx) to s3 78 | #The data is parsed and then written to (POSTGRE) RDS table in AWS 79 | #Elliott Arnold 7-15-19 80 | #si3mshady 81 | 82 | #https://pandas.pydata.org/pandas-docs/version/0.20/io.html 83 | #https://github.com/jkehler/awslambda-psycopg2/issues/3 84 | #https://stackoverflow.com/questions/11618898/pg-config-executable-not-found 85 | #https://stackoverflow.com/questions/28526935/pg-ctl-error-while-loading-shared-libraries-libpq-so-5?lq=1 86 | #https://github.com/psycopg/psycopg2/issues/892 -------------------------------------------------------------------------------- /lambda_handler_2.py: -------------------------------------------------------------------------------- 1 | import boto3, logging, psycopg2 2 | from openpyxl import load_workbook 3 | from configparser import ConfigParser 4 | from io import BytesIO 5 | 6 | logger = logging.getLogger() 7 | logger.setLevel(logging.INFO) 8 | s3_cli = boto3.client('s3') 9 | 10 | DB_CONFIG = '/tmp/config.ini' 11 | 12 | def begin(event,context): 13 | logger.info(event) 14 | bucket = event['Records'][0]['s3']['bucket']['name'] 15 | key = event['Records'][0]['s3']['object']['key'] 16 | getDBConfig(bucket) 17 | insert_data = openExcel(bucket,key) 18 | insertRDS(insert_data) 19 | 20 | def getDBConfig(bucket): 21 | responseObj = s3_cli.get_object(Bucket=bucket, Key=DB_CONFIG.rsplit('/')[-1]) 22 | cfg = responseObj['Body'].read() 23 | with open(DB_CONFIG,'wb') as config: 24 | config.write(cfg) 25 | 26 | def openExcel(bucket,key): 27 | responseObj = s3_cli.get_object(Bucket=bucket, Key=key) 28 | excelBinaryData = responseObj['Body'].read() 29 | '''use BytesIO to load binary data, instead of writing to disk''' 30 | wb = load_workbook(BytesIO(excelBinaryData)) 31 | print('Opened Excel Spreadsheet') 32 | sheet = wb.sheetnames[0] 33 | active_sheet = wb[sheet] 34 | main = [] 35 | '''create a list of tuple lists by extracting all rows and columns from spreadsheet''' 36 | for row in active_sheet.values: 37 | save_list = [] 38 | for column in row: 39 | record = column 40 | save_list.append(tuple([record])) 41 | main.append(save_list) 42 | print(main) 43 | return main 44 | 45 | def config(filename=DB_CONFIG, section='rds'): 46 | '''parse conf file ''' 47 | parser = ConfigParser() 48 | '''read the config''' 49 | parser.read(filename) 50 | '''get the section wanted''' 51 | db = {} 52 | if parser.has_section(section): 53 | print('Reading db config') 54 | params = parser.items(section) 55 | for param in params: 56 | db[param[0]] = param[1] #key value structure from file 57 | else: 58 | raise Exception('Section {0} is not found in {1} file'.format(section,filename)) 59 | return db 60 | 61 | def insertRDS(data_list): 62 | # %s denotes string value 63 | sql = "Insert INTO tx(City,State,County,Established,Alias,Initials,Email1,Email2) VALUES (%s,%s,%s,%s,%s,%s,%s,%s)" 64 | connection = None 65 | try: 66 | params = config() 67 | logger.info(params) 68 | connection = psycopg2.connect(**params) 69 | cursor = connection.cursor() 70 | for i in range(len(data_list)): 71 | cursor.execute(sql, data_list[i]) 72 | connection.commit() 73 | cursor.close() 74 | except (Exception, psycopg2.DatabaseError) as error: 75 | print(error) 76 | finally: 77 | if connection is not None: 78 | connection.close() 79 | 80 | 81 | #AWS Lambda practice: Triggering Lambda functions - part 2 82 | #Function is enhanced to read multi column/row spreadsheet 83 | #Triggered by the upload of a spreadsheet file (.xlsx) to s3 84 | #The data is parsed and then written to (POSTGRE) RDS table in AWS 85 | #Elliott Arnold 7-23-19 86 | #si3mshady 87 | 88 | #https://en.wikipedia.org/wiki/List_of_counties_in_Texas -------------------------------------------------------------------------------- /lambda_layer_wt/build/asset-manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "files": { 3 | "main.css": "/static/css/main.c625180c.chunk.css", 4 | "main.js": "/static/js/main.a6a939c5.chunk.js", 5 | "main.js.map": "/static/js/main.a6a939c5.chunk.js.map", 6 | "runtime-main.js": "/static/js/runtime-main.37e5f3d1.js", 7 | "runtime-main.js.map": "/static/js/runtime-main.37e5f3d1.js.map", 8 | "static/css/2.1a02f21c.chunk.css": "/static/css/2.1a02f21c.chunk.css", 9 | "static/js/2.11b63539.chunk.js": "/static/js/2.11b63539.chunk.js", 10 | "static/js/2.11b63539.chunk.js.map": "/static/js/2.11b63539.chunk.js.map", 11 | "index.html": "/index.html", 12 | "static/css/2.1a02f21c.chunk.css.map": "/static/css/2.1a02f21c.chunk.css.map", 13 | "static/css/main.c625180c.chunk.css.map": "/static/css/main.c625180c.chunk.css.map", 14 | "static/js/2.11b63539.chunk.js.LICENSE.txt": "/static/js/2.11b63539.chunk.js.LICENSE.txt" 15 | }, 16 | "entrypoints": [ 17 | "static/js/runtime-main.37e5f3d1.js", 18 | "static/css/2.1a02f21c.chunk.css", 19 | "static/js/2.11b63539.chunk.js", 20 | "static/css/main.c625180c.chunk.css", 21 | "static/js/main.a6a939c5.chunk.js" 22 | ] 23 | } -------------------------------------------------------------------------------- /lambda_layer_wt/build/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/si3mshady/aws_lambda_functions-/e4942354f59deb4f404056661d5834ffd33ae7cb/lambda_layer_wt/build/favicon.ico -------------------------------------------------------------------------------- /lambda_layer_wt/build/index.html: -------------------------------------------------------------------------------- 1 | React App
-------------------------------------------------------------------------------- /lambda_layer_wt/build/logo192.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/si3mshady/aws_lambda_functions-/e4942354f59deb4f404056661d5834ffd33ae7cb/lambda_layer_wt/build/logo192.png -------------------------------------------------------------------------------- /lambda_layer_wt/build/logo512.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/si3mshady/aws_lambda_functions-/e4942354f59deb4f404056661d5834ffd33ae7cb/lambda_layer_wt/build/logo512.png -------------------------------------------------------------------------------- /lambda_layer_wt/build/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "short_name": "React App", 3 | "name": "Create React App Sample", 4 | "icons": [ 5 | { 6 | "src": "favicon.ico", 7 | "sizes": "64x64 32x32 24x24 16x16", 8 | "type": "image/x-icon" 9 | }, 10 | { 11 | "src": "logo192.png", 12 | "type": "image/png", 13 | "sizes": "192x192" 14 | }, 15 | { 16 | "src": "logo512.png", 17 | "type": "image/png", 18 | "sizes": "512x512" 19 | } 20 | ], 21 | "start_url": ".", 22 | "display": "standalone", 23 | "theme_color": "#000000", 24 | "background_color": "#ffffff" 25 | } 26 | -------------------------------------------------------------------------------- /lambda_layer_wt/build/robots.txt: -------------------------------------------------------------------------------- 1 | # https://www.robotstxt.org/robotstxt.html 2 | User-agent: * 3 | Disallow: 4 | -------------------------------------------------------------------------------- /lambda_layer_wt/build/static/css/main.c625180c.chunk.css: -------------------------------------------------------------------------------- 1 | .container{margin-top:10%;display:flex;justify-content:center}.innerContainer{border:1px solid #000}label{display:flex;justify-content:left}select{position:relative;right:61px}input{width:100%} 2 | /*# sourceMappingURL=main.c625180c.chunk.css.map */ -------------------------------------------------------------------------------- /lambda_layer_wt/build/static/css/main.c625180c.chunk.css.map: -------------------------------------------------------------------------------- 1 | {"version":3,"sources":["webpack://src/App.css"],"names":[],"mappings":"AAGA,WACE,cAAe,CACf,YAAa,CACb,sBAEF,CAEA,gBAIE,qBACF,CAMA,MAEA,YAAa,CACb,oBAEA,CAEA,OACE,iBAAkB,CAClB,UACF,CAEA,MACE,UACF","file":"main.c625180c.chunk.css","sourcesContent":["\n\n\n.container { \n margin-top: 10%;\n display: flex;\n justify-content: center;\n\n}\n\n.innerContainer {\n\n\n\n border: solid black 1px;\n}\n\n/* select {\n display: inline-block;\n} */\n\nlabel {\n\ndisplay: flex;\njustify-content: left;\n\n}\n\nselect {\n position: relative; \n right: 61px;\n}\n\ninput {\n width: 100%;\n}"]} -------------------------------------------------------------------------------- /lambda_layer_wt/build/static/js/2.11b63539.chunk.js.LICENSE.txt: -------------------------------------------------------------------------------- 1 | /* 2 | object-assign 3 | (c) Sindre Sorhus 4 | @license MIT 5 | */ 6 | 7 | /** @license React v0.20.1 8 | * scheduler.production.min.js 9 | * 10 | * Copyright (c) Facebook, Inc. and its affiliates. 11 | * 12 | * This source code is licensed under the MIT license found in the 13 | * LICENSE file in the root directory of this source tree. 14 | */ 15 | 16 | /** @license React v17.0.1 17 | * react-dom.production.min.js 18 | * 19 | * Copyright (c) Facebook, Inc. and its affiliates. 20 | * 21 | * This source code is licensed under the MIT license found in the 22 | * LICENSE file in the root directory of this source tree. 23 | */ 24 | 25 | /** @license React v17.0.1 26 | * react-jsx-runtime.production.min.js 27 | * 28 | * Copyright (c) Facebook, Inc. and its affiliates. 29 | * 30 | * This source code is licensed under the MIT license found in the 31 | * LICENSE file in the root directory of this source tree. 32 | */ 33 | 34 | /** @license React v17.0.1 35 | * react.production.min.js 36 | * 37 | * Copyright (c) Facebook, Inc. and its affiliates. 38 | * 39 | * This source code is licensed under the MIT license found in the 40 | * LICENSE file in the root directory of this source tree. 41 | */ 42 | -------------------------------------------------------------------------------- /lambda_layer_wt/build/static/js/main.a6a939c5.chunk.js: -------------------------------------------------------------------------------- 1 | (this.webpackJsonplambda_layer_gen=this.webpackJsonplambda_layer_gen||[]).push([[0],{14:function(e,t,n){"use strict";n.r(t);var c=n(0),a=n(1),o=n.n(a),i=n(6),r=n.n(i),l=(n(13),n(5),n(2)),j=n(4),s=n(7),b={runtime:"",packageNameZip:"",s3Bucket:"",module:""};function d(){var e=Object(a.useState)(b),t=Object(s.a)(e,2),n=t[0],o=t[1];function i(e){var t=e.target,n=t.name,c=t.value;o((function(e){return console.log("Previous entries look like:",e),Object(j.a)(Object(j.a)({},e),{},Object(l.a)({},n,c))}))}return Object(c.jsx)("div",{className:"container",children:Object(c.jsx)("section",{id:"container-base",children:Object(c.jsx)("div",{align:"center",id:"deployment",children:Object(c.jsxs)("form",{children:[Object(c.jsx)("label",{for:"runtime",children:"Choose a runtime:"}),Object(c.jsxs)("select",{required:"true",onChange:i,id:"runtime",name:"runtime",children:[Object(c.jsx)("option",{value:""}),Object(c.jsx)("option",{name:"python3.8",children:"Python3.8"}),Object(c.jsx)("option",{name:"python3.7",children:"Python3.7"}),Object(c.jsx)("option",{name:"python3.6",children:"Python3.6"}),Object(c.jsx)("option",{name:"node",children:"Node"})]}),Object(c.jsx)("br",{}),Object(c.jsx)("br",{}),Object(c.jsxs)("p",{children:[Object(c.jsx)("label",{for:"packageNameZip",children:"Save Deployment As:"}),Object(c.jsx)("input",{onChange:i,id:"packageNameZip",type:"text",name:"packageNameZip",value:n.packageNameZip,placeholder:"Enter zipfile name"})]}),Object(c.jsxs)("p",{children:[Object(c.jsx)("label",{for:"S3Bucket",children:"Store in S3 bucket:"}),Object(c.jsx)("input",{onChange:i,id:"deploymentPackage",type:"text",name:"s3Bucket",value:n.s3Bucket,placeholder:"Enter S3 Bucket name"})]}),Object(c.jsxs)("p",{children:[Object(c.jsx)("label",{for:"Modules",children:"Package:"}),Object(c.jsx)("input",{onChange:i,id:"Module",type:"text",name:"module",value:n.module,placeholder:"Enter Module to install "})]}),Object(c.jsx)("br",{}),Object(c.jsx)("button",{type:"submit",action:"http://0.0.0.0:8080/",method:"post",onSubmit:function(e){o(b),e.preventDefault()},children:" Create Deployment Package "})]})})})})}var u=function(){return Object(c.jsx)(d,{})};r.a.render(Object(c.jsx)(o.a.StrictMode,{children:Object(c.jsx)(u,{})}),document.getElementById("root"))},5:function(e,t,n){}},[[14,1,2]]]); 2 | //# sourceMappingURL=main.a6a939c5.chunk.js.map -------------------------------------------------------------------------------- /lambda_layer_wt/build/static/js/runtime-main.37e5f3d1.js: -------------------------------------------------------------------------------- 1 | !function(e){function r(r){for(var n,l,a=r[0],f=r[1],i=r[2],c=0,s=[];c":"
Clean Slate
", \ 54 | "import logo from './logo.svg';":'' } 55 | 56 | for pattern, repl in patterns.items(): 57 | cleanUpAppJS(pattern, repl) 58 | 59 | cleanIndexJS() 60 | removeFiles() 61 | createComponentsDirectory() 62 | 63 | 64 | if __name__ == "__main__": 65 | begin() -------------------------------------------------------------------------------- /lambda_layer_wt/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "lambda_layer_gen", 3 | "version": "0.1.0", 4 | "private": true, 5 | "dependencies": { 6 | "@testing-library/jest-dom": "^5.11.4", 7 | "@testing-library/react": "^11.1.0", 8 | "@testing-library/user-event": "^12.1.10", 9 | "bootstrap": "^4.5.3", 10 | "react": "^17.0.1", 11 | "react-bootstrap": "^1.4.0", 12 | "react-dom": "^17.0.1", 13 | "react-scripts": "4.0.1", 14 | "web-vitals": "^0.2.4" 15 | }, 16 | "scripts": { 17 | "start": "react-scripts start", 18 | "build": "react-scripts build", 19 | "test": "react-scripts test", 20 | "eject": "react-scripts eject" 21 | }, 22 | "eslintConfig": { 23 | "extends": [ 24 | "react-app", 25 | "react-app/jest" 26 | ] 27 | }, 28 | "browserslist": { 29 | "production": [ 30 | ">0.2%", 31 | "not dead", 32 | "not op_mini all" 33 | ], 34 | "development": [ 35 | "last 1 chrome version", 36 | "last 1 firefox version", 37 | "last 1 safari version" 38 | ] 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /lambda_layer_wt/public/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/si3mshady/aws_lambda_functions-/e4942354f59deb4f404056661d5834ffd33ae7cb/lambda_layer_wt/public/favicon.ico -------------------------------------------------------------------------------- /lambda_layer_wt/public/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 14 | 15 | 19 | 20 | 29 | React App 30 | 31 | 32 | 33 |
34 | 44 | 45 | 46 | -------------------------------------------------------------------------------- /lambda_layer_wt/public/logo192.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/si3mshady/aws_lambda_functions-/e4942354f59deb4f404056661d5834ffd33ae7cb/lambda_layer_wt/public/logo192.png -------------------------------------------------------------------------------- /lambda_layer_wt/public/logo512.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/si3mshady/aws_lambda_functions-/e4942354f59deb4f404056661d5834ffd33ae7cb/lambda_layer_wt/public/logo512.png -------------------------------------------------------------------------------- /lambda_layer_wt/public/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "short_name": "React App", 3 | "name": "Create React App Sample", 4 | "icons": [ 5 | { 6 | "src": "favicon.ico", 7 | "sizes": "64x64 32x32 24x24 16x16", 8 | "type": "image/x-icon" 9 | }, 10 | { 11 | "src": "logo192.png", 12 | "type": "image/png", 13 | "sizes": "192x192" 14 | }, 15 | { 16 | "src": "logo512.png", 17 | "type": "image/png", 18 | "sizes": "512x512" 19 | } 20 | ], 21 | "start_url": ".", 22 | "display": "standalone", 23 | "theme_color": "#000000", 24 | "background_color": "#ffffff" 25 | } 26 | -------------------------------------------------------------------------------- /lambda_layer_wt/public/robots.txt: -------------------------------------------------------------------------------- 1 | # https://www.robotstxt.org/robotstxt.html 2 | User-agent: * 3 | Disallow: 4 | -------------------------------------------------------------------------------- /lambda_layer_wt/src/App.css: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | .container { 5 | margin-top: 10%; 6 | display: flex; 7 | justify-content: center; 8 | 9 | } 10 | 11 | .innerContainer { 12 | 13 | 14 | 15 | border: solid black 1px; 16 | } 17 | 18 | /* select { 19 | display: inline-block; 20 | } */ 21 | 22 | label { 23 | 24 | display: flex; 25 | justify-content: left; 26 | 27 | } 28 | 29 | select { 30 | position: relative; 31 | right: 61px; 32 | } 33 | 34 | input { 35 | width: 100%; 36 | } -------------------------------------------------------------------------------- /lambda_layer_wt/src/index.js: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import ReactDOM from 'react-dom'; 3 | import 'bootstrap/dist/css/bootstrap.min.css'; 4 | 5 | 6 | import App from './App'; 7 | 8 | 9 | ReactDOM.render( 10 | 11 | 12 | , 13 | document.getElementById('root') 14 | ); 15 | 16 | // If you want to start measuring performance in your app, pass a function 17 | // to log results (for example: reportWebVitals(console.log)) 18 | // or send to an analytics endpoint. Learn more: https://bit.ly/CRA-vitals 19 | 20 | -------------------------------------------------------------------------------- /learningGraphQL.js: -------------------------------------------------------------------------------- 1 | const {ApolloServer} = require('apollo-server'); 2 | const { promisify } = require('util'); 3 | const exec = promisify(require('child_process').exec) 4 | const { gql } = require('apollo-server'); 5 | 6 | const typeDefs = gql` 7 | # Query/Mutations/ must have a matching resolvers 8 | type SystemData { 9 | username: String! 10 | loadAverage: String! 11 | diskUtil: String! 12 | } 13 | 14 | type Query { 15 | querySystemData: SystemData! 16 | } 17 | 18 | ` 19 | async function getSystemData() { 20 | 21 | const user = await exec("w | awk '{print $1}' | sort -u | grep -v USER | grep -v ':'") 22 | const loadAverage = await exec('uptime | awk \'{print $10 " " $11 " " $12 }\'') 23 | const diskUtil = await exec("df -h | grep 'xvda' | awk '{print $5}' | head -n 1") 24 | return { username:user.stdout.trim(), 25 | loadAverage:loadAverage.stdout.trim(), 26 | diskUtil: diskUtil.stdout.trim()} 27 | 28 | } 29 | 30 | const resolvers = { 31 | Query:{ querySystemData:getSystemData } } 32 | 33 | 34 | const apolloServer = new ApolloServer({ 35 | typeDefs, 36 | resolvers 37 | 38 | }); 39 | 40 | apolloServer.listen({ port: 5000}).then((res) => { 41 | console.log(`Server running at ${res.url}`) 42 | }); 43 | 44 | 45 | // AWS EC2 GraphQL practice - Queries and Resolvers 46 | // Running Apollo Server from EC2 - using basic query and resolver to fetch basic system data 47 | // Elliott Arnold 12-13-20 48 | -------------------------------------------------------------------------------- /learning_paramiko.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | import paramiko 3 | import re 4 | import time 5 | 6 | ec2 = boto3.client('ec2') 7 | s3 = boto3.client('s3') 8 | 9 | BUCKET_NAME = 'si3mshady-ssh' 10 | LOG_BUCKET = 'ec2-logs-public' 11 | KEY_NAME = "keyZ.pem" 12 | LOCAL_SCRATCH_SPACE = "/tmp/" 13 | EC2_HOSTNAME = "ec2-52-23-252-87.compute-1.amazonaws.com" 14 | TARGET_DIR_EC2 = "/home/ubuntu/" 15 | cmd = ['for i in $(sudo find /var/log/ -iname "*log*"); do sudo tar zcf ~/logfiles.$(date +"%Y-%m-%d") $i; done', 'date +"%Y-%m-%d"'] 16 | 17 | def get_ssh_key_from_s3(): 18 | data = s3.get_object(Bucket=BUCKET_NAME, Key=KEY_NAME) 19 | ssh_key = data['Body'].read().decode() 20 | with open(LOCAL_SCRATCH_SPACE + KEY_NAME, 'w') as ink: 21 | ink.write(ssh_key) 22 | compress_logs_send_to_s3() 23 | 24 | def compress_logs_send_to_s3(): 25 | key = paramiko.RSAKey.from_private_key_file(LOCAL_SCRATCH_SPACE + KEY_NAME) 26 | client = paramiko.SSHClient() 27 | client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) 28 | client.connect(hostname=EC2_HOSTNAME, username="ubuntu", pkey=key) 29 | client.exec_command(cmd[0]) 30 | time.sleep(1) 31 | _, stdout, _ = client.exec_command(cmd[1]) 32 | date_extension = re.sub('\s', "", stdout.read().decode()) 33 | get_compressed_logs_from_ec2(date_extension) 34 | 35 | def get_compressed_logs_from_ec2(extension): 36 | key = paramiko.RSAKey.from_private_key_file(LOCAL_SCRATCH_SPACE + KEY_NAME) 37 | client = paramiko.SSHClient() 38 | client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) 39 | client.connect(hostname=EC2_HOSTNAME, username="ubuntu", pkey=key) 40 | ftp = client.open_sftp() 41 | ftp.get(TARGET_DIR_EC2 + f'logfiles.{extension}', LOCAL_SCRATCH_SPACE + f'/logfiles.{extension}') 42 | logfile_archive = open(LOCAL_SCRATCH_SPACE + f'/logfiles.{extension}', 'rb') 43 | send_to_s3(logfile_archive, extension) 44 | 45 | def send_to_s3(file,extension): 46 | response = s3.put_object(ACL='public-read', Body=file, Bucket=LOG_BUCKET, ContentType='application/octet-stream', 47 | Key=f'logfiles.{extension}') 48 | 49 | 50 | get_ssh_key_from_s3() 51 | 52 | #EC2 practice with BOTO3; learning PARAMIKO library 53 | #SSH on ec2 instance, create tar compressed archive of all log files, retrieve the files and send to s3 54 | #Elliott Arnold 10-15-19 WIP 55 | 56 | #https://aws.amazon.com/blogs/compute/scheduling-ssh-jobs-using-aws-lambda/ 57 | #https://medium.com/@keagileageek/paramiko-how-to-ssh-and-file-transfers-with-python-75766179de73 -------------------------------------------------------------------------------- /manage_cron.py: -------------------------------------------------------------------------------- 1 | import boto3, json 2 | 3 | client = boto3.client('ec2') 4 | 5 | def get_instance_ids(): 6 | response = client.describe_instances() 7 | instance_id_list = [[insta['InstanceId'] for insta in response['Reservations'][i]['Instances']] for i in range(len(response['Reservations']))] 8 | instance_id_strings = [i[0] for i in instance_id_list] 9 | return instance_id_strings 10 | 11 | def start_instances(event,context): 12 | print(event) 13 | instance_ids = get_instance_ids() 14 | response = client.start_instances( 15 | InstanceIds=instance_ids, 16 | DryRun=False) 17 | return response 18 | 19 | def stop_instances(event,context): 20 | instance_ids = get_instance_ids() 21 | response = client.stop_instances( 22 | InstanceIds=instance_ids, 23 | Force=True) 24 | return response 25 | 26 | def test_function(): 27 | response = get_instance_ids() 28 | http_response = { 29 | "statusCode": 200, 30 | "body": json.dumps(response) 31 | } 32 | return http_response 33 | 34 | 35 | #learning hands-on with Udemy - Start / Stop EC2 instances using AWS Lambda and Serverless Framework 36 | #Elliott Arnold 5-23-19 37 | #https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ec2.html#EC2.Client.start_instances 38 | #https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ec2.html#EC2.Client.stop_instances 39 | #https://serverless.com/framework/docs/providers/aws/guide/quick-start/ 40 | 41 | 42 | -------------------------------------------------------------------------------- /miniWikiChaliceJS/add.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 22 | 23 |
24 | 25 |
26 | 27 | 28 |
29 |
30 | 31 | 32 |
33 | 34 | 35 | 36 |
37 | 38 | 39 | 40 | 41 | -------------------------------------------------------------------------------- /miniWikiChaliceJS/app.js: -------------------------------------------------------------------------------- 1 | //index.html 2 | const makeUrlString = (basePath) => { 3 | 4 | return `https://6m295ad1if.execute-api.us-east-1.amazonaws.com/api/${basePath}/${Date.now()}` 5 | } 6 | 7 | $('#submit-button').click( () => { 8 | var keyword = $('#search-text').val() 9 | axios.post(makeUrlString('searchQuery'), {key: keyword}).then((response) => { 10 | var myData = response.data.success 11 | myData.forEach(element => { 12 | $('#bundle').append(`
${element}`) 13 | }); 14 | console.log(myData); 15 | }, (error) => {console.log(error); }); 16 | }) 17 | 18 | // add.html 19 | $('#submit-btn').click( () => { 20 | //take inputs 21 | var keyword = $('#keyword').val() 22 | var url = $('#url').val() 23 | //clear inputs 24 | $('#keyword').val('') 25 | $('#url').val('') 26 | 27 | axios.post(makeUrlString('addToDatabse'), { 28 | key: keyword, link: url 29 | 30 | }).then((response) => {console.log(response); 31 | 32 | },(error) => { console.log(error); }); 33 | }) 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | -------------------------------------------------------------------------------- /miniWikiChaliceJS/app.py: -------------------------------------------------------------------------------- 1 | from chalice import Chalice, CORSConfig 2 | import boto3, json 3 | 4 | app = Chalice(app_name='miniwiki-project-api') 5 | cors_config = CORSConfig(allow_origin='*') 6 | 7 | def filterTable(keyword,dynamoResultSet): 8 | filtered = [item for item in dynamoResultSet['Items'] if item['keyword']['S'] == keyword] 9 | return list({data['url']['S']:data['keyword']['S'] for data in filtered}.keys()) 10 | #TypeError: Object of type dict_keys is not JSON serializable unless list() is used 11 | #the key value of dict comprehension needs to be url because it's unique 12 | #otherwise the key would be overwritten (key values should have high-cardinality) 13 | 14 | def scanAndFilterTable(key): 15 | dynamoClient = boto3.client('dynamodb',region_name='us-east-1') 16 | dynamoResultSet = dynamoClient.scan(TableName='Miniwiki') 17 | return filterTable(key,dynamoResultSet) 18 | 19 | 20 | #databses usese composite primary key because multiple values could be stored under the same keyword 21 | def addDocument(keyword,url): 22 | dynamoResource = boto3.resource('dynamodb',region_name='us-east-1') 23 | miniWikiTable = dynamoResource.Table('Miniwiki') 24 | miniWikiTable.put_item(Item={str('keyword'): str(keyword), str('url'): str(url)}) 25 | 26 | 27 | @app.route('/addToDatabse/{data}', methods=['POST'],cors=cors_config, content_types=['application/json']) 28 | def addToDatabase(data): 29 | fullBody = app.current_request.json_body 30 | searchKey = fullBody['key'] 31 | searchUrl = str(fullBody['link']) 32 | addDocument(searchKey,searchUrl) 33 | print(fullBody) 34 | return {"searchKey-added": searchKey, 35 | "searchUrl-added": searchUrl } 36 | 37 | 38 | @app.route('/searchQuery/{data}', methods=['POST'],cors=cors_config, content_types=['application/json']) 39 | def searchQuery(data): 40 | requestData = app.current_request.json_body 41 | tableScanData = scanAndFilterTable(requestData['key']) 42 | print(tableScanData) 43 | return {"success": tableScanData} 44 | 45 | #AWS Chalice Python3 VanillaJS Jquery 46 | #Create a small wiki-website to help keep track of usefull links I come across 47 | #Elliott Arnold 9-26-20 DMS DFW Covid-19 48 | # curl -H "Content-Type: application/json" -X POST -d '{"hello": "world"}' 49 | #https://www.youtube.com/watch?v=M5QY2_8704o&list=PLrVQaveCtQdZavMBJ5lhSr88l-TZs904s&index=27 -------------------------------------------------------------------------------- /miniWikiChaliceJS/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | Results 13 | 14 | 15 | 16 | 17 | 24 | 25 |
26 |
27 | 28 |
29 |

Begin search

30 | 31 | 32 | 33 | 34 | 35 |
36 | 37 | 38 |
39 |
40 | 41 | 42 |
43 |
44 | 45 |
46 | 47 |

Mini Wiki Results

48 | 51 | 52 | 53 |
54 | 55 |
56 |
57 | 58 | 59 | 60 | 61 | -------------------------------------------------------------------------------- /modify_instance_attribute/put_metric_data.py: -------------------------------------------------------------------------------- 1 | #!/bin/python3 2 | import subprocess 3 | import boto3 4 | import time 5 | import re 6 | 7 | class PIM: 8 | cloudwatch = boto3.client('cloudwatch', region_name='us-east-1') 9 | sns = boto3.client('sns',region_name='us-east-1') 10 | ssm = boto3.client('ssm',region_name='us-east-1') 11 | 12 | 13 | percent_idle = "iostat | grep -A1 avg-cpu | column | awk '{print $6}' | grep '[0-9]'" 14 | 15 | @classmethod 16 | def put_idle_metric(cls): 17 | pi = float(subprocess.check_output(cls.percent_idle, shell=True).decode('utf-8')) 18 | cls.cloudwatch.put_metric_data( 19 | MetricData=[ 20 | { 21 | 'MetricName': 'Custom_Percent_Idle', 22 | 'Dimensions': [ 23 | { 24 | 'Name': 'Custom Data', 25 | 'Value': 'Percent_Idle' 26 | }, 27 | ], 28 | 'Unit': 'Percent', 29 | 'Value': pi 30 | }, 31 | ], 32 | Namespace='Idle/CPU' 33 | ) 34 | 35 | return pi 36 | 37 | @classmethod 38 | def publish_instance_id(cls): 39 | param = cls.ssm.get_parameter(Name='cdk-sns-arn') 40 | sns_arn = param['Parameter']['Value'] 41 | result = subprocess.Popen("curl http://169.254.169.254/latest/meta-data/instance-id",stdout=subprocess.PIPE,shell=True) 42 | res,err = result.communicate() 43 | instance_id = re.findall(r'(i-[0-9aA-zZ]+)',res.decode())[0] 44 | cls.sns.publish(TargetArn=sns_arn, Message=instance_id) 45 | 46 | 47 | 48 | @classmethod 49 | def run_metric_for_minute(cls): 50 | count = 0 51 | for i in range(61): 52 | percent_idle = cls.put_idle_metric() 53 | if percent_idle > float(50): 54 | count +=1 55 | time.sleep(1) 56 | if count > 59: 57 | cls.publish_instance_id() 58 | 59 | 60 | 61 | PIM.run_metric_for_minute() 62 | 63 | #AWS EC2 SQS Cloudwatch practice exercise - Sending custom EC2 metrics to cloudwatch 64 | #Script runs from cron job checking the 'Idle/CPU' Percentage Metric at regular intervals (isostat) 65 | #Quick and Dirty 66 | #Elliott Arnold 12-14-2019 -> (edited 3-28-20) 67 | -------------------------------------------------------------------------------- /modify_instance_attribute/upgrade_instance.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | from botocore.exceptions import ClientError 3 | 4 | 5 | def lambda_handler(event, context): 6 | print(event) 7 | modify = ModAttribute() 8 | modify.change_instance_type() 9 | 10 | class ModAttribute: 11 | 12 | def __init__(self): 13 | self.ec2 = boto3.client('ec2', region_name='us-east-1') 14 | self.sqs = boto3.client('sqs', region_name='us-east-1') 15 | 16 | 17 | def get_instance_id_type_mapping(self): 18 | return {data['Instances'][0]['InstanceId']: data['Instances'][0]['InstanceType'] for data in 19 | self.ec2.describe_instances()['Reservations']} 20 | 21 | 22 | def get_instance_type(self,instance_id): 23 | return self.get_instance_id_type_mapping()[instance_id] 24 | 25 | 26 | def change_instance_type(self): 27 | instance_id = self.filter_instances()[0] 28 | if instance_id != None: 29 | instance_class = ['t2.nano', 't2.micro', 't2.small', 't2.medium', 't2.large', 't2.xlarge'] 30 | #get current instance type 31 | instance_type = self.get_instance_type(instance_id) 32 | try: 33 | index = instance_class.index(instance_type) 34 | 35 | if index + 1 <= len(instance_class): 36 | try: 37 | self.mod_instance(instance_id, instance_class[index + 1]) 38 | except ClientError: 39 | pass 40 | except ValueError: 41 | pass 42 | 43 | 44 | def get_messages(self): 45 | # get the instance_id from the sqs queue 46 | queue = "https://sqs.us-east-1.amazonaws.com/705166095368/MON_IDLE_CPU" 47 | response = self.sqs.receive_message(QueueUrl=queue, MaxNumberOfMessages=10, VisibilityTimeout=10, WaitTimeSeconds=10 ) 48 | return response 49 | 50 | 51 | def filter_instances(self): 52 | return list(set([instance['Body'] for instance in self.get_messages()['Messages']])) 53 | 54 | 55 | def mod_instance(self,instance_id, instance_type): 56 | self.ec2.modify_instance_attribute(InstanceId=instance_id, InstanceType={'Value': instance_type }) 57 | 58 | 59 | #AWS EC2 practice exercise - Modifying EC2 instance attributes with Lambda 60 | #Lambda Polls the SQS queue for instances that have been flagged by script as using High CPU over X period of time 61 | #Once found the instance class is modified for better performance 62 | #Quick and Dirty 63 | #Elliott Arnold 12-14-2019 64 | 65 | -------------------------------------------------------------------------------- /monitor_new_files.py: -------------------------------------------------------------------------------- 1 | import subprocess, glob, time, paramiko 2 | from pathlib import Path 3 | 4 | class PollNewFiles: 5 | def __init__(self): 6 | self.files = self.poll_files() 7 | self.check_inventory_file_exists() 8 | 9 | def monitor(self): 10 | while True: 11 | time.sleep(5) 12 | self.files = self.poll_files() 13 | if len(self.check_new_files()) > 0: 14 | self.append_new_files() 15 | 16 | def poll_files(self): 17 | cmd = 'find . -maxdepth 1 -type f' 18 | return set(subprocess.check_output(cmd,shell=True).decode().split()) 19 | 20 | 21 | def check_inventory_file_exists(self): 22 | if not glob.glob('*inventory*'): 23 | cmd = 'touch inventory.txt' 24 | subprocess.check_output(cmd, shell=True) 25 | 26 | def read_inventory(self): 27 | return set([file.strip() for file in open('inventory.txt').readlines()]) 28 | 29 | 30 | def check_new_files(self): 31 | return self.files - self.read_inventory() 32 | 33 | 34 | def append_new_files(self): 35 | with open('inventory.txt','a') as ink: 36 | for new_file in self.check_new_files(): 37 | print(f'Adding {new_file} to inventory') 38 | ink.write(new_file + '\n') 39 | full_file_path = Path(new_file).resolve() 40 | try: 41 | SSH.sendfile(full_file_path) 42 | time.sleep(5) 43 | print(f'Adding {new_file} to EC2') 44 | except OSError as e: 45 | print(e) 46 | 47 | class SSH: 48 | @classmethod 49 | def sendfile(cls,file): 50 | cls._target_directory = '/home/ubuntu/si3mshady/' 51 | cls.hostname = 'ec2-54-158-115-200.compute-1.amazonaws.com' 52 | cls.local_key = '/Users/si3mshady/keyZ.pem' 53 | cls.key = paramiko.RSAKey.from_private_key_file(cls.local_key) 54 | cls.client = paramiko.SSHClient() 55 | cls.client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) 56 | cls.client.connect(hostname=cls.hostname, username="ubuntu", pkey=cls.key) 57 | cls.ftp = cls.client.open_sftp() 58 | cls.ftp.put(str(file), cls._target_directory + str(file).split('/')[-1]) 59 | cls.ftp.close() 60 | 61 | 62 | if __name__ == "__main__": 63 | delta = PollNewFiles() 64 | delta.monitor() 65 | 66 | #AWS #EC2 #Linux Practice - monitoring directory for new files - if new file detected the file is ssh'd to EC2 instance 67 | #Elliott Arnold 68 | #si3mshady 69 | #11-4-19 70 | -------------------------------------------------------------------------------- /mount_efs.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | import time 3 | import paramiko 4 | 5 | 6 | class MountEfS: 7 | def __init__(self,instance_id): 8 | self.ec2 = boto3.client('ec2') 9 | self.instance_id = instance_id 10 | self.s3 = boto3.client('s3') 11 | 12 | def check_instance_tagged(self): 13 | results = self.ec2.describe_instances() 14 | 15 | #use of 'get' method will not throw exception error out if key is absent, rather provides a default value of None 16 | instanceId_tag_mapping = {i['Instances'][0]['InstanceId']: i['Instances'][0].get('Tags') \ 17 | for i in results['Reservations']} 18 | 19 | if instanceId_tag_mapping[self.instance_id] == None: 20 | self.mount_efs() 21 | self.tag_instance() 22 | else: 23 | pass 24 | 25 | def get_dns_name(self): 26 | response = self.ec2.describe_instances(InstanceIds=[self.instance_id]) 27 | return response['Reservations'][0]['Instances'][0]['PublicDnsName'] 28 | 29 | def download_ssh_key(self, bucket='efs-sandbox', key='acloudGuru.pem'): 30 | s3_resp = self.s3.get_object(Bucket=bucket, Key=key) 31 | ssh_key = s3_resp['Body'].read().decode() 32 | with open('/tmp/acg.pem', 'w') as ink: 33 | ink.write(ssh_key) 34 | 35 | def mount_efs(self): 36 | self.download_ssh_key() 37 | key = paramiko.RSAKey.from_private_key_file('/tmp/acg.pem') 38 | client = paramiko.SSHClient() 39 | client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) 40 | client.connect(hostname=self.get_dns_name(), username="ec2-user", pkey=key) 41 | 42 | #install nfs, create directory and mount EFS in new instance 43 | commands = ["sudo yum install -y nfs-utils","mkdir /home/ec2-user/efs", 44 | "sudo mount -t nfs4 -o nfsvers=4.1,rsize=1048576,wsize=1048576,hard,timeo=600,retrans=2,noresvport fs-c1292740.efs.us-east-1.amazonaws.com:/ /home/ec2-user/efs"] 45 | 46 | for cmd in commands: 47 | _, stdout, _ = client.exec_command(cmd) 48 | time.sleep(8) 49 | client.connect(hostname=self.get_dns_name(), username="ec2-user", pkey=key) 50 | 51 | print('Commands Run Successfully.') 52 | 53 | def tag_instance(self): 54 | self.ec2.create_tags( 55 | DryRun=False, 56 | Resources=[ 57 | self.instance_id 58 | ], 59 | Tags=[ 60 | { 61 | 'Key': 'efs', 62 | 'Value': 'mounted' 63 | }, 64 | ] 65 | ) 66 | 67 | def lambda_handler(event,context): 68 | instance_id = event['detail']['instance-id'] 69 | mefs = MountEfS(instance_id) 70 | mefs.check_instance_tagged() 71 | 72 | #AWS EFS Cloudwatch practice = Using Cloudwatch and Lambda to mount Elastic File Systems (EFS) on new instances as they are launched - quick and dirty 73 | #Elliott Arnold 74 | #1-5-20 -------------------------------------------------------------------------------- /nestedStack_deployment_customDomain.yml: -------------------------------------------------------------------------------- 1 | --- 2 | Transform: AWS::Serverless-2016-10-31 3 | 4 | Resources: 5 | silverServerless: 6 | Type: AWS::Serverless::Application 7 | Properties: 8 | Location: nested/template.yml 9 | 10 | 11 | --- 12 | AWSTemplateFormatVersion: '2010-09-09' 13 | Transform: AWS::Serverless-2016-10-31 14 | Description: 15 | nestedResource2 16 | 17 | 18 | Resources: 19 | nestedResource2: 20 | Type: AWS::Serverless::HttpApi 21 | Properties: 22 | StageName: dev 23 | Domain: 24 | DomainName: www.breakingthechainz.com 25 | 26 | 27 | -------------------------------------------------------------------------------- /piechart_vanillaJS_python/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | Multiple Charts 10 | 11 | 12 |
13 |
14 | 15 |
16 | 17 |
18 | 19 |
20 |
21 | 22 | 23 | 81 | 82 | 83 | 84 | 95 | -------------------------------------------------------------------------------- /piechart_vanillaJS_python/lambda_handler.py: -------------------------------------------------------------------------------- 1 | import boto3, json 2 | 3 | ec2 = boto3.client('ec2') 4 | 5 | 6 | def process_data_set(): 7 | #explanation of sequence: len -> list -> list_comp -> map -> lambda at bottom for future reference 8 | mapping = assess_all_instances()['data'] 9 | result_mapping = {} 10 | us_east_1_running = len([ el for el in list(map(lambda x: x if x['region'] == \ 11 | 'us-east-1' and x['state']['Name'] == 'running' else None, mapping)) if el != None]) 12 | 13 | us_east_1_stopped = len([ el for el in list(map(lambda x: x if x['region'] == \ 14 | 'us-east-1' and x['state']['Name'] == 'stopped' else None, mapping)) if el != None]) 15 | 16 | us_east_1_terminated = len([ el for el in list(map(lambda x: x if x['region'] == \ 17 | 'us-east-1' and x['state']['Name'] == 'terminated' else None, mapping)) if el != None]) 18 | 19 | 20 | us_west_1_running = len([ el for el in list(map(lambda x: x if x['region'] == \ 21 | 'us-west-1' and x['state']['Name'] == 'running' else None, mapping)) if el != None]) 22 | 23 | us_west_1_stopped = len([ el for el in list(map(lambda x: x if x['region'] == \ 24 | 'us-west-1' and x['state']['Name'] == 'stopped' else None, mapping)) if el != None]) 25 | 26 | us_west_1_terminated = len([ el for el in list(map(lambda x: x if x['region'] == \ 27 | 'us-west-1' and x['state']['Name'] == 'terminated' else None, mapping)) if el != None]) 28 | 29 | result_mapping['us-east-1'] = {'running': us_east_1_running, 'stopped': us_east_1_stopped, 'terminated': us_east_1_terminated} 30 | result_mapping['us-west-1'] = {'running': us_west_1_running, 'stopped': us_west_1_stopped, 'terminated': us_west_1_terminated} 31 | 32 | return {'data': result_mapping} 33 | 34 | 35 | 36 | def getRegions(): 37 | return [region['RegionName'] for region in ec2.describe_regions()['Regions']] 38 | 39 | 40 | 41 | def assess_all_instances(): 42 | main_array = [] 43 | for region in getRegions(): 44 | ec2 = boto3.client('ec2',region_name=region) 45 | instances = ec2.describe_instances() 46 | for i in instances["Reservations"]: 47 | main_array.append({"region": region,"state":i['Instances'][0]['State'], "instance_id":i['Instances'][0]['InstanceId']}) 48 | 49 | 50 | return {"data":main_array} 51 | 52 | def lambda_handler(event,context): 53 | return { 54 | 'statusCode': 200, 55 | 'headers': {'Content-Type': 'application/json', 56 | 'Access-Control-Allow-Origin': '*' }, 57 | 'body': json.dumps(process_data_set()), 58 | "isBase64Encoded": False 59 | 60 | } 61 | 62 | 63 | 64 | 65 | # AWS ApiGateaway Lambda Vanilla JS Python3 66 | # Create custom API w/ AWS Lambda and update pie chart using ChartJS framework 67 | # Elliott Arnold 9-20-20 Amazonian DMS DFW 68 | 69 | #''' You must first define the what the lambda will return value ie lambda x: x if x==2 else False {or None} 70 | # Current examle returns matching element from list otherwise returns None value. Must use Map in conjunction with Lambda to interate over 71 | # each element in list. Use list list comprehension to filter out the None values and 72 | # get the length with len) len -> list -> list_comp -> map -> lambda ''' -------------------------------------------------------------------------------- /preFlightChecker_cors.js: -------------------------------------------------------------------------------- 1 | const exec = require('child_process').exec; 2 | const express = require('express') 3 | const bodyParser = require('body-parser') 4 | const app = express(); 5 | 6 | app.set('view engine', 'ejs') 7 | app.use(express.static("public")) 8 | app.use(bodyParser.urlencoded({ 9 | extended: true 10 | })); 11 | 12 | app.get('/', (req,res) => { 13 | res.render('index') 14 | }) 15 | 16 | app.post('/allowMethods', (req, res) => { 17 | const url = req.body.endpoint 18 | exec(`curl -IX OPTIONS ${url} | grep -i access-control-allow-methods`, 19 | (err,stdout,_) => { 20 | if (!err) { 21 | const allowMethods = stdout.split(':')[1].split(',') 22 | res.render('allowMethods', {"allowMethods": allowMethods}) 23 | } 24 | })}) 25 | 26 | app.post('/allowHeaders', (req, res) => { 27 | const url = req.body.endpoint 28 | exec(`curl -IX OPTIONS ${url} | grep -i access-control-allow-headers`, 29 | (err,stdout,_) => { 30 | if (!err) { 31 | const allowHeaders = stdout.split(':')[1].split(',') 32 | res.render('allowHeaders', {"allowHeaders": allowHeaders}) 33 | } 34 | })}) 35 | 36 | app.post('/allowOrigin', (req, res) => { 37 | const url = req.body.endpoint 38 | exec(`curl -IX OPTIONS ${url} | grep -i access-control-allow-origin`, 39 | (err,stdout,_) => { 40 | if (!err) { 41 | const allowOrigin = stdout.split(':')[1].split(',') 42 | res.render('allowOrigin', {"allowOrigin": allowOrigin}) 43 | } 44 | })}) 45 | 46 | 47 | 48 | app.listen(3001, () => { 49 | console.log("Server started on port 3001") 50 | }) 51 | 52 | // NodeJS API Gateway EJS Troubleshooting CORS with CURL font-end exercise "Pre-flight checker" 53 | // Create a small front end that leverages CURL utiltiy to check api attributes 54 | // of an API for troubleshooting 55 | //Elliott Arnold DMS 1-13-21 56 | -------------------------------------------------------------------------------- /recognize.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | 3 | rekognition = boto3.client('rekognition') 4 | s3 = boto3.client('s3') 5 | BUCKET_NAME = 'alexa-detect-images-tf' 6 | 7 | def lambda_handler(event,context): 8 | if event['request']['type'] == "LaunchRequest": 9 | count = getS3Count() 10 | message = f"You have {count} files in your S 3 bucket" 11 | return process_response(message) 12 | 13 | elif event['request']['type'] == "IntentRequest": 14 | return main(event) 15 | 16 | '''get file names from s3''' 17 | def getS3Keys(): 18 | response = s3.list_objects(Bucket=BUCKET_NAME) 19 | keys = [filename.get('Key') for filename in response['Contents']] 20 | return keys 21 | 22 | def getS3Count(): 23 | return len(getS3Keys()) 24 | 25 | '''preform image regognition with AWS rekognition''' 26 | def detect_label(photo, bucket='alexa-detect-images-tf'): 27 | response = rekognition.detect_labels(Image={'S3Object':{'Bucket':bucket,'Name':photo}}, MaxLabels=5) 28 | item = [img.get('Name') for img in response['Labels']][0] 29 | return item 30 | 31 | def process_response(message): 32 | speech_response = { 33 | "version": "1.0", 34 | "response": { 35 | "outputSpeech": { 36 | "type": "PlainText", 37 | "text": message 38 | }, 39 | "shouldEndSession": True 40 | } 41 | } 42 | return speech_response 43 | 44 | def main(event): 45 | intent = event['request']['intent']['name'] 46 | slot = event['request']['intent']['slots'] 47 | s3_items = getS3Keys() 48 | 49 | if intent == "detectImage": 50 | if slot['number']['value'] == "0": 51 | detected = detect_label(s3_items[0]) 52 | detection_message = f"Image number 1 is {detected}" 53 | return process_response(detection_message) 54 | 55 | elif slot['number']['value'] == "1": 56 | detected = detect_label(s3_items[1]) 57 | detection_message = f"Image number 2 is {detected}" 58 | return process_response(detection_message) 59 | 60 | elif slot['number']['value'] == "3": 61 | detected = detect_label(s3_items[2]) 62 | detection_message = f"Image number 3 is {detected}" 63 | return process_response(detection_message) 64 | 65 | #Alexa Custom Skill & AWS Rekognition practice 66 | #Lambda funtion triggered to preform image recognition from jpg files present in s3 bucket 67 | #Elliott Arnold 10-1-19 68 | #https://stackoverflow.com/questions/27742537/list-comprehensions-extracting-values-from-a-dictionary-in-a-dictionary 69 | 70 | -------------------------------------------------------------------------------- /remote_control_github/app.py: -------------------------------------------------------------------------------- 1 | import boto3, json 2 | from chalice import Chalice, CORSConfig 3 | 4 | app = Chalice(app_name='remote-control') 5 | cors_config = CORSConfig(allow_origin='*') 6 | 7 | ec2 = boto3.client('ec2') 8 | data = ec2.describe_instances() 9 | 10 | def get_ec2_instances(): 11 | return [insta['Instances'][0]['InstanceId'] for insta in data['Reservations']] 12 | 13 | def start_instances(instance_id): 14 | response = ec2.start_instances( InstanceIds=[instance_id], DryRun=False) 15 | return response 16 | 17 | def stop_instances(instance_id): 18 | response = ec2.stop_instances( 19 | InstanceIds=[instance_id], 20 | Force=True) 21 | return response 22 | 23 | @app.route('/{instance_id}', cors=True) 24 | def toggle_instance_status(instance_id): 25 | instanceMapping = {insta['Instances'][0]['InstanceId']:insta['Instances'][0]['State']['Name'] \ 26 | for insta in data['Reservations']} 27 | 28 | if instanceMapping[instance_id] == 'running': 29 | stop_instances(instance_id) 30 | else: 31 | print('need to start instance') 32 | start_instances(instance_id) 33 | 34 | 35 | 36 | @app.route('/', cors=True) 37 | def index(): 38 | return { 39 | 'statusCode': 200, 40 | 'headers': {'Content-Type': 'application/json', 41 | 'Access-Control-Allow-Origin': '*' }, 42 | 'body': json.dumps(get_ec2_instances()), 43 | "isBase64Encoded": False 44 | 45 | } 46 | 47 | 48 | 49 | #AWS Chalice ApiGateway Lambda Python JS Jquery CSS practice 50 | #Create a small remote control to turn on/off ec2 instances - use bootstrap and css 51 | #Elliott Arnold 52 | #10-4-20 Covid 19 53 | 54 | -------------------------------------------------------------------------------- /remote_control_github/index.js: -------------------------------------------------------------------------------- 1 | 2 | getAllButtons = () => { 3 | var buttons = $(".keypad-btn") 4 | return Array.from(buttons) 5 | } 6 | 7 | // create api that sends instance-id's 8 | setDataAttributes = (instance_list) => { 9 | buttons = getAllButtons() 10 | buttons.forEach((element, index) => { $(element).attr("data-instance",instance_list[index])}); 11 | } 12 | 13 | setClickListeners = () => { 14 | buttons = getAllButtons() 15 | buttons.forEach(element => { $(element).click( () => { 16 | // get data property of each element w/ jquery 17 | var instance_id = $(element).data().instance 18 | console.log(instance_id) 19 | // display data property of each button in input div 20 | $("#input").text(() => { 21 | return instance_id })})})} 22 | 23 | get_ec2_instances = () => { 24 | const url = "https://ra8npboyu6.execute-api.us-east-1.amazonaws.com/api/" 25 | axios.get(url) 26 | .then(res => { 27 | var instances = JSON.parse(res.data.body) 28 | setDataAttributes(instances) 29 | }).catch( err => {console.log(err)})} 30 | 31 | setToggleButton = () => { 32 | $('#toggle').click( () => { 33 | // get content of input space -> when button is pressed simulate led red light, on/off visual 34 | $(".led").css('background-color','red') 35 | setTimeout(() => { $(".led").css('background-color','white')}, 100); 36 | var instance_id = $("#input").text() 37 | console.log(instance_id) 38 | const toggleUrl = `https://ra8npboyu6.execute-api.us-east-1.amazonaws.com/api/${instance_id}` 39 | axios.get(toggleUrl) .then(res => { console.log(res) }).catch(err => { console.log(err) })})} 40 | 41 | setToggleButton() 42 | setClickListeners() 43 | get_ec2_instances() 44 | 45 | //AWS Chalice ApiGateway Lambda Python JS Jquery CSS practice 46 | //Create a small remote control to turn on/off ec2 instances - use bootstrap and css 47 | //Elliott Arnold 48 | //10-4-20 Covid 19 49 | -------------------------------------------------------------------------------- /remote_control_github/styles.css: -------------------------------------------------------------------------------- 1 | 2 | body { 3 | background-color: grey; 4 | } 5 | 6 | .power { 7 | position: relative; 8 | left: 39px; 9 | top: 97px; 10 | } 11 | 12 | .base { 13 | margin: 20px auto; 14 | height: 500px; 15 | width: 200px; 16 | /* background-color: #A9A9A9 ; */ 17 | background-color: black; 18 | border-radius: 29%; 19 | } 20 | 21 | .led { 22 | width: 10px; 23 | height: 10px; 24 | position: relative; 25 | left: 98px; 26 | background-color: white; 27 | 28 | } 29 | 30 | .input { 31 | font-size: 10px; 32 | width: 130px; 33 | height: 20px; 34 | background-color: orange; 35 | position: relative; 36 | left: 32px; 37 | top: 60px; 38 | text-align: center; 39 | border-radius: 30px; 40 | 41 | } 42 | 43 | .keypad-btn { 44 | width: 30px; 45 | border-radius: 30px; 46 | font-size: 9px; 47 | text-align: center; 48 | position: relative; 49 | left: 16px; 50 | margin-top: 12px; 51 | box-shadow: 3px 4px steelblue; 52 | } 53 | 54 | 55 | .button-container { 56 | 57 | margin-top: 120px; 58 | } 59 | 60 | 61 | button:active { 62 | background-color: yellowgreen !important; 63 | } -------------------------------------------------------------------------------- /report_for_duty.py: -------------------------------------------------------------------------------- 1 | from selenium import webdriver 2 | from selenium.webdriver.common.by import By 3 | 4 | 5 | #init 6 | chrome_driver = '/Users/e/automation/chromedriver' 7 | driver = webdriver.Chrome(executable_path=chrome_driver) 8 | driver.get("https://aws.amazon.com/") 9 | driver.implicitly_wait(8) 10 | 11 | #select my account from drop down 12 | drop_down_xpath = '//*[@id="m-nav"]/div[1]/div[2]/a[4]' 13 | dd = driver.find_element(By.XPATH,drop_down_xpath) 14 | dd.click() 15 | 16 | #select drop down 17 | console_xpath = "/html/body/div[6]/ul/li[1]/a" 18 | console_link = driver.find_element(By.XPATH,console_xpath) 19 | console_link.click() 20 | 21 | #enter login email 22 | input_xpath = '//*[@id="resolving_input"]' 23 | email_input = driver.find_element(By.XPATH,input_xpath) 24 | email_input.send_keys('elliott@arnold.com') 25 | 26 | #select next btn 27 | next_btn_xpath = '//*[@id="next_button"]' 28 | next_btn = driver.find_element(By.XPATH,next_btn_xpath) 29 | next_btn.click() 30 | 31 | #enter pw 32 | pw_xpath = '//*[@id="password"]' 33 | pw_input = driver.find_element(By.XPATH,pw_xpath) 34 | pw_input.send_keys("elliottsPassword") 35 | 36 | #submit pw 37 | btn_submit_xpath = '/html/body/div[1]/div[2]/div[1]/div[1]/div[3]/div[5]/button' 38 | sign_in = driver.find_element(By.XPATH,btn_submit_xpath) 39 | sign_in.click() 40 | 41 | #popluate input search field 42 | console_input_xpath = '//*[@id="search-box-input"]' 43 | find_services = driver.find_element(By.XPATH,console_input_xpath) 44 | find_services.send_keys('api gateway') 45 | 46 | #select apigateway from drop down 47 | apigw_xpath = '//*[@id="search-box-input-dropdown-ag"]/awsui-select-option/div/div/div[1]' 48 | apigw = driver.find_element(By.XPATH,apigw_xpath) 49 | apigw.click() 50 | 51 | #AWS Python3 Console Selenium Automation - Sign in for the day - Select ApiGateway 52 | #Elliott Arnold 9-17-2020 AWS DMS DFW Amazonian 53 | #Quick and Dirty / Covid19 54 | -------------------------------------------------------------------------------- /request_lambda_authorizer.py: -------------------------------------------------------------------------------- 1 | 2 | class RequestAuthorizer: 3 | 4 | def parse_req_obj(self,**kwargs): 5 | _type = kwargs['type'] 6 | methodArn = kwargs['methodArn'] 7 | httpMethod = kwargs['httpMethod'] 8 | headers = kwargs['headers'] 9 | user_agent = headers['User-Agent'] 10 | username = headers['HeaderAuth1'] 11 | 12 | if username == "si3mshady" and user_agent == 'python-requests/2.22.0': 13 | params = {"Action":"execute-api:Invoke","Effect":"Allow",\ 14 | "Resource":methodArn,"principalId":username} 15 | return self.access_granted(**params) 16 | else: 17 | params = {"Action":"execute-api:Invoke","Effect":"Deny",\ 18 | "Resource":methodArn,"principalId":username} 19 | return self.access_denied(**params) 20 | 21 | def make_policy(self,**kwargs): 22 | action = kwargs['Action'] 23 | effect = kwargs['Effect'] 24 | resource = kwargs['Resource'] 25 | principal_id = kwargs['principalId'] 26 | 27 | statement= {} 28 | statement['Action'] = action 29 | statement['Effect'] = effect 30 | statement['Resource'] = resource 31 | 32 | policy_doc = {} 33 | policy_doc['Version'] = "2012-10-17" 34 | policy_doc['Statement'] = [statement] 35 | 36 | principal = {} 37 | principal['principalId'] = principal_id 38 | principal['policyDocument'] = policy_doc 39 | return principal 40 | 41 | def access_granted(self,**params): 42 | return self.make_policy(**params) 43 | 44 | def access_denied(**params): 45 | return self.make_policy(**params) 46 | 47 | def lambda_handler(event,context): 48 | print(event) 49 | ra = RequestAuthorizer() 50 | return ra.parse_req_obj(**event) 51 | 52 | 53 | #AWS APIGW Lambda Req Authorizer 54 | #APIGW + Lambda exercies 55 | #Elliott Arnold 5-27-20 56 | -------------------------------------------------------------------------------- /resize_fs_1/extend_fs.py: -------------------------------------------------------------------------------- 1 | import subprocess, re, time 2 | 3 | 4 | def extend_partition(): 5 | string_list = subprocess.check_output("lsblk | awk '{print $1}'", shell=True) 6 | # parse device name and partition 7 | # filter out empty lists and convert filter object to list 8 | matches = list(filter(None, [re.findall('([a-z0-9]{1,5})', element) for element in string_list.decode('utf-8').split('\n')])) 9 | 10 | extend_partition = [f'sudo growpart /dev/{matches[0][0]} 1', f'sudo xfs_growfs /dev/{matches[1][0]}'] 11 | 12 | for cmd in extend_partition: 13 | subprocess.check_output(cmd, shell=True) 14 | time.sleep(8) 15 | 16 | 17 | extend_partition() 18 | 19 | #AWS Lambda Cloudwatch EBS Alarm Simulation 20 | #Increase root ebs volume size and filesystem upon low storage condition 21 | #Elliott Arnold 22 | #12-5-19 23 | -------------------------------------------------------------------------------- /resize_fs_1/pmiko.py: -------------------------------------------------------------------------------- 1 | import paramiko 2 | import boto3 3 | 4 | class UseParamiko: 5 | def __init__(self,instance_id): 6 | self.SSH_KEY_LOC = "/tmp/testing.pem" 7 | self.SCRIPT_LOC = "/tmp/extend_file_system.py" 8 | self.instance_id = instance_id 9 | self.ec2 = boto3.client('ec2') 10 | self.s3 = boto3.client('s3') 11 | 12 | def init(self): 13 | self.download_script() 14 | self.download_ssh_key() 15 | self.ssh_put_script() 16 | self.ssh_run_command() 17 | 18 | def get_dns_name(self): 19 | response = self.ec2.describe_instances(InstanceIds=[self.instance_id]) 20 | return response['Reservations'][0]['Instances'][0]['PublicDnsName'] 21 | 22 | def download_ssh_key(self,bucket='extend-fs',key='testing.pem'): 23 | s3_resp = self.s3.get_object(Bucket=bucket, Key=key) 24 | ssh_key = s3_resp['Body'].read().decode() 25 | with open(self.SSH_KEY_LOC,'w') as ink: 26 | ink.write(ssh_key) 27 | print('Ssh key downloaded') 28 | 29 | def download_script(self, bucket='extend-fs', key='extend_file_system.py'): 30 | s3_resp = self.s3.get_object(Bucket=bucket, Key=key) 31 | mount_script = s3_resp['Body'].read().decode() 32 | with open(self.SCRIPT_LOC, 'w') as ink: 33 | ink.write(mount_script) 34 | print('Script downloaded') 35 | 36 | def ssh_put_script(self): 37 | key = paramiko.RSAKey.from_private_key_file(self.SSH_KEY_LOC) 38 | client = paramiko.SSHClient() 39 | client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) 40 | client.connect(hostname=self.get_dns_name(), username="ec2-user", pkey=key) 41 | ftp = client.open_sftp() 42 | ftp.put(self.SCRIPT_LOC,'/home/ec2-user/extend_file_system.py') 43 | ftp.close() 44 | print('Script Uploaded') 45 | 46 | def ssh_run_command(self): 47 | key = paramiko.RSAKey.from_private_key_file(self.SSH_KEY_LOC) 48 | client = paramiko.SSHClient() 49 | client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) 50 | client.connect(hostname=self.get_dns_name(), username="ec2-user", pkey=key) 51 | _, stdout, _ = client.exec_command('python3 /home/ec2-user/extend_file_system.py') 52 | print('Command Run Successfully') 53 | 54 | #AWS Lambda Cloudwatch EBS Alarm Simulation 55 | #Increase root ebs volume size and filesystem upon low storage condition 56 | #Elliott Arnold 57 | #12-5-19 -------------------------------------------------------------------------------- /resize_fs_1/resize_root_partition.py: -------------------------------------------------------------------------------- 1 | from pmiko import UseParamiko 2 | import boto3 3 | import re 4 | 5 | def lambda_handler(event, context): 6 | data = event['Records'][0]['Sns']['Message'] 7 | instance_id = re.findall("(i-[a-z0-9]*)", data)[0] 8 | print(instance_id) 9 | modify = ModVolume(instance_id) 10 | modify.modVolume() 11 | 12 | class ModVolume: 13 | 14 | def __init__(self, instance_id): 15 | self.instance_id = instance_id 16 | self.ec2_client = boto3.client('ec2') 17 | self.ec2_resource = boto3.resource('ec2') 18 | 19 | def modVolume(self): 20 | volume_id = self.get_volume_id(self.instance_id) 21 | self.modify_volume(volume_id) 22 | extend_fs = UseParamiko(self.instance_id) 23 | extend_fs.init() 24 | 25 | def get_volume_id(self, instance_id): 26 | results = self.map_all_instance_id_with_volume_id() 27 | volume_id = results[instance_id] 28 | return volume_id 29 | 30 | def get_current_volume_size(self, volume_id): 31 | volume = self.ec2_resource.Volume(volume_id) 32 | return volume.size 33 | 34 | def modify_volume(self, volume_id): 35 | response = self.ec2_client.modify_volume( 36 | DryRun=False, 37 | VolumeId=volume_id, 38 | Size=self.get_current_volume_size(volume_id) + 50, 39 | VolumeType='gp2' 40 | ) 41 | return response 42 | 43 | def map_all_instance_id_with_volume_id(self): 44 | 45 | volumes = self.ec2_client.describe_volumes() 46 | data_dictionary = {} 47 | for i in volumes.get('Volumes'): 48 | try: 49 | if i['Attachments'][0]['State'] == 'attached': 50 | data_dictionary[i['Attachments'][0]['InstanceId']] = i['Attachments'][0]['VolumeId'] 51 | except IndexError: 52 | pass 53 | return data_dictionary 54 | 55 | #AWS Lambda Cloudwatch EBS Alarm Simulation 56 | #Increase root ebs volume size and filesystem upon low storage condition 57 | #Elliott Arnold 58 | #12-5-19 -------------------------------------------------------------------------------- /send_fake_data.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | import json 3 | import time 4 | from faker import Faker 5 | 6 | class ProduceDummyData: 7 | def __init__(self,iterations): 8 | self.stream = boto3.client('kinesis') 9 | self.fake = Faker() 10 | self.interations = iterations 11 | 12 | def fake_it_till_you_make_it(self): 13 | for _ in range(self.interations): 14 | shell = {} 15 | shell["Name"] = self.fake.name(), 16 | shell["Phone Number"] = self.fake.phone_number(), 17 | shell["SSN"] = self.fake.ssn() 18 | yield shell 19 | 20 | 21 | def dont_cross_the_streams(self): 22 | for fake_data in list(self.fake_it_till_you_make_it()): 23 | self.stream.put_record(StreamName="dont_cross_the_streams", Data=json.dumps(fake_data),PartitionKey="888") 24 | time.sleep(1) 25 | 26 | if __name__ == "__main__": 27 | producer = ProduceDummyData(8) 28 | producer.dont_cross_the_streams() 29 | 30 | 31 | #AWS Kinesis Lambda Aurora practice 32 | #Elliott Arnold BLM 33 | #6-5-20 34 | -------------------------------------------------------------------------------- /serverless.yml: -------------------------------------------------------------------------------- 1 | service: shady-cron-manager 2 | 3 | provider: 4 | name: aws 5 | runtime: python3.6 6 | region: us-east-1 7 | profile: serverless_admin 8 | memorySize: 512 # optional, in MB, default is 1024 9 | 10 | functions: 11 | start_instances: 12 | handler: manage_cron.start_instances #required, handler = filename + function (filename.function) 13 | timeout: 60 14 | events: 15 | - schedule: 16 | rate: cron(0 9 * * ? *) 17 | 18 | stop_instances: 19 | handler: manage_cron.stop_instances 20 | timeout: 60 21 | events: 22 | - schedule: 23 | rate: cron(0 23 * * ? *) 24 | 25 | test_function: 26 | handler: manage_cron.test_function 27 | timeout: 60 28 | events: 29 | - schedule: 30 | rate: rate(10 minutes) 31 | 32 | 33 | #You can't specify the Day-of-month and Day-of-week fields in the same cron expression. 34 | #If you specify a value (or a *) in one of the fields, you must use a ? (question mark) in the other. 35 | 36 | 37 | #https://serverless.com/framework/docs/providers/aws/cli-reference/create/ 38 | #https://medium.com/blogfoster-engineering/running-cron-jobs-on-aws-lambda-with-scheduled-events-e8fe38686e20 39 | #https://serverless.com/framework/docs/providers/aws/guide/credentials/ 40 | #https://serverless.com/framework/docs/getting-started/ 41 | #https://serverless.com/framework/docs/providers/aws/guide/functions/ 42 | #https://docs.aws.amazon.com/AmazonCloudWatch/latest/events/ScheduledEvents.html 43 | #https://github.com/serverless/serverless/issues/2344 44 | #https://serverless.com/framework/docs/providers/aws/guide/functions/ 45 | -------------------------------------------------------------------------------- /serverless_chat_app_basic/first_connection.py: -------------------------------------------------------------------------------- 1 | import json 2 | import random 3 | import boto3 4 | 5 | 6 | 7 | class WebSocketConnect: 8 | #on websocket connect wscat -c wss://888888.execute-api.us-east.amazonaws.com/test 9 | def __init__(self): 10 | self.username = 'user_' + str(random.randint(0,99)) 11 | self.ddb_table = self.get_ddb_table() 12 | 13 | def get_ddb_table(self): 14 | ddb = boto3.resource('dynamodb', region_name='us-east-1') 15 | return ddb.Table("connections_websocket") 16 | 17 | def update_db(self,connection_id): 18 | self.ddb_table.put_item(Item={"Username": self.username,"connectionId": connection_id}) 19 | return 1 20 | 21 | 22 | def lambda_handler(event, context): 23 | print(event) 24 | connection_id = event['requestContext']['connectionId'] 25 | wss = WebSocketConnect() 26 | if connection_id: 27 | if wss.update_db(connection_id): 28 | return { "statusCode": 200, "body": 'Connected.' } 29 | -------------------------------------------------------------------------------- /serverless_chat_app_basic/send_message.py: -------------------------------------------------------------------------------- 1 | import json 2 | import random 3 | import boto3 4 | 5 | class BroadcastMessages: 6 | def __init__(self,event): 7 | self.event = event 8 | self.stage = event['requestContext']['stage'] 9 | self.api_id = event['requestContext']['apiId'] 10 | self.region = 'us-east-1' 11 | self.domain = f'{self.api_id}.execute-api.{self.region}.amazonaws.com' 12 | self.management_url = f'https://{self.domain}/{self.stage}' 13 | 14 | self.ddb_client = boto3.client("dynamodb") 15 | self.management_api = boto3.client("apigatewaymanagementapi", endpoint_url = self.management_url) 16 | 17 | def scan_db_map_data(self) -> dict: 18 | result = self.ddb_client.scan(TableName="connections_websocket") 19 | user_id_mapping = {val['connectionId']['S']:val['Username']['S'] for val in result['Items']} 20 | return user_id_mapping 21 | 22 | def parse_user_message(self) -> tuple: 23 | conection_id = self.event['requestContext']['connectionId'] 24 | msg = json.loads(self.event['body'])['data'] 25 | return conection_id, msg 26 | 27 | def broadcast(self): 28 | users = self.scan_db_map_data() 29 | conection_id, msg = self.parse_user_message() 30 | 31 | for val in users.keys(): 32 | try: 33 | self.management_api.post_to_connection(ConnectionId=val,\ 34 | Data=f"User {users[conection_id]} posted {msg}".encode()) 35 | except Exception: 36 | pass 37 | 38 | 39 | def lambda_handler(event, context): 40 | print(event) 41 | wss = BroadcastMessages(event) 42 | wss.broadcast() 43 | 44 | #Serverless chat app - basic 45 | #Lambda, DynamoDb , Api gateway -------------------------------------------------------------------------------- /service_quota_react_js_github/App.js: -------------------------------------------------------------------------------- 1 | import React, { Component } from 'react'; 2 | import axios from 'axios'; 3 | import './App.css'; 4 | import Sections from './components/Sections/Sections' 5 | import AUX from './components/AUX/Aux' 6 | 7 | 8 | class App extends Component { 9 | state = { 10 | getUrl: 'https://acf9n1kt36.execute-api.us-east-1.amazonaws.com/sandbox/get-service-names', 11 | postUrl: 'https://acf9n1kt36.execute-api.us-east-1.amazonaws.com/sandbox/get-sq-names', 12 | serviceNames: [] 13 | 14 | } 15 | 16 | componentDidMount() { 17 | //use lifecyle method component did mount when invoking initial http request automatically 18 | axios.get(this.state.getUrl).then(res => { 19 | let data = res.data 20 | data = data.replace("[","") 21 | data = data.replace("]","") 22 | const service_names = data.split(',') 23 | this.setState({ 24 | serviceNames: service_names 25 | }) 26 | }) 27 | } 28 | 29 | render() { 30 | //transform serviceNames array into Sections objects - return JSX 31 | let transformed = this.state.serviceNames.map(serviceName => { 32 | return 33 | }) 34 | return ( 35 | 36 | {transformed} 37 | 38 | ); 39 | } 40 | } 41 | 42 | export default App; 43 | 44 | 45 | //AWS Python React Boto3 Lambda API-Gateway Practice 46 | //Obtain service quota information via http request using Boto3, Lambda and APIGW 47 | //Show results in browser 48 | //Elliott Arnold DMS DFW 7-28-20 Part 1 -> TBC 49 | //Covid19 -------------------------------------------------------------------------------- /service_quota_react_js_github/Articles.js: -------------------------------------------------------------------------------- 1 | import React, { Component } from 'react'; 2 | import axios from 'axios' 3 | import Aux from '../AUX/Aux' 4 | import '/...App.css'; 5 | 6 | class Articles extends Component { 7 | //always use class components and state when preforming async tasks and 8 | // when one needs to re-render DOM elements 9 | state = { 10 | service_names: '' 11 | } 12 | 13 | toggleView = (event) => { 14 | //the 'p' tag is adjacent to the button which is using the 'hide' class 15 | const paragraph = event.target.nextSibling 16 | paragraph.classList.toggle('hide') 17 | } 18 | 19 | getServiceName = (event,postUrl) => { 20 | //the 'p' tag is adjacent to the button, retrieving the innerText and using it for post request 21 | const service_name = event.target.nextSibling.innerText 22 | const param = {"service": service_name} 23 | axios.post(postUrl,param).then(data => { 24 | const serviceQuotaNames = new String(data.data); 25 | console.log(serviceQuotaNames) 26 | this.setState({ service_names: serviceQuotaNames }) 27 | }) 28 | } 29 | 30 | render () { 31 | return ( 32 | 33 | 34 |
35 |
36 |
37 | 39 | 40 |

{this.props.sname}

41 | 43 |

{this.state.service_names}

44 |
45 |
46 |
47 |
48 | ) 49 | } 50 | } 51 | 52 | export default Articles; 53 | 54 | 55 | //AWS Python React Boto3 Lambda API-Gateway Practice 56 | //Obtain service quota information via http request using Boto3, Lambda and APIGW 57 | //Show results in browser 58 | //Elliott Arnold DMS DFW 7-28-20 Part 1 -> TBC 59 | //Covid19 -------------------------------------------------------------------------------- /service_quota_react_js_github/Aux.js: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | 3 | const aux = (props) => ( 4 | 5 |
{props.children}
6 | ); 7 | export default aux; 8 | 9 | 10 | //AWS Python React Boto3 Lambda API-Gateway Practice 11 | //Obtain service quota information via http request using Boto3, Lambda and APIGW 12 | //Show results in browser 13 | //Elliott Arnold DMS DFW 7-28-20 Part 1 -> TBC 14 | //Covid19 -------------------------------------------------------------------------------- /service_quota_react_js_github/Sections.js: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import AUX from '../AUX/Aux' 3 | import Articles from '../Articles/Articles' 4 | 5 | const section = (props) => ( 6 | // passing props App.js -> Sections.js -> Articles.js 7 | 8 |
9 | 10 |
11 |
12 | 13 | ) 14 | 15 | export default section; 16 | 17 | 18 | //AWS Python React Boto3 Lambda API-Gateway Practice 19 | //Obtain service quota information via http request using Boto3, Lambda and APIGW 20 | //Show results in browser 21 | //Elliott Arnold DMS DFW 7-28-20 Part 1 -> TBC 22 | //Covid19 -------------------------------------------------------------------------------- /service_quota_react_js_github/service_quotas.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | 3 | class ServiceQuotas: 4 | def __init__(self,event): 5 | self.event = event 6 | self.sq = boto3.client('service-quotas') 7 | self.sq_document = self.list_all_services_with_quotas() 8 | 9 | def list_all_services_with_quotas(self): 10 | sq_document = {} 11 | all_services = self.sq.list_services() 12 | for service in all_services.get('Services'): 13 | result = self.sq.list_service_quotas(ServiceCode=service['ServiceCode']) 14 | #some services have no list of quotas, so disreguard 15 | if len(result['Quotas']) > 1: 16 | sq_document[service['ServiceCode']] = result 17 | 18 | return sq_document 19 | 20 | def get_service_quota_details(self,service_code): 21 | service_detail = self.sq_document[service_code]['Quotas'] 22 | quota_names = [quotaName['QuotaName'] for quotaName in service_detail] 23 | return quota_names 24 | 25 | def get_service_names(self): 26 | return [sn for sn in self.sq_document] 27 | 28 | def lambda_handler(event,context): 29 | if event.get('httpMethod', None) == 'GET': 30 | sq = ServiceQuotas(event) 31 | data = sq.get_service_names() 32 | return { 33 | 'statusCode': 200, 34 | 'headers': {'Content-Type': 'application/json', 35 | 'Access-Control-Allow-Origin': '*' }, 36 | 'body': (str(data)), 37 | "isBase64Encoded": False 38 | } 39 | 40 | elif event.get('httpMethod', None) == 'POST': 41 | sq = ServiceQuotas(event) 42 | service_code = event.get('body').split('=')[-1] 43 | if service_code in sq.get_service_names(): 44 | data = sq.get_service_quota_details(service_code) 45 | return { 46 | 'statusCode': 200, 47 | 'headers': {'Content-Type': 'application/json', 48 | 'Access-Control-Allow-Origin': '*' }, 49 | 'body': (str(data)), 50 | "isBase64Encoded": False 51 | 52 | } 53 | 54 | 55 | #AWS Python Boto3 Lambda API-Gateway Practice 56 | #Obtain service quota information via http request using Boto3, Lambda and APIGW 57 | #Elliott Arnold DMS DFW 7-28-20 Part 1 -> TBC 58 | #Covid19 59 | 60 | -------------------------------------------------------------------------------- /service_quotas.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | 3 | class ServiceQuotas: 4 | def __init__(self,event): 5 | self.event = event 6 | self.sq = boto3.client('service-quotas') 7 | self.sq_document = self.list_all_services_with_quotas() 8 | 9 | def list_all_services_with_quotas(self): 10 | sq_document = {} 11 | all_services = self.sq.list_services() 12 | for service in all_services.get('Services'): 13 | result = self.sq.list_service_quotas(ServiceCode=service['ServiceCode']) 14 | #some services have no list of quotas, so disreguard 15 | if len(result['Quotas']) > 1: 16 | sq_document[service['ServiceCode']] = result 17 | 18 | return sq_document 19 | 20 | def get_service_quota_details(self,service_code): 21 | service_detail = self.sq_document[service_code]['Quotas'] 22 | quota_names = [quotaName['QuotaName'] for quotaName in service_detail] 23 | return quota_names 24 | 25 | def get_service_names(self): 26 | return [sn for sn in self.sq_document] 27 | 28 | def lambda_handler(event,context): 29 | if event.get('httpMethod', None) == 'GET': 30 | sq = ServiceQuotas(event) 31 | data = sq.get_service_names() 32 | return { 33 | 'statusCode': 200, 34 | 'headers': {'Content-Type': 'application/json', 35 | 'Access-Control-Allow-Origin': '*' }, 36 | 'body': (str(data)), 37 | "isBase64Encoded": False 38 | } 39 | 40 | elif event.get('httpMethod', None) == 'POST': 41 | sq = ServiceQuotas(event) 42 | service_code = event.get('body').split('=')[-1] 43 | if service_code in sq.get_service_names(): 44 | data = sq.get_service_quota_details(service_code) 45 | return { 46 | 'statusCode': 200, 47 | 'headers': {'Content-Type': 'application/json', 48 | 'Access-Control-Allow-Origin': '*' }, 49 | 'body': (str(data)), 50 | "isBase64Encoded": False 51 | 52 | } 53 | 54 | 55 | #AWS Python Boto3 Lambda API-Gateway Practice 56 | #Obtain service quota information via http request using Boto3, Lambda and APIGW 57 | #Elliott Arnold DMS DFW 7-28-20 Part 1 -> TBC 58 | #Covid19 59 | 60 | -------------------------------------------------------------------------------- /set_time_zone.py: -------------------------------------------------------------------------------- 1 | import boto3, time 2 | 3 | class Nsync: 4 | def __init__(self,instance_id): 5 | self.instance_id = instance_id 6 | self.ssm = boto3.client('ssm') 7 | self.ec2 = boto3.client('ec2') 8 | 9 | def sync(self): 10 | self.check_instance_ubuntu() 11 | self.change_tz() 12 | 13 | def check_instance_ubuntu(self): 14 | try: 15 | while True: 16 | data = self.ec2.get_console_output(InstanceId=self.instance_id) 17 | if 'Output' not in data.keys(): 18 | time.sleep(8) 19 | continue 20 | if 'ubuntu' in data['Output'].lower(): 21 | self.assign_ssm_role() 22 | break 23 | else: 24 | pass 25 | except Exception as e: 26 | print(e) 27 | 28 | def assign_ssm_role(self): 29 | self.ec2.associate_iam_instance_profile( 30 | IamInstanceProfile={'Arn': 'arn:aws:iam::705166095368:instance-profile/SSM_Sandbox', 31 | 'Name': 'AmazonSSMFullAccess'}, InstanceId=self.instance_id) 32 | def change_tz(self): 33 | try: 34 | while True: 35 | self.ssm.send_command(InstanceIds=[self.instance_id], DocumentName='AWS-RunShellScript', 36 | Parameters={'commands': ["sudo timedatectl set-timezone America/Chicago"]}) 37 | 38 | print('Command Completed Successfully') 39 | 40 | break 41 | except Exception as e: 42 | if "InvalidInstanceId" in str(e): 43 | time.sleep(8) 44 | self.change_tz() 45 | 46 | def lambda_handler(event,context): 47 | instance_id = event['detail']['instance-id'] 48 | time_sync = Nsync(instance_id) 49 | time_sync.sync() 50 | 51 | #AWS SSM EC2 Practice - Using Lambda to set timezone on launched instances with SSM 52 | #Quick and Dirty - Use lambda to set the timezone on newly launched instance after confirming OS type is Ubuntu 53 | #2-26-20 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | -------------------------------------------------------------------------------- /slack_bolt_slash_cmd_add_workspace_user.py: -------------------------------------------------------------------------------- 1 | import re , boto3, json 2 | from slack_bolt import App 3 | from slack_bolt.adapter.aws_lambda import SlackRequestHandler 4 | 5 | app = App(process_before_response=True) 6 | ssm = boto3.client('ssm', region_name='us-east-1') 7 | sm = boto3.client('secretsmanager', region_name='us-east-1') 8 | secrets = sm.get_secret_value(SecretId='login').get('SecretString') 9 | secrets = json.loads(secrets) 10 | 11 | def run_commands_ssm(fields): 12 | #use ssm to run command on managed instance which is running selenium container 13 | try: 14 | cmd_args = " ".join(fields) 15 | #slack converts ampersand to '%40' which causes add user to fail in broswer 16 | cmd_args = cmd_args.replace('%40','@') 17 | cmd = f'sudo docker run si3mshady/headless-ec2-adduser:1 {cmd_args}' 18 | print(cmd) 19 | return ssm.send_command(InstanceIds=[secrets.get('instance_id')],DocumentName='AWS-RunShellScript', 20 | Parameters={'commands': [cmd]}) 21 | except TypeError: 22 | pass 23 | 24 | @app.command("/create_workspace") 25 | def create_workspace_user(ack, say, body, respond, command): 26 | print('this is the body') 27 | #command arguments are transformed into a string with spaces 28 | username = body.get('text').split(" ")[0] 29 | # Acknowledge command request 30 | ack() 31 | say( f"Creating AWS workspace User => {username} 👍") 32 | 33 | 34 | def process_create_command_data(data): 35 | 36 | pattern = r"create_workspace\S+text=([\w\+\.&%]+)&" 37 | try: 38 | m = re.search(pattern, data) 39 | 40 | fields = m.group(1).split('+') 41 | if len(fields) != 4: 42 | 43 | return { 44 | "message":"Create command requires (4) attributes" 45 | } 46 | return fields 47 | except Exception as e: 48 | pass 49 | 50 | 51 | 52 | def handler(event, context): 53 | print(event.get('body')) 54 | slack_handler = SlackRequestHandler(app=app) 55 | fields = process_create_command_data(event.get('body')) 56 | print(fields) 57 | result = run_commands_ssm(fields) 58 | print(result) 59 | return slack_handler.handle(event, context) 60 | 61 | #Elliott Arnold 62 | #Slackbot using slackbolt -> Lambda -> SSM -> Docker create AWS Workspace user 63 | #6-13-21 64 | #wip 65 | -------------------------------------------------------------------------------- /ssm_raid0.py: -------------------------------------------------------------------------------- 1 | import boto3,re,time 2 | 3 | class ConfigureRaid0: 4 | def __init__(self, instance_id): 5 | self.instance_id = instance_id 6 | self.ec2 = boto3.client('ec2') 7 | self.ssm = boto3.client('ssm') 8 | self.label = f'RAID0_{self.instance_id}' 9 | self.raid_commands = self.prepare_raid_commands() 10 | 11 | def assign__ssm_iam_role(self): 12 | self.ec2.associate_iam_instance_profile( 13 | IamInstanceProfile={'Arn': 'arn:aws:iam::705166095368:instance-profile/SSM_Sandbox','Name': 'AmazonSSMFullAccess' }, 14 | InstanceId=self.instance_id) 15 | 16 | def run_commands_ssm(self): 17 | for cmd in self.raid_commands: 18 | self.ssm.send_command(InstanceIds=[self.instance_id],DocumentName='AWS-RunShellScript', 19 | Parameters={'commands': [cmd]}) 20 | time.sleep(3) 21 | 22 | def get_device_name(self): 23 | #get EBS device names as detected on target instance 24 | data = self.ec2.describe_instances(InstanceIds=[self.instance_id]) 25 | device_names = [dn['DeviceName'] \ 26 | for dn in data['Reservations'][0]['Instances'][0]['BlockDeviceMappings'] \ 27 | if dn['DeviceName'] !='/dev/xvda'] 28 | device_names = [re.sub('s', 'xv', dev) for dev in device_names] 29 | return device_names 30 | 31 | def prepare_raid_commands(self): 32 | cmd_list = [] 33 | dev_names = self.get_device_name() 34 | num_devices = len(dev_names) 35 | raid0_cmd = f"sudo mdadm --create --verbose /dev/md0 --level=0 \ 36 | --name={self.label} --raid-devices={num_devices} {dev_names[0]} {dev_names[1]}" 37 | cmd_list.append(raid0_cmd) 38 | make_ext4_fs = f"sudo mkfs.ext4 -L {self.label} /dev/md0 | at now + 5 minutes" 39 | cmd_list.append(make_ext4_fs) 40 | create_mdadm_conf = 'sudo mdadm --detail --scan | sudo tee -a /etc/mdadm.conf | at now + 6 minutes' 41 | cmd_list.append(create_mdadm_conf) 42 | create_initramfs = 'sudo dracut -H -f /boot/initramfs-$(uname -r).img $(uname -r) | at now + 7 minutes' 43 | cmd_list.append(create_initramfs) 44 | mkdir = "sudo mkdir -p /mnt/raid | at now + 8 minutes" 45 | cmd_list.append(mkdir) 46 | mount = f'sudo mount /dev/md0 /mnt/raid/ | at now + 9 minutes' 47 | cmd_list.append(mount) 48 | back_up_fstab = f'sudo cp /etc/fstab /etc/fstab.orig | at now + 9 minutes' 49 | cmd_list.append(back_up_fstab) 50 | amend_fstab = f"sudo echo /dev/md0 /mnt/raid/ ext4 defaults,nofail 0 2 >> /etc/fstab | at now + 10 minutes" 51 | cmd_list.append(amend_fstab) 52 | return cmd_list 53 | 54 | 55 | def lambda_handler(event, context): 56 | instance_id = event['detail']['responseElements']['instanceId'] 57 | config = ConfigureRaid0(instance_id) 58 | config.assign__ssm_iam_role() 59 | config.run_commands_ssm() 60 | 61 | #AWS SSM LAMBDA LINUX practice 62 | #Execute commands on managed EC2 instances using ssm 63 | #Configure EBS volumes with Raid0 for increased iops preformance 64 | #Elliott Arnold 2-1-20 65 | #https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/raid-config.html 66 | 67 | -------------------------------------------------------------------------------- /stepFunctionSandbox/func1.py: -------------------------------------------------------------------------------- 1 | import boto3, time 2 | 3 | ssm = boto3.client('ssm') 4 | 5 | cmds = ['cd /home/ec2-user; ls'] 6 | 7 | 8 | def runSSMCommands(instanceId): 9 | cmd_id_list = [] 10 | for cmd in cmds: 11 | result = ssm.send_command(InstanceIds=[instanceId],DocumentName='AWS-RunShellScript', Parameters={'commands': [cmd]}) 12 | time.sleep(3) 13 | cmd_id_list.append((lambda x: x['Command']['CommandId'])(result)) 14 | return cmd_id_list 15 | 16 | 17 | 18 | def lambda_handler(event,context): 19 | print(event) 20 | instanceId = event['instanceId'] 21 | cmd_id_list = runSSMCommands(instanceId) 22 | return {'commandIds':cmd_id_list,'instanceId':instanceId} 23 | 24 | 25 | #AWS StepFunctions Lambda SSM S3 practice exericse 26 | #Create 3 state stage machine that will capture listing of files in directory and push to s3 27 | #Elliott Arnold Team DMS 28 | # 12-10-20 Covid-19 Mothership AWS Everythings not lost 29 | #ThreeStateMachina 30 | -------------------------------------------------------------------------------- /stepFunctionSandbox/func2.py: -------------------------------------------------------------------------------- 1 | import boto3, time 2 | 3 | ssm = boto3.client('ssm') 4 | #i-070972dd8eafb4cf1 5 | 6 | def getSSMCommandResults(cmd_id,instanceId): 7 | resultList = [] 8 | kwargs = {"CommandId":cmd_id,"InstanceId":instanceId} 9 | result = ssm.get_command_invocation(**kwargs)['StandardOutputContent'] 10 | resultList = result.split('\n') 11 | 12 | return resultList 13 | 14 | def lambda_handler(event,context): 15 | print(event) 16 | instanceId = event['instanceId'] 17 | if len(event) > 0: 18 | for cmd_id in event['commandIds']: 19 | resultList = getSSMCommandResults(cmd_id,instanceId) 20 | time.sleep(3) 21 | print(resultList) 22 | return {'instanceId':instanceId, 'resultList': resultList} 23 | 24 | #AWS StepFunctions Lambda SSM S3 practice exericse 25 | #Create 3 state stage machine that will capture listing of files in directory and push to s3 26 | #Elliott Arnold Team DMS 27 | # 12-10-20 Covid-19 Mothership AWS Everythings not lost 28 | #ThreeStateMachina 29 | 30 | #AWS #Python3 #Stepfunctions #SystemsManager #Lambda #S3 mashup practice exercise. As the world turns and time progresses I began to support another great aws service from the #DMS profile named Step functions. Step functions allow for orchestrating a series of AWS services or, in this case lambda functions, in a pre-determined order for accomplishing tasks, think #batchJobs. Step functions are defined using the Amazon State Language which are json documents with special keys required to dictate the flow of work within the state machine (first image). For practice I decided to create a 3 stage state machine where the output of one lambda functions becomes the input of the next. Here i'm just capturing a listing of files under a given directory on an SSM managed instance and ultimately copy them to an S3 bucket. #practice #pythonic #scripting #threeStateMachina #motherShipAWS -------------------------------------------------------------------------------- /stepFunctionSandbox/func3.py: -------------------------------------------------------------------------------- 1 | import boto3, time 2 | 3 | ssm = boto3.client('ssm') 4 | bucketName = 'stepfunctionoutput' 5 | 6 | 7 | 8 | def copyToS3(instanceId,fileList): 9 | for file in fileList: 10 | ssm.send_command(InstanceIds=[instanceId],DocumentName='AWS-RunShellScript', \ 11 | Parameters={'commands': [f"aws s3 cp /home/ec2-user/{file} s3://{bucketName}/{file}"]}) 12 | time.sleep(3) 13 | 14 | 15 | def lambda_handler(event, context): 16 | print(event) 17 | instanceId = event['instanceId'] 18 | resultList = event['resultList'] 19 | copyToS3(instanceId,resultList[:10]) #just copy the first 10 20 | 21 | 22 | return { 23 | 'statusCode': 200, 24 | 'body': 'Success!' 25 | } 26 | 27 | 28 | 29 | #AWS StepFunctions Lambda SSM S3 practice exericse 30 | #Create 3 state stage machine that will capture listing of files in directory and push to s3 31 | #Elliott Arnold Team DMS 32 | # 12-10-20 Covid-19 Mothership AWS Everythings not lost 33 | #ThreeStateMachina 34 | -------------------------------------------------------------------------------- /stepFunctionSandbox/stateMachina.json: -------------------------------------------------------------------------------- 1 | { 2 | "Comment": "init", 3 | "StartAt": "firstFunction", 4 | "States": { 5 | "firstFunction": { 6 | "Type": "Task", 7 | "Resource": "arn:aws:lambda:us-east-1:705****8:function:testing_step_functions", 8 | "Next": "secondFunction" 9 | }, 10 | "secondFunction": { 11 | "Type": "Task", 12 | "Resource": "arn:aws:lambda:us-east-1:705****8:function:testing-step-functions-2", 13 | "Next": "thirdFunction" 14 | }, 15 | "thirdFunction": { 16 | "Type": "Task", 17 | "Resource": "arn:aws:lambda:us-east-1:705****8:function:testing-step-functions-3", 18 | "End": true 19 | } 20 | 21 | } 22 | } 23 | 24 | 25 | -------------------------------------------------------------------------------- /traffic_report_ec2.py: -------------------------------------------------------------------------------- 1 | import selenium, os 2 | from selenium.webdriver.chrome.options import Options 3 | from selenium.webdriver.common.by import By 4 | import time, boto3 5 | from email.mime.application import MIMEApplication 6 | from email.mime.multipart import MIMEMultipart 7 | 8 | 9 | 10 | class TrafficJam: 11 | @classmethod 12 | def init_driver(cls): 13 | driver_location = '/usr/bin/chromedriver' 14 | os.environ['webdriver.chrome.driver'] = driver_location 15 | 16 | options = Options() 17 | options.headless = True 18 | 19 | driver = selenium.webdriver.Chrome(driver_location, options=options) 20 | url = 'http://its.txdot.gov/dal/dal.htm' 21 | driver.implicitly_wait(8) 22 | driver.get(url) 23 | return driver 24 | 25 | @classmethod 26 | def traffic_report(cls): 27 | '''fetch traffic data from TXDot''' 28 | driver = cls.init_driver() 29 | driver.fullscreen_window() 30 | driver.find_element(By.XPATH, "//a[@href and contains(text(),'Incidents')]").click() 31 | time.sleep(8) 32 | driver.save_screenshot('Incidents.png') 33 | cls.send_email('Incidents.png') 34 | 35 | driver.find_element(By.XPATH, "//a[@href and @id='tab6Link']").click() 36 | time.sleep(8) 37 | driver.save_screenshot('TravelTimes.png') 38 | cls.send_email('TravelTimes.png') 39 | 40 | 41 | @classmethod 42 | def send_email(self, filename): 43 | ses = boto3.client('ses',region_name='us-east-1') 44 | '''documentation = https://docs.aws.amazon.com/ses/latest/DeveloperGuide/send-email-raw.html ''' 45 | msg = MIMEMultipart('mixed') 46 | # Add subject, from and to lines. 47 | msg['Subject'] = 'Traffic_Report' 48 | msg['From'] = 'si3mshady@gmail.com' 49 | msg['To'] = 'alquimista2891@gmail.com' 50 | 51 | # Create a multipart/alternative child container. 52 | msg_body = MIMEMultipart('alternative') 53 | 54 | # Define the attachment part and encode it using MIMEApplication. 55 | att = MIMEApplication(open(filename, 'rb').read()) 56 | 57 | # Add a header to tell the email client to treat this part as an attachment, 58 | # and to give the attachment a name. 59 | att.add_header('Content-Disposition', 'attachment', filename=filename) 60 | 61 | # Attach the multipart/alternative child container to the multipart/mixed 62 | # parent container. 63 | msg.attach(msg_body) 64 | 65 | # Add the attachment to the parent container. 66 | msg.attach(att) 67 | print('Sending email') 68 | response = ses.send_raw_email( 69 | Source='si3mshady@gmail.com', 70 | Destinations=[ 71 | 'alquimista2891@gmail.com' 72 | ], 73 | RawMessage={ 74 | 'Data': msg.as_string(), 75 | } 76 | ) 77 | 78 | TrafficJam.traffic_report() 79 | 80 | 81 | 82 | #AWS/EC2/Selenium - Practice: Using Chrome Browserless Mode on EC2 to fetch traffic screenshots 83 | #Elliott Arnold 11-15-19 84 | 85 | #https://tecadmin.net/setup-selenium-chromedriver-on-ubuntu/ 86 | #https://chromedriver.chromium.org/getting-started 87 | #https://www.parrotqa.com/selenium-tutorial -------------------------------------------------------------------------------- /transcribe_file.py: -------------------------------------------------------------------------------- 1 | import boto3, time, json, os 2 | 3 | transcribe_svc = boto3.client('transcribe') 4 | S3 = boto3.client('s3') 5 | sns = boto3.client('sns') 6 | phone = os.environ['PHONE'] 7 | 8 | def init(event,context): 9 | bucket = event['Records'][0]['s3']['bucket']['name'] 10 | key = event['Records'][0]['s3']['object']['key'] 11 | audio_uri = f"https://s3.amazonaws.com/{bucket}/{key}" 12 | print(audio_uri) 13 | job_name = f"job_{key}" 14 | aws_transcribe(transcribe_svc,job_name,audio_uri) 15 | time.sleep(600) 16 | json_result_key = job_name + '.json' 17 | txt = fetchJsonData(S3,json_result_key) 18 | sns.publish(PhoneNumber=phone,Message=str(txt)) 19 | 20 | 21 | def fetchJsonData(service,key,bucket='lambda-transcribe-code'): 22 | s3_response = service.get_object(Bucket=bucket, Key=key) 23 | resultData = s3_response['Body'].read() 24 | '''str to dict''' 25 | jsonResult = json.loads(resultData.decode()) 26 | text = jsonResult['results']['transcripts'][0]['transcript'] 27 | return text 28 | 29 | def aws_transcribe(boto_svc,job_name,src_s3,output_bucket='lambda-transcribe-code'): #init boto3 transcribeService 30 | result = boto_svc.start_transcription_job(TranscriptionJobName=job_name, 31 | LanguageCode='en-US', 32 | MediaSampleRateHertz=44100, 33 | MediaFormat='wav', 34 | Media={'MediaFileUri':src_s3}, 35 | OutputBucketName=output_bucket, 36 | Settings={'ChannelIdentification': True }) 37 | 38 | #AWS Lambda practice: transcribing audiofiles - Lambda functions triggering lambda functions 39 | #Function retrieves transcibed data from audio file, stores data into S3 , sending an sns message upon completion 40 | #Elliott Arnold 7-11-19 -------------------------------------------------------------------------------- /userdata_kubernetes_bootsrap_to_deployment.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | sudo hostnamectl set-hostname ${nodename} && 3 | curl -sfL https://get.k3s.io | sh -s - server \ 4 | --write-kubeconfig-mode 644 \ 5 | --tls-san=$(curl http://169.254.169.254/latest/meta-data/public-ipv4) 6 | 7 | 8 | NewFile=si3mshady_strayaway.yml 9 | ( 10 | cat <<'DEPLOYMENT' 11 | apiVersion: apps/v1 12 | kind: Deployment 13 | metadata: 14 | name: strayaway 15 | spec: 16 | replicas: 2 17 | selector: 18 | matchLabels: 19 | # manage pods with the label app: si3mshady/strayaway 20 | si3mshady: strayaway 21 | template: 22 | metadata: 23 | labels: 24 | si3mshady: strayaway 25 | spec: 26 | containers: 27 | - name: strayaway 28 | image: si3mshady/strayaway 29 | ports: 30 | - containerPort: 888 31 | hostPort: 5000 32 | DEPLOYMENT 33 | ) > $NewFile 34 | 35 | kubectl apply -f $NewFile 36 | -------------------------------------------------------------------------------- /weather_forecast.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timedelta 2 | from lxml import html 3 | import requests 4 | import boto3 5 | 6 | class ScrapeWeatherReport: 7 | 8 | @classmethod 9 | def scrape_and_send_next_10(cls): 10 | cls.url = 'https://www.wfaa.com/10-day' 11 | cls.rawPage = requests.get(cls.url).content 12 | cls.webPage = html.fromstring(cls.rawPage) 13 | cls.mapped_temps = cls.combine_dates_and_temps(cls.parse_high_temp(cls.webPage),cls.parse_low_temp(cls.webPage)) 14 | cls.publish(cls.mapped_temps) 15 | 16 | @classmethod 17 | def publish(cls,json_message): 18 | sns = boto3.client('sns') 19 | 20 | response = sns.publish( 21 | TopicArn='arn:aws:sns:us-east-1:952151691101:weather_data_scrape', 22 | Message=str(json_message), 23 | Subject='10-Day Forecast', 24 | MessageStructure='string' 25 | ) 26 | 27 | @classmethod 28 | def combine_dates_and_temps(cls, high, low): 29 | '''map date string to high/low temperature tuple''' 30 | combined = {} 31 | cls.days = list(cls.next10Days()) 32 | for i, _ in enumerate(high): 33 | combined[cls.days[i]] = (high[i], low[i]) 34 | return combined 35 | 36 | @classmethod 37 | def parse_high_temp(cls,webpage): 38 | '''xpath for next 10 day high temps''' 39 | return webpage.xpath("//div[@class='forecast__high forecast__var']/text()") 40 | 41 | @classmethod 42 | def parse_low_temp(cls, webpage): 43 | '''xpath for next 10 day low temps''' 44 | return webpage.xpath("//div[@class='forecast__low forecast__var']/text()") 45 | 46 | @classmethod 47 | def next10Days(cls): 48 | '''generate formatted date strings''' 49 | for day in range(11): 50 | yield (datetime.now() + timedelta(days=day)).strftime('%m/%d/%Y') 51 | 52 | 53 | def lambda_handler(event,context): 54 | ScrapeWeatherReport.scrape_and_send_next_10() 55 | 56 | 57 | #AWS Lambda, SNS, Webscrape practice 58 | #Quick and dirty function for scraping the 10-day forcast data using lxml & xpath 59 | #Elliott Arnold 60 | #11-11-19 61 | #happy veterans day 62 | #go navy 63 | #si3mshady 64 | 65 | 66 | 67 | 68 | 69 | --------------------------------------------------------------------------------