├── .gitignore
├── TotallyMoreSecure.yaml
├── TotallySecure.yaml
├── build-final.sh
├── build-nopublic.sh
├── build.sh
├── customers.csv
├── destroy.sh
├── scripts
├── build.sh
└── destroy.sh
└── workbook
├── base_config.yml
├── docs
├── css
│ ├── custom.css
│ ├── html.css
│ └── jquery.fancybox.min.css
├── exercises
│ ├── exercise1.md
│ ├── exercise2.md
│ ├── exercise3.md
│ ├── exercise4.md
│ ├── exercise5.md
│ └── exercise6.md
├── img
│ ├── 1.png
│ ├── 10.png
│ ├── 11.png
│ ├── 12.png
│ ├── 13.png
│ ├── 14.png
│ ├── 15.png
│ ├── 16.png
│ ├── 17.png
│ ├── 18.png
│ ├── 19.png
│ ├── 2.png
│ ├── 20.png
│ ├── 21.png
│ ├── 22.png
│ ├── 23.png
│ ├── 24.png
│ ├── 25.png
│ ├── 26.png
│ ├── 27.png
│ ├── 28.png
│ ├── 29.png
│ ├── 3.png
│ ├── 30.png
│ ├── 31.png
│ ├── 32.png
│ ├── 33.png
│ ├── 34.png
│ ├── 35.png
│ ├── 36.png
│ ├── 37.png
│ ├── 38.png
│ ├── 39.png
│ ├── 4.png
│ ├── 5.png
│ ├── 6.png
│ ├── 7.png
│ ├── 8.png
│ ├── 9.png
│ └── favicon.ico
├── index.md
└── js
│ ├── jquery-3.4.1.min.js
│ ├── jquery.fancybox.min.js
│ ├── jquery.min.js
│ ├── jquery.min.map
│ └── workbook.js
├── mkdocs.yml
└── requirements.txt
/.gitignore:
--------------------------------------------------------------------------------
1 | workbook/.venv
2 | **/.venv
3 | **/workbook/site
--------------------------------------------------------------------------------
/TotallyMoreSecure.yaml:
--------------------------------------------------------------------------------
1 | AWSTemplateFormatVersion: 2010-09-09
2 | Resources:
3 | SensitiveBucket:
4 | Type: AWS::S3::Bucket
5 | Properties:
6 | BucketName: !Join
7 | - ''
8 | - - 'sensitive-'
9 | - !Ref 'AWS::AccountId'
10 | OwnershipControls:
11 | Rules:
12 | - ObjectOwnership: 'BucketOwnerPreferred'
13 |
--------------------------------------------------------------------------------
/TotallySecure.yaml:
--------------------------------------------------------------------------------
1 | AWSTemplateFormatVersion: 2010-09-09
2 | Resources:
3 | SensitiveBucket:
4 | Type: AWS::S3::Bucket
5 | Properties:
6 | BucketName: !Join
7 | - ''
8 | - - 'sensitive-'
9 | - !Ref 'AWS::AccountId'
10 | OwnershipControls:
11 | Rules:
12 | - ObjectOwnership: 'BucketOwnerPreferred'
13 | PublicAccessBlockConfiguration:
14 | BlockPublicAcls: false
15 | BlockPublicPolicy: false
16 | IgnorePublicAcls: false
17 | RestrictPublicBuckets: false
18 | PublicBucketPolicy:
19 | Type: 'AWS::S3::BucketPolicy'
20 | Properties:
21 | Bucket: !Ref SensitiveBucket
22 | PolicyDocument:
23 | Statement:
24 | -
25 | Action:
26 | - s3:Get*
27 | Effect: Allow
28 | Resource:
29 | - !Sub arn:aws:s3:::${SensitiveBucket}/*
30 | Principal:
31 | AWS:
32 | - '*'
33 | -
34 | Action:
35 | - s3:List*
36 | Effect: Allow
37 | Resource:
38 | - !Sub arn:aws:s3:::${SensitiveBucket}
39 | Principal:
40 | AWS:
41 | - '*'
42 |
--------------------------------------------------------------------------------
/build-final.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | GREEN="\033[32m"
4 | RED="\033[31m"
5 | NOCOLOR="\033[0m"
6 |
7 | pushd ~/avoiding-data-disasters >/dev/null
8 | echo -en $GREEN"Deploying CloudFormation stack... "$NOCOLOR
9 | aws cloudformation delete-stack --stack-name avoiding-data-disasters 2>/dev/null >/dev/null
10 | sleep 5
11 | aws cloudformation create-stack --stack-name avoiding-data-disasters --template-body file://MostSecure.yaml >/dev/null
12 | while true; do
13 | case $(aws cloudformation describe-stacks --stack-name avoiding-data-disasters --query 'Stacks[].StackStatus' --output text) in
14 | CREATE_COMPLETE)
15 | echo "Done"
16 | break
17 | ;;
18 | CREATE_FAILED)
19 | echo -e $RED"FAILED"
20 | exit 1
21 | ;;
22 | CREATE_IN_PROGRESS)
23 | sleep 5
24 | ;;
25 | *)
26 | echo -e $RED"FAILED"$NOCOLOR
27 | exit 1
28 | ;;
29 | esac
30 | done
31 |
32 | echo -en $GREEN"Uploading sensitive data... "$NOCOLOR
33 | BUCKET=$(aws s3 ls | grep sensitive- | awk '{print $3}')
34 | aws s3 cp customers.csv s3://$BUCKET/customers.csv >/dev/null
35 | popd >/dev/null
36 | aws s3 ls s3://$BUCKET/customers.csv >/dev/null
37 | if [ $? -eq 0 ]; then
38 | echo "Done"
39 | else
40 | echo -e $RED"FAILED"$NOCOLOR
41 | exit 1
42 | fi
43 |
--------------------------------------------------------------------------------
/build-nopublic.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | GREEN="\033[32m"
4 | RED="\033[31m"
5 | NOCOLOR="\033[0m"
6 |
7 | pushd ~/avoiding-data-disasters >/dev/null
8 | echo -en $GREEN"Deploying CloudFormation stack... "$NOCOLOR
9 | aws cloudformation delete-stack --stack-name avoiding-data-disasters 2>/dev/null >/dev/null
10 | sleep 5
11 | aws cloudformation create-stack --stack-name avoiding-data-disasters --template-body file://TotallyMoreSecure.yaml >/dev/null
12 | while true; do
13 | case $(aws cloudformation describe-stacks --stack-name avoiding-data-disasters --query 'Stacks[].StackStatus' --output text) in
14 | CREATE_COMPLETE)
15 | echo "Done"
16 | break
17 | ;;
18 | CREATE_FAILED)
19 | echo -e $RED"FAILED"
20 | exit 1
21 | ;;
22 | CREATE_IN_PROGRESS)
23 | sleep 5
24 | ;;
25 | *)
26 | echo -e $RED"FAILED"$NOCOLOR
27 | exit 1
28 | ;;
29 | esac
30 | done
31 |
32 | echo -en $GREEN"Uploading sensitive data... "$NOCOLOR
33 | BUCKET=$(aws s3 ls | grep sensitive- | awk '{print $3}')
34 | aws s3 cp customers.csv s3://$BUCKET/customers.csv >/dev/null
35 | popd >/dev/null
36 | aws s3 ls s3://$BUCKET/customers.csv >/dev/null
37 | if [ $? -eq 0 ]; then
38 | echo "Done"
39 | else
40 | echo -e $RED"FAILED"$NOCOLOR
41 | exit 1
42 | fi
43 |
--------------------------------------------------------------------------------
/build.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | GREEN="\033[32m"
4 | RED="\033[31m"
5 | NOCOLOR="\033[0m"
6 |
7 | pushd ~/avoiding-data-disasters >/dev/null
8 | echo -en $GREEN"Deploying CloudFormation stack... "$NOCOLOR
9 | aws cloudformation create-stack --stack-name avoiding-data-disasters --template-body file://TotallySecure.yaml >/dev/null
10 | while true; do
11 | case $(aws cloudformation describe-stacks --stack-name avoiding-data-disasters --query 'Stacks[].StackStatus' --output text) in
12 | CREATE_COMPLETE)
13 | echo "Done"
14 | break
15 | ;;
16 | CREATE_FAILED)
17 | echo -e $RED"FAILED"
18 | exit 1
19 | ;;
20 | CREATE_IN_PROGRESS)
21 | sleep 5
22 | ;;
23 | *)
24 | echo -e $RED"FAILED"$NOCOLOR
25 | exit 1
26 | ;;
27 | esac
28 | done
29 |
30 | echo -en $GREEN"Setting bucket ACL... "$NOCOLOR
31 | BUCKET=$(aws s3 ls | grep sensitive- | awk '{print $3}')
32 | aws s3api put-bucket-acl --bucket $BUCKET --acl public-read >/dev/null
33 | if [[ $(aws s3api get-bucket-acl --bucket $BUCKET --query 'Grants[?Permission==`READ`].Grantee.URI' --output text) == "http://acs.amazonaws.com/groups/global/AllUsers" ]]; then
34 | echo "Done"
35 | else
36 | echo $RED"FAILED"$NOCOLOR
37 | exit 1
38 | fi
39 |
40 | echo -en $GREEN"Uploading sensitive data... "$NOCOLOR
41 | aws s3 cp customers.csv s3://$BUCKET/customers.csv >/dev/null
42 | popd >/dev/null
43 | aws s3 ls s3://$BUCKET/customers.csv >/dev/null
44 | if [ $? -eq 0 ]; then
45 | echo "Done"
46 | else
47 | echo -e $RED"FAILED"$NOCOLOR
48 | exit 1
49 | fi
50 |
--------------------------------------------------------------------------------
/customers.csv:
--------------------------------------------------------------------------------
1 | id,first_name,last_name,email,ip_address,cc_num
2 | 1,Devonna,Misselbrook,dmisselbrook0@scientificamerican.com,244.90.204.78,3546634243157105
3 | 2,Angie,Woolmer,awoolmer1@spiegel.de,215.80.41.15,4405492437748769
4 | 3,Noemi,Benford,nbenford2@nih.gov,183.13.47.200,5002357914522751
5 | 4,Jerrold,Hadaway,jhadaway3@mayoclinic.com,67.1.148.23,4017950069971843
6 | 5,Jenilee,Jandourek,jjandourek4@howstuffworks.com,213.37.250.196,3533520784901347
7 | 6,Ermanno,Golby,egolby5@examiner.com,146.48.226.47,677119584699737421
8 | 7,Reinold,Barwise,rbarwise6@cmu.edu,19.182.0.27,3572982767039713
9 | 8,Lorine,Evison,levison7@imageshack.us,203.36.140.186,36488284533952
10 | 9,Cornela,Bermingham,cbermingham8@house.gov,48.122.7.125,30464443174109
11 | 10,Leupold,Plose,lplose9@mlb.com,234.164.92.126,3572803253997950
12 | 11,Ari,Muddle,amuddlea@sun.com,169.20.56.136,3546934926035911
13 | 12,Goldie,Dumingo,gdumingob@psu.edu,156.22.34.233,3552301999744749
14 | 13,Dorotea,Ickovic,dickovicc@digg.com,15.169.215.26,3554005588077068
15 | 14,Lorne,Dowry,ldowryd@mysql.com,224.44.149.122,4175004181041735
16 | 15,Cacilia,Raleston,cralestone@sakura.ne.jp,140.249.92.110,3573405886406299
17 | 16,Mechelle,Bushe,mbushef@is.gd,212.207.185.253,5378057504016786
18 | 17,Tome,MacGee,tmacgeeg@loc.gov,90.253.140.52,3555130143322699
19 | 18,Hildy,Ybarra,hybarrah@netlog.com,101.226.199.67,5602253501559955
20 | 19,Hanan,Haskew,hhaskewi@tmall.com,223.26.46.220,30308163789739
21 | 20,Zita,Jannex,zjannexj@drupal.org,40.25.206.159,493611182087174904
22 | 21,Ezri,Melin,emelink@google.ru,98.87.236.245,5002350023673409
23 | 22,Sheffie,Slimme,sslimmel@unc.edu,58.222.126.78,201628021717223
24 | 23,Zonnya,Ambrosetti,zambrosettim@google.fr,123.76.14.110,5641820483314307804
25 | 24,Ailene,McCoole,amccoolen@free.fr,0.151.199.105,630475618007486003
26 | 25,Ricardo,Wharin,rwharino@ehow.com,81.35.181.126,3574125743988482
27 | 26,Bastian,Denisyev,bdenisyevp@amazon.co.jp,244.137.102.8,3535463821506423
28 | 27,Ortensia,Langdale,olangdaleq@jugem.jp,61.58.25.161,3551432820199067
29 | 28,Madelena,Glave,mglaver@artisteer.com,54.202.39.30,3575686161889568
30 | 29,Ramona,Sebrens,rsebrenss@archive.org,69.38.158.218,3579963243207150
31 | 30,Nyssa,Cainey,ncaineyt@zimbio.com,101.138.38.248,67612469323648267
32 | 31,Valle,Wallwork,vwallworku@psu.edu,167.126.183.17,3538323334944437
33 | 32,Joya,MacIntosh,jmacintoshv@slate.com,248.176.51.238,4175006718794876
34 | 33,Corissa,Gleader,cgleaderw@wordpress.org,156.172.153.251,56022100812861445
35 | 34,Berkie,Hovey,bhoveyx@java.com,63.111.88.181,346622897268474
36 | 35,Ryan,Batts,rbattsy@engadget.com,205.190.66.94,3555539395465768
37 | 36,Faustina,Sherville,fshervillez@joomla.org,60.6.168.233,3571361521168244
38 | 37,Valeda,Hrus,vhrus10@archive.org,159.211.110.57,3551522774352611
39 | 38,Beth,Bartolomeazzi,bbartolomeazzi11@va.gov,133.21.108.68,3586066120890071
40 | 39,Kenn,Leele,kleele12@toplist.cz,148.242.75.1,3532514691848015
41 | 40,Tawsha,Woolager,twoolager13@hostgator.com,248.121.191.203,6762440975663702
42 | 41,Chase,Luce,cluce14@army.mil,66.61.153.218,3559691488091484
43 | 42,Letta,Buckthorpe,lbuckthorpe15@google.com.hk,139.35.244.228,3534315846533884
44 | 43,Dylan,Yeardsley,dyeardsley16@craigslist.org,112.9.3.161,6706891018307824169
45 | 44,Gabrielle,Gillow,ggillow17@imdb.com,78.63.101.137,5602222665224532
46 | 45,Obadias,Luck,oluck18@ycombinator.com,51.12.159.7,5228808071530472
47 | 46,Lisetta,Clash,lclash19@dropbox.com,15.100.146.200,5436680922221603
48 | 47,Cari,Parkhouse,cparkhouse1a@squidoo.com,215.61.53.232,56108398404207632
49 | 48,Hilda,Axtonne,haxtonne1b@shareasale.com,142.9.228.79,3578913631317211
50 | 49,Oneida,Gibbonson,ogibbonson1c@kickstarter.com,109.73.177.63,3571538509598183
51 | 50,Louisette,Gozzett,lgozzett1d@sohu.com,140.80.200.80,4041597725365
52 | 51,Art,Bunt,abunt1e@tripod.com,145.89.91.75,3537182951322884
53 | 52,Windham,Halhead,whalhead1f@columbia.edu,19.140.228.120,6334340844287635
54 | 53,Camala,Selby,cselby1g@bloglovin.com,83.96.116.84,3565827511007996
55 | 54,Zaccaria,Marriott,zmarriott1h@wsj.com,184.206.149.214,4026848489989602
56 | 55,Bria,Mill,bmill1i@symantec.com,236.18.42.51,3579731094155145
57 | 56,Lamar,Worsom,lworsom1j@whitehouse.gov,154.18.252.210,6395189044996131
58 | 57,Emelda,Sutlieff,esutlieff1k@imageshack.us,145.164.54.238,3560704301125988
59 | 58,Donnie,Antonovic,dantonovic1l@pen.io,246.130.31.56,6331102756681730070
60 | 59,Barrie,Zealey,bzealey1m@utexas.edu,160.121.107.159,3530600579118324
61 | 60,Jens,Ilchenko,jilchenko1n@ezinearticles.com,66.216.46.37,5602245172628580
62 | 61,Vinny,Goodinson,vgoodinson1o@ask.com,46.15.26.9,3545962077816921
63 | 62,Constantine,Cartmell,ccartmell1p@seattletimes.com,114.2.5.129,4041592455853951
64 | 63,Madalyn,Hegge,mhegge1q@unicef.org,162.228.202.167,5007665452735105
65 | 64,Stafford,Cadden,scadden1r@themeforest.net,120.129.40.227,3529864467293218
66 | 65,Anna,Castaignet,acastaignet1s@e-recht24.de,149.5.10.164,4905738551740171
67 | 66,Casi,Crammy,ccrammy1t@columbia.edu,21.247.15.118,342516878225582
68 | 67,Megan,Pestor,mpestor1u@amazon.co.uk,11.11.9.25,6378151175153628
69 | 68,Ami,Grinvalds,agrinvalds1v@about.me,11.54.137.118,3539996838072299
70 | 69,Ester,Martinovsky,emartinovsky1w@mediafire.com,216.190.7.31,633110110286453677
71 | 70,Allegra,McNiven,amcniven1x@java.com,35.57.105.62,3571388367979392
72 | 71,Tawnya,Tonry,ttonry1y@uiuc.edu,13.19.195.134,372301719104020
73 | 72,Syman,Mahy,smahy1z@flickr.com,109.186.95.202,3540346031403494
74 | 73,Grover,Domenge,gdomenge20@mit.edu,142.163.55.214,560221887497151486
75 | 74,Griffie,Lambertazzi,glambertazzi21@harvard.edu,205.195.37.63,5526325059885884
76 | 75,Christophe,Bartlomieczak,cbartlomieczak22@examiner.com,219.206.120.52,5018480047822056229
77 | 76,Nara,Cheson,ncheson23@reference.com,83.247.30.169,5007664994272644
78 | 77,Kenny,Giraudot,kgiraudot24@sphinn.com,38.14.88.226,5108752967127511
79 | 78,Ame,Sanham,asanham25@google.com,148.1.82.205,374283084995513
80 | 79,Elladine,Hannen,ehannen26@ehow.com,27.15.124.235,5018662415447057
81 | 80,Lesley,Screwton,lscrewton27@psu.edu,246.177.128.116,3582580639553263
82 | 81,Amalea,Chesley,achesley28@washingtonpost.com,66.30.248.145,5010124970909789
83 | 82,Jeannette,Sandeland,jsandeland29@thetimes.co.uk,51.102.194.223,30095285770321
84 | 83,Cecilia,Duff,cduff2a@usnews.com,190.201.26.33,30433724002024
85 | 84,Eileen,Causer,ecauser2b@google.com.au,165.179.40.137,560221937336846372
86 | 85,Teodoro,Soppit,tsoppit2c@foxnews.com,105.245.100.176,3584344242179687
87 | 86,Maddy,Cargo,mcargo2d@sogou.com,150.183.57.170,3552284503912814
88 | 87,Alicea,Hargie,ahargie2e@alibaba.com,11.226.236.134,201904943327775
89 | 88,Collie,Sellar,csellar2f@cdc.gov,91.9.164.187,6759015355914882150
90 | 89,Lexi,Sidgwick,lsidgwick2g@infoseek.co.jp,143.96.93.105,060466789667825250
91 | 90,Correy,Coppock.,ccoppock2h@goo.gl,164.63.79.199,3536111580997122
92 | 91,Gun,Schade,gschade2i@cbc.ca,68.223.42.173,5132984484472639
93 | 92,Doreen,Kisar,dkisar2j@histats.com,144.194.173.188,374288789872925
94 | 93,Killy,McEnhill,kmcenhill2k@home.pl,58.3.169.58,3536875297393631
95 | 94,Susanetta,Goreisr,sgoreisr2l@mozilla.com,185.40.57.99,5602247813410063
96 | 95,Talbert,Izakoff,tizakoff2m@google.com.br,18.178.188.123,589374811509733937
97 | 96,Shae,Juleff,sjuleff2n@java.com,250.227.174.58,3574260019328806
98 | 97,Onfre,Pletts,opletts2o@acquirethisname.com,173.116.27.148,5485745477429859
99 | 98,Laure,Templeman,ltempleman2p@slideshare.net,218.83.151.153,3574682022422849
100 | 99,Slade,O'Halloran,sohalloran2q@yolasite.com,138.163.159.16,5048370366368397
101 | 100,Doralyn,Drydale,ddrydale2r@google.com,55.9.37.32,5305240410395993
102 |
--------------------------------------------------------------------------------
/destroy.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | GREEN="\033[32m"
4 | RED="\033[31m"
5 | NOCOLOR="\033[0m"
6 |
7 | BUCKET=$(aws s3 ls | grep sensitive | awk '{print $3}')
8 | echo -en $GREEN"Emptying $BUCKET bucket... "$NOCOLOR
9 | aws s3api put-bucket-versioning --bucket $BUCKET --versioning-configuration Status=Suspended
10 | for OBJECT in $(aws s3api list-object-versions --bucket $BUCKET | jq -r '"\(.DeleteMarkers[].Key),\(.DeleteMarkers[].VersionId)"' 2>/dev/null); do
11 | KEY=$(echo $OBJECT | cut -d ',' -f1)
12 | VERSIONID=$(echo $OBJECT | cut -d ',' -f2)
13 | aws s3api delete-object --bucket $BUCKET --version-id $VERSIONID --key $KEY 2>/dev/null >/dev/null
14 | done
15 | for OBJECT in $(aws s3api list-object-versions --bucket $BUCKET | jq -r '"\(.Versions[].Key),\(.Versions[].VersionId)"' 2>/dev/null); do
16 | KEY=$(echo $OBJECT | cut -d ',' -f1)
17 | VERSIONID=$(echo $OBJECT | cut -d ',' -f2)
18 | aws s3api delete-object --bucket $BUCKET --version-id $VERSIONID --key $KEY 2>/dev/null >/dev/null
19 | done
20 | if ! [[ $(aws s3 ls s3://$BUCKET/customers.csv) ]]; then
21 | echo "Done"
22 | else
23 | echo -e $RED"FAILED"$NOCOLOR
24 | exit 1
25 | fi
26 |
27 | echo -en $GREEN"Destroying CloudFormation stack... "$NOCOLOR
28 | aws cloudformation delete-stack --stack-name avoiding-data-disasters
29 | while true; do
30 | if [[ $(aws cloudformation describe-stacks --query 'Stacks[?StackName==`avoiding-data-disasters`].StackName' --output text) == "avoiding-data-disasters" ]]; then
31 | sleep 5
32 | else
33 | echo "Done"
34 | break
35 | fi
36 | done
37 |
--------------------------------------------------------------------------------
/scripts/build.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | pushd ~/building-detections-aws >/dev/null
4 | aws cloudformation deploy --stack-name building-detections --template-file ./BuildingDetections.yaml --capabilities CAPABILITY_NAMED_IAM
5 | cat << 'EOF' > /tmp/password-backup.txt
6 | AWS Root: admin@sherlock.com | P@ssw0rd1234
7 | Sherlock: sherlock@sherlock.com | $h3rL0ck!
8 | EOF
9 | BUCKET=$(aws s3api list-buckets | jq -r '.Buckets[] | select(.Name | startswith("databackup-")) | .Name')
10 | aws s3 cp /tmp/password-backup.txt s3://$BUCKET/password-backup.txt >/dev/null
11 | popd >/dev/null
12 |
--------------------------------------------------------------------------------
/scripts/destroy.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | TARGET=$(aws events list-targets-by-rule --rule honeyfile --query Targets[0].Id --output text)
4 | aws events remove-targets --rule honeyfile --ids $TARGET
5 | aws events delete-rule --name honeyfile
6 | BUCKET=$(aws s3api list-buckets | jq -r '.Buckets[] | select(.Name | startswith("cloudlogs-")) | .Name')
7 | aws s3 rm s3://$BUCKET --recursive
8 | aws s3api delete-bucket --bucket $BUCKET
9 | BUCKET=$(aws s3api list-buckets | jq -r '.Buckets[] | select(.Name | startswith("databackup-")) | .Name')
10 | aws s3 rm s3://$BUCKET --recursive
11 | aws cloudtrail delete-trail --name security
12 | aws cloudformation delete-stack --stack-name building-detections
13 |
--------------------------------------------------------------------------------
/workbook/base_config.yml:
--------------------------------------------------------------------------------
1 | extra:
2 | template_version: 2.0.3
3 |
4 | theme:
5 | name: "material"
6 | font:
7 | text: "Roboto"
8 | code: "Source Code Pro"
9 | palette:
10 | primary: "white"
11 | accent: "indigo"
12 | # option 1: internal icons
13 | icon:
14 | logo: "material/home-variant"
15 |
16 | # use https://favicon.io/favicon-converter/ for this
17 | favicon: "img/favicon.ico"
18 | features:
19 | - navigation.instant
20 | - navigation.tabs
21 |
22 | markdown_extensions:
23 | - admonition
24 | - attr_list
25 | - codehilite:
26 | guess_lang: false
27 | - footnotes
28 | - toc:
29 | permalink: false
30 | - pymdownx.arithmatex
31 | - pymdownx.betterem:
32 | smart_enable: all
33 | - pymdownx.caret
34 | - pymdownx.critic
35 | - pymdownx.details
36 | - pymdownx.emoji:
37 | emoji_index: !!python/name:materialx.emoji.twemoji
38 | emoji_generator: !!python/name:materialx.emoji.to_svg
39 | - pymdownx.inlinehilite
40 | - pymdownx.keys
41 | - pymdownx.magiclink
42 | - pymdownx.mark
43 | - pymdownx.smartsymbols
44 | - pymdownx.snippets
45 | - pymdownx.superfences
46 | - pymdownx.tabbed:
47 | alternate_style: true
48 | - pymdownx.tasklist:
49 | custom_checkbox: true
50 | clickable_checkbox: true
51 | - pymdownx.tilde
52 |
53 | plugins:
54 | - search
55 | - exclude:
56 | glob:
57 | - lab-template.md
58 | - pdf-export:
59 | verbose: false
60 | media_type: print
61 | combined: true
62 | enabled_if_env: ENABLE_PDF_EXPORT
63 |
64 | extra_css:
65 | - css/html.css
66 | - css/custom.css
67 | - css/jquery.fancybox.min.css
68 | - css/custom.css
69 |
70 | extra_javascript:
71 | - js/jquery.min.js
72 | - js/jquery.fancybox.min.js
73 | - js/workbook.js
--------------------------------------------------------------------------------
/workbook/docs/css/custom.css:
--------------------------------------------------------------------------------
1 | :root {
2 | --sans-primary-color: #ff5000;
3 | --sans-secondary-color: #ff7000;
4 | --sans-primary-text-color: #ffffff;
5 | --sans-secondary-text-color: #eeeeee;
6 | }
7 |
--------------------------------------------------------------------------------
/workbook/docs/css/html.css:
--------------------------------------------------------------------------------
1 | :root {
2 | --sans-primary-color: #005f86;
3 | --sans-secondary-color: #0092bc;
4 | --sans-primary-text-color: #ffffff;
5 | --sans-secondary-text-color: #eeeeee;
6 |
7 | /* keyboard icon for "cmd" admonishment */
8 | --md-admonition-icon--cmd: url('data:image/svg+xml;charset=utf-8,');
9 | }
10 |
11 | @media not print {
12 | html {
13 | font-size: 14.5pt;
14 | }
15 | }
16 |
17 | .md-header {
18 | color: var(--sans-primary-text-color);
19 | background-color: var(--sans-primary-color);
20 | }
21 | .md-header__title {
22 | font-weight: bold;
23 | font-size: 1.2rem;
24 | }
25 | .md-tabs {
26 | background-color: var(--sans-secondary-color);
27 | color: var(--sans-secondary-text-color);
28 | font-weight: bold;
29 | }
30 | .md-tabs__link--active {
31 | font-weight: bold;
32 | font-size: 1rem;
33 | }
34 |
35 | .md-grid {
36 | max-width: 80%;
37 | }
38 |
39 | h1 {
40 | color: white;
41 | background-color: var(--sans-primary-color);
42 | font-style: italic;
43 | text-align: center;
44 | padding-top: 3px;
45 | padding-bottom: 5px;
46 | }
47 |
48 | .md-typeset a {
49 | text-decoration: underline;
50 | }
51 |
52 | .md-typeset h1 {
53 | color: var(--sans-primary-text-color);
54 | font-weight: bold;
55 | }
56 | .md-typeset h1 code {
57 | color: var(--sans-primary-text-color);
58 | background-color: var(--sans-primary-color);
59 | }
60 |
61 | .md-search__inner {
62 | color: black;
63 | background-color: white;
64 | }
65 |
66 | h2 {
67 | color: var(--sans-secondary-text-color);
68 | background-color: var(--sans-secondary-color);
69 | padding-left: 5px;
70 | padding-top: 2px;
71 | padding-bottom: 3px;
72 | }
73 |
74 | .md-typeset h2 {
75 | font-size: 110%;
76 | font-weight: bolder;
77 | }
78 |
79 | .md-typeset h2 code {
80 | color: var(--sans-secondary-text-color);
81 | background-color: var(--sans-primary-color);
82 | }
83 |
84 | h3 {
85 | color: black;
86 | background-color: lightgray;
87 | border-color: lightgray;
88 | border-style: double;
89 | border-width: thick;
90 | padding-left: 5px;
91 | font-style: italic;
92 | }
93 |
94 | .md-typeset h3 {
95 | font-size: 100%;
96 | }
97 |
98 | .md-typeset h3 code {
99 | color: black;
100 | background-color: lightgray;
101 | }
102 |
103 | /* "code" is for inline code and "codehilite" is for code blocks */
104 | code { font-weight: bold; }
105 | .codehilite { font-weight: bold; }
106 |
107 | img {
108 | border-style: solid;
109 | border-width: 1px;
110 | border-color: black;
111 | }
112 |
113 | img.centered {
114 | display: block;
115 | margin: 0 auto;
116 | }
117 |
118 | img.noborder { border-style: none; }
119 |
120 | /* To make an image display at a specified width,
121 | use the attr_list mkdocs plugin and assign a
122 | class to the image itself, e.g. {: class="w400" } */
123 | img.w100 { width:100px; }
124 | img.w150 { width:150px; }
125 | img.w200 { width:200px; }
126 | img.w250 { width:250px; }
127 | img.w300 { width:300px; }
128 | img.w350 { width:350px; }
129 | img.w400 { width:400px; }
130 | img.w450 { width:450px; }
131 | img.w500 { width:500px; }
132 | img.w550 { width:550px; }
133 | img.w600 { width:600px; }
134 | img.w650 { width:650px; }
135 |
136 | .md-typeset .admonition.cmd,
137 | .md-typeset details.cmd {
138 | border-color: rgb(0, 191, 165);
139 | }
140 | .md-typeset .cmd > .admonition-title,
141 | .md-typeset .cmd > summary {
142 | background-color: rgba(0, 191, 165, 0.1);
143 | border-color: rgb(0, 191, 165);
144 | }
145 | .md-typeset :is(.admonition.cmd, details.cmd) {
146 | border-color: rgb(0, 191, 165);
147 | }
148 | .md-typeset .cmd > .admonition-title::before,
149 | .md-typeset .cmd > summary::before {
150 | background-color: rgb(0, 191, 165);
151 | -webkit-mask-image: var(--md-admonition-icon--cmd);
152 | mask-image: var(--md-admonition-icon--cmd);
153 | }
154 |
155 | .md-typeset .admonition-title {
156 | border-left-width: 0px !important;
157 | }
158 |
159 | /* remove the "copy to clipboard" icon an feature on "summary" admonition
160 | boxes, which are consistently and only used for command line results */
161 | .summary .md-clipboard {
162 | visibility: hidden;
163 | }
164 |
165 | blockquote {
166 | background-color: #f5f5f5;
167 | }
168 |
169 | .md-typeset blockquote {
170 | color: black;
171 | font-size: 90%;
172 | }
173 |
174 | /* Provides a means of centering a paragraph of text. To use, place the following below the text paragraph:
175 | {: class="centered" }
176 | */
177 | .centered {
178 | text-align: center;
179 | }
180 |
181 | /* This overlays a "zoom in" icon for all class="zoom" anchor elements. */
182 | a.zoom { position: relative; }
183 | a.zoom::after {
184 | height: 24px;
185 | width: 24px;
186 | content: "";
187 | position: absolute;
188 | bottom:11px;
189 | right:5px;
190 | display: block;
191 | background-image: url(/img/magnify.png);
192 | background-repeat: no-repeat;
193 | opacity: 75%;
194 | }
--------------------------------------------------------------------------------
/workbook/docs/css/jquery.fancybox.min.css:
--------------------------------------------------------------------------------
1 | body.compensate-for-scrollbar{overflow:hidden}.fancybox-active{height:auto}.fancybox-is-hidden{left:-9999px;margin:0;position:absolute!important;top:-9999px;visibility:hidden}.fancybox-container{-webkit-backface-visibility:hidden;height:100%;left:0;outline:none;position:fixed;-webkit-tap-highlight-color:transparent;top:0;-ms-touch-action:manipulation;touch-action:manipulation;transform:translateZ(0);width:100%;z-index:99992}.fancybox-container *{box-sizing:border-box}.fancybox-bg,.fancybox-inner,.fancybox-outer,.fancybox-stage{bottom:0;left:0;position:absolute;right:0;top:0}.fancybox-outer{-webkit-overflow-scrolling:touch;overflow-y:auto}.fancybox-bg{background:#1e1e1e;opacity:0;transition-duration:inherit;transition-property:opacity;transition-timing-function:cubic-bezier(.47,0,.74,.71)}.fancybox-is-open .fancybox-bg{opacity:.9;transition-timing-function:cubic-bezier(.22,.61,.36,1)}.fancybox-caption,.fancybox-infobar,.fancybox-navigation .fancybox-button,.fancybox-toolbar{direction:ltr;opacity:0;position:absolute;transition:opacity .25s ease,visibility 0s ease .25s;visibility:hidden;z-index:99997}.fancybox-show-caption .fancybox-caption,.fancybox-show-infobar .fancybox-infobar,.fancybox-show-nav .fancybox-navigation .fancybox-button,.fancybox-show-toolbar .fancybox-toolbar{opacity:1;transition:opacity .25s ease 0s,visibility 0s ease 0s;visibility:visible}.fancybox-infobar{color:#ccc;font-size:13px;-webkit-font-smoothing:subpixel-antialiased;height:44px;left:0;line-height:44px;min-width:44px;mix-blend-mode:difference;padding:0 10px;pointer-events:none;top:0;-webkit-touch-callout:none;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none}.fancybox-toolbar{right:0;top:0}.fancybox-stage{direction:ltr;overflow:visible;transform:translateZ(0);z-index:99994}.fancybox-is-open .fancybox-stage{overflow:hidden}.fancybox-slide{-webkit-backface-visibility:hidden;display:none;height:100%;left:0;outline:none;overflow:auto;-webkit-overflow-scrolling:touch;padding:44px;position:absolute;text-align:center;top:0;transition-property:transform,opacity;white-space:normal;width:100%;z-index:99994}.fancybox-slide:before{content:"";display:inline-block;font-size:0;height:100%;vertical-align:middle;width:0}.fancybox-is-sliding .fancybox-slide,.fancybox-slide--current,.fancybox-slide--next,.fancybox-slide--previous{display:block}.fancybox-slide--image{overflow:hidden;padding:44px 0}.fancybox-slide--image:before{display:none}.fancybox-slide--html{padding:6px}.fancybox-content{background:#fff;display:inline-block;margin:0;max-width:100%;overflow:auto;-webkit-overflow-scrolling:touch;padding:44px;position:relative;text-align:left;vertical-align:middle}.fancybox-slide--image .fancybox-content{animation-timing-function:cubic-bezier(.5,0,.14,1);-webkit-backface-visibility:hidden;background:transparent;background-repeat:no-repeat;background-size:100% 100%;left:0;max-width:none;overflow:visible;padding:0;position:absolute;top:0;transform-origin:top left;transition-property:transform,opacity;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;z-index:99995}.fancybox-can-zoomOut .fancybox-content{cursor:zoom-out}.fancybox-can-zoomIn .fancybox-content{cursor:zoom-in}.fancybox-can-pan .fancybox-content,.fancybox-can-swipe .fancybox-content{cursor:grab}.fancybox-is-grabbing .fancybox-content{cursor:grabbing}.fancybox-container [data-selectable=true]{cursor:text}.fancybox-image,.fancybox-spaceball{background:transparent;border:0;height:100%;left:0;margin:0;max-height:none;max-width:none;padding:0;position:absolute;top:0;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;width:100%}.fancybox-spaceball{z-index:1}.fancybox-slide--iframe .fancybox-content,.fancybox-slide--map .fancybox-content,.fancybox-slide--pdf .fancybox-content,.fancybox-slide--video .fancybox-content{height:100%;overflow:visible;padding:0;width:100%}.fancybox-slide--video .fancybox-content{background:#000}.fancybox-slide--map .fancybox-content{background:#e5e3df}.fancybox-slide--iframe .fancybox-content{background:#fff}.fancybox-iframe,.fancybox-video{background:transparent;border:0;display:block;height:100%;margin:0;overflow:hidden;padding:0;width:100%}.fancybox-iframe{left:0;position:absolute;top:0}.fancybox-error{background:#fff;cursor:default;max-width:400px;padding:40px;width:100%}.fancybox-error p{color:#444;font-size:16px;line-height:20px;margin:0;padding:0}.fancybox-button{background:rgba(30,30,30,.6);border:0;border-radius:0;box-shadow:none;cursor:pointer;display:inline-block;height:44px;margin:0;padding:10px;position:relative;transition:color .2s;vertical-align:top;visibility:inherit;width:44px}.fancybox-button,.fancybox-button:link,.fancybox-button:visited{color:#ccc}.fancybox-button:hover{color:#fff}.fancybox-button:focus{outline:none}.fancybox-button.fancybox-focus{outline:1px dotted}.fancybox-button[disabled],.fancybox-button[disabled]:hover{color:#888;cursor:default;outline:none}.fancybox-button div{height:100%}.fancybox-button svg{display:block;height:100%;overflow:visible;position:relative;width:100%}.fancybox-button svg path{fill:currentColor;stroke-width:0}.fancybox-button--fsenter svg:nth-child(2),.fancybox-button--fsexit svg:first-child,.fancybox-button--pause svg:first-child,.fancybox-button--play svg:nth-child(2){display:none}.fancybox-progress{background:#ff5268;height:2px;left:0;position:absolute;right:0;top:0;transform:scaleX(0);transform-origin:0;transition-property:transform;transition-timing-function:linear;z-index:99998}.fancybox-close-small{background:transparent;border:0;border-radius:0;color:#ccc;cursor:pointer;opacity:.8;padding:8px;position:absolute;right:-12px;top:-44px;z-index:401}.fancybox-close-small:hover{color:#fff;opacity:1}.fancybox-slide--html .fancybox-close-small{color:currentColor;padding:10px;right:0;top:0}.fancybox-slide--image.fancybox-is-scaling .fancybox-content{overflow:hidden}.fancybox-is-scaling .fancybox-close-small,.fancybox-is-zoomable.fancybox-can-pan .fancybox-close-small{display:none}.fancybox-navigation .fancybox-button{background-clip:content-box;height:100px;opacity:0;position:absolute;top:calc(50% - 50px);width:70px}.fancybox-navigation .fancybox-button div{padding:7px}.fancybox-navigation .fancybox-button--arrow_left{left:0;left:env(safe-area-inset-left);padding:31px 26px 31px 6px}.fancybox-navigation .fancybox-button--arrow_right{padding:31px 6px 31px 26px;right:0;right:env(safe-area-inset-right)}.fancybox-caption{background:linear-gradient(0deg,rgba(0,0,0,.85) 0,rgba(0,0,0,.3) 50%,rgba(0,0,0,.15) 65%,rgba(0,0,0,.075) 75.5%,rgba(0,0,0,.037) 82.85%,rgba(0,0,0,.019) 88%,transparent);bottom:0;color:#eee;font-size:14px;font-weight:400;left:0;line-height:1.5;padding:75px 44px 25px;pointer-events:none;right:0;text-align:center;z-index:99996}@supports (padding:max(0px)){.fancybox-caption{padding:75px max(44px,env(safe-area-inset-right)) max(25px,env(safe-area-inset-bottom)) max(44px,env(safe-area-inset-left))}}.fancybox-caption--separate{margin-top:-50px}.fancybox-caption__body{max-height:50vh;overflow:auto;pointer-events:all}.fancybox-caption a,.fancybox-caption a:link,.fancybox-caption a:visited{color:#ccc;text-decoration:none}.fancybox-caption a:hover{color:#fff;text-decoration:underline}.fancybox-loading{animation:a 1s linear infinite;background:transparent;border:4px solid #888;border-bottom-color:#fff;border-radius:50%;height:50px;left:50%;margin:-25px 0 0 -25px;opacity:.7;padding:0;position:absolute;top:50%;width:50px;z-index:99999}@keyframes a{to{transform:rotate(1turn)}}.fancybox-animated{transition-timing-function:cubic-bezier(0,0,.25,1)}.fancybox-fx-slide.fancybox-slide--previous{opacity:0;transform:translate3d(-100%,0,0)}.fancybox-fx-slide.fancybox-slide--next{opacity:0;transform:translate3d(100%,0,0)}.fancybox-fx-slide.fancybox-slide--current{opacity:1;transform:translateZ(0)}.fancybox-fx-fade.fancybox-slide--next,.fancybox-fx-fade.fancybox-slide--previous{opacity:0;transition-timing-function:cubic-bezier(.19,1,.22,1)}.fancybox-fx-fade.fancybox-slide--current{opacity:1}.fancybox-fx-zoom-in-out.fancybox-slide--previous{opacity:0;transform:scale3d(1.5,1.5,1.5)}.fancybox-fx-zoom-in-out.fancybox-slide--next{opacity:0;transform:scale3d(.5,.5,.5)}.fancybox-fx-zoom-in-out.fancybox-slide--current{opacity:1;transform:scaleX(1)}.fancybox-fx-rotate.fancybox-slide--previous{opacity:0;transform:rotate(-1turn)}.fancybox-fx-rotate.fancybox-slide--next{opacity:0;transform:rotate(1turn)}.fancybox-fx-rotate.fancybox-slide--current{opacity:1;transform:rotate(0deg)}.fancybox-fx-circular.fancybox-slide--previous{opacity:0;transform:scale3d(0,0,0) translate3d(-100%,0,0)}.fancybox-fx-circular.fancybox-slide--next{opacity:0;transform:scale3d(0,0,0) translate3d(100%,0,0)}.fancybox-fx-circular.fancybox-slide--current{opacity:1;transform:scaleX(1) translateZ(0)}.fancybox-fx-tube.fancybox-slide--previous{transform:translate3d(-100%,0,0) scale(.1) skew(-10deg)}.fancybox-fx-tube.fancybox-slide--next{transform:translate3d(100%,0,0) scale(.1) skew(10deg)}.fancybox-fx-tube.fancybox-slide--current{transform:translateZ(0) scale(1)}@media (max-height:576px){.fancybox-slide{padding-left:6px;padding-right:6px}.fancybox-slide--image{padding:6px 0}.fancybox-close-small{right:-6px}.fancybox-slide--image .fancybox-close-small{background:#4e4e4e;color:#f2f4f6;height:36px;opacity:1;padding:6px;right:0;top:0;width:36px}.fancybox-caption{padding-left:12px;padding-right:12px}@supports (padding:max(0px)){.fancybox-caption{padding-left:max(12px,env(safe-area-inset-left));padding-right:max(12px,env(safe-area-inset-right))}}}.fancybox-share{background:#f4f4f4;border-radius:3px;max-width:90%;padding:30px;text-align:center}.fancybox-share h1{color:#222;font-size:35px;font-weight:700;margin:0 0 20px}.fancybox-share p{margin:0;padding:0}.fancybox-share__button{border:0;border-radius:3px;display:inline-block;font-size:14px;font-weight:700;line-height:40px;margin:0 5px 10px;min-width:130px;padding:0 15px;text-decoration:none;transition:all .2s;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;white-space:nowrap}.fancybox-share__button:link,.fancybox-share__button:visited{color:#fff}.fancybox-share__button:hover{text-decoration:none}.fancybox-share__button--fb{background:#3b5998}.fancybox-share__button--fb:hover{background:#344e86}.fancybox-share__button--pt{background:#bd081d}.fancybox-share__button--pt:hover{background:#aa0719}.fancybox-share__button--tw{background:#1da1f2}.fancybox-share__button--tw:hover{background:#0d95e8}.fancybox-share__button svg{height:25px;margin-right:7px;position:relative;top:-1px;vertical-align:middle;width:25px}.fancybox-share__button svg path{fill:#fff}.fancybox-share__input{background:transparent;border:0;border-bottom:1px solid #d7d7d7;border-radius:0;color:#5d5b5b;font-size:14px;margin:10px 0 0;outline:none;padding:10px 15px;width:100%}.fancybox-thumbs{background:#ddd;bottom:0;display:none;margin:0;-webkit-overflow-scrolling:touch;-ms-overflow-style:-ms-autohiding-scrollbar;padding:2px 2px 4px;position:absolute;right:0;-webkit-tap-highlight-color:rgba(0,0,0,0);top:0;width:212px;z-index:99995}.fancybox-thumbs-x{overflow-x:auto;overflow-y:hidden}.fancybox-show-thumbs .fancybox-thumbs{display:block}.fancybox-show-thumbs .fancybox-inner{right:212px}.fancybox-thumbs__list{font-size:0;height:100%;list-style:none;margin:0;overflow-x:hidden;overflow-y:auto;padding:0;position:absolute;position:relative;white-space:nowrap;width:100%}.fancybox-thumbs-x .fancybox-thumbs__list{overflow:hidden}.fancybox-thumbs-y .fancybox-thumbs__list::-webkit-scrollbar{width:7px}.fancybox-thumbs-y .fancybox-thumbs__list::-webkit-scrollbar-track{background:#fff;border-radius:10px;box-shadow:inset 0 0 6px rgba(0,0,0,.3)}.fancybox-thumbs-y .fancybox-thumbs__list::-webkit-scrollbar-thumb{background:#2a2a2a;border-radius:10px}.fancybox-thumbs__list a{-webkit-backface-visibility:hidden;backface-visibility:hidden;background-color:rgba(0,0,0,.1);background-position:50%;background-repeat:no-repeat;background-size:cover;cursor:pointer;float:left;height:75px;margin:2px;max-height:calc(100% - 8px);max-width:calc(50% - 4px);outline:none;overflow:hidden;padding:0;position:relative;-webkit-tap-highlight-color:transparent;width:100px}.fancybox-thumbs__list a:before{border:6px solid #ff5268;bottom:0;content:"";left:0;opacity:0;position:absolute;right:0;top:0;transition:all .2s cubic-bezier(.25,.46,.45,.94);z-index:99991}.fancybox-thumbs__list a:focus:before{opacity:.5}.fancybox-thumbs__list a.fancybox-thumbs-active:before{opacity:1}@media (max-width:576px){.fancybox-thumbs{width:110px}.fancybox-show-thumbs .fancybox-inner{right:110px}.fancybox-thumbs__list a{max-width:calc(100% - 10px)}}
2 |
--------------------------------------------------------------------------------
/workbook/docs/exercises/exercise1.md:
--------------------------------------------------------------------------------
1 | # Exercise 1: The Big Mistake
2 |
3 |
4 |
5 |
6 |
11 |
12 | **Estimated time to complete:** 20 minutes
13 |
14 | ## Objectives
15 |
16 | - Deploy a CloudFormation stack from **AWS CloudShell** using `build.sh` which creates a public S3 bucket and uploads a sensitive file
17 | - Audit cloud account using `s3audit` showing all of the storage issues
18 | - Show that the file can freely be downloaded from anywhere
19 |
20 | ## Challenges
21 |
22 | ### Challenge 1: Launch AWS CloudShell and Download Workshop Code
23 |
24 | The exercises performed in this workshop are designed to simply use your web browser - no additional tools (e.g., virtual machines, SSH clients) required! Many cloud vendors allow customers to generate a shell session in a vendor-managed container/VM to perform basic tasks. We will use this to our advantage to deploy resources, analyze their configurations, and fix those issues.
25 |
26 | Begin by logging into your AWS account and launch a **CloudShell** session in the **N. Virginia (us-east-1)** region.
27 |
28 | Once in a **CloudShell** session, you will need to download [this code](https://github.com/bluemountaincyber/avoiding-data-disasters) in order to deploy resources via AWS CloudFormation. But how to pull the code down to the session? That's easy! AWS provides `git` in their **CloudShell** environment!
29 |
30 | ??? cmd "Solution"
31 |
32 | 1. Navigate to [https://console.aws.amazon.com](https://console.aws.amazon.com) and sign in with either your root user account or an IAM user with **AdministratorAccess** permissions.
33 |
34 | !!! note "Root User"
35 |
36 | Select the **Root user** radio button (1), enter your email address used to sign up for AWS in the **Root user email address** text field (2), and click the **Next** button (3). On the next page, enter your password in the **Password** text field (4) and click the **Sign in** button (5).
37 |
38 | {: class="w300" }
39 | {: class="w300" }
40 |
41 | !!! note "IAM User"
42 |
43 | Select the **IAM user** radio button (1), enter your AWS account number of alias in the **Account ID (12 digits) or account alias** text field (2), and click the **Next** button (3). On the next page, enter your IAM username in the **IAM user name** text field (4), enter your IAM user's password in the **Password** text field (5), and click on the **Sign in** button (6).
44 |
45 | {: class="w300" }
46 | {: class="w300" }
47 |
48 | 2. When you arrive at the **AWS Management Console**, ensure that you are currently interacting with the **N. Virginia (us-east-1)** region by taking a look at the top-right of the page. You should see **N. Virginia**. If you see a different region, click the down arrow next to the region's name (1) and select **East US (N. Virginia** (2).
49 |
50 | {: class="w400" }
51 |
52 | 3. Now that you are interacting with the **N. Virginia (us-east-1)** region, click on the icon near the top-right that looks like a command prompt to start a **CloudShell** session.
53 |
54 | {: class="w500" }
55 |
56 | 4. On the next page, you will see a banner that states *Waiting for environment to run...*. Wait a minute or two until you see a command prompt that looks similar to `[cloudshell-user@ip-10-1-82-127 ~]$` (your hostname will vary).
57 |
58 | {: class="w500" }
59 |
60 | !!! note
61 |
62 | Your **CloudShell** session will expire after roughly 20 minutes of inactivity. If this happens, simply attempt to type and the session should resume. If this does not work, refresh the page.
63 |
64 | 5. Ensure that you are in your **CloudShell** session's home directory by running the following commands:
65 |
66 | ```bash
67 | cd /home/cloudshell-user
68 | pwd
69 | ```
70 |
71 | !!! summary "Expected Result"
72 |
73 | ```bash
74 | /home/cloudshell-user
75 | ```
76 |
77 | 6. Use `git` to clone the **evidence-app** source code.
78 |
79 | ```bash
80 | git clone https://github.com/bluemountaincyber/avoiding-data-disasters
81 | ```
82 |
83 | !!! summary "Expected result"
84 |
85 | ```bash
86 | Cloning into 'avoiding-data-disasters'...
87 | remote: Enumerating objects: 96, done.
88 | remote: Counting objects: 100% (96/96), done.
89 | remote: Compressing objects: 100% (87/87), done.
90 | remote: Total 96 (delta 10), reused 94 (delta 8), pack-reused 0
91 | Receiving objects: 100% (96/96), 6.04 MiB | 17.19 MiB/s, done.
92 | Resolving deltas: 100% (10/10), done.
93 | ```
94 |
95 | 7. Ensure that the code downloaded by running the following command:
96 |
97 | ```bash
98 | ls -la /home/cloudshell-user/avoiding-data-disasters/
99 | ```
100 |
101 | !!! summary "Expected Result"
102 |
103 | ```bash
104 | total 51
105 | drwxrwxr-x 5 cloudshell-user cloudshell-user 4096 Jun 16 13:14 .
106 | drwxr-xr-x 13 cloudshell-user cloudshell-user 4096 Jun 16 13:14 ..
107 | -rwxrwxr-x 1 cloudshell-user cloudshell-user 1424 Jun 16 13:14 build.sh
108 | -rw-rw-r-- 1 cloudshell-user cloudshell-user 7171 Jun 16 13:14 customers.csv
109 | -rwxrwxr-x 1 cloudshell-user cloudshell-user 727 Jun 16 13:14 destroy.sh
110 | drwxrwxr-x 8 cloudshell-user cloudshell-user 4096 Jun 16 13:14 .git
111 | -rw-rw-r-- 1 cloudshell-user cloudshell-user 40 Jun 16 13:14 .gitignore
112 | drwxrwxr-x 2 cloudshell-user cloudshell-user 4096 Jun 16 13:14 scripts
113 | -rw-rw-r-- 1 cloudshell-user cloudshell-user 1062 Jun 16 13:14 TotallySecure.yaml
114 | drwxrwxr-x 3 cloudshell-user cloudshell-user 4096 Jun 16 13:14 workbook
115 | ```
116 |
117 | ### Challenge 2: Deploy Workshop IaC Resources
118 |
119 | Finally, you have all of the components needed to deploy the resources in your AWS account.
120 |
121 | Use `build.sh` to deploy the IaC (which can be found in the `avoiding-data-detections` directory). Ensure that all worked properly by searching for the following AWS resources using the AWS CLI (also provided in CloudShell):
122 |
123 | - [ ] An S3 bucket with a name beginning with `sensitive-`
124 | - [ ] An S3 object in that bucket called `customers.csv`
125 |
126 | ??? cmd "Solution"
127 |
128 | 1. Run the `build.sh` script located in the `/home/cloudshell-user/avoiding-data-disasters/` directory. After roughly a minute, it should complete.
129 |
130 | ```bash
131 |
132 | /home/cloudshell-user/avoiding-data-disasters/build.sh
133 | ```
134 |
135 | !!! summary "Sample Result"
136 |
137 | ```bash
138 | Deploying CloudFormation stack... Done
139 | Setting bucket ACL... Done
140 | Uploading sensitive data... Done
141 | ```
142 |
143 | 2. Now, check that the resources listed above were deployed properly.
144 |
145 | - S3 bucket beginning with the name `sensitive-`
146 |
147 | Here, we will use the AWS CLI with its `aws s3api list-buckets` command to gather information about our deployed buckets and then pass that information to the `jq` utility to parse the data and extract the bucket name beginning with the text `sensitive-`.
148 |
149 | ```bash
150 | aws s3api list-buckets | \
151 | jq -r '.Buckets[] | select(.Name | startswith("sensitive-")) | .Name'
152 | ```
153 |
154 | !!! summary "Sample result"
155 |
156 | ```bash
157 | sensitive-012345678910
158 | ```
159 |
160 | - S3 object called `customers.csv`
161 |
162 | And now, use a little Command Line Kung Fu to dig into the bucket beginning with `sensitive-` using the AWS CLI. This is a little more complex. First, we are setting the S3 bucket name to a variable to be used in the second command which will do two things: 1) list metadata about all objects in the bucket and 2) use `jq` to extract just the file name so we can verify that the file exists.
163 |
164 | ```bash
165 | BUCKET=$(aws s3api list-buckets | \
166 | jq -r '.Buckets[] | select(.Name | startswith("sensitive-")) | .Name')
167 | aws s3api list-objects --bucket $BUCKET | jq -r '.Contents[].Key'
168 | ```
169 |
170 | !!! summary "Sample result"
171 |
172 | ```bash
173 | customers.csv
174 | ```
175 |
176 | ### Challenge 3: Use s3audit to Audit S3 Configuration
177 |
178 | Unfortunately, the `s3audit` tool is not available natively in AWS CloudShell, but that is easy to fix. Follow the installation instruction [here](https://github.com/scalefactory/s3audit-ts) and run the tool to assess your new S3 bucket.
179 |
180 | ??? cmd "Solution"
181 |
182 | 1. There are two methods to install this tool: 1) download the appropriate release from GitHub or 2) use `npm` (which is included in AWS CloudShell) to install the `s3audit` package. We will go with option two as shown below:
183 |
184 | ```bash
185 | sudo npm install -g s3audit
186 | ```
187 |
188 | !!! summary "Sample result"
189 |
190 | ```bash
191 | npm WARN deprecated querystring@0.2.0: The querystring API is considered Legacy. new code should use the URLSearchParams API instead.
192 |
193 | added 179 packages, and audited 180 packages in 18s
194 |
195 | 23 packages are looking for funding
196 | run `npm fund` for details
197 |
198 | found 0 vulnerabilities
199 | npm notice
200 | npm notice New major version of npm available! 8.19.4 -> 9.7.1
201 | npm notice Changelog: https://github.com/npm/cli/releases/tag/v9.7.1
202 | npm notice Run npm install -g npm@9.7.1 to update!
203 | npm notice
204 | ```
205 |
206 | 2. Using `s3audit` is as easy as just running `s3audit`. Since some folks may have more than one bucket in their AWS account, we can tell `s3audit` to look at a specific bucket using the `--bucket` flag. Run the command as follows to see the results of your security configuration for your `sensitive-*` bucket.
207 |
208 | ```bash
209 | s3audit --bucket=$BUCKET
210 | ```
211 |
212 | !!! summary "Sample result"
213 |
214 | ```bash
215 | (node:294) NOTE: We are formalizing our plans to enter AWS SDK for JavaScript (v2) into maintenance mode in 2023.
216 |
217 | Please migrate your code to use AWS SDK for JavaScript (v3).
218 | For more information, check the migration guide at https://a.co/7PzMCcy
219 | (Use `node --trace-warnings ...` to show where the warning was created)
220 | ❯ Checking 1 bucket
221 | ❯ sensitive-012345678910
222 | ❯ Bucket public access configuration
223 | ✖ BlockPublicAcls is set to false
224 | ✖ IgnorePublicAcls is set to false
225 | ✖ BlockPublicPolicy is set to false
226 | ✖ RestrictPublicBuckets is set to false
227 | ✔ Server side encryption is enabled
228 | ✖ Object versioning is not enabled
229 | ✖ MFA Delete is not enabled
230 | ✔ Static website hosting is disabled
231 | ✖ Bucket policy contains wildcard entities
232 | ✖ Bucket allows public access via ACL
233 | ✖ Logging is not enabled
234 | ✔ CloudFront Distributions
235 | ```
236 |
237 | 3. **Oh no!** It appears that we have quite a bit of work to do to secure this bucket and its contents! Let's quickly verify that this is indeed bad. Remember, the name of the bucket begins with the word `sensitive`. Now, it is just a word, but let's pretend that this word means something... that the data in this bucket is **SENSITIVE**. By running the following command, you can see how easy this data is to access:
238 |
239 | ```bash
240 | echo "Go here: https://$BUCKET.s3.amazonaws.com/customers.csv"
241 | ```
242 |
243 | !!! summary "Sample result"
244 |
245 | ```bash
246 | Go here: https://sensitive-012345678910.s3.amazonaws.com/customers.csv
247 | ```
248 |
249 | 4. Copy and paste the link that is displayed into a new browser tab. You may have noticed that the sensitive CSV file (`customers.csv`) was immediately downloaded without even asking for credentials. That is because the object is publicly accessible! That will be the first issue we fix.
250 |
251 | ## Conclusion
252 |
253 | Now that you have the resources in place and realized just how bad this configuration is, let's start fixing this over the next few exercises.
254 |
--------------------------------------------------------------------------------
/workbook/docs/exercises/exercise2.md:
--------------------------------------------------------------------------------
1 | # Exercise 2: Shutting Down Public Access
2 |
3 |
4 |
5 |
6 |
11 |
12 | **Estimated Time to Complete:** 15 minutes
13 |
14 | ## Objectives
15 |
16 | - Remove public access from the S3 bucket
17 | - Prevent future buckets from becoming public across the entire account
18 | - Attempt to change a bucket setting back to public
19 |
20 | ## Challenges
21 |
22 | ### Challenge 1: Remove Public Access From Bucket
23 |
24 | In this first challenge, we need to fix the most critical issue: that our S3 objects are public-facing. But what is allowing this? Surely, this can't be a default configuration for AWS (and it's not). There are three things affecting this:
25 |
26 | - The default **Block public access** settings are disabled
27 | - There is a bucket policy attached to this bucket allowing any user to perform the `GetObject` API call against all object in the bucket
28 | - There is an ACL configured for the bucket allowing **Everyone** to list and read from the bucket
29 |
30 | Use either the AWS Management Console (as shown in the solution below) or the AWS CLI (if you want to use your CloudShell session)to disable or remove the above configuration issues.
31 |
32 | ??? cmd "Solution"
33 |
34 | 1. In your web browser, you can navigate to AWS services a few different ways, but the easiest, if you know the name of the service, is to use the search bar at the top of the screen. Type `s3` in the search bar (1) and click on the **S3** result (2).
35 |
36 | {: class="w600" }
37 |
38 | 2. On the next page, you should see your S3 bucket starting with the name `sensitive-`. Click on the name of the bucket (2) to edit its configuration.
39 |
40 | {: class="w600" }
41 |
42 | 3. On the next page, there are a few different areas to access data and modify the bucket configuration (e.g., the Objects, Properties, and Permissions tabs). Since we are concerned with who can access data in this bucket, we will need to navigate to the **Permissions** tab.
43 |
44 | {: class="w300" }
45 |
46 | 4. Under the **Permissions** tab, you will see all three of our configuration issues. The second pane contains our first issue: the **Block public access** settings are all disabled. Begin to enable all of them by clicking on the **Edit** button.
47 |
48 | {: class="w400" }
49 |
50 | 5. On the next page, click the top checkbox next to **Block all public access** (this will enable all other checkboxes) (1) and click the **Save changes** button (2).
51 |
52 | {: class="w600" }
53 |
54 | 6. You will receive a popup asking if you are sure that you want to make these changes. Type `confirm` in the text box and click the **Confirm** button.
55 |
56 | {: class="w400" }
57 |
58 | 7. You should now be forwarded back to the **Permissions** tab. If you scroll down, you will find the next issue: the bucket policy. If you look closely, it would be allowing all principals (the entire internet) the ability to perform the `GetObject` API call against all objects in the bucket (essentially, downloading all data in the bucket). Now, this is over-ridden by the last change you made, but just in case that gets reset somehow, let's remove the policy as it is certainly not needed. This is as easy as clicking the **Delete** button under **Bucket Policy** (1), entering `` in the popup's text field (2), and clicking the **Delete** button in the popup (3).
59 |
60 | {: class="w600" }
61 |
62 | {: class="w400" }
63 |
64 | 8. Now, if you scroll to the bottom of the **Permissions** tab, you should find the Access control list (ACL) settings. However, all looks well (i.e., no public settings). But wait, if you looked earlier before you made changes to the **Block public settings**, you would have seen this:
65 |
66 | {: class="w600" }
67 |
68 | So what happened? When you checked those **Block public access** settings, it removed the ACL for you!
69 |
70 | 9. And now your data is no longer public-facing! You can verify this if you'd like by trying to download your data using the following URL (replace `BUCKET` with the name of your bucket):
71 |
72 | https://BUCKET.s3.amazonaws.com/customers.csv
73 |
74 | {: class="w600" }
75 |
76 | ### Challenge 2: Prevent Future Public Buckets
77 |
78 | The previous configuration issues were not the hardest to fix, but if they were to occur again, we would have to discover them. Why not, instead, come up with a method to prevent these misconfigurations from happening in the first place?
79 |
80 | Luckily, AWS provides an account-wide setting to prevent public S3 buckets which **is not** enabled by default. Find this setting and enforce it.
81 |
82 | ??? cmd "Solution"
83 |
84 | 1. If you have been poking around the S3 bucket settings looking for a global setting that affects all buckets, you won't find it. It actually lives in the left pane and is intuitively called **Block Public Access settings for this account**. Click on this item.
85 |
86 | {: class="w250" }
87 |
88 | 2. This will bring you to a very familiar configuration. Just as you did in challenge 1, begin to enable all of these settings by clicking on the **Edit** button.
89 |
90 | {: class="w400" }
91 |
92 | 3. On the next page, click the top checkbox next to **Block all public access** (this will enable all other checkboxes) (1) and click the **Save changes** button (2).
93 |
94 | {: class="w600" }
95 |
96 | 4. You will receive a popup asking if you are sure that you want to make these changes. Type `confirm` in the text box and click the **Confirm** button.
97 |
98 | {: class="w400" }
99 |
100 | 5. Now you should no longer be able to create a public bucket. Let's test it!
101 |
102 | ### Challenge 3: Attempt to Configure Public Bucket
103 |
104 | Attempt to undo some of the settings from challenge 1 to see if you are stopped.
105 |
106 | ??? cmd "Solution"
107 |
108 | 1. We could attempt to set all of the settings back to their original form, but there is an easier way: destroy and rebuild the S3 bucket with CloudFormation. To do this, re-open your CloudShell session like you did in exercise 1 and run the following commands:
109 |
110 | First:
111 |
112 | ```bash
113 | /home/cloudshell-user/avoiding-data-disasters/destroy.sh
114 | ```
115 |
116 | !!! summary "Sample result"
117 |
118 | ```bash
119 | Emptying sensitive-012345678910 bucket... Done
120 | Destroying CloudFormation stack... Done
121 | ```
122 |
123 | Wait a few seconds, and run this command:
124 |
125 | ```bash
126 | /home/cloudshell-user/avoiding-data-disasters/build.sh
127 | ```
128 |
129 | !!! summary "Sample result"
130 |
131 | ```bash
132 | Deploying CloudFormation stack... FAILED
133 | ```
134 |
135 | 2. It failed! That's what we want! We tried to break a rule and AWS wouldn't let us.
136 |
137 | 3. There's another template that does not attempt to create a public bucket we can use that is part of another script: `build-nopublic.sh`. Run that command to deploy a private bucket. This should get us back to where we left off for the next few exercises.
138 |
139 | ```bash
140 | /home/cloudshell-user/avoiding-data-disasters/build-nopublic.sh
141 | ```
142 |
143 | !!! summary "Sample result"
144 |
145 | ```bash
146 | Deploying CloudFormation stack... Done
147 | Uploading sensitive data... Done
148 | ```
149 |
150 | ## Conclusion
151 |
152 | In this exercise, you found just how it easy it is to remove public access but also how to prevent those incorrect settings from showing up in the future! We still have plenty of work to further protect this sensitive bucket.
153 |
--------------------------------------------------------------------------------
/workbook/docs/exercises/exercise3.md:
--------------------------------------------------------------------------------
1 | # Exercise 3: Attacking the Cloud Account
2 |
3 |
4 |
5 |
6 |
11 |
12 | **Estimated Time to Complete:** 15 minutes
13 |
14 | ## Objectives
15 |
16 | - Turn on **Versioning** to prevent accidental modifications of this data
17 | - Change the file
18 | - Revert to the original version
19 |
20 | ## Challenges
21 |
22 | ### Challenge 1: Enable Bucket Versioning
23 |
24 | There are a few different methods to ensure your data is backed up and recoverable in cloud storage solutions like versioning (which we'll leverage in this exercise), having the data automatically replicated to another bucket/storage account, and using third-party or custom tooling to replicate the data to another solution.
25 |
26 | Enable **bucket versioning** on your S3 bucket beginning with `sensitive-` to ensure that, when an object is modified in any way, you can restore back to the original version (we will test this in the next two challenges).
27 |
28 | ??? cmd "Solution"
29 |
30 | 1. Just as you began the previous exercise, use the search bar at the top of the screen to navigate to the S3 service by typing `s3` in the search bar (1) and clicking on the **S3** result (2).
31 |
32 | {: class="w600" }
33 |
34 | 2. Once again, click on the bucket beginning with `sensitive-` to edit its configuration.
35 |
36 | {: class="w600" }
37 |
38 | 3. When you arrive at the next page, instead of navigating to Permissions as you had before, bucket versioning settings can be found under the **Properties** tab. Click that tab to continue.
39 |
40 | {: class="w600" }
41 |
42 | 4. Under Properties, you can find the **Bucket Versioning** section (1). Notice that **Bucket Versioning** is disabled (2) by default. Change this by clicking on the **Edit** button (3).
43 |
44 | {: class="w600" }
45 |
46 | 5. Making the change to enable versioning is really simple: change the **Bucket Versioning** radio button to **Enable** (1) and click on **Save changes** (2).
47 |
48 | {: class="w600" }
49 |
50 | 6. And now, let's put this to the test!
51 |
52 | ### Challenge 2: Modify customers.csv File
53 |
54 | Test that versioning is working by "accidentally" modifying our critical `customers.csv` file. Ensure that there are now two versions of the file - the original and the modified version.
55 |
56 | ??? cmd "Solution"
57 |
58 | 1. The fastest way to do this is to head back to your **CloudShell** session.
59 |
60 | {: class="w600" }
61 |
62 | 2. Create a new `customers.csv` file and place it in a temporary directory. Its contents can be anything you'd like, but you can use this as an example:
63 |
64 | ```bash
65 | echo 'SANS has the best cloud classes!' > /tmp/customers.csv
66 | echo -e "The content of /tmp/customers.csv is:\n\n$(cat /tmp/customers.csv)"
67 | ```
68 |
69 | !!! summary "Sample result"
70 |
71 | ```bash
72 | The content of /tmp/customers.csv is:
73 |
74 | SANS has the best cloud classes!
75 | ```
76 |
77 | 3. Now, you can use the AWS CLI to replace the original `customers.csv` with this new one you just created like so:
78 |
79 | ```bash
80 | BUCKET=$(aws s3api list-buckets | \
81 | jq -r '.Buckets[] | select(.Name | startswith("sensitive-")) | .Name')
82 | aws s3 cp /tmp/customers.csv s3://$BUCKET/customers.csv
83 | ```
84 |
85 | !!! summary "Sample result"
86 |
87 | ```bash
88 | upload: ../../../tmp/customers.csv to s3://sensitive-012345678910/customers.csv
89 | ```
90 |
91 | 4. You have two ways to see if versioning is working: using the Management Console (i.e., web browser) or, as we'll do since we're already here, use the CLI. The following command will show you version information about your `customers.csv` file.
92 |
93 | !!! warning
94 |
95 | You may see a "page" of information at a time. To navigate through the results, you can use your arrow keys, space bar, and enter key to review the content. When finished, type `q` to exit this view.
96 |
97 | ```bash
98 | aws s3api list-object-versions --bucket $BUCKET
99 | ```
100 |
101 | !!! summary "Sample result"
102 |
103 | ```bash
104 | {
105 | "Versions": [
106 | {
107 | "ETag": "\"76be91fb5343d39a88613ee5d3db9c30\"",
108 | "Size": 33,
109 | "StorageClass": "STANDARD",
110 | "Key": "customers.csv",
111 | "VersionId": "nCAKbMViAOx8Z49IiXVlBgZwHFXd1rM5",
112 | "IsLatest": true,
113 | "LastModified": "2023-07-08T13:41:03+00:00",
114 | "Owner": {
115 | "DisplayName": "ryan",
116 | "ID": "e9c322584d211fe214b82aa1a508e8720ed920d53fb3a9c1b8d5625a3548a27d"
117 | }
118 | },
119 | {
120 | "ETag": "\"6d3c6b35840b98b7ca04ceb3a7438764\"",
121 | "Size": 7171,
122 | "StorageClass": "STANDARD",
123 | "Key": "customers.csv",
124 | "VersionId": "null",
125 | "IsLatest": false,
126 | "LastModified": "2023-07-08T13:19:54+00:00",
127 | "Owner": {
128 | "DisplayName": "ryan",
129 | "ID": "e9c322584d211fe214b82aa1a508e8720ed920d53fb3a9c1b8d5625a3548a27d"
130 | }
131 | }
132 | ],
133 | "RequestCharged": null
134 | }
135 | ```
136 |
137 | 5. That is a lot of information about just two versions of the same file (or as AWS puts it, `Key`). If you look closely, you will notice some significant differences between these two versions: the `LastModified` time, the `IsLatest` boolean value (one is `true` and one is `false`), and the `Size` to name a few.
138 |
139 | 6. But which one would be downloaded if someone (or an applicaiton) is unaware of this change and just pulls the latest and greatest? Let's find out. Download the file by name only and save it to `/tmp/result.csv`.
140 |
141 | ```bash
142 | aws s3 cp s3://$BUCKET/customers.csv /tmp/result.csv
143 | ```
144 |
145 | !!! summary "Sample result"
146 |
147 | ```bash
148 | download: s3://sensitive-012345678910/customers.csv to ../../../tmp/result.csv
149 | ```
150 |
151 | 7. Now, review its contents.
152 |
153 | ```bash
154 | cat /tmp/result.csv
155 | ```
156 |
157 | !!! summary "Sample result"
158 |
159 | ```bash
160 | SANS has the best cloud classes!
161 | ```
162 |
163 | 8. It's the new `customers.csv` we created! Let's roll back this file to its original content next.
164 |
165 | ### Challenge 3: Revert Back to Original File
166 |
167 | Roll the `customers.csv` file back to its original state and test that, when downloading that file by name, you get the correct version of the file.
168 |
169 | ??? cmd "Solution"
170 |
171 | 1. This is as easy as removing the newest version of the file so there is only one version left (the original). Here, we will use the AWS Management Console. You may already be in the S3 service, but to ensure we're all at the same place, navigate to the S3 service by typing `s3` in the search bar (1) and clicking on the **S3** result (2).
172 |
173 | {: class="w600" }
174 |
175 | 2. Once again, click on the bucket beginning with `sensitive-` to edit its configuration.
176 |
177 | {: class="w600" }
178 |
179 | 3. To view or modify configurations of individual objects, click on the object (in our case, `customers.csv`).
180 |
181 | {: class="w600" }
182 |
183 | 4. Notice at the top of the next page, there is a **Versions** tab, click on it to see all versions of this file.
184 |
185 | {: class="w600" }
186 |
187 | 5. Here is where you will find the two different versions of the file. To remove the most recent (incorrect) version, place a check next to the top entry (it should contain **Current version** in the **Version ID**) (1) and click **Delete** (2).
188 |
189 | {: class="w600" }
190 |
191 | 6. The next page, more or less, is asking if you're sure. We are, so type `permanently delete` in the text field (1) and click **Delete objects**.
192 |
193 | {: class="w600" }
194 |
195 | 7. You can click **Close** on the next screen.
196 |
197 | {: class="w400" }
198 |
199 | 8. And now, let's test this just like we did in the last challenge. Return to your CloudShell session and run the following commands:
200 |
201 | !!! note
202 |
203 | Notice here, we are using a trick so that we don't download this sensitive data to yet another system by writing to `stdout` (`-`) instead of a file.
204 |
205 | ```bash
206 | aws s3 cp s3://$BUCKET/customers.csv -
207 | ```
208 |
209 | !!! summary "Sample result"
210 |
211 | ```bash
212 | id,first_name,last_name,email,ip_address,cc_num
213 | 1,Devonna,Misselbrook,dmisselbrook0@scientificamerican.com,244.90.204.78,3546634243157105
214 | 2,Angie,Woolmer,awoolmer1@spiegel.de,215.80.41.15,4405492437748769
215 |
216 |
217 |
218 | 98,Laure,Templeman,ltempleman2p@slideshare.net,218.83.151.153,3574682022422849
219 | 99,Slade,O'Halloran,sohalloran2q@yolasite.com,138.163.159.16,5048370366368397
220 | 100,Doralyn,Drydale,ddrydale2r@google.com,55.9.37.32,5305240410395993
221 | ```
222 |
223 | ## Conclusion
224 |
225 | And now we have a method to restore data in the event that it is accidentally modified! If you have time, explore some other methods like [Bucket Replication](https://docs.aws.amazon.com/AmazonS3/latest/userguide/replication.html) which allows for availability in the event that the original bucket or region is unreachable.
226 |
--------------------------------------------------------------------------------
/workbook/docs/exercises/exercise4.md:
--------------------------------------------------------------------------------
1 | # Exercise 4: Monitoring
2 |
3 |
4 |
5 |
6 |
11 |
12 | **Estimated Time to Complete:** 15 minutes
13 |
14 | !!! warning "Disclaimer"
15 |
16 | AWS S3 access logging is inconsistent in how quickly it starts logging once enabled. This exercise may or may not be successful for you, but at least we see how to enable logging.
17 |
18 | You can always revisit this exercise's challenges 2 and 3 once logging has been enabled for some time to have an experience more consistent with what is expected here.
19 |
20 | ## Objectives
21 |
22 | - Turn on bucket access logging
23 | - Access the file using the AWS CLI
24 | - Review the access log data capturing this request
25 |
26 | ## Challenges
27 |
28 | ### Challenge 1: Turn On Bucket Access Logging
29 |
30 | Since this `customers.csv` file and anything else that may end up in the `sensitive-` bucket is... *sensitive*, you will likely want to monitor any access to objects in this bucket. Create a new bucket to store log information called `security-` followed by your AWS account number and set up the `sensitive-` bucket to log access attempts to this new bucket.
31 |
32 | ??? cmd "Solution"
33 |
34 | 1. The fastest way to create a new bucket is to head back to your **CloudShell** session.
35 |
36 | {: class="w600" }
37 |
38 | 2. Grab your AWS account number and create a new bucket called `security-` followed by the account number.
39 |
40 | ```bash
41 | BUCKET=$(aws s3api list-buckets | \
42 | jq -r '.Buckets[] | select(.Name | startswith("sensitive-")) | .Name')
43 | REGION=$(aws s3api get-bucket-location --bucket $BUCKET --query LocationConstraint --output text)
44 | ACCTNUM=$(aws sts get-caller-identity --query 'Account' --output text)
45 | echo "The account number is: $ACCTNUM"
46 | aws s3api create-bucket --bucket security-$ACCTNUM
47 | ```
48 |
49 | !!! summary "Sample result"
50 |
51 | ```bash
52 | The account number is: 012345678910
53 | {
54 | "Location": "/security-012345678910"
55 | }
56 | ```
57 |
58 | 3. Now, venture back to the AWS Management Console's S3 service as you had done previously. Navigate to the S3 service by typing `s3` in the search bar (1) and clicking on the **S3** result (2).
59 |
60 | {: class="w600" }
61 |
62 | 4. You should now see **two** buckets: one beginning with `security-` and one beginning with `sensitive-`. Since we want to log interactions with the `sensitive-` objects, click on the bucket beginning with `sensitive-`.
63 |
64 | {: class="w600" }
65 |
66 | 5. The access logging settings can be found under the **Properties** tab. Click on that tab to continue.
67 |
68 | {: class="w600" }
69 |
70 | 6. If you scroll mid-way down the next page, you should find the **Server access logging** section (1) as well as its default setting of **Disabled** (2). To change this, click the **Edit** button (3).
71 |
72 | {: class="w600" }
73 |
74 | 7. To begin enabling access logging, simply click the **Enable** radio button (1). You will be asked where to store the logs, so you can click **Browse S3** (2) to see the available options.
75 |
76 | !!! note
77 |
78 | Notice that AWS notifies you that it will update the receiving bucket's policy so that it can adequately receive the logging information.
79 |
80 | {: class="w600" }
81 |
82 | 8. You should now see a list of available S3 buckets. **MAKE SURE TO CHOOSE THE ONE BEGINNING WITH `security-`!** (1) and then click **Choose path** (2).
83 |
84 | {: class="w600" }
85 |
86 | 9. You should now see your bucket appear in the **Target bucket** field (1). Click **Save changes** (2) to enforce bucket logging.
87 |
88 | {: class="w600" }
89 |
90 | ### Challenge 2: Access the customers.csv File
91 |
92 | To generate some log data, simply download the `customers.csv` file in your CloudShell session.
93 |
94 | ??? cmd "Solution"
95 |
96 | 1. Head back to your **CloudShell** session.
97 |
98 | {: class="w600" }
99 |
100 | 2. Just like in exercise 3, challenge 3, use some Command Line Kung Fu to access the `customers.csv` file content.
101 |
102 | ```bash
103 | BUCKET=$(aws s3api list-buckets | \
104 | jq -r '.Buckets[] | select(.Name | startswith("sensitive-")) | .Name')
105 | aws s3 cp s3://$BUCKET/customers.csv -
106 | ```
107 |
108 | !!! summary "Sample result"
109 |
110 | ```bash
111 | id,first_name,last_name,email,ip_address,cc_num
112 | 1,Devonna,Misselbrook,dmisselbrook0@scientificamerican.com,244.90.204.78,3546634243157105
113 | 2,Angie,Woolmer,awoolmer1@spiegel.de,215.80.41.15,4405492437748769
114 |
115 |
116 |
117 | 98,Laure,Templeman,ltempleman2p@slideshare.net,218.83.151.153,3574682022422849
118 | 99,Slade,O'Halloran,sohalloran2q@yolasite.com,138.163.159.16,5048370366368397
119 | 100,Doralyn,Drydale,ddrydale2r@google.com,55.9.37.32,5305240410395993
120 | ```
121 |
122 | 3. That was easy! Let's see if any log data shows our interaction.
123 |
124 | ### Challenge 3: Review the Access Log Data
125 |
126 | Review the access log data in the bucket beginning with `security-`.
127 |
128 | !!! note
129 |
130 | It may take 5-10 minutes until the data arrives in the bucket.
131 |
132 | ??? cmd "Solution"
133 |
134 | 1. We will continue this work from the **CloudShell** session.
135 |
136 | 2. First, let's see if the log data has arrived. Since we'll be referencing the `security-` bucket quite often, set an environment variable called `LOGBUCKET` which references this bucket name.
137 |
138 | ```bash
139 | LOGBUCKET=$(aws s3api list-buckets | \
140 | jq -r '.Buckets[] | select(.Name | startswith("security-")) | .Name')
141 | echo "The log bucket is: $LOGBUCKET"
142 | ```
143 |
144 | !!! summary "Sample result"
145 |
146 | ```bash
147 | The log bucket is: security-012345678910
148 | ```
149 |
150 | 3. Now that we retrieved the bucket name, let's see if there is any data in there.
151 |
152 | !!! warning
153 |
154 | If there are no results, the data just did not arrive yet. In fact, AWS' [documentation](https://docs.aws.amazon.com/AmazonS3/latest/userguide/ServerLogs.html#LogDeliveryBestEffort) states that it could be up to hours IF AT ALL.
155 |
156 | ```bash
157 | aws s3 ls s3://$LOGBUCKET/
158 | ```
159 |
160 | !!! summary "Sample result"
161 |
162 | ```bash
163 | 2023-07-08 15:11:09 558 2023-07-08-15-11-08-542B672B0A76BD48
164 |
165 |
166 |
167 | 2023-07-08 15:20:07 656 2023-07-08-15-20-06-350AE97D7741A9F1
168 | ```
169 |
170 | 4. To analyze this data, you can use the AWS CLI to output their contents to `/tmp/s3logs/` like so:
171 |
172 | ```bash
173 | mkdir /tmp/s3logs
174 | aws s3 cp --recursive s3://$LOGBUCKET/ /tmp/s3logs/
175 | ```
176 |
177 | !!! summary "Sample result"
178 |
179 | ```bash
180 | download: s3://security-012345678910/2023-07-08-15-12-11-7BD4A500BC707BB9 to ../../../tmp/s3logs/2023-07-08-15-12-11-7BD4A500BC707BB9
181 |
182 |
183 |
184 | download: s3://security-012345678910/2023-07-08-15-20-06-350AE97D7741A9F1 to ../../../tmp/s3logs/2023-07-08-15-20-06-350AE97D7741A9F1
185 | ```
186 |
187 | 5. To view any records related to `customers.csv`, we can begin with a simple `grep` to show any of these files containing our sensitive file's name.
188 |
189 | ```bash
190 | grep customers.csv /tmp/s3logs/*
191 | ```
192 |
193 | !!! summary "Sample result"
194 |
195 | ```bash
196 | /tmp/s3logs/2023-07-08-15-35-59-69BCA2D0660A17B4:e9c322584d211fe214b82aa1a508e8720ed920d53fb3a9c1b8d5625a3548a27d sensitive-206757820151 [08/Jul/2023:14:27:40 +0000] 44.211.241.53 e9c322584d211fe214b82aa1a508e8720ed920d53fb3a9c1b8d5625a3548a27d N77G2TV3SB07S8KE REST.HEAD.OBJECT customers.csv "HEAD /customers.csv HTTP/1.1" 200 - - 7171 15 - "-" "aws-cli/2.12.5 Python/3.11.4 Linux/4.14.255-314-253.539.amzn2.x86_64 exec-env/CloudShell exe/x86_64.amzn.2 prompt/off command/s3.cp" - 06yjXKKPiFPCD9bJWc+vx64/ppK3UmX/RAYbMtA10kaUfZw1OQKu/bNhpBmpiEsKX/6pq7LOHKE= SigV4 ECDHE-RSA-AES128-GCM-SHA256 AuthHeader sensitive-206757820151.s3.us-east-1.amazonaws.com TLSv1.2 - -
197 | /tmp/s3logs/2023-07-08-15-35-59-69BCA2D0660A17B4:e9c322584d211fe214b82aa1a508e8720ed920d53fb3a9c1b8d5625a3548a27d sensitive-206757820151 [08/Jul/2023:14:27:40 +0000] 44.211.241.53 e9c322584d211fe214b82aa1a508e8720ed920d53fb3a9c1b8d5625a3548a27d N77ZMY7RVA115S35 REST.GET.OBJECT customers.csv "GET /customers.csv HTTP/1.1" 200 - 7171 7171 27 26 "-" "aws-cli/2.12.5 Python/3.11.4 Linux/4.14.255-314-253.539.amzn2.x86_64 exec-env/CloudShell exe/x86_64.amzn.2 prompt/off command/s3.cp" - t6Ecnw35IOOpvmffdz9iDHHOoG6zbt5YKbtxxfRIy/QqtjrJYqdlZZLHJrzRlmUkOEUBpV3ctBQ= SigV4 ECDHE-RSA-AES128-GCM-SHA256 AuthHeader sensitive-206757820151.s3.us-east-1.amazonaws.com TLSv1.2 - -
198 | ```
199 |
200 | 6. Those are some **noisy** logs with an unknown (for now), space-delimited structure. To see the various elements of an S3 access log, it is documented [here](https://docs.aws.amazon.com/AmazonS3/latest/userguide/LogFormat.html).
201 |
202 | 7. Let's cut to the chase and review some key information like:
203 |
204 | - What was the date/time of the access attempt? That is the 3rd and 4th space-delimited values (there is a space between the time and time zone offset)
205 |
206 | - Was it a change or just read access? You can gather this by looking at the REST API call used (8th space-delimited field)
207 |
208 | - Which IP address made the request? That is the 5th space-delimited field
209 |
210 | - Which tool was likely used to make the request (i.e., User-Agent string)? This one is tricky, but instead of counting space-delimited fields, we can look at double-quotes. If we were delimiting the content by double-quotes, we would find that the 6th field is the User-Agent string.
211 |
212 | 8. Let's put all of that together to extract our data of interest. We'll even use a bit of command line magic to output this as JSON.
213 |
214 | ```bash
215 | IFS=$'\n'
216 | for LOG in $(grep customers.csv /tmp/s3logs/*); do
217 | echo -n '{'
218 | echo -n '"timestamp": "'
219 | printf $LOG | awk -F ' ' '{print $3" "$4}' | tr -d '\n'
220 | echo -n '",'
221 | echo -n '"sourceIpAddress": "'
222 | printf $LOG | awk -F ' ' '{print $5}' | tr -d '\n'
223 | echo -n '",'
224 | echo -n '"restMethod": "'
225 | printf $LOG | awk -F ' ' '{print $8}' | tr -d '\n'
226 | echo -n '",'
227 | echo -n '"userAgent": "'
228 | printf $LOG | awk -F '"' '{print $6}' | tr -d '\n'
229 | echo -n '"'
230 | echo '}'
231 | done | jq
232 | ```
233 |
234 | !!! summary "Sample result"
235 |
236 | ```bash
237 | {
238 | "timestamp": "[08/Jul/2023:14:27:40 +0000]",
239 | "sourceIpAddress": "44.211.241.53",
240 | "restMethod": "REST.HEAD.OBJECT",
241 | "userAgent": "aws-cli/2.12.5 Python/3.11.4 Linux/4.14.255-314-253.539.amzn2.x86_64 exec-env/CloudShell exe/x86_64.amzn.2 prompt/off command/s3.cp"
242 | }
243 | {
244 | "timestamp": "[08/Jul/2023:14:27:40 +0000]",
245 | "sourceIpAddress": "44.211.241.53",
246 | "restMethod": "REST.GET.OBJECT",
247 | "userAgent": "aws-cli/2.12.5 Python/3.11.4 Linux/4.14.255-314-253.539.amzn2.x86_64 exec-env/CloudShell exe/x86_64.amzn.2 prompt/off command/s3.cp"
248 | }
249 | ```
250 |
251 | ## Conclusion
252 |
253 | If you were fortunate enough to have the logs appear quickly, congrats! If not, many cloud providers do require patience at times - especially when it comes ot logging. Now that we have logging at least set up for our more critical data, let's move onto our final major exercise: **Reviewing Our Work and Shifting Left**.
254 |
--------------------------------------------------------------------------------
/workbook/docs/exercises/exercise5.md:
--------------------------------------------------------------------------------
1 | # Exercise 5: Building an Automated Detection
2 |
3 |
4 |
5 |
6 |
11 |
12 | **Estimated Time to Complete:** 20 minutes
13 |
14 | ## Objectives
15 |
16 | - Re-run `s3audit` to show our progress we made in just a few short hours
17 | - Generate a more appropriate CloudFormation template
18 | - Assess the new template with `checkov` to discover any remaining issues
19 |
20 | ## Challenges
21 |
22 | ### Challenge 1: Re-Run s3audit
23 |
24 | Re-run `s3audit` to see if you've cleaned up all of the findings discovered previously.
25 |
26 | ??? cmd "Solution"
27 |
28 | 1. All of the challenges in this exercise are performed in your **CloudShell** session. Once more, click on the icon near the top-right that looks like a command prompt to start a **CloudShell** session.
29 |
30 | {: class="w500" }
31 |
32 | 2. So that `s3audit` performs its checks against the correct bucket, use the AWS CLI with its `aws s3api list-buckets` command to gather information about our deployed buckets and then pass that information to the `jq` utility to parse the data and extract the bucket name beginning with the text `sensitive-`.
33 |
34 | ```bash
35 | BUCKET=$(aws s3api list-buckets | \
36 | jq -r '.Buckets[] | select(.Name | startswith("sensitive-")) | .Name')
37 | echo "The bucket to assess is: $BUCKET"
38 | ```
39 |
40 | !!! summary "Sample result"
41 |
42 | ```bash
43 | The bucket to assess is: sensitive-012345678910
44 | ```
45 |
46 | 3. You can tell `s3audit` to look at a specific bucket using the `--bucket` flag. Run the command as follows to see the results of your security configuration for your `sensitive-*` bucket.
47 |
48 | ```bash
49 | s3audit --bucket=$BUCKET
50 | ```
51 |
52 | !!! summary "Sample result"
53 |
54 | ```bash
55 | (node:204) NOTE: We are formalizing our plans to enter AWS SDK for JavaScript (v2) into maintenance mode in 2023.
56 |
57 | Please migrate your code to use AWS SDK for JavaScript (v3).
58 | For more information, check the migration guide at https://a.co/7PzMCcy
59 | (Use `node --trace-warnings ...` to show where the warning was created)
60 | ❯ Checking 1 bucket
61 | ❯ sensitive-206757820151
62 | ✔ Bucket public access configuration
63 | ✔ BlockPublicAcls
64 | ✔ IgnorePublicAcls
65 | ✔ BlockPublicPolicy
66 | ✔ RestrictPublicBuckets
67 | ✔ Server side encryption is enabled
68 | ✔ Object versioning is enabled
69 | ✖ MFA Delete is not enabled
70 | ✔ Static website hosting is disabled
71 | ✔ Bucket policy doesn't allow a wildcard entity
72 | ✔ Bucket ACL doesn't allow access to "Everyone" or "Any authenticated AWS user"
73 | ✔ Logging is enabled
74 | ✔ Bucket is not associated with any CloudFront distributions
75 | ```
76 |
77 | 4. MUCH better. In fact, there is only one remaining, availability-related finding left: *MFA Delete is not enabled*.
78 |
79 | !!! note
80 |
81 | We will leave this open for now, but feel free to explore how you will enable this in practice if you want to ensure that the bucket owner (you) will need to have MFA enabled to be able to delete any versions of adjust the version state of the bucket. This may mean that the teardown method used in exercise 6 may not work as written.
82 |
83 | More on this [here](https://docs.aws.amazon.com/AmazonS3/latest/userguide/MultiFactorAuthenticationDelete.html).
84 |
85 | ### Challenge 2: Generate a Better CloudFormation Template
86 |
87 | Manually fixing these issues are fine, but if anyone were to reuse even the most recent CloudFormation template (`TotallyMoreSecure.yaml`, which is referenced by the `build-nopublic.sh` script), we will again have those remaining issues - forcing us to redo all of that work all over again.
88 |
89 | Tear down the current deployment, create a new CloudFormation template called `MostSecure.yaml`, and run the build-final.sh script. Afterwards, re-run `s3audit` to ensure that the new deployment met all of the checks that you manually configured previously.
90 |
91 | ??? cmd "Solution"
92 |
93 | 1. Start by running the `` script once more to tear down the current S3 bucket.
94 |
95 | ```bash
96 | /home/cloudshell-user/avoiding-data-disasters/destroy.sh
97 | ```
98 |
99 | !!! summary "Sample result"
100 |
101 | ```bash
102 | Emptying sensitive-012345678910 bucket... Done
103 | Destroying CloudFormation stack... Done
104 | ```
105 |
106 | 2. We can start with `TotallyMoreSecure.yaml` as a template and build those manual changes that were performed previously to it. Here is what we will begin with:
107 |
108 | !!! summary "Starting point"
109 |
110 | ```yaml
111 | AWSTemplateFormatVersion: 2010-09-09
112 | Resources:
113 | SensitiveBucket:
114 | Type: AWS::S3::Bucket
115 | Properties:
116 | BucketName: !Join
117 | - ''
118 | - - 'sensitive-'
119 | - !Ref 'AWS::AccountId'
120 | OwnershipControls:
121 | Rules:
122 | - ObjectOwnership: 'BucketOwnerPreferred'
123 | ```
124 |
125 | 3. Next, you will need to add versioning to address availability concerns. To do this, there is a configuration element called `VersioningConfiguration` that must be added. You can learn more about this [here](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket.html#cfn-s3-bucket-versioning) if you'd like. The value of this item should be set to `Status: Enabled` to enforce versioning. Now, our template looks like this:
126 |
127 | !!! summary "Starting point"
128 |
129 | ```yaml
130 | AWSTemplateFormatVersion: 2010-09-09
131 | Resources:
132 | SensitiveBucket:
133 | Type: AWS::S3::Bucket
134 | Properties:
135 | BucketName: !Join
136 | - ''
137 | - - 'sensitive-'
138 | - !Ref 'AWS::AccountId'
139 | OwnershipControls:
140 | Rules:
141 | - ObjectOwnership: 'BucketOwnerPreferred'
142 | VersioningConfiguration:
143 | Status: Enabled
144 | ```
145 |
146 | 4. Now, for the access logging. This one is a bit more cumbersome as we will need to add the `LoggingConfiguration` element and reference a bucket name dynamically (just like the original code does for the `sensitive-` bucket) as it will be different for each workshop participant. As noted [here](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-loggingconfig.html), we need to reference a `DestinationBucketName` which is our `security-` bucket. This makes our template read as follows:
147 |
148 | !!! summary "Starting point"
149 |
150 | ```yaml
151 | AWSTemplateFormatVersion: 2010-09-09
152 | Resources:
153 | SensitiveBucket:
154 | Type: AWS::S3::Bucket
155 | Properties:
156 | BucketName: !Join
157 | - ''
158 | - - 'sensitive-'
159 | - !Ref 'AWS::AccountId'
160 | OwnershipControls:
161 | Rules:
162 | - ObjectOwnership: 'BucketOwnerPreferred'
163 | VersioningConfiguration:
164 | Status: Enabled
165 | LoggingConfiguration:
166 | DestinationBucketName: !Join
167 | - ''
168 | - - 'security-'
169 | - !Ref 'AWS::AccountId'
170 | ```
171 |
172 | 5. While we're at it, let's make sure that the block public access settings are in place just in case some defaults were to change in the future (and also to make some assessment tools happy). In this case, the `PublicAccessBlockConfiguration` must be included as noted [here](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket.html#cfn-s3-bucket-publicaccessblockconfiguration). We should include all four options (`BlockPublicAcls`, `BlockPublicPolicy`, `IgnorePublicAcls`, `RestrictPublicBuckets`) and set them to `true`. That will leave us with this:
173 |
174 | !!! summary "Starting point"
175 |
176 | ```yaml
177 | AWSTemplateFormatVersion: 2010-09-09
178 | Resources:
179 | SensitiveBucket:
180 | Type: AWS::S3::Bucket
181 | Properties:
182 | BucketName: !Join
183 | - ''
184 | - - 'sensitive-'
185 | - !Ref 'AWS::AccountId'
186 | OwnershipControls:
187 | Rules:
188 | - ObjectOwnership: 'BucketOwnerPreferred'
189 | VersioningConfiguration:
190 | Status: Enabled
191 | LoggingConfiguration:
192 | DestinationBucketName: !Join
193 | - ''
194 | - - 'security-'
195 | - !Ref 'AWS::AccountId'
196 | PublicAccessBlockConfiguration:
197 | BlockPublicAcls: true
198 | BlockPublicPolicy: true
199 | IgnorePublicAcls: true
200 | RestrictPublicBuckets: true
201 | ```
202 |
203 | 6. To generate this bucket, we must first create a new file with this YAML content. Use the following heredoc to write the YAML content to `/home/cloudshell-user/avoiding-data-disasters/MostSecure.yaml`.
204 |
205 | ```bash
206 | cat < /home/cloudshell-user/avoiding-data-disasters/MostSecure.yaml
207 | AWSTemplateFormatVersion: 2010-09-09
208 | Resources:
209 | SensitiveBucket:
210 | Type: AWS::S3::Bucket
211 | Properties:
212 | BucketName: !Join
213 | - ''
214 | - - 'sensitive-'
215 | - !Ref 'AWS::AccountId'
216 | OwnershipControls:
217 | Rules:
218 | - ObjectOwnership: 'BucketOwnerPreferred'
219 | VersioningConfiguration:
220 | Status: Enabled
221 | LoggingConfiguration:
222 | DestinationBucketName: !Join
223 | - ''
224 | - - 'security-'
225 | - !Ref 'AWS::AccountId'
226 | PublicAccessBlockConfiguration:
227 | BlockPublicAcls: true
228 | BlockPublicPolicy: true
229 | IgnorePublicAcls: true
230 | RestrictPublicBuckets: true
231 | EOF
232 | ```
233 |
234 | 7. And now, the moment of truth! Run `build-final.sh` to deploy this new, much more secure S3 bucket.
235 |
236 | ```bash
237 | /home/cloudshell-user/avoiding-data-disasters/build-final.sh
238 | ```
239 |
240 | !!! summary "Sample result"
241 |
242 | ```bash
243 | Deploying CloudFormation stack one last time... Done
244 | Uploading sensitive data... Done
245 | ```
246 |
247 | ### Challenge 3: Assess New Template With checkov
248 |
249 | There was a lot of building, finding issues with an assessment tool, tearing down, adjusting code, and repeating of this process throughout the workshop... but there's a better way to get in front of these issues - even before deploying ANYTHING. So, to "shift security left", analyze the original and final CloudFormation templates to see how an assessment tool can inform us of what possible security issues would arise had we deployed this Infrastructure as Code (IaC).
250 |
251 | The security tool you can use is **Checkov**. This tool is not installed in **AWS CloudShell**, so you will need to get it up and running before you can assess the `TotallySecure.yaml` and `MostSecure.yaml` files.
252 |
253 | ??? cmd "Solution"
254 |
255 | 1. To begin, you must first install **Checkov**. Since **Checkov** is a tool written in Python, you can set up a virtual environment in your `/home/cloudshell-user/avoiding-data-disasters` directory and install it using `pip`, like so:
256 |
257 | ```bash
258 | cd /home/cloudshell-user/avoiding-data-disasters
259 | python3 -m venv .venv
260 | source .venv/bin/activate
261 | pip install checkov
262 | ```
263 |
264 | !!! summary "Sample result"
265 |
266 | ```bash
267 |
268 |
269 | 0.7.5 update-checker-0.18.0 uritools-4.0.1 urllib3-1.26.16 wcwidth-0.2.6 websocket-client-1.6.1 xmltodict-0.13.0 yarl-1.9.2 zipp-3.15.0
270 | WARNING: You are using pip version 22.0.4; however, version 23.1.2 is available.
271 | You should consider upgrading via the '/home/cloudshell-user/avoiding-data-disasters/.venv/bin/python3 -m pip install --upgrade pip' command.
272 | ```
273 |
274 | 2. To ensure that **Checkov** was installed properly and to see what options it has, you can run the following:
275 |
276 | ```bash
277 | checkov --help
278 | ```
279 |
280 | !!! summary "Sample result"
281 |
282 | ```bash
283 | usage: checkov [-h] [-v] [--support] [-d DIRECTORY] [--add-check]
284 | [-f FILE [FILE ...]] [--skip-path SKIP_PATH]
285 | [--external-checks-dir EXTERNAL_CHECKS_DIR]
286 | [--external-checks-git EXTERNAL_CHECKS_GIT] [-l]
287 | [-o {cli,csv,cyclonedx,cyclonedx_json,json,junitxml,github_failed_only,gitlab_sast,sarif,spdx}]
288 | [--output-file-path OUTPUT_FILE_PATH] [--output-bc-ids]
289 | [--include-all-checkov-policies] [--quiet] [--compact]
290 |
291 |
292 |
293 | --openai-api-key OPENAI_API_KEY
294 | Add an OpenAI API key to enhance finding guidelines by
295 | sending violated policies and resource code to OpenAI
296 | to request remediation guidance. This will use your
297 | OpenAI credits. Set your number of findings that will
298 | receive enhanced guidelines using
299 | CKV_OPENAI_MAX_FINDINGS [env var: CKV_OPENAI_API_KEY]
300 |
301 | Args that start with '--' can also be set in a config file (/home/cloudshell-
302 | user/avoiding-data-disasters/.checkov.yaml or /home/cloudshell-user/avoiding-
303 | data-disasters/.checkov.yml or /home/cloudshell-user/.checkov.yaml or
304 | /home/cloudshell-user/.checkov.yml or specified via --config-file). The config
305 | file uses YAML syntax and must represent a YAML 'mapping' (for details, see
306 | http://learn.getgrav.org/advanced/yaml). In general, command-line values
307 | override environment variables which override config file values which
308 | override defaults.
309 | ```
310 |
311 | 3. That is quite verbose. You may notice that this tool supports a large variety of IaC offerings, including CloudFormation. It is even smart enough, in most cases, to understand the IaC product it is testing, so we can just use the `--file` flag to target our original and most secure CloudFormation template files. Let's start with `TotallySecure.yaml`.
312 |
313 | ```bash
314 | checkov --file /home/cloudshell-user/avoiding-data-disasters/TotallySecure.yaml
315 | ```
316 |
317 | !!! summary "Sample result"
318 |
319 | ```bash
320 |
321 |
322 | Check: CKV_AWS_56: "Ensure S3 bucket has 'restrict_public_bucket' enabled"
323 | FAILED for resource: AWS::S3::Bucket.SensitiveBucket
324 | File: /TotallySecure.yaml:3-17
325 | Guide: https://docs.paloaltonetworks.com/content/techdocs/en_US/prisma/prisma-cloud/prisma-cloud-code-security-policy-reference/aws-policies/s3-policies/bc-aws-s3-22.html
326 |
327 | 3 | SensitiveBucket:
328 | 4 | Type: AWS::S3::Bucket
329 | 5 | Properties:
330 | 6 | BucketName: !Join
331 | 7 | - ''
332 | 8 | - - 'sensitive-'
333 | 9 | - !Ref 'AWS::AccountId'
334 | 10 | OwnershipControls:
335 | 11 | Rules:
336 | 12 | - ObjectOwnership: 'BucketOwnerPreferred'
337 | 13 | PublicAccessBlockConfiguration:
338 | 14 | BlockPublicAcls: false
339 | 15 | BlockPublicPolicy: false
340 | 16 | IgnorePublicAcls: false
341 | 17 | RestrictPublicBuckets: false
342 | ```
343 |
344 | 4. Once again, this tool is quite chatty, but it does show you which checks failed, a link to more detail, and the snippet of your code that is not compliant. If you want to just see the failed resources and the title of the check, you can pipe the results to `grep` as follows:
345 |
346 | ```bash
347 | checkov --file /home/cloudshell-user/avoiding-data-disasters/TotallySecure.yaml \
348 | | grep -B1 FAILED
349 | ```
350 |
351 | !!! summary "Sample result"
352 |
353 | ```bash
354 | Check: CKV_AWS_53: "Ensure S3 bucket has block public ACLS enabled"
355 | FAILED for resource: AWS::S3::Bucket.SensitiveBucket
356 | --
357 | Check: CKV_AWS_55: "Ensure S3 bucket has ignore public ACLs enabled"
358 | FAILED for resource: AWS::S3::Bucket.SensitiveBucket
359 | --
360 | Check: CKV_AWS_54: "Ensure S3 bucket has block public policy enabled"
361 | FAILED for resource: AWS::S3::Bucket.SensitiveBucket
362 | --
363 | Check: CKV_AWS_21: "Ensure the S3 bucket has versioning enabled"
364 | FAILED for resource: AWS::S3::Bucket.SensitiveBucket
365 | --
366 | Check: CKV_AWS_18: "Ensure the S3 bucket has access logging enabled"
367 | FAILED for resource: AWS::S3::Bucket.SensitiveBucket
368 | --
369 | Check: CKV_AWS_56: "Ensure S3 bucket has 'restrict_public_bucket' enabled"
370 | FAILED for resource: AWS::S3::Bucket.SensitiveBucket
371 | ```
372 |
373 | 5. It appears that **Checkov** discovered 6 findings with our original code. Now, let's see if there are any issues with the latest, most secure code.
374 |
375 | ```bash
376 | checkov --file /home/cloudshell-user/avoiding-data-disasters/MostSecure.yaml
377 | ```
378 |
379 | !!! summary "Sample result"
380 |
381 | ```bash
382 |
383 |
384 | Check: CKV_AWS_56: "Ensure S3 bucket has 'restrict_public_bucket' enabled"
385 | PASSED for resource: AWS::S3::Bucket.SensitiveBucket
386 | File: /MostSecure.yaml:3-24
387 | Guide: https://docs.paloaltonetworks.com/content/techdocs/en_US/prisma/prisma-cloud/prisma-cloud-code-security-policy-reference/aws-policies/s3-policies/bc-aws-s3-22.html
388 | Check: CKV_AWS_57: "Ensure the S3 bucket does not allow WRITE permissions to everyone"
389 | PASSED for resource: AWS::S3::Bucket.SensitiveBucket
390 | File: /MostSecure.yaml:3-24
391 | Guide: https://docs.paloaltonetworks.com/content/techdocs/en_US/prisma/prisma-cloud/prisma-cloud-code-security-policy-reference/aws-policies/s3-policies/s3-2-acl-write-permissions-everyone.html
392 | ```
393 |
394 | 6. You should have **no remaining findings**! This would have saved us a lot of time if we would have used this tool first!
395 |
396 | ## Conclusion
397 |
398 | This exercise really proved our approach was appropriate in solving many secure blunders and we also discovered a method to "shift security left" using a tool like **Checkov**. Now, you have a variety of ways to detect and correct storage issues that have previously rocked many organizations.
399 |
--------------------------------------------------------------------------------
/workbook/docs/exercises/exercise6.md:
--------------------------------------------------------------------------------
1 | # Exercise 6: Tearing Down Serverless Application
2 |
3 | **Estimated time to complete:** 5 minutes
4 |
5 | ## Objectives
6 |
7 | * Teardown **evidence-app** resources using `destroy.sh`
8 | * (Optional) Reset **CloudShell** home directory contents
9 |
10 | ## Challenges
11 |
12 | ### Challenge 1: Teardown Resources
13 |
14 | Log back into your **CloudShell** session and use `destroy.sh` to destroy the workshop resources.
15 |
16 | ??? cmd "Solution"
17 |
18 | 1. In your **CloudShell** session, run the following command to destroy all workbook resources:
19 |
20 | ```bash
21 | /home/cloudshell-user/avoiding-data-disasters/destroy.sh
22 | ```
23 |
24 | !!! summary "Expected result"
25 |
26 | ```bash
27 | Emptying sensitive-206757820151 bucket... Done
28 | Destroying CloudFormation stack... Done
29 | ```
30 |
31 | ### Challenge 2: (Optional) Reset CloudShell Home Directory
32 |
33 | Delete the contents of your **CloudShell** home directory.
34 |
35 | ??? cmd "Solution"
36 |
37 | 1. In your **CloudShell session**, click on the **Actions** dropdown (1) and choose **Delete AWS CloudShell home directory** (2).
38 |
39 | {: class="w300" }
40 |
41 | 2. When prompted, type `delete` in the text field (1) and click on the **Delete** button (2).
42 |
43 | {: class="w500" }
44 |
45 | 3. Your **CloudShell** session should restart and your home directory will be set to default.
46 |
--------------------------------------------------------------------------------
/workbook/docs/img/1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/1.png
--------------------------------------------------------------------------------
/workbook/docs/img/10.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/10.png
--------------------------------------------------------------------------------
/workbook/docs/img/11.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/11.png
--------------------------------------------------------------------------------
/workbook/docs/img/12.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/12.png
--------------------------------------------------------------------------------
/workbook/docs/img/13.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/13.png
--------------------------------------------------------------------------------
/workbook/docs/img/14.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/14.png
--------------------------------------------------------------------------------
/workbook/docs/img/15.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/15.png
--------------------------------------------------------------------------------
/workbook/docs/img/16.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/16.png
--------------------------------------------------------------------------------
/workbook/docs/img/17.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/17.png
--------------------------------------------------------------------------------
/workbook/docs/img/18.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/18.png
--------------------------------------------------------------------------------
/workbook/docs/img/19.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/19.png
--------------------------------------------------------------------------------
/workbook/docs/img/2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/2.png
--------------------------------------------------------------------------------
/workbook/docs/img/20.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/20.png
--------------------------------------------------------------------------------
/workbook/docs/img/21.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/21.png
--------------------------------------------------------------------------------
/workbook/docs/img/22.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/22.png
--------------------------------------------------------------------------------
/workbook/docs/img/23.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/23.png
--------------------------------------------------------------------------------
/workbook/docs/img/24.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/24.png
--------------------------------------------------------------------------------
/workbook/docs/img/25.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/25.png
--------------------------------------------------------------------------------
/workbook/docs/img/26.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/26.png
--------------------------------------------------------------------------------
/workbook/docs/img/27.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/27.png
--------------------------------------------------------------------------------
/workbook/docs/img/28.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/28.png
--------------------------------------------------------------------------------
/workbook/docs/img/29.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/29.png
--------------------------------------------------------------------------------
/workbook/docs/img/3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/3.png
--------------------------------------------------------------------------------
/workbook/docs/img/30.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/30.png
--------------------------------------------------------------------------------
/workbook/docs/img/31.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/31.png
--------------------------------------------------------------------------------
/workbook/docs/img/32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/32.png
--------------------------------------------------------------------------------
/workbook/docs/img/33.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/33.png
--------------------------------------------------------------------------------
/workbook/docs/img/34.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/34.png
--------------------------------------------------------------------------------
/workbook/docs/img/35.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/35.png
--------------------------------------------------------------------------------
/workbook/docs/img/36.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/36.png
--------------------------------------------------------------------------------
/workbook/docs/img/37.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/37.png
--------------------------------------------------------------------------------
/workbook/docs/img/38.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/38.png
--------------------------------------------------------------------------------
/workbook/docs/img/39.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/39.png
--------------------------------------------------------------------------------
/workbook/docs/img/4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/4.png
--------------------------------------------------------------------------------
/workbook/docs/img/5.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/5.png
--------------------------------------------------------------------------------
/workbook/docs/img/6.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/6.png
--------------------------------------------------------------------------------
/workbook/docs/img/7.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/7.png
--------------------------------------------------------------------------------
/workbook/docs/img/8.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/8.png
--------------------------------------------------------------------------------
/workbook/docs/img/9.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/9.png
--------------------------------------------------------------------------------
/workbook/docs/img/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bluemountaincyber/avoiding-data-disasters/5188aadd75aa399596c5c9678a411ce736d5b4ef/workbook/docs/img/favicon.ico
--------------------------------------------------------------------------------
/workbook/docs/index.md:
--------------------------------------------------------------------------------
1 | # Welcome to Avoiding Data Disasters
2 |
3 | ## Learning Objectives
4 |
5 | - Discover all-too-common cloud storage security deficiencies present as either insecure vendor defaults or careless mistakes
6 | - Correct these issues using a variety of means (e.g., cloud management console, command line tools, and Infrastructure-as-Code)
7 |
8 | ## Pre-requisite Knowledge
9 |
10 | - None
11 |
12 | ## System Requirements
13 |
14 | - Laptop with a modern web browser
15 | - AWS account with root access or an IAM user with `AdministratorAccess` permissions
--------------------------------------------------------------------------------
/workbook/docs/js/jquery.fancybox.min.js:
--------------------------------------------------------------------------------
1 | // ==================================================
2 | // fancyBox v3.5.7
3 | //
4 | // Licensed GPLv3 for open source use
5 | // or fancyBox Commercial License for commercial use
6 | //
7 | // http://fancyapps.com/fancybox/
8 | // Copyright 2019 fancyApps
9 | //
10 | // ==================================================
11 | !function(t,e,n,o){"use strict";function i(t,e){var o,i,a,s=[],r=0;t&&t.isDefaultPrevented()||(t.preventDefault(),e=e||{},t&&t.data&&(e=h(t.data.options,e)),o=e.$target||n(t.currentTarget).trigger("blur"),(a=n.fancybox.getInstance())&&a.$trigger&&a.$trigger.is(o)||(e.selector?s=n(e.selector):(i=o.attr("data-fancybox")||"",i?(s=t.data?t.data.items:[],s=s.length?s.filter('[data-fancybox="'+i+'"]'):n('[data-fancybox="'+i+'"]')):s=[o]),r=n(s).index(o),r<0&&(r=0),a=n.fancybox.open(s,e,r),a.$trigger=o))}if(t.console=t.console||{info:function(t){}},n){if(n.fn.fancybox)return void console.info("fancyBox already initialized");var a={closeExisting:!1,loop:!1,gutter:50,keyboard:!0,preventCaptionOverlap:!0,arrows:!0,infobar:!0,smallBtn:"auto",toolbar:"auto",buttons:["zoom","slideShow","thumbs","close"],idleTime:3,protect:!1,modal:!1,image:{preload:!1},ajax:{settings:{data:{fancybox:!0}}},iframe:{tpl:'',preload:!0,css:{},attr:{scrolling:"auto"}},video:{tpl:'',format:"",autoStart:!0},defaultType:"image",animationEffect:"zoom",animationDuration:366,zoomOpacity:"auto",transitionEffect:"fade",transitionDuration:366,slideClass:"",baseClass:"",baseTpl:'