├── .gitignore
├── Makefile
├── README.md
├── infra
├── cloudwatch.tf
├── iam.tf
├── lambda-ingest-example.tf.example
├── lambda-ingest-urlscan.tf
├── lambda-scan.tf
├── lambda-upload.tf
├── s3-buckets.tf
├── sns.tf
└── variables.tf
├── lambda
├── ingest-example
│ ├── main.py
│ └── requirements.txt
├── ingest-urlscan
│ ├── main.py
│ └── requirements.txt
├── pack.sh
├── scan
│ ├── main.py
│ └── requirements.txt
├── upload
│ └── main.py
└── uploader.py
└── res
├── icon.png
├── notif.png
└── s3eker.png
/.gitignore:
--------------------------------------------------------------------------------
1 | # Mac
2 | .DS_Store
3 |
4 | # Terraform
5 | infra/.terraform
6 |
7 | # Lambda packages
8 | lambda/dist/
9 |
10 | # Binaries for programs and plugins
11 | *.exe
12 | *.exe~
13 | *.dll
14 | *.so
15 | *.dylib
16 |
17 | # Test binary, built with `go test -c`
18 | *.test
19 |
20 | # Output of the go coverage tool, specifically when used with LiteIDE
21 | *.out
22 |
23 | # Dependency directories (remove the comment below to include it)
24 | # vendor/
25 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | pack: ## Package the code into zip archive, upload to the S3 bucket, and update the functions.
2 | cd lambda && ./pack.sh
3 |
4 | remove: ## Remove the test bucket from the list of previously scanned buckets.
5 | aws s3 rm s3://s3eker-buckets/s3eker-open.s3-website-us-east-1.amazonaws.com
6 |
7 | test: remove ## Publish a test open bucket to the SNS topic.
8 | aws sns publish --topic-arn 'arn:aws:sns:us-east-1:xxxxxxxxxxxx:s3eker-upload' --message '{"bucket": "s3eker-open.s3-website-us-east-1.amazonaws.com"}'
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # s3eker
2 |
3 | s3eker is an extensible way to find open S3 buckets and notify via Slack. There are no limits on what can be used to ingest bucket names as long as it has a way to publish to the AWS SNS topic (REST API, CLI, SDK). Almost all of the infrastructure can be created using Terraform.
4 |
5 |
6 |
7 | ## Ingestion Example
8 |
9 | A Lambda function searches [urlscan.io](https://urlscan.io) every hour for websites that reach out to a `s3-website-us-east-1.amazonaws.com` subdomain. The theory being if a bucket is being used as a static site, it may have more relaxed permissions.
10 |
11 | ## Getting Started
12 |
13 | See the [Configuration and Deployment](https://github.com/becksteadn/s3eker/wiki/Configuration-and-Deployment) section of the [Wiki](https://github.com/becksteadn/s3eker/wiki).
14 |
15 | ## Infrastructure Overview
16 |
17 | s3eker runs on AWS and can be spun up using Terraform with the exception of a Secrets Manager entry for the Slack webhook. No API key is needed for urlscan.io. Ingestion functions are run periodically using Cloudwatch events. They publish all found buckets to an SNS topic. This notifies the upload function which will check if the bucket has already been scanned. If it has not been scanned, the bucket name is uploaded to an S3 bucket, triggering the scan function.
18 |
19 |
--------------------------------------------------------------------------------
/infra/cloudwatch.tf:
--------------------------------------------------------------------------------
1 | resource "aws_cloudwatch_event_rule" "every-hour" {
2 | name = "s3eker-fetch-interval"
3 | description = "Time interval to trigger s3eker fetch Lambda."
4 | schedule_expression = "rate(1 hour)"
5 | }
6 |
--------------------------------------------------------------------------------
/infra/iam.tf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scriptingislife/s3eker/071718a3fb98048f384dbfde263de4db71c92465/infra/iam.tf
--------------------------------------------------------------------------------
/infra/lambda-ingest-example.tf.example:
--------------------------------------------------------------------------------
1 | resource "aws_lambda_function" "ingest-example" {
2 | function_name = "s3eker-ingest-example"
3 | s3_bucket = var.lambda_bucket_name
4 | s3_key = "ingest-example/ingest-example.zip"
5 |
6 | memory_size = 128
7 | timeout = 240
8 |
9 | handler = "main.main"
10 | runtime = "python3.7"
11 | role = aws_iam_role.exec-ingest-example.arn
12 | }
13 |
14 | resource "aws_iam_role" "exec-ingest-example" {
15 | name = "s3eker-lambda-ingest-example"
16 | assume_role_policy = </dev/null
7 |
8 | for D in */
9 | do
10 | FUNCTION_NAME="${D%%/}" # Remove slash at end of dir name. dir/ to dir
11 | if [ "$FUNCTION_NAME" = "dist" ] || [ "$FUNCTION_NAME" = "ingest-example" ] # Ignore some directories
12 | then
13 | continue
14 | fi
15 |
16 | TARGET_DIR="./dist/$D" # Place to put code before zipping.
17 |
18 | echo
19 | #echo -e "\e[31mPacking $D\e[0m"
20 | echo "Packing $D"
21 |
22 | # Install dependencies in dist/FUNCTION_NAME/
23 | cp -r "$D" "$TARGET_DIR"
24 |
25 | if [[ "$FUNCTION_NAME" == ingest-* ]];
26 | then
27 | cp uploader.py "$TARGET_DIR"
28 | fi
29 |
30 | if [ -f "$FUNCTION_NAME/requirements.txt" ]; then
31 | pip3 install -r "$FUNCTION_NAME/requirements.txt" --target "$TARGET_DIR"
32 | fi
33 |
34 | cd "$TARGET_DIR"
35 |
36 | DIST_DIR=".."
37 | ZIP_FILENAME="$FUNCTION_NAME.zip"
38 | ZIP_FILE="$DIST_DIR/$ZIP_FILENAME"
39 |
40 | if zip -r "$ZIP_FILE" *; then
41 | echo "Successfully zipped $FUNCTION_NAME"
42 | aws s3 cp "$ZIP_FILE" "s3://$LAMBDA_BUCKET/$FUNCTION_NAME/$ZIP_FILENAME"
43 |
44 | # Update Lambda function with new code.
45 | aws lambda update-function-code --function-name "s3eker-$FUNCTION_NAME" --s3-bucket $LAMBDA_BUCKET --s3-key "$FUNCTION_NAME/$ZIP_FILENAME" > /dev/null
46 | fi
47 |
48 | cd $ORIG_DIR
49 | rm -r $TARGET_DIR
50 |
51 | done
--------------------------------------------------------------------------------
/lambda/scan/main.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import requests
3 | import boto3
4 | import json
5 | from botocore.exceptions import ClientError
6 | import base64
7 |
8 | logging.basicConfig()
9 | logging.getLogger().setLevel(logging.INFO)
10 |
11 | def main(event, context):
12 | s3_event = event["Records"][0]["s3"]
13 | s3_object = s3_event["object"]["key"]
14 |
15 | target_bucket = s3_object.split('.')[0]
16 |
17 | s3 = boto3.client("s3")
18 | try:
19 | s3.list_objects(Bucket=target_bucket)
20 | logging.info(f"Bucket {target_bucket} is open!")
21 |
22 | webhook = get_secret()
23 | response = requests.post(webhook, headers={'Content-type': 'application/json'}, data=json.dumps({"text": f"Bucket `{target_bucket}` is open!"}))
24 | if response.status_code != 200:
25 | logging.error(f"Slack returned status code {response.status_code}.")
26 |
27 | except ClientError as e:
28 | if e.response['Error']['Code'] == "AccessDenied":
29 | logging.info(f"Permission denied for bucket {target_bucket}")
30 | else:
31 | raise
32 |
33 | def get_secret():
34 |
35 | secret_name = "s3ekerSlackWebhook"
36 | region_name = "us-east-1"
37 |
38 | # Create a Secrets Manager client
39 | session = boto3.session.Session()
40 | client = session.client(
41 | service_name='secretsmanager',
42 | region_name=region_name
43 | )
44 |
45 | # In this sample we only handle the specific exceptions for the 'GetSecretValue' API.
46 | # See https://docs.aws.amazon.com/secretsmanager/latest/apireference/API_GetSecretValue.html
47 | # We rethrow the exception by default.
48 |
49 | try:
50 | get_secret_value_response = client.get_secret_value(
51 | SecretId=secret_name
52 | )
53 | except ClientError as e:
54 | if e.response['Error']['Code'] == 'DecryptionFailureException':
55 | # Secrets Manager can't decrypt the protected secret text using the provided KMS key.
56 | # Deal with the exception here, and/or rethrow at your discretion.
57 | raise e
58 | elif e.response['Error']['Code'] == 'InternalServiceErrorException':
59 | # An error occurred on the server side.
60 | # Deal with the exception here, and/or rethrow at your discretion.
61 | raise e
62 | elif e.response['Error']['Code'] == 'InvalidParameterException':
63 | # You provided an invalid value for a parameter.
64 | # Deal with the exception here, and/or rethrow at your discretion.
65 | raise e
66 | elif e.response['Error']['Code'] == 'InvalidRequestException':
67 | # You provided a parameter value that is not valid for the current state of the resource.
68 | # Deal with the exception here, and/or rethrow at your discretion.
69 | raise e
70 | elif e.response['Error']['Code'] == 'ResourceNotFoundException':
71 | # We can't find the resource that you asked for.
72 | # Deal with the exception here, and/or rethrow at your discretion.
73 | raise e
74 | else:
75 | # Decrypts secret using the associated KMS CMK.
76 | # Depending on whether the secret is a string or binary, one of these fields will be populated.
77 | if 'SecretString' in get_secret_value_response:
78 | return get_secret_value_response['SecretString']
79 | else:
80 | return base64.b64decode(get_secret_value_response['SecretBinary'])
81 |
82 |
83 | if __name__ == "__main__":
84 | blah = {
85 | 'Records': [
86 | {
87 | 's3': {
88 | 'bucket': {
89 | 'name': 'bloopy'
90 | },
91 | 'object': {
92 | 'key': 's3eker-open.s3-website-us-east-1.amazonaws.com'
93 | }
94 | }
95 | }
96 | ]
97 | }
98 | main(blah, None)
--------------------------------------------------------------------------------
/lambda/scan/requirements.txt:
--------------------------------------------------------------------------------
1 | requests
--------------------------------------------------------------------------------
/lambda/upload/main.py:
--------------------------------------------------------------------------------
1 | import json
2 | import requests
3 | import logging
4 | import boto3
5 | import botocore
6 |
7 | logging.basicConfig()
8 | logging.getLogger().setLevel(logging.INFO)
9 |
10 |
11 | def main(event, context):
12 | sns_event = event["Records"][0]["Sns"]
13 | sns_message = json.loads(sns_event["Message"])
14 | logging.info("SNS Message is " + sns_message)
15 | bucket = sns_message["bucket"]
16 |
17 | s3 = boto3.client('s3')
18 | bucket_name = "s3eker-buckets"
19 |
20 | logging.info("Checking if domain exists in bucket.")
21 | # https://stackoverflow.com/questions/33842944/check-if-a-key-exists-in-a-bucket-in-s3-using-boto3
22 |
23 | logging.info(f"Loading object {bucket}")
24 | response = s3.list_objects_v2(Bucket=bucket_name, Prefix=bucket)
25 | found = False
26 | for obj in response.get('Contents', []):
27 | if obj['Key'] == bucket:
28 | found = True
29 |
30 | if not found:
31 | create_key(bucket_name, bucket)
32 |
33 | def create_key(bucket, key):
34 | logging.info(f"Creating key {key} in bucket {bucket}.\n")
35 | s3 = boto3.client('s3')
36 | try:
37 | s3.put_object(Bucket=bucket, Key=key)
38 | except botocore.exceptions.ClientError as e:
39 | logging.error(e)
40 | return False
41 | return True
42 |
43 |
44 | if __name__ == "__main__":
45 | main(None, None)
--------------------------------------------------------------------------------
/lambda/uploader.py:
--------------------------------------------------------------------------------
1 | import json
2 | import boto3
3 | import logging
4 |
5 | def upload(client, topic_arn,site):
6 | logging.info(f"Publishing {site} to SNS topic.\n")
7 | client.publish(TargetArn=topic_arn, Message=json.dumps({'default': json.dumps({'bucket': site}), 'sms':site, 'email':site}), Subject=f"New site to check",MessageStructure='json')
8 |
--------------------------------------------------------------------------------
/res/icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scriptingislife/s3eker/071718a3fb98048f384dbfde263de4db71c92465/res/icon.png
--------------------------------------------------------------------------------
/res/notif.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scriptingislife/s3eker/071718a3fb98048f384dbfde263de4db71c92465/res/notif.png
--------------------------------------------------------------------------------
/res/s3eker.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scriptingislife/s3eker/071718a3fb98048f384dbfde263de4db71c92465/res/s3eker.png
--------------------------------------------------------------------------------