{% block title %} {{ post['title'] }} {% endblock %}
5 | {{ post['created'] }}
6 |
{{ post['content'] }}
7 | {% endblock %}
8 |
--------------------------------------------------------------------------------
/samples/lambda-container-apigateway-flaskapp/flask-app-serverless/requirements.txt:
--------------------------------------------------------------------------------
1 | flask
2 | db-sqlite3
3 | Werkzeug
4 | serverless-wsgi>=2.0.2
5 | ushlex
--------------------------------------------------------------------------------
/samples/lambda-role-policy-apigateway-python/api-gateway.tf:
--------------------------------------------------------------------------------
1 | # Create API Gateway with Rest API type
2 | resource "aws_api_gateway_rest_api" "example" {
3 | name = "Serverless"
4 | description = "Serverless Application using Terraform"
5 | }
6 |
7 | # Defines a resource in the API Gateway that will capture any request path. The path_part = "{proxy+}" allows API Gateway to match all requests that have any path pattern, enabling dynamic routing.
8 | resource "aws_api_gateway_resource" "proxy" {
9 | rest_api_id = aws_api_gateway_rest_api.example.id
10 | parent_id = aws_api_gateway_rest_api.example.root_resource_id
11 | path_part = "{proxy+}" # with proxy, this resource will match any request path
12 | }
13 |
14 | # Configures to allow any HTTP method (GET, POST, DELETE, etc.) and does not require any specific authorization. It's set for the previously defined proxy resource.
15 | resource "aws_api_gateway_method" "proxy" {
16 | rest_api_id = aws_api_gateway_rest_api.example.id
17 | resource_id = aws_api_gateway_resource.proxy.id
18 | http_method = "ANY" # with ANY, it allows any request method to be used, all incoming requests will match this resource
19 | authorization = "NONE"
20 | }
21 |
22 | # API Gateway - Lambda Connection
23 | # The AWS_PROXY type means API Gateway will directly pass the request details (method, headers, body, path parameters, etc.) to the Lambda function
24 | resource "aws_api_gateway_integration" "lambda" {
25 | rest_api_id = aws_api_gateway_rest_api.example.id
26 | resource_id = aws_api_gateway_method.proxy.resource_id
27 | http_method = aws_api_gateway_method.proxy.http_method
28 | integration_http_method = "POST"
29 | type = "AWS_PROXY" # With AWS_PROXY, it causes API gateway to call into the API of another AWS service
30 | uri = aws_lambda_function.lambda_function.invoke_arn
31 | }
32 |
33 | # The proxy resource cannot match an empty path at the root of the API. API GW doesn't route requests to the root (/) path using the proxy, so a separate method and integration for the root resource are required.
34 | # To handle that, a similar configuration must be applied to the root resource that is built in to the REST API object.
35 | # This ensures that requests to the root (e.g., https://api.example.com/) are also forwarded to the Lambda function.
36 | resource "aws_api_gateway_method" "proxy_root" {
37 | rest_api_id = aws_api_gateway_rest_api.example.id
38 | resource_id = aws_api_gateway_rest_api.example.root_resource_id
39 | http_method = "ANY"
40 | authorization = "NONE"
41 | }
42 |
43 | resource "aws_api_gateway_integration" "lambda_root" {
44 | rest_api_id = aws_api_gateway_rest_api.example.id
45 | resource_id = aws_api_gateway_method.proxy_root.resource_id
46 | http_method = aws_api_gateway_method.proxy_root.http_method
47 | integration_http_method = "POST"
48 | type = "AWS_PROXY" # With AWS_PROXY, it causes API gateway to call into the API of another AWS service
49 | uri = aws_lambda_function.lambda_function.invoke_arn
50 | }
51 |
52 | # Deploy API Gateway
53 | # Deploys the API to the specified stage (test stage). The depends_on ensures that the API is not deployed until both the Lambda integrations (for proxy and root) are complete.
54 | resource "aws_api_gateway_deployment" "example" {
55 | depends_on = [
56 | aws_api_gateway_integration.lambda,
57 | aws_api_gateway_integration.lambda_root,
58 | ]
59 | rest_api_id = aws_api_gateway_rest_api.example.id
60 | stage_name = "test"
61 | }
62 |
63 | # Output to the URL
64 | output "base_url" {
65 | value = aws_api_gateway_deployment.example.invoke_url
66 | }
67 |
--------------------------------------------------------------------------------
/samples/lambda-role-policy-apigateway-python/code/main.py:
--------------------------------------------------------------------------------
1 | def lambda_handler(event, context):
2 | content = """
3 |
4 |
Hello Website running on Lambda! Deployed via Terraform
5 |
6 | """
7 | response ={
8 | "statusCode": 200,
9 | "body": content,
10 | "headers": {"Content-Type": "text/html",},
11 | }
12 | return response
--------------------------------------------------------------------------------
/samples/lambda-role-policy-apigateway-python/lambda.tf:
--------------------------------------------------------------------------------
1 | terraform {
2 | required_providers {
3 | aws = {
4 | source = "hashicorp/aws"
5 | version = "~> 4.16"
6 | }
7 | }
8 | required_version = ">= 1.2.0"
9 | }
10 |
11 | # Create IAM Role for lambda
12 | # The assume_role_policy defines who or what can assume this role. In this case, it allows lambda to assume the role, which is necessary for Lambda functions to execute with this role's permissions.
13 | # The policy allows STS (Security Token Service) to manage temporary credentials for the Lambda service.
14 | resource "aws_iam_role" "lambda_role" {
15 | name = "aws_lambda_role"
16 | assume_role_policy = < check only Python 3.7 compatible
31 | FI50,
32 | FI51,
33 | FI52,
34 | FI53,
35 | FI54,
36 | FI55,
37 | FI56,
38 | FI57,
39 | W503
40 |
41 | require-code = True
42 |
43 | [testenv]
44 | commands =
45 | pytest --cov=pipelines --cov-append {posargs}
46 | coverage report --fail-under=0
47 | deps = .[test]
48 | depends =
49 | {py36,py37,py38}: clean
50 |
51 | [testenv:flake8]
52 | skipdist = true
53 | skip_install = true
54 | deps = flake8
55 | commands = flake8
56 |
57 | [testenv:black-format]
58 | deps = black
59 | commands =
60 | black -l 100 ./
61 |
62 | [testenv:black-check]
63 | deps = black
64 | commands =
65 | black -l 100 --check ./
66 |
67 | [testenv:clean]
68 | skip_install = true
69 | deps = coverage
70 | commands = coverage erase
71 |
72 | [testenv:pydocstyle]
73 | deps = pydocstyle
74 | commands =
75 | pydocstyle pipelines
76 |
--------------------------------------------------------------------------------
/samples/mlops-sagemaker-github-codepipeline-codebuild-codedeploy/modeldeploy_pipeline/README.md:
--------------------------------------------------------------------------------
1 | # modeldeploy_pipeline
2 |
--------------------------------------------------------------------------------
/samples/mlops-sagemaker-github-codepipeline-codebuild-codedeploy/modeldeploy_pipeline/endpoint-config-template.yml:
--------------------------------------------------------------------------------
1 | Description:
2 | This template is built and deployed by the infrastructure pipeline in various stages (staging/production) as required.
3 | It specifies the resources that need to be created, like the SageMaker Endpoint. It can be extended to include resources like
4 | AutoScalingPolicy, API Gateway, etc,. as required.
5 | Parameters:
6 | SageMakerProjectName:
7 | Type: String
8 | Description: Name of the project
9 | MinLength: 1
10 | MaxLength: 32
11 | AllowedPattern: ^[a-zA-Z](-*[a-zA-Z0-9])*
12 | ModelExecutionRoleArn:
13 | Type: String
14 | Description: Execution role used for deploying the model.
15 | ModelPackageName:
16 | Type: String
17 | Description: The trained Model Package Name
18 | StageName:
19 | Type: String
20 | Description:
21 | The name for a project pipeline stage, such as Staging or Prod, for
22 | which resources are provisioned and deployed.
23 | EndpointInstanceCount:
24 | Type: Number
25 | Description: Number of instances to launch for the endpoint.
26 | MinValue: 1
27 | EndpointInstanceType:
28 | Type: String
29 | Description: The ML compute instance type for the endpoint.
30 |
31 | Resources:
32 | Model:
33 | Type: AWS::SageMaker::Model
34 | Properties:
35 | PrimaryContainer:
36 | ModelPackageName: !Ref ModelPackageName
37 | ExecutionRoleArn: !Ref ModelExecutionRoleArn
38 |
39 | EndpointConfig:
40 | Type: AWS::SageMaker::EndpointConfig
41 | Properties:
42 | ProductionVariants:
43 | - InitialInstanceCount: !Ref EndpointInstanceCount
44 | InitialVariantWeight: 1.0
45 | InstanceType: !Ref EndpointInstanceType
46 | ModelName: !GetAtt Model.ModelName
47 | VariantName: AllTraffic
48 |
49 | Endpoint:
50 | Type: AWS::SageMaker::Endpoint
51 | Properties:
52 | EndpointName: !Sub ${SageMakerProjectName}-${StageName}
53 | EndpointConfigName: !GetAtt EndpointConfig.EndpointConfigName
54 |
--------------------------------------------------------------------------------
/samples/mlops-sagemaker-github-codepipeline-codebuild-codedeploy/modeldeploy_pipeline/fix_model_permission.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import boto3
3 | import json
4 | import os
5 | import logging
6 | from botocore.exceptions import ClientError
7 |
8 | # this script is a workaround to fix some permission issues with the file
9 | # created for the model and stored in an S3 bucket
10 |
11 | s3_client = boto3.client('s3')
12 | sm_client = boto3.client('sagemaker')
13 |
14 | if __name__ == "__main__":
15 | parser = argparse.ArgumentParser()
16 | parser.add_argument("--log-level", type=str, default=os.environ.get("LOGLEVEL", "INFO").upper())
17 | parser.add_argument("--prod-config-file", type=str, default="prod-config-export.json")
18 |
19 | args, _ = parser.parse_known_args()
20 |
21 | # Configure logging to output the line number and message
22 | log_format = "%(levelname)s: [%(filename)s:%(lineno)s] %(message)s"
23 | logging.basicConfig(format=log_format, level=args.log_level)
24 |
25 | # first retrieve the name of the package that will be deployed
26 | model_package_name = None
27 | with open(args.prod_config_file, 'r') as f:
28 | for param in json.loads(f.read()):
29 | if param.get('ParameterKey') == 'ModelPackageName':
30 | model_package_name = param.get('ParameterValue')
31 | if model_package_name is None:
32 | raise Exception("Configuration file must include ModelPackageName parameter")
33 |
34 | # then, describe it to get the S3 URL of the model
35 | resp = sm_client.describe_model_package(ModelPackageName=model_package_name)
36 | model_data_url = resp['InferenceSpecification']['Containers'][0]['ModelDataUrl']
37 | _,_,bucket_name,key = model_data_url.split('/', 3)
38 |
39 | # finally, copy the file to override the permissions
40 | with open('/tmp/model.tar.gz', 'wb') as data:
41 | s3_client.download_fileobj(bucket_name, key, data)
42 | with open('/tmp/model.tar.gz', 'rb') as data:
43 | s3_client.upload_fileobj(data, bucket_name, key)
44 |
45 |
--------------------------------------------------------------------------------
/samples/mlops-sagemaker-github-codepipeline-codebuild-codedeploy/modeldeploy_pipeline/prod-config.json:
--------------------------------------------------------------------------------
1 | {
2 | "Parameters": {
3 | "StageName": "prod-0306",
4 | "EndpointInstanceCount": "1",
5 | "EndpointInstanceType": "ml.m5.large"
6 | }
7 | }
--------------------------------------------------------------------------------
/samples/mlops-sagemaker-github-codepipeline-codebuild-codedeploy/modeldeploy_pipeline/setup.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import json
3 | import logging
4 | import os
5 | import argparse
6 | import boto3
7 | from botocore.exceptions import ClientError
8 |
9 | logger = logging.getLogger(__name__)
10 | sm_client = boto3.client("sagemaker")
11 | org_client = boto3.client("organizations")
12 |
13 | if __name__ == "__main__":
14 | parser = argparse.ArgumentParser()
15 | parser.add_argument("--log-level", type=str, default=os.environ.get("LOGLEVEL", "INFO").upper())
16 | parser.add_argument("--sagemaker-project-id", type=str, required=True)
17 | parser.add_argument("--sagemaker-project-name", type=str, required=True)
18 | parser.add_argument("--model-package-group-name", type=str, required=True)
19 | parser.add_argument("--organizational-unit-staging-id", type=str, required=True)
20 | parser.add_argument("--organizational-unit-prod-id", type=str, required=True)
21 |
22 | args, _ = parser.parse_known_args()
23 |
24 | # Configure logging to output the line number and message
25 | log_format = "%(levelname)s: [%(filename)s:%(lineno)s] %(message)s"
26 | logging.basicConfig(format=log_format, level=args.log_level)
27 | model_package_group_arn = None
28 | # Create model package group if necessary
29 | try:
30 | # check if the model package group exists
31 | resp = sm_client.describe_model_package_group(
32 | ModelPackageGroupName=args.model_package_group_name)
33 | model_package_group_arn = resp['ModelPackageGroupArn']
34 | except ClientError as e:
35 | if e.response['Error']['Code'] == 'ValidationException':
36 | # it doesn't exist, lets create a new one
37 | resp = sm_client.create_model_package_group(
38 | ModelPackageGroupName=args.model_package_group_name,
39 | ModelPackageGroupDescription="Multi account model group",
40 | Tags=[
41 | {'Key': 'sagemaker:project-name', 'Value': args.sagemaker_project_name},
42 | {'Key': 'sagemaker:project-id', 'Value': args.sagemaker_project_id},
43 | ]
44 | )
45 | model_package_group_arn = resp['ModelPackageGroupArn']
46 | else:
47 | raise e
48 | staging_ou_id = args.organizational_unit_staging_id
49 | prod_ou_id = args.organizational_unit_prod_id
50 |
51 | # finally, we need to update the model package group policy
52 | # Get the account principals based on staging and prod ids
53 | staging_accounts = [i['Id'] for i in org_client.list_accounts_for_parent(ParentId=staging_ou_id)['Accounts']]
54 | prod_accounts = [i['Id'] for i in org_client.list_accounts_for_parent(ParentId=prod_ou_id)['Accounts']]
55 | # update the policy
56 | sm_client.put_model_package_group_policy(
57 | ModelPackageGroupName=args.model_package_group_name,
58 | ResourcePolicy=json.dumps({
59 | 'Version': '2012-10-17',
60 | 'Statement': [{
61 | 'Sid': 'Stmt1527884065456',
62 | 'Effect': 'Allow',
63 | 'Principal': {'AWS': ['arn:aws:iam::%s:root' % i for i in staging_accounts + prod_accounts] },
64 | 'Action': 'sagemaker:CreateModel',
65 | 'Resource': '%s/*' % model_package_group_arn.replace('model-package-group', 'model-package')
66 | }]
67 | })
68 | )
69 |
70 |
71 |
--------------------------------------------------------------------------------
/samples/mlops-sagemaker-github-codepipeline-codebuild-codedeploy/modeldeploy_pipeline/staging-config.json:
--------------------------------------------------------------------------------
1 | {
2 | "Parameters": {
3 | "StageName": "staging-0306",
4 | "EndpointInstanceCount": "1",
5 | "EndpointInstanceType": "ml.m5.large"
6 | }
7 | }
8 |
--------------------------------------------------------------------------------
/samples/mlops-sagemaker-github-codepipeline-codebuild-codedeploy/modeldeploy_pipeline/test/test.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import json
3 | import logging
4 | import os
5 |
6 | import boto3
7 | from botocore.exceptions import ClientError
8 |
9 | logger = logging.getLogger(__name__)
10 | sm_client = boto3.client("sagemaker")
11 |
12 |
13 | def invoke_endpoint(endpoint_name):
14 | """
15 | Add custom logic here to invoke the endpoint and validate reponse
16 | """
17 | return {"endpoint_name": endpoint_name, "success": True}
18 |
19 |
20 | def test_endpoint(endpoint_name):
21 | """
22 | Describe the endpoint and ensure InSerivce, then invoke endpoint. Raises exception on error.
23 | """
24 | error_message = None
25 | try:
26 | # Ensure endpoint is in service
27 | response = sm_client.describe_endpoint(EndpointName=endpoint_name)
28 | status = response["EndpointStatus"]
29 | if status != "InService":
30 | error_message = f"SageMaker endpoint: {endpoint_name} status: {status} not InService"
31 | logger.error(error_message)
32 | raise Exception(error_message)
33 |
34 | # Output if endpoint has data capture enbaled
35 | endpoint_config_name = response["EndpointConfigName"]
36 | response = sm_client.describe_endpoint_config(EndpointConfigName=endpoint_config_name)
37 | if "DataCaptureConfig" in response and response["DataCaptureConfig"]["EnableCapture"]:
38 | logger.info(f"data capture enabled for endpoint config {endpoint_config_name}")
39 |
40 | # Call endpoint to handle
41 | return invoke_endpoint(endpoint_name)
42 | except ClientError as e:
43 | error_message = e.response["Error"]["Message"]
44 | logger.error(error_message)
45 | raise Exception(error_message)
46 |
47 |
48 | if __name__ == "__main__":
49 | parser = argparse.ArgumentParser()
50 | parser.add_argument("--log-level", type=str, default=os.environ.get("LOGLEVEL", "INFO").upper())
51 | parser.add_argument("--import-build-config", type=str, required=True)
52 | parser.add_argument("--export-test-results", type=str, required=True)
53 | args, _ = parser.parse_known_args()
54 |
55 | # Configure logging to output the line number and message
56 | log_format = "%(levelname)s: [%(filename)s:%(lineno)s] %(message)s"
57 | logging.basicConfig(format=log_format, level=args.log_level)
58 |
59 | # Load the build config
60 | with open(args.import_build_config, "r") as f:
61 | config = json.load(f)
62 |
63 | # Get the endpoint name from sagemaker project name
64 | endpoint_name = "{}-{}".format(
65 | config["Parameters"]["SageMakerProjectName"], config["Parameters"]["StageName"]
66 | )
67 | results = test_endpoint(endpoint_name)
68 |
69 | # Print results and write to file
70 | logger.debug(json.dumps(results, indent=4))
71 | with open(args.export_test_results, "w") as f:
72 | json.dump(results, f, indent=4)
73 |
--------------------------------------------------------------------------------
/samples/mlops-sagemaker-github-codepipeline-codebuild-codedeploy/modeldeploy_pipeline/test/test_buildspec.yml:
--------------------------------------------------------------------------------
1 | version: 0.2
2 |
3 | phases:
4 | install:
5 | runtime-versions:
6 | python: 3.8
7 | build:
8 | commands:
9 | # Call the test python code
10 | - python test/test.py --import-build-config $CODEBUILD_SRC_DIR_BuildArtifact/staging-config-export.json --export-test-results ${EXPORT_TEST_RESULTS}
11 | # Show the test results file
12 | - cat ${EXPORT_TEST_RESULTS}
13 |
14 | artifacts:
15 | files:
16 | - ${EXPORT_TEST_RESULTS}
17 |
--------------------------------------------------------------------------------
/samples/mlops-sagemaker-github-codepipeline-codebuild-codedeploy/modeldeploy_pipeline/test/test_buildspec_singleaccount.yml:
--------------------------------------------------------------------------------
1 | version: 0.2
2 |
3 | phases:
4 | install:
5 | runtime-versions:
6 | python: 3.8
7 | build:
8 | commands:
9 | # Call the test python code
10 | - python test/test.py --import-build-config $CODEBUILD_SRC_DIR_BuildArtifact/staging-config-export.json --export-test-results ${EXPORT_TEST_RESULTS}
11 | # Show the test results file
12 | - cat ${EXPORT_TEST_RESULTS}
13 |
14 | artifacts:
15 | files:
16 | - ${EXPORT_TEST_RESULTS}
17 |
--------------------------------------------------------------------------------
/samples/mlops-sagemaker-github-codepipeline-codebuild-codedeploy/modeldeploy_pipeline/test/test_singleaccount.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import json
3 | import logging
4 | import os
5 |
6 | import boto3
7 | from botocore.exceptions import ClientError
8 |
9 | logger = logging.getLogger(__name__)
10 | sm_client = boto3.client("sagemaker")
11 |
12 |
13 | def invoke_endpoint(endpoint_name):
14 | """
15 | Add custom logic here to invoke the endpoint and validate reponse
16 | """
17 | return {"endpoint_name": endpoint_name, "success": True}
18 |
19 |
20 | def test_endpoint(endpoint_name):
21 | """
22 | Describe the endpoint and ensure InSerivce, then invoke endpoint. Raises exception on error.
23 | """
24 | error_message = None
25 | try:
26 | # Ensure endpoint is in service
27 | response = sm_client.describe_endpoint(EndpointName=endpoint_name)
28 | status = response["EndpointStatus"]
29 | if status != "InService":
30 | error_message = f"SageMaker endpoint: {endpoint_name} status: {status} not InService"
31 | logger.error(error_message)
32 | raise Exception(error_message)
33 |
34 | # Output if endpoint has data capture enbaled
35 | endpoint_config_name = response["EndpointConfigName"]
36 | response = sm_client.describe_endpoint_config(EndpointConfigName=endpoint_config_name)
37 | if "DataCaptureConfig" in response and response["DataCaptureConfig"]["EnableCapture"]:
38 | logger.info(f"data capture enabled for endpoint config {endpoint_config_name}")
39 |
40 | # Call endpoint to handle
41 | return invoke_endpoint(endpoint_name)
42 | except ClientError as e:
43 | error_message = e.response["Error"]["Message"]
44 | logger.error(error_message)
45 | raise Exception(error_message)
46 |
47 |
48 | if __name__ == "__main__":
49 | parser = argparse.ArgumentParser()
50 | parser.add_argument("--log-level", type=str, default=os.environ.get("LOGLEVEL", "INFO").upper())
51 | parser.add_argument("--import-build-config", type=str, required=True)
52 | parser.add_argument("--export-test-results", type=str, required=True)
53 | args, _ = parser.parse_known_args()
54 |
55 | # Configure logging to output the line number and message
56 | log_format = "%(levelname)s: [%(filename)s:%(lineno)s] %(message)s"
57 | logging.basicConfig(format=log_format, level=args.log_level)
58 |
59 | # Load the build config
60 | with open(args.import_build_config, "r") as f:
61 | config = json.load(f)
62 |
63 | # Get the endpoint name from sagemaker project name
64 | endpoint_name = "{}-{}".format(
65 | config["Parameters"]["SageMakerProjectName"], config["Parameters"]["StageName"]
66 | )
67 | results = test_endpoint(endpoint_name)
68 |
69 | # Print results and write to file
70 | logger.debug(json.dumps(results, indent=4))
71 | with open(args.export_test_results, "w") as f:
72 | json.dump(results, f, indent=4)
73 |
--------------------------------------------------------------------------------
/samples/mlops-sagemaker-github-codepipeline-codebuild-codedeploy/terraform/events.tf:
--------------------------------------------------------------------------------
1 | resource "aws_cloudwatch_event_rule" "sm_model_registry_rule" {
2 | name = "sm-model-registry-event-rule"
3 | description = "Capture new model registry"
4 |
5 | event_pattern = <"
2 | env = "dev"
3 | project_name = "" #"aws-ml-11052023"
4 | project_id = "" #"04052023"
5 | region = "us-east-1"
6 | repository_owner = ""
7 | build_repository_name = "modelbuild_pipeline"
8 | deploy_repository_name = "modeldeploy_pipeline"
9 | artifacts_bucket_name = "" # "artifact-ml-11052023" #join("-", [var.project_name, var.project_id, var.env])
10 | github_token = "" # to pull modelbuild and modeldeploy
--------------------------------------------------------------------------------
/samples/mlops-sagemaker-github-codepipeline-codebuild-codedeploy/terraform/variables.tf:
--------------------------------------------------------------------------------
1 | variable "repository_branch" {
2 | description = "Repository branch to connect to"
3 | default = ""
4 | }
5 | variable "env" {
6 | description = "Deployment environment"
7 | default = "dev"
8 | }
9 | variable "project_name" {
10 | description = "Project name"
11 | default = ""
12 | }
13 | variable "project_id" {
14 | description = "Project ID"
15 | default = ""
16 | }
17 | variable "region" {
18 | description = "AWS region"
19 | default = "us-east-1"
20 | }
21 |
22 | variable "repository_owner" {
23 | description = "GitHub repository owner"
24 | default = ""
25 | }
26 |
27 | variable "build_repository_name" {
28 | description = "GitHub repository name"
29 | default = "modelbuild_pipeline"
30 | }
31 |
32 | variable "deploy_repository_name" {
33 | description = "GitHub repository name"
34 | default = "modeldeploy_pipeline"
35 | }
36 |
37 | variable "artifacts_bucket_name" {
38 | description = "S3 Bucket for storing artifacts"
39 | default = ""
40 | }
41 |
42 | variable "github_token" {
43 | description = "GitHub token"
44 | default = ""
45 | }
46 |
--------------------------------------------------------------------------------
/samples/s3-cloudfront-static-website/cloudfront.tf:
--------------------------------------------------------------------------------
1 | locals {
2 | s3_origin_id = "s3-my-website2023"
3 | }
4 |
5 | resource "aws_cloudfront_origin_access_identity" "origin_access_identity" {
6 | comment = "s3-my-website2023"
7 | }
8 |
9 | resource "aws_cloudfront_distribution" "s3_distribution" {
10 | origin {
11 | domain_name = aws_s3_bucket.mybucket.bucket_regional_domain_name
12 | origin_id = local.s3_origin_id
13 |
14 | s3_origin_config {
15 | origin_access_identity = aws_cloudfront_origin_access_identity.origin_access_identity.cloudfront_access_identity_path
16 | }
17 | }
18 |
19 | enabled = true
20 | is_ipv6_enabled = true
21 | comment = "my-cloudfront"
22 | default_root_object = "index.html"
23 |
24 | # Configure logging here if required
25 | #logging_config {
26 | # include_cookies = false
27 | # bucket = "mylogs.s3.amazonaws.com"
28 | # prefix = "myprefix"
29 | #}
30 |
31 | # If you have domain configured use it here
32 | #aliases = ["mywebsite.example.com", "s3-static-web-dev.example.com"]
33 |
34 | default_cache_behavior {
35 | allowed_methods = ["DELETE", "GET", "HEAD", "OPTIONS", "PATCH", "POST", "PUT"]
36 | cached_methods = ["GET", "HEAD"]
37 | target_origin_id = local.s3_origin_id
38 |
39 | forwarded_values {
40 | query_string = false
41 |
42 | cookies {
43 | forward = "none"
44 | }
45 | }
46 |
47 | viewer_protocol_policy = "allow-all"
48 | min_ttl = 0
49 | default_ttl = 3600
50 | max_ttl = 86400
51 | }
52 |
53 | # Cache behavior with precedence 0
54 | ordered_cache_behavior {
55 | path_pattern = "/content/immutable/*"
56 | allowed_methods = ["GET", "HEAD", "OPTIONS"]
57 | cached_methods = ["GET", "HEAD", "OPTIONS"]
58 | target_origin_id = local.s3_origin_id
59 |
60 | forwarded_values {
61 | query_string = false
62 | headers = ["Origin"]
63 |
64 | cookies {
65 | forward = "none"
66 | }
67 | }
68 |
69 | min_ttl = 0
70 | default_ttl = 86400
71 | max_ttl = 31536000
72 | compress = true
73 | viewer_protocol_policy = "redirect-to-https"
74 | }
75 |
76 | # Cache behavior with precedence 1
77 | ordered_cache_behavior {
78 | path_pattern = "/content/*"
79 | allowed_methods = ["GET", "HEAD", "OPTIONS"]
80 | cached_methods = ["GET", "HEAD"]
81 | target_origin_id = local.s3_origin_id
82 |
83 | forwarded_values {
84 | query_string = false
85 |
86 | cookies {
87 | forward = "none"
88 | }
89 | }
90 |
91 | min_ttl = 0
92 | default_ttl = 3600
93 | max_ttl = 86400
94 | compress = true
95 | viewer_protocol_policy = "redirect-to-https"
96 | }
97 |
98 | price_class = "PriceClass_200"
99 |
100 | restrictions {
101 | geo_restriction {
102 | restriction_type = "whitelist"
103 | locations = ["US", "CA", "GB", "DE", "IN", "IR"]
104 | }
105 | }
106 |
107 | tags = {
108 | Environment = "development"
109 | Name = "my-tag"
110 | }
111 |
112 | viewer_certificate {
113 | cloudfront_default_certificate = true
114 | }
115 | }
116 |
117 | # to get the Cloud front URL if doamin/alias is not configured
118 | output "cloudfront_domain_name" {
119 | value = aws_cloudfront_distribution.s3_distribution.domain_name
120 | }
121 |
--------------------------------------------------------------------------------
/samples/s3-cloudfront-static-website/website/assets/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/omerbsezer/Fast-Terraform/5ff4bbac8abd87430a6b9184339071a1c0ec6158/samples/s3-cloudfront-static-website/website/assets/favicon.ico
--------------------------------------------------------------------------------
/samples/s3-cloudfront-static-website/website/assets/img/portfolio/cabin.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/omerbsezer/Fast-Terraform/5ff4bbac8abd87430a6b9184339071a1c0ec6158/samples/s3-cloudfront-static-website/website/assets/img/portfolio/cabin.png
--------------------------------------------------------------------------------
/samples/s3-cloudfront-static-website/website/assets/img/portfolio/cake.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/omerbsezer/Fast-Terraform/5ff4bbac8abd87430a6b9184339071a1c0ec6158/samples/s3-cloudfront-static-website/website/assets/img/portfolio/cake.png
--------------------------------------------------------------------------------
/samples/s3-cloudfront-static-website/website/assets/img/portfolio/circus.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/omerbsezer/Fast-Terraform/5ff4bbac8abd87430a6b9184339071a1c0ec6158/samples/s3-cloudfront-static-website/website/assets/img/portfolio/circus.png
--------------------------------------------------------------------------------
/samples/s3-cloudfront-static-website/website/assets/img/portfolio/game.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/omerbsezer/Fast-Terraform/5ff4bbac8abd87430a6b9184339071a1c0ec6158/samples/s3-cloudfront-static-website/website/assets/img/portfolio/game.png
--------------------------------------------------------------------------------
/samples/s3-cloudfront-static-website/website/assets/img/portfolio/safe.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/omerbsezer/Fast-Terraform/5ff4bbac8abd87430a6b9184339071a1c0ec6158/samples/s3-cloudfront-static-website/website/assets/img/portfolio/safe.png
--------------------------------------------------------------------------------
/samples/s3-cloudfront-static-website/website/assets/img/portfolio/submarine.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/omerbsezer/Fast-Terraform/5ff4bbac8abd87430a6b9184339071a1c0ec6158/samples/s3-cloudfront-static-website/website/assets/img/portfolio/submarine.png
--------------------------------------------------------------------------------
/samples/s3-cloudfront-static-website/website/js/scripts.js:
--------------------------------------------------------------------------------
1 | /*!
2 | * Start Bootstrap - Freelancer v7.0.7 (https://startbootstrap.com/theme/freelancer)
3 | * Copyright 2013-2023 Start Bootstrap
4 | * Licensed under MIT (https://github.com/StartBootstrap/startbootstrap-freelancer/blob/master/LICENSE)
5 | */
6 | //
7 | // Scripts
8 | //
9 |
10 | window.addEventListener('DOMContentLoaded', event => {
11 |
12 | // Navbar shrink function
13 | var navbarShrink = function () {
14 | const navbarCollapsible = document.body.querySelector('#mainNav');
15 | if (!navbarCollapsible) {
16 | return;
17 | }
18 | if (window.scrollY === 0) {
19 | navbarCollapsible.classList.remove('navbar-shrink')
20 | } else {
21 | navbarCollapsible.classList.add('navbar-shrink')
22 | }
23 |
24 | };
25 |
26 | // Shrink the navbar
27 | navbarShrink();
28 |
29 | // Shrink the navbar when page is scrolled
30 | document.addEventListener('scroll', navbarShrink);
31 |
32 | // Activate Bootstrap scrollspy on the main nav element
33 | const mainNav = document.body.querySelector('#mainNav');
34 | if (mainNav) {
35 | new bootstrap.ScrollSpy(document.body, {
36 | target: '#mainNav',
37 | rootMargin: '0px 0px -40%',
38 | });
39 | };
40 |
41 | // Collapse responsive navbar when toggler is visible
42 | const navbarToggler = document.body.querySelector('.navbar-toggler');
43 | const responsiveNavItems = [].slice.call(
44 | document.querySelectorAll('#navbarResponsive .nav-link')
45 | );
46 | responsiveNavItems.map(function (responsiveNavItem) {
47 | responsiveNavItem.addEventListener('click', () => {
48 | if (window.getComputedStyle(navbarToggler).display !== 'none') {
49 | navbarToggler.click();
50 | }
51 | });
52 | });
53 |
54 | });
55 |
--------------------------------------------------------------------------------