├── Week8 ├── 5_Deploy_EKS.md ├── 7_AWS_Fargate.md ├── 4_EKS_Cluster.md ├── 6_Azure_ACI.md ├── Notes_Week8.md ├── project3_nginx.png ├── project3_services.png ├── Project2 │ └── azaks.ps1 ├── Project1 │ └── nginx.yml ├── infrastructure │ ├── variables.tf │ └── main.tf ├── Project3 │ └── nginx_aks.yml ├── 3_Deploy_AKS.md ├── 2_AKS_Cluster.md └── 1_Minikube_Env.md ├── Week9 ├── 5_AWS_Cloudwatch.md ├── 1_Azure_IaaS.md ├── Notes_Week9.md ├── 3_Azure_Monitor.md ├── 2_Serverless.md ├── 4_Azure_logs_metrics.md └── 6_Prometheus.md ├── Week2 ├── Project3 │ ├── linting.ps1 │ └── New-ResourceGroup.ps1 ├── pester_result.png ├── portal_bucket_live.png ├── python_test_result.png ├── newresourcegroupcreated.png ├── 1_dev_environment.md ├── 3_Reusable_PowerShell_code.md ├── Project5 │ ├── PowerShell │ │ └── New-ResourceGroup-test.ps1 │ └── Python │ │ └── s3_bucket_test.py ├── Project2 │ ├── s3bucket.py │ └── s3bucket_region.py ├── 4_Linting_PowerShell_Python.md ├── 5_Testing_PowerShell_Python.md ├── 2_Reusable_clean_Python.md ├── 6_Documenting_code.md └── Notes_week2.md ├── Week5 ├── new_page.png ├── new_push.png ├── dashboard.png ├── action_done.png ├── apprunning_1.png ├── project6_repo.png ├── devops_starter_1.png ├── devops_starter_2.png ├── devops_starter_3.png ├── devops_starter_4.png ├── devops_starter_5.png ├── project2_workflows.png ├── project5_artifact.png ├── project1_app_running.png ├── project6_codecommit.png ├── project3_build_success.png ├── project4_predeployment.png ├── project6_codecommit_main.png ├── project2_unit_test_results.png ├── 5_Artifacts_Packages_CICD.md ├── 4_Setting_Up_Continuous_Monitoring.md ├── 6_Working_AWS_CodeDeploy.md ├── Notes_week5.md ├── 1_Deploy_Web_App_GitHubActions.md ├── 2_CI_GitHubActions.md └── 3_Terraform_GitHubActions.md ├── Week1 ├── devops-tools.png ├── helloworldaction1.png ├── helloworldaction2.png ├── helloworldaction3.png ├── first_github_project.png ├── myfirstgithubaction.png ├── 2_SettingUp_Azure.md ├── 1_GitHub_Getting_Started.md ├── 4_GitHub_Projects.md ├── 3_SettingUp_AWS.md ├── 5_First_GitHub_Action.md └── Notes_week1.md ├── Week3 ├── ec2_running.png ├── go_test_pass.png ├── project1_rg.png ├── public_ip_ec2.png ├── bicep_rg_portal.png ├── Project2 │ ├── modules │ │ └── ec2 │ │ │ ├── variables.tf │ │ │ ├── outputs.tf │ │ │ └── main.tf │ └── main.tf ├── Project3 │ └── Testing │ │ └── terraform-aws-webserver │ │ ├── examples │ │ └── webserver │ │ │ ├── outputs.tf │ │ │ ├── variables.tf │ │ │ └── main.tf │ │ ├── variables.tf │ │ ├── outputs.tf │ │ ├── test │ │ └── webserver_test.go │ │ └── main.tf ├── Project4 │ ├── main.bicep │ └── main.json ├── Notes_week3.md ├── 2_Terraform_module_vnet_aws.md ├── 3_Testing_IaC.md ├── 4_Intro_Azure_Bicep.md ├── 1_ARM_template_VM_Azure.md └── Project1 │ └── template.json ├── Week4 ├── contributing1.png ├── contributing2.png ├── sourcecontrolsync.png ├── sourcecontrolview1.png ├── sourcecontrolview2.png ├── sourcecontrolview3.png ├── Project2 │ ├── mergeconflict1.png │ ├── mergeconflict2.png │ ├── mergeconflictfinal.png │ └── Managing_Merge_Conflicts.md ├── Project1 │ └── cloudskills_git_project.md ├── 3_Contributing_Open_Source.md ├── Project3 │ └── My_First_Contribution.md ├── 1_VSCode_Git_GitHub.md ├── 2_Working_Push_Conflicts.md └── Notes_week4.md ├── Week7 ├── project1_jobs.png ├── project1_alerts.png ├── project4_iam_roles.png ├── project4_iam_users.png ├── project3_powershell.png ├── project2_checkov_action.png ├── project2_checkov_error.png ├── project3_create_secret.png ├── project3_access_policies.png ├── project3_when_to_use_msi.png ├── project4_iam_create_role.png ├── project4_iam_group_policy.png ├── project1_setup_code_scanning.png ├── project3_add_access_policy.png ├── project3_add_role_assignment.png ├── project3_system_assigned_id.png ├── Project4 │ └── iamroles-aws.sh ├── Project5 │ └── rbac-rules-azure.sh ├── Notes_Week7.md ├── 4_IAM_Roles.md ├── 1_Securing_Code.md ├── 5_RBAC_Policies.md ├── 2_Continuous_Security.md └── 3_Security_Authentication.md ├── Week6 ├── project3_webapp.png ├── project5_lambda.png ├── project5_signin.png ├── project6_failures.png ├── project1_addfunction.png ├── project1_azureportal.png ├── project1_functionlog.png ├── project1_newfunction.png ├── project1_output_get.png ├── project1_output_post.png ├── project2_azureportal.png ├── project6_aws_monitor.png ├── project2_webapprunning.png ├── project3_azure_portal.png ├── project5_authentication.png ├── project5_editsettings.png ├── project6_live_metrics.png ├── project2_deploymentcenter.png ├── project3_workflow_success.png ├── project5_security_center.png ├── project6_monitoring_logs.png ├── project6_webapp_overview.png ├── project2_deploymentsuccess.png ├── project4_lambda_aws_console.png ├── project4_serverless_create.png ├── project4_serverless_deploy.png ├── project5_lambda_permissions.png ├── project6_application_insights.png ├── project6_diagnose_solve_problems.png ├── Project2 │ ├── variables.tf │ └── main.tf ├── Project4 │ ├── .gitignore │ ├── handler.py │ └── serverless.yml ├── Project1 │ └── createfunctionapp.ps1 ├── Notes_week6.md ├── 4_Lambda_Function.md ├── 1_Azure_Function.md ├── 6_Monitoring_Serverless.md ├── 3_Azure_WebApp_CICD_GitHubActions.md ├── 5_Security_Serverless.md └── 2_Azure_WebApp.md ├── .github └── workflows │ ├── learn-github-actions.yml │ └── hello.yml ├── .vscode └── settings.json ├── .gitignore ├── Books └── DevOps_Handbook.md └── README.md /Week8/5_Deploy_EKS.md: -------------------------------------------------------------------------------- 1 | # Deploying to EKS 2 | -------------------------------------------------------------------------------- /Week8/7_AWS_Fargate.md: -------------------------------------------------------------------------------- 1 | # Using AWS Fargate 2 | -------------------------------------------------------------------------------- /Week9/5_AWS_Cloudwatch.md: -------------------------------------------------------------------------------- 1 | # AWS Cloudwatch 2 | -------------------------------------------------------------------------------- /Week8/4_EKS_Cluster.md: -------------------------------------------------------------------------------- 1 | # Creating an EKS Cluster 2 | -------------------------------------------------------------------------------- /Week9/1_Azure_IaaS.md: -------------------------------------------------------------------------------- 1 | # Monitoring Azure IaaS 2 | -------------------------------------------------------------------------------- /Week9/Notes_Week9.md: -------------------------------------------------------------------------------- 1 | # Monitoring and Logging 2 | -------------------------------------------------------------------------------- /Week2/Project3/linting.ps1: -------------------------------------------------------------------------------- 1 | Invoke-ScriptAnalyzer -Path . -------------------------------------------------------------------------------- /Week8/6_Azure_ACI.md: -------------------------------------------------------------------------------- 1 | # Using Azure Container Instances 2 | -------------------------------------------------------------------------------- /Week8/Notes_Week8.md: -------------------------------------------------------------------------------- 1 | # Containerization and Kubernetes 2 | -------------------------------------------------------------------------------- /Week9/3_Azure_Monitor.md: -------------------------------------------------------------------------------- 1 | # Azure Monitor and Alerting 2 | -------------------------------------------------------------------------------- /Week9/2_Serverless.md: -------------------------------------------------------------------------------- 1 | # Monitoring Serverless Platforms 2 | -------------------------------------------------------------------------------- /Week9/4_Azure_logs_metrics.md: -------------------------------------------------------------------------------- 1 | # Azure Logging and Metrics 2 | -------------------------------------------------------------------------------- /Week9/6_Prometheus.md: -------------------------------------------------------------------------------- 1 | # Container Monitoring with Prometheus 2 | -------------------------------------------------------------------------------- /Week5/new_page.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week5/new_page.png -------------------------------------------------------------------------------- /Week5/new_push.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week5/new_push.png -------------------------------------------------------------------------------- /Week5/dashboard.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week5/dashboard.png -------------------------------------------------------------------------------- /Week1/devops-tools.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week1/devops-tools.png -------------------------------------------------------------------------------- /Week2/pester_result.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week2/pester_result.png -------------------------------------------------------------------------------- /Week3/ec2_running.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week3/ec2_running.png -------------------------------------------------------------------------------- /Week3/go_test_pass.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week3/go_test_pass.png -------------------------------------------------------------------------------- /Week3/project1_rg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week3/project1_rg.png -------------------------------------------------------------------------------- /Week3/public_ip_ec2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week3/public_ip_ec2.png -------------------------------------------------------------------------------- /Week4/contributing1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week4/contributing1.png -------------------------------------------------------------------------------- /Week4/contributing2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week4/contributing2.png -------------------------------------------------------------------------------- /Week5/action_done.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week5/action_done.png -------------------------------------------------------------------------------- /Week5/apprunning_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week5/apprunning_1.png -------------------------------------------------------------------------------- /Week5/project6_repo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week5/project6_repo.png -------------------------------------------------------------------------------- /Week7/project1_jobs.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week7/project1_jobs.png -------------------------------------------------------------------------------- /Week3/bicep_rg_portal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week3/bicep_rg_portal.png -------------------------------------------------------------------------------- /Week6/project3_webapp.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week6/project3_webapp.png -------------------------------------------------------------------------------- /Week6/project5_lambda.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week6/project5_lambda.png -------------------------------------------------------------------------------- /Week6/project5_signin.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week6/project5_signin.png -------------------------------------------------------------------------------- /Week7/project1_alerts.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week7/project1_alerts.png -------------------------------------------------------------------------------- /Week8/project3_nginx.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week8/project3_nginx.png -------------------------------------------------------------------------------- /Week1/helloworldaction1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week1/helloworldaction1.png -------------------------------------------------------------------------------- /Week1/helloworldaction2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week1/helloworldaction2.png -------------------------------------------------------------------------------- /Week1/helloworldaction3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week1/helloworldaction3.png -------------------------------------------------------------------------------- /Week2/portal_bucket_live.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week2/portal_bucket_live.png -------------------------------------------------------------------------------- /Week2/python_test_result.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week2/python_test_result.png -------------------------------------------------------------------------------- /Week4/sourcecontrolsync.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week4/sourcecontrolsync.png -------------------------------------------------------------------------------- /Week4/sourcecontrolview1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week4/sourcecontrolview1.png -------------------------------------------------------------------------------- /Week4/sourcecontrolview2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week4/sourcecontrolview2.png -------------------------------------------------------------------------------- /Week4/sourcecontrolview3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week4/sourcecontrolview3.png -------------------------------------------------------------------------------- /Week5/devops_starter_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week5/devops_starter_1.png -------------------------------------------------------------------------------- /Week5/devops_starter_2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week5/devops_starter_2.png -------------------------------------------------------------------------------- /Week5/devops_starter_3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week5/devops_starter_3.png -------------------------------------------------------------------------------- /Week5/devops_starter_4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week5/devops_starter_4.png -------------------------------------------------------------------------------- /Week5/devops_starter_5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week5/devops_starter_5.png -------------------------------------------------------------------------------- /Week5/project2_workflows.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week5/project2_workflows.png -------------------------------------------------------------------------------- /Week5/project5_artifact.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week5/project5_artifact.png -------------------------------------------------------------------------------- /Week6/project6_failures.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week6/project6_failures.png -------------------------------------------------------------------------------- /Week7/project4_iam_roles.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week7/project4_iam_roles.png -------------------------------------------------------------------------------- /Week7/project4_iam_users.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week7/project4_iam_users.png -------------------------------------------------------------------------------- /Week8/project3_services.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week8/project3_services.png -------------------------------------------------------------------------------- /Week1/first_github_project.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week1/first_github_project.png -------------------------------------------------------------------------------- /Week1/myfirstgithubaction.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week1/myfirstgithubaction.png -------------------------------------------------------------------------------- /Week5/project1_app_running.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week5/project1_app_running.png -------------------------------------------------------------------------------- /Week5/project6_codecommit.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week5/project6_codecommit.png -------------------------------------------------------------------------------- /Week6/project1_addfunction.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week6/project1_addfunction.png -------------------------------------------------------------------------------- /Week6/project1_azureportal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week6/project1_azureportal.png -------------------------------------------------------------------------------- /Week6/project1_functionlog.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week6/project1_functionlog.png -------------------------------------------------------------------------------- /Week6/project1_newfunction.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week6/project1_newfunction.png -------------------------------------------------------------------------------- /Week6/project1_output_get.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week6/project1_output_get.png -------------------------------------------------------------------------------- /Week6/project1_output_post.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week6/project1_output_post.png -------------------------------------------------------------------------------- /Week6/project2_azureportal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week6/project2_azureportal.png -------------------------------------------------------------------------------- /Week6/project6_aws_monitor.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week6/project6_aws_monitor.png -------------------------------------------------------------------------------- /Week7/project3_powershell.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week7/project3_powershell.png -------------------------------------------------------------------------------- /Week2/newresourcegroupcreated.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week2/newresourcegroupcreated.png -------------------------------------------------------------------------------- /Week4/Project2/mergeconflict1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week4/Project2/mergeconflict1.png -------------------------------------------------------------------------------- /Week4/Project2/mergeconflict2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week4/Project2/mergeconflict2.png -------------------------------------------------------------------------------- /Week5/project3_build_success.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week5/project3_build_success.png -------------------------------------------------------------------------------- /Week5/project4_predeployment.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week5/project4_predeployment.png -------------------------------------------------------------------------------- /Week6/project2_webapprunning.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week6/project2_webapprunning.png -------------------------------------------------------------------------------- /Week6/project3_azure_portal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week6/project3_azure_portal.png -------------------------------------------------------------------------------- /Week6/project5_authentication.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week6/project5_authentication.png -------------------------------------------------------------------------------- /Week6/project5_editsettings.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week6/project5_editsettings.png -------------------------------------------------------------------------------- /Week6/project6_live_metrics.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week6/project6_live_metrics.png -------------------------------------------------------------------------------- /Week7/project2_checkov_action.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week7/project2_checkov_action.png -------------------------------------------------------------------------------- /Week7/project2_checkov_error.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week7/project2_checkov_error.png -------------------------------------------------------------------------------- /Week7/project3_create_secret.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week7/project3_create_secret.png -------------------------------------------------------------------------------- /Week5/project6_codecommit_main.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week5/project6_codecommit_main.png -------------------------------------------------------------------------------- /Week6/project2_deploymentcenter.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week6/project2_deploymentcenter.png -------------------------------------------------------------------------------- /Week6/project3_workflow_success.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week6/project3_workflow_success.png -------------------------------------------------------------------------------- /Week6/project5_security_center.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week6/project5_security_center.png -------------------------------------------------------------------------------- /Week6/project6_monitoring_logs.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week6/project6_monitoring_logs.png -------------------------------------------------------------------------------- /Week6/project6_webapp_overview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week6/project6_webapp_overview.png -------------------------------------------------------------------------------- /Week7/project3_access_policies.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week7/project3_access_policies.png -------------------------------------------------------------------------------- /Week7/project3_when_to_use_msi.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week7/project3_when_to_use_msi.png -------------------------------------------------------------------------------- /Week7/project4_iam_create_role.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week7/project4_iam_create_role.png -------------------------------------------------------------------------------- /Week7/project4_iam_group_policy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week7/project4_iam_group_policy.png -------------------------------------------------------------------------------- /Week4/Project2/mergeconflictfinal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week4/Project2/mergeconflictfinal.png -------------------------------------------------------------------------------- /Week5/project2_unit_test_results.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week5/project2_unit_test_results.png -------------------------------------------------------------------------------- /Week6/project2_deploymentsuccess.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week6/project2_deploymentsuccess.png -------------------------------------------------------------------------------- /Week6/project4_lambda_aws_console.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week6/project4_lambda_aws_console.png -------------------------------------------------------------------------------- /Week6/project4_serverless_create.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week6/project4_serverless_create.png -------------------------------------------------------------------------------- /Week6/project4_serverless_deploy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week6/project4_serverless_deploy.png -------------------------------------------------------------------------------- /Week6/project5_lambda_permissions.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week6/project5_lambda_permissions.png -------------------------------------------------------------------------------- /Week7/project1_setup_code_scanning.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week7/project1_setup_code_scanning.png -------------------------------------------------------------------------------- /Week7/project3_add_access_policy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week7/project3_add_access_policy.png -------------------------------------------------------------------------------- /Week7/project3_add_role_assignment.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week7/project3_add_role_assignment.png -------------------------------------------------------------------------------- /Week7/project3_system_assigned_id.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week7/project3_system_assigned_id.png -------------------------------------------------------------------------------- /Week3/Project2/modules/ec2/variables.tf: -------------------------------------------------------------------------------- 1 | variable "servername" { 2 | type = string 3 | } 4 | 5 | variable "size" { 6 | type = string 7 | } -------------------------------------------------------------------------------- /Week3/Project3/Testing/terraform-aws-webserver/examples/webserver/outputs.tf: -------------------------------------------------------------------------------- 1 | output "public_ip" { 2 | value = module.webserver.public_ip 3 | } -------------------------------------------------------------------------------- /Week6/project6_application_insights.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week6/project6_application_insights.png -------------------------------------------------------------------------------- /Week6/project6_diagnose_solve_problems.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rmiravalles/CloudNativeBootcamp/HEAD/Week6/project6_diagnose_solve_problems.png -------------------------------------------------------------------------------- /Week3/Project3/Testing/terraform-aws-webserver/variables.tf: -------------------------------------------------------------------------------- 1 | variable "servername" { 2 | type = string 3 | } 4 | 5 | variable "size" { 6 | type = string 7 | } -------------------------------------------------------------------------------- /Week4/Project1/cloudskills_git_project.md: -------------------------------------------------------------------------------- 1 | # Cloudskills Bootcamp Week 4 Project 1 2 | 3 | - This is just a test file to experiment with the VS Code and Git integration. 4 | -------------------------------------------------------------------------------- /Week3/Project3/Testing/terraform-aws-webserver/examples/webserver/variables.tf: -------------------------------------------------------------------------------- 1 | variable "servername" { 2 | type = string 3 | } 4 | 5 | variable "region" { 6 | type = string 7 | } -------------------------------------------------------------------------------- /Week1/2_SettingUp_Azure.md: -------------------------------------------------------------------------------- 1 | # Setting up your first Azure account 2 | 3 | The goal of this project is to create an Azure account. 4 | 5 | Since I already an Azure account, I can consider this one **completed**. 6 | -------------------------------------------------------------------------------- /Week1/1_GitHub_Getting_Started.md: -------------------------------------------------------------------------------- 1 | # Project 1: Getting Started with GitHub 2 | 3 | In this project, we'll take our first steps in GitHub. 4 | 5 | Since I already have a GitHub account, I'll consider this project **completed**. 6 | -------------------------------------------------------------------------------- /Week3/Project2/modules/ec2/outputs.tf: -------------------------------------------------------------------------------- 1 | # all the output data appears here. this helps in the organization of your files 2 | output "public_ip" { 3 | value = aws_instance.web.public_ip # this is the value we are outputting 4 | 5 | } -------------------------------------------------------------------------------- /Week3/Project3/Testing/terraform-aws-webserver/outputs.tf: -------------------------------------------------------------------------------- 1 | # all the output data appears here. this helps in the organization of your files 2 | output "public_ip" { 3 | value = aws_instance.web.public_ip # this is the value we are outputting 4 | 5 | } -------------------------------------------------------------------------------- /Week6/Project2/variables.tf: -------------------------------------------------------------------------------- 1 | variable "app_service_plan_name" { 2 | type = string 3 | } 4 | 5 | variable "location" { 6 | type = string 7 | } 8 | 9 | variable "resource_group_name" { 10 | type = string 11 | } 12 | 13 | variable "app_service_name" { 14 | type = string 15 | } -------------------------------------------------------------------------------- /Week7/Project4/iamroles-aws.sh: -------------------------------------------------------------------------------- 1 | # Create an IAM user 2 | 3 | aws iam create-user --user-name Chase 4 | 5 | # Create an IAM group 6 | 7 | aws iam create-group --group-name Cloudskills 8 | 9 | # Add user to group 10 | 11 | aws iam add-user-to-group --user-name Chase --group-name Cloudskills -------------------------------------------------------------------------------- /Week6/Project4/.gitignore: -------------------------------------------------------------------------------- 1 | # Distribution / packaging 2 | .Python 3 | env/ 4 | build/ 5 | develop-eggs/ 6 | dist/ 7 | downloads/ 8 | eggs/ 9 | .eggs/ 10 | lib/ 11 | lib64/ 12 | parts/ 13 | sdist/ 14 | var/ 15 | *.egg-info/ 16 | .installed.cfg 17 | *.egg 18 | 19 | # Serverless directories 20 | .serverless -------------------------------------------------------------------------------- /.github/workflows/learn-github-actions.yml: -------------------------------------------------------------------------------- 1 | name: learn-github-actions 2 | on: [push] 3 | jobs: 4 | check-bats-version: 5 | runs-on: ubuntu-latest 6 | steps: 7 | - uses: actions/checkout@v2 8 | - uses: actions/setup-node@v1 9 | - run: npm install -g bats 10 | - run: bats -v -------------------------------------------------------------------------------- /Week7/Project5/rbac-rules-azure.sh: -------------------------------------------------------------------------------- 1 | # create a service principal 2 | 3 | az ad sp create-for-rbac -n "AzureDevOps" --role contributor --scopes /subscriptions/xxxxxxxxxx 4 | 5 | # create a service principal for SDK/programmatic access 6 | 7 | az ad sp create-for-rbac -n "AzureDevOps" --role contributor --scopes /subscriptions/xxxxxxxxxx --sdk-auth -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "cSpell.ignoreWords": [ 3 | "biceptestrg", 4 | "containerd", 5 | "cslab", 6 | "levan", 7 | "rdpallow", 8 | "rg", 9 | "unstage" 10 | ], 11 | "python.linting.pylintEnabled": true, 12 | "python.linting.enabled": true, 13 | "cSpell.words": [ 14 | "Checkov" 15 | ] 16 | } -------------------------------------------------------------------------------- /Week8/Project2/azaks.ps1: -------------------------------------------------------------------------------- 1 | [CmdletBinding()] 2 | param ( 3 | $name = 'cloudskillsaks66', 4 | $rgName = 'azaksrg', 5 | $nodeCount = 1, 6 | $kubernetesVersion = '1.19.6' 7 | ) 8 | 9 | az aks create --generate-ssh-keys ` 10 | --name $name ` 11 | --resource-group $rgName ` 12 | --node-count $nodeCount ` 13 | --kubernetes-version $kubernetesVersion -------------------------------------------------------------------------------- /Week2/1_dev_environment.md: -------------------------------------------------------------------------------- 1 | # Setting up a Dev Environment from Start to Finish 2 | 3 | - Michael Levan guides us through the installation and configuration of VS Code 4 | - For these labs, we will be using Python and PowerShell, so we'll need to install some extensions 5 | - I have VS Code installed already and the extensions he mentions, so I'll mark this one **completed**. 6 | - I still don't have access to GitHub Codespaces. I'm on the waiting list. 7 | -------------------------------------------------------------------------------- /Week3/Project4/main.bicep: -------------------------------------------------------------------------------- 1 | param name string = 'storagethx2893' 2 | param location string = 'eastus' 3 | 4 | var storageSku = 'Standard_LRS' 5 | 6 | resource storageaccount 'Microsoft.Storage/storageAccounts@2020-08-01-preview' = { 7 | name: name 8 | location: location 9 | kind: 'Storage' 10 | sku: { 11 | name:storageSku 12 | } 13 | properties: { 14 | allowBlobPublicAccess:false 15 | 16 | } 17 | } 18 | 19 | output id string = storageaccount.id -------------------------------------------------------------------------------- /Week1/4_GitHub_Projects.md: -------------------------------------------------------------------------------- 1 | # Setting Up Projects in GitHub 2 | 3 | In this project, we'll set up a new project in GitHub. 4 | 5 | I created a project called Cloud Native Bootcamp and populated it with cards relating to this week's projects. One card for each project. Task **completed** 6 | 7 | Further reading: [Managing Project Boards](https://docs.github.com/en/github/managing-your-work-on-github/managing-project-boards) 8 | 9 |

10 | ![my_first_project](first_github_project.png) 11 | -------------------------------------------------------------------------------- /Week8/Project1/nginx.yml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: Deployment 3 | metadata: 4 | name: nginx-deployment 5 | spec: 6 | selector: 7 | matchLabels: 8 | app: nginx 9 | replicas: 2 10 | template: 11 | metadata: 12 | labels: 13 | app: nginx 14 | spec: 15 | containers: 16 | - name: nginx 17 | image: nginx:latest 18 | resources: 19 | limits: 20 | memory: "128Mi" 21 | cpu: "500m" 22 | ports: 23 | - containerPort: 80 24 | -------------------------------------------------------------------------------- /Week7/Notes_Week7.md: -------------------------------------------------------------------------------- 1 | # Security in Azure and AWS 2 | 3 | In this lab, we'll learn how to create users and groups, and how to define roles in AWS. 4 | 5 | ## The Project 6 | 7 | - To create a user in AWS using the CLI, we run the following command from our terminal: `aws iam create-user --user-name username`. 8 | - To create a group, we run the following command: `aws iam create-group --group-name groupname`. 9 | - To add a user to a group, this is the command: `aws iam add-user-to-group --user-name username --group-name groupname`. 10 | -------------------------------------------------------------------------------- /Week8/infrastructure/variables.tf: -------------------------------------------------------------------------------- 1 | variable "resource_group_name" { 2 | type = string 3 | description = "the name of the resource group" 4 | } 5 | 6 | variable "location" { 7 | type = string 8 | description = "the location of the resource" 9 | } 10 | 11 | variable "username" { 12 | type = string 13 | description = "the admin username of the vm" 14 | sensitive = true 15 | } 16 | 17 | variable "password" { 18 | type = string 19 | description = "the password of the vm" 20 | sensitive = true 21 | } -------------------------------------------------------------------------------- /Week3/Project2/main.tf: -------------------------------------------------------------------------------- 1 | # this is the main file, which will call the modules 2 | terraform { 3 | required_providers { 4 | aws = { 5 | source = "hashicorp/aws" 6 | version = "3.7" 7 | } 8 | } 9 | } 10 | 11 | provider "aws" { 12 | region = "us-east-1" 13 | } 14 | 15 | module "webserver" { 16 | source = "./modules/ec2" # this specifies the path of the module being called upon 17 | servername = "terraformdemo" # here we are specifying the variables defined in the variables.tf file 18 | size = "t3.micro" 19 | } -------------------------------------------------------------------------------- /Week5/5_Artifacts_Packages_CICD.md: -------------------------------------------------------------------------------- 1 | # Artifacts and Packages in CI/CD 2 | 3 | - An artifact is a package of code. 4 | 5 | ## The Project 6 | 7 | - For this project, we're gonna create an artifact in Azure DevOps from the previous build we used for [Project 4](4_Setting_Up_Continuous_Monitoring.md). 8 | - In Azure DevOps, we can create an artifact from a build we already built inside Azure DevOps, or from code stored in Azure Repos, GitHub or TFVC. 9 | - I built an artifact from the resulting build of the Parts Unlimited web app. 10 | ![Build Artifact](project5_artifact.png). 11 | -------------------------------------------------------------------------------- /Week3/Project3/Testing/terraform-aws-webserver/examples/webserver/main.tf: -------------------------------------------------------------------------------- 1 | # this is the main file, which will call the modules 2 | terraform { 3 | required_providers { 4 | aws = { 5 | source = "hashicorp/aws" 6 | version = "3.7" 7 | } 8 | } 9 | } 10 | 11 | provider "aws" { 12 | region = var.region 13 | } 14 | 15 | module "webserver" { 16 | source = "../../" # this specifies the path of the module being called upon 17 | servername = var.servername # here we are specifying the variables defined in the variables.tf file 18 | size = "t3.micro" 19 | } -------------------------------------------------------------------------------- /Week2/3_Reusable_PowerShell_code.md: -------------------------------------------------------------------------------- 1 | # Reusable and Clean PowerShell Code 2 | 3 | - When writing a script, try to make sure you give the user of the script the option to pass in parameters (date, location, mandatory parameters, etc.). 4 | - Make the script reusable by using a function. 5 | - Run the script named [New-ResourceGroup.ps1](Project3/New-ResourceGroup.ps1) 6 | - Then, from the command line, run `New-ResourceGroup -rgName cloudskillsrg -location westeurope` (remember these two parameters are mandatory). 7 | - The result can be seen below 8 | 9 | ![resource created](newresourcegroupcreated.png) 10 | -------------------------------------------------------------------------------- /Week6/Project4/handler.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | 4 | def hello(event, context): 5 | body = { 6 | "message": "This is Cloudskills broadcasting on all frequencies!", 7 | "input": event 8 | } 9 | 10 | response = { 11 | "statusCode": 200, 12 | "body": json.dumps(body) 13 | } 14 | 15 | return response 16 | 17 | # Use this code if you don't use the http event with the LAMBDA-PROXY 18 | # integration 19 | """ 20 | return { 21 | "message": "Go Serverless v1.0! Your function executed successfully!", 22 | "event": event 23 | } 24 | """ 25 | -------------------------------------------------------------------------------- /Week2/Project5/PowerShell/New-ResourceGroup-test.ps1: -------------------------------------------------------------------------------- 1 | # to run the test, we'll use Pester, a PowerShell testing framework 2 | # Invoke-Pester .\New-ResourceGroup-test.ps1 3 | 4 | Describe "New-ResourceGroup" { 5 | $location = 'westeurope' 6 | $name = 'cloudskillsbootcamp' 7 | 8 | # the It line is just for the person reading the test to know 9 | # what value should be expected 10 | 11 | It "Name should be cloudskillsbootcamp" { 12 | $name | Should Be 'cloudskillsbootcamp' 13 | } 14 | 15 | It "location should be westeurope" { 16 | $location | Should Be 'westeurope' 17 | } 18 | } -------------------------------------------------------------------------------- /Week2/Project2/s3bucket.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import boto3 3 | 4 | try: # in case an error occurs 5 | def main(): 6 | create_s3bucket(bucket_name) 7 | 8 | except Exception as e: 9 | print(e) 10 | 11 | def create_s3bucket(bucket_name): 12 | s3_bucket = boto3.client( 13 | 's3', 14 | #region-name='us-east-1' 15 | ) 16 | 17 | bucket = s3_bucket.create_bucket( 18 | Bucket=bucket_name, 19 | ACL='private', # ACL stands for Access Control List 20 | ) 21 | 22 | print(bucket) # this will print the output of creating the S3 bucket 23 | 24 | BUCKET_NAME = sys.argv[1] # argv allows us to pass an argument at runtime 25 | 26 | if __name__ == '__main__': 27 | main() 28 | -------------------------------------------------------------------------------- /Week1/3_SettingUp_AWS.md: -------------------------------------------------------------------------------- 1 | # Setting up your first AWS environment 2 | 3 | In this project, I'll create an AWS account. 4 | 5 | I didn't have one, so I followed the steps on the video and created my own account. I am a complete beginner, and I haven't had any contact with AWS until now. I have to focus on that! 6 | 7 | Next steps: 8 | 9 | 1. Read the [AWS Fundamentals](https://aws.amazon.com/getting-started/fundamentals-core-concepts/) documentation 10 | 1. Watch the [AWS Certified Cloud Practitioner training video](https://www.youtube.com/watch?v=3hLmDS179YE&ab&ab_channel=freeCodeCamp.org) 11 | 1. Try a few [hands-on tutorials](https://aws.amazon.com/getting-started/hands-on/?awsf.getting-started-content-type=content-type%23hands-on&?e=gs2020&p=gsrc) 12 | -------------------------------------------------------------------------------- /Week2/Project3/New-ResourceGroup.ps1: -------------------------------------------------------------------------------- 1 | function New-ResourceGroup { 2 | [cmdletbinding(SupportsShouldProcess)] # the SupportShouldProcess argument adds Confirm and WhatIf parameters to the function 3 | param ( 4 | [Parameter(Mandatory)] # this indicates that the parameter is mandatory and must be presented at runtime 5 | [string]$rgName, 6 | 7 | [Parameter(Mandatory)] 8 | [string]$location 9 | ) 10 | 11 | # this is splatting, a method of passing a collection of parameter values to a command as a single unit 12 | $params = @{ 13 | 'Name' = $rgName 14 | 'Location' = $location 15 | } 16 | if($PSCmdlet.ShouldProcess('location')){ 17 | New-AzResourceGroup @params 18 | } 19 | } -------------------------------------------------------------------------------- /Week6/Project4/serverless.yml: -------------------------------------------------------------------------------- 1 | # Welcome to Serverless! 2 | # 3 | # This file is the main config file for your service. 4 | # It's very minimal at this point and uses default values. 5 | # You can always add more config options for more control. 6 | # We've included some commented out config examples here. 7 | # Just uncomment any of them to get that config option. 8 | # 9 | # For full config options, check the docs: 10 | # docs.serverless.com 11 | # 12 | # Happy Coding! 13 | 14 | service: project4-lambda 15 | 16 | frameworkVersion: '2' 17 | 18 | provider: 19 | name: aws 20 | runtime: python3.8 21 | lambdaHashingVersion: 20201221 22 | 23 | functions: 24 | hello: 25 | handler: handler.hello 26 | events: 27 | - http: 28 | path: / 29 | method: get -------------------------------------------------------------------------------- /Week8/Project3/nginx_aks.yml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: Deployment 3 | metadata: 4 | name: nginx-deployment 5 | spec: 6 | selector: 7 | matchLabels: 8 | app: nginx-deployment 9 | replicas: 2 10 | template: 11 | metadata: 12 | labels: 13 | app: nginx-deployment 14 | spec: 15 | containers: 16 | - name: nginx 17 | image: nginx:latest 18 | resources: 19 | limits: 20 | memory: "128Mi" 21 | cpu: "500m" 22 | ports: 23 | - containerPort: 80 24 | 25 | --- 26 | # This is a service 27 | apiVersion: v1 28 | kind: Service 29 | metadata: 30 | name: nginx-service 31 | spec: 32 | type: LoadBalancer 33 | ports: 34 | - port: 80 35 | selector: 36 | app: nginx-deployment 37 | 38 | 39 | 40 | -------------------------------------------------------------------------------- /Week4/3_Contributing_Open_Source.md: -------------------------------------------------------------------------------- 1 | # Contributing to an Open Source Project 2 | 3 | There are several reasons you should consider contributing to an open source project. Some of them are: 4 | 5 | - Increase your skills and competencies. 6 | - Increase your visibility. 7 | - You can do something for others. 8 | 9 | ## Resources 10 | 11 | - [First Contributions](https://github.com/firstcontributions/first-contributions) is an interesting GitHub repo to try out your first contribution. 12 | - [The Definitive Guide to Contributing to Open Source](https://www.freecodecamp.org/news/the-definitive-guide-to-contributing-to-open-source-900d5f9f2282/) 13 | 14 | ## My Project 15 | 16 | For this project, please see [My First Contribution](Project3/My_First_Contribution.md). In this document, I describe the steps to contribute to a project on GitHub. 17 | -------------------------------------------------------------------------------- /Week8/3_Deploy_AKS.md: -------------------------------------------------------------------------------- 1 | # Deploying to AKS 2 | 3 | In this project we'll deploy an NGINX application to the Kubernetes cluster we created in the previous exercise. 4 | 5 | ## The Project 6 | 7 | - We'll use [this Kubernetes manifest file](Project3/nginx_aks.yml) to deploy our service. 8 | - To create the resource from the manifest file, we run `kubectl create -f nginx_aks.yml`. 9 | - The `-f` flag stands for filename. 10 | - To see our deployments, we run `kubectl get deployments`. 11 | - To see our running services, we type `kubectl get service`. 12 | - To see our services running from the Azure portal, we go to our AKS resource, then to **Services and ingresses**, from the left menu. 13 | ![Services and ingresses](project3_services.png) 14 | - To visit our application, we click on the service name. In this page, we can find the external IP address. 15 | ![Nginx](project3_nginx.png) 16 | -------------------------------------------------------------------------------- /Week8/2_AKS_Cluster.md: -------------------------------------------------------------------------------- 1 | # Creating an AKS Cluster 2 | 3 | For this project, we'll create a Kubernetes cluster in [Azure Kubernetes Service](https://docs.microsoft.com/en-us/azure/aks/) using Azure CLI. 4 | 5 | ## The Project 6 | 7 | - To deploy our resources, we'll use [this PowerShell script](Project2/azaks.ps1). 8 | - We'll use the `az aks create` Azure CLI command to create our Kubernetes cluster. 9 | - There's a parameters block in the script, so it can be reused. 10 | 11 | - To pull down the AKS configuration so we can interact with it from our local computer, we run this command: 12 | 13 | `az aks get-credentials --name cloudskillsaks66 --resource-group azaksrg` 14 | 15 | - This command will return a result that says the AKS cluster has been merged into our local context. 16 | - To get the nodes: `kube ctl get nodes` 17 | - With this command, we see that our context is pointing to the AKS cluster running in Azure. 18 | -------------------------------------------------------------------------------- /Week3/Notes_week3.md: -------------------------------------------------------------------------------- 1 | # Infrastructure as Code 2 | 3 | ## Core practices of IaC 4 | 5 | - Everything defined as code 6 | - Reusability 7 | - Consistency 8 | - Transparency 9 | - **Validation and testing** (just as important as testing code) 10 | - Small and simple parts that you can change independently 11 | - You want your code to be as simple as possible 12 | 13 | ## Configuration management vs infrastructure provisioning 14 | 15 | - Configuration management: Terraform 16 | - Infrastructure provisioning: Chef, Puppet, Ansible 17 | - With the growth of microservices and serverless 18 | 19 | ## Where do I start? 20 | 21 | - Automate one thing at a time and start slow 22 | - DevOps culture is about making small changes, one at a time, and improve upon them 23 | - Don't try to do all at once 24 | - Automate the most tedious processes first, like server builds 25 | - **If you try to do everything at once, you'll most likely have to go back and redo it** 26 | -------------------------------------------------------------------------------- /Week6/Project2/main.tf: -------------------------------------------------------------------------------- 1 | provider "azurerm" { 2 | features {} 3 | } 4 | 5 | resource "azurerm_resource_group" "azurerg" { 6 | name = var.resource_group_name 7 | location = var.location 8 | } 9 | 10 | resource "azurerm_app_service_plan" "cloudskills-sp" { 11 | name = var.app_service_plan_name 12 | location = azurerm_resource_group.azurerg.location 13 | resource_group_name = azurerm_resource_group.azurerg.name 14 | kind = "Linux" 15 | reserved = true 16 | 17 | sku { 18 | tier = "Standard" 19 | size = "S1" 20 | } 21 | } 22 | 23 | resource "azurerm_app_service" "cloudskills-ap" { 24 | name = var.app_service_name 25 | location = azurerm_resource_group.azurerg.location 26 | resource_group_name = azurerm_resource_group.azurerg.name 27 | app_service_plan_id = azurerm_app_service_plan.cloudskills-sp.id 28 | 29 | site_config { 30 | linux_fx_version = "NODE|12-lts" 31 | } 32 | } -------------------------------------------------------------------------------- /Week2/Project5/Python/s3_bucket_test.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | # the unittest.TestCase argument tells Python this is a test, not application level code 4 | 5 | class TestS3(unittest.TestCase): 6 | def test_bucket_name_value(self): 7 | bucket = 'cloudskills808' 8 | 9 | self.assertEqual(bucket,'cloudskills808') # the value on the right is going to be compared to the value on the left and assert whether or not they are equal 10 | 11 | def test_region_value(self): 12 | region = 'us-east-1' 13 | 14 | self.assertEqual(region,'us-east-1') 15 | 16 | def test_bucket_name_is_string(self): 17 | bucket = 'cloudskills808' 18 | 19 | self.assertTrue(type(bucket),str) # here, we are asserting whether or not the argument is true (if it's a string) 20 | 21 | def test_region_is_string(self): 22 | region = 'us-east-1' 23 | 24 | self.assertTrue(type(region),str) 25 | 26 | if __name__ == '__main__': 27 | unittest.main() # this is a function in the unittest library -------------------------------------------------------------------------------- /Week2/4_Linting_PowerShell_Python.md: -------------------------------------------------------------------------------- 1 | # Linting in PowerShell and Python 2 | 3 | - Linting is the process of running a program that will analyze code for potential errors. 4 | - For Python, we will use **Pylint** 5 | - To install Pylint, run `pip install pylint` 6 | - In the video, Michael types `pylint s3bucket.py` to run Pylint, but in my case I got an error. 7 | - I ran `python -m pylint s3bucket.py` and it worked. 8 | - For PowerShell, we are going to use the **PSScriptAnalyzer** module. 9 | - Michael mentions in the video that this module comes preinstalled in Windows 10, but in my case I had to install it. 10 | - To install the module, I ran `Install-Module -Name PSScriptAnalyzer` 11 | - We then create a new PowerShell script called [linting.ps1](Project3/linting.ps1), which will execute the Script Analyzer. 12 | - The *linting.ps1* script contains the following command: `Invoke-ScriptAnalyzer -Path .` 13 | - the . (dot) as argument for the *Path* parameter tells Script Analyzer to look in the same folder where the Script Analyzer script resides. 14 | -------------------------------------------------------------------------------- /Week3/Project4/main.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", 3 | "contentVersion": "1.0.0.0", 4 | "parameters": { 5 | "name": { 6 | "type": "string", 7 | "defaultValue": "storagethx2893" 8 | }, 9 | "location": { 10 | "type": "string", 11 | "defaultValue": "eastus" 12 | } 13 | }, 14 | "functions": [], 15 | "variables": { 16 | "storageSku": "Standard_LRS" 17 | }, 18 | "resources": [ 19 | { 20 | "type": "Microsoft.Storage/storageAccounts", 21 | "apiVersion": "2020-08-01-preview", 22 | "name": "[parameters('name')]", 23 | "location": "[parameters('location')]", 24 | "kind": "Storage", 25 | "sku": { 26 | "name": "[variables('storageSku')]" 27 | }, 28 | "properties": { 29 | "allowBlobPublicAccess": false 30 | } 31 | } 32 | ], 33 | "outputs": { 34 | "id": { 35 | "type": "string", 36 | "value": "[resourceId('Microsoft.Storage/storageAccounts', parameters('name'))]" 37 | } 38 | } 39 | } -------------------------------------------------------------------------------- /Week6/Project1/createfunctionapp.ps1: -------------------------------------------------------------------------------- 1 | function Create-FunctionApp { 2 | param ( 3 | [Parameter(Mandatory)] 4 | [string]$RGName, 5 | 6 | [Parameter(Mandatory)] 7 | [string]$name, 8 | 9 | [Parameter(Mandatory)] 10 | [string]$storageAccountName 11 | ) 12 | 13 | az group create --name $RGName --location westeurope 14 | 15 | az storage account create --name $storageAccountName ` 16 | --resource-group $RGName 17 | 18 | $plan = az functionapp plan create -g $RGName ` 19 | -n $($name + 'plan') ` 20 | --min-instances 1 ` 21 | --max-burst 5 ` 22 | --sku EP1 23 | $plan 24 | 25 | az functionapp create -g $RGName ` 26 | -n $name ` 27 | -p $($name + 'plan') ` 28 | --runtime powershell ` 29 | -s $storageAccountName ` 30 | --functions-version 2 31 | 32 | 33 | } -------------------------------------------------------------------------------- /Week8/1_Minikube_Env.md: -------------------------------------------------------------------------------- 1 | # Creating a Minikube Environment 2 | 3 | - [minikube](https://minikube.sigs.k8s.io/docs/) is a tool that lets you run Kubernetes locally. minikube runs a single-node Kubernetes cluster on your PC so that you can try out Kubernetes, or for daily development work. 4 | - To install minikube, follow [these instructions](https://minikube.sigs.k8s.io/docs/start/). 5 | - I'm on a Windows 10 machine, and I installed minikube using Chocolatey, by running `choco install minikube`. 6 | - After minikube is successfully installed, we run `minikube start` to start our cluster. 7 | - To verify we have a running cluster, we type `kubectl get node`. 8 | - To deploy our application, we'll use a Kubernetes manifest, which is a file written in YAML that is used to create, modify and delete Kubernetes resources. 9 | - [This is our Kubernetes manifest](Project1/nginx.yml) 10 | - Here, we'll deploy an NGINX server running in 2 pods. 11 | - To create the deployment, from the directory where the manifest is, we run `kubectl create -f nginx.yml`. 12 | - To get the deployments, we run `kubectl get deployments`. 13 | - To get the pods, we run `kubectl get pods`. 14 | -------------------------------------------------------------------------------- /.github/workflows/hello.yml: -------------------------------------------------------------------------------- 1 | # This is a basic workflow to help you get started with Actions 2 | 3 | name: CI 4 | 5 | # Controls when the action will run. 6 | on: 7 | # Triggers the workflow on push or pull request events but only for the main branch 8 | push: 9 | branches: [ main ] 10 | pull_request: 11 | branches: [ main ] 12 | 13 | # Allows you to run this workflow manually from the Actions tab 14 | workflow_dispatch: 15 | 16 | # A workflow run is made up of one or more jobs that can run sequentially or in parallel 17 | jobs: 18 | # This workflow contains a single job called "build" 19 | build: 20 | # The type of runner that the job will run on 21 | runs-on: ubuntu-latest 22 | 23 | # Steps represent a sequence of tasks that will be executed as part of the job 24 | steps: 25 | # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it 26 | - uses: actions/checkout@v2 27 | 28 | # Runs a single command using the runners shell 29 | - name: Run a one-line script 30 | run: echo Hello, world! 31 | 32 | # Runs a set of commands using the runners shell 33 | - name: Run a multi-line script 34 | run: | 35 | echo Add other actions to build, 36 | echo test, and deploy your project. 37 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Local .terraform directories 2 | Week3/Project2/.terraform/ 3 | 4 | # .tfstate files 5 | Week3/Project2/terraform.tfstate 6 | Week3/Project2/terraform.tfstate.backup 7 | *.tfstate 8 | *.tfstate.* 9 | 10 | # Crash log files 11 | crash.log 12 | 13 | # Exclude all .tfvars files, which are likely to contain sentitive data, such as 14 | # password, private keys, and other secrets. These should not be part of version 15 | # control as they are data points which are potentially sensitive and subject 16 | # to change depending on the environment. 17 | # 18 | *.tfvars 19 | 20 | # Ignore override files as they are usually used to override resources locally and so 21 | # are not checked in 22 | override.tf 23 | override.tf.json 24 | *_override.tf 25 | *_override.tf.json 26 | 27 | # Include override files you do wish to add to version control using negated pattern 28 | # 29 | # !example_override.tf 30 | 31 | # Include tfplan files to ignore the plan output of command: terraform plan -out=tfplan 32 | # example: *tfplan* 33 | 34 | # Ignore CLI configuration files 35 | .terraformrc 36 | terraform.rc 37 | 38 | # Ignore this too 39 | Week3/Project2/.terraform.lock.hcl 40 | 41 | # Ignore also these 42 | Week3/Project3/Testing/terraform-aws-webserver/examples/webserver/.terraform/providers/registry.terraform.io/hashicorp/aws/3.7.0/windows_amd64/terraform-provider-aws_v3.7.0_x5.exe -------------------------------------------------------------------------------- /Week2/5_Testing_PowerShell_Python.md: -------------------------------------------------------------------------------- 1 | # Testing in PowerShell and Python 2 | 3 | - For these labs, we'll use unit testing. 4 | - Unit testing is a type of software testing where individual units, or components, of a software are tested. The purpose is to validate that each unit of the software code performs as expected. Unit Testing is done during the development (coding phase) of an application by the developers. Unit Tests isolate a section of code and verify its correctness. A unit may be an individual function, method, procedure, module, or object. 5 | 6 | ## PowerShell 7 | 8 | - For the PowerShell code, we'll use Pester, a test framework. 9 | - For more information on Pester: [What Is Pester and Why Whould I Care?](https://devblogs.microsoft.com/scripting/what-is-pester-and-why-should-i-care/#:~:text=Pester%20is%20a%20test%20framework,tests%20and%20report%20the%20results.) 10 | - A book on Pester: [The Pester Book](https://leanpub.com/pesterbook) by Adam Bertram. 11 | - The result: 12 | ![Pester Result](pester_result.png) 13 | 14 | ## Python 15 | 16 | - For the Python code, we will use the **unittest** library. 17 | - More information on testing in Python: [Getting Started with Testing in Python](https://realpython.com/python-testing/) 18 | - The code can be found [here](Project5/Python/s3_bucket_test.py) 19 | - The result: 20 | ![Python unittest result](python_test_result.png) 21 | -------------------------------------------------------------------------------- /Week5/4_Setting_Up_Continuous_Monitoring.md: -------------------------------------------------------------------------------- 1 | # Setting Up Continuous Monitoring 2 | 3 | ## Definition 4 | 5 | The objective of a continuous monitoring program is to determine if the complete set of planned, required, and deployed security controls within an information system or inherited by the system continue to be effective over time in light of the inevitable changes that occur. 6 | 7 | ## The Project 8 | 9 | - For this project I'll be using an existing CI/CD pipeline, from a previous lab. 10 | - I built a pipeline in Azure DevOps following the instructions from the [Configuring CI/CD Pipelines as Code with YAML in Azure DevOps](https://azuredevopslabs.com/labs/azuredevops/yaml/) hands-on lab. 11 | - Under *Pipelines*, and then *Releases*, we're gonna click on **New release pipeline**. 12 | - I selected the **Azure App Service deployment** for template, since that's what I was doing with the pipeline. 13 | - After saving the stage, I went to **Pre-deployment conditions** (the set of icons on the left side of the stage rectangle) and added the [Query Azure Monitor alerts](https://docs.microsoft.com/en-us/azure/devops/pipelines/tasks/utility/azure-monitor?view=azure-devops) gate to the condition. 14 | ![gate](project4_predeployment.png) 15 | - More on gates: [Release deployment control using gates](https://docs.microsoft.com/en-us/azure/devops/pipelines/release/approvals/gates?view=azure-devops). 16 | -------------------------------------------------------------------------------- /Week5/6_Working_AWS_CodeDeploy.md: -------------------------------------------------------------------------------- 1 | # Working with AWS CodeDeploy 2 | 3 | - AWS has a number of different processes for CI/CD. 4 | - [CodeCommit](https://aws.amazon.com/codecommit/) is a fully-managed source control service that hosts secure Git-based repositories. 5 | - [CodeArtifact](https://aws.amazon.com/codeartifact/) is a fully managed artifact repository service that makes it easy to securely store, publish, and share software packages. 6 | - [CodeBuild](https://aws.amazon.com/codebuild/) is a fully managed continuous integration service that compiles source code, run tests, and produces software packages that are ready to deploy. This is pretty much the CI process. 7 | - [CodeDeploy](https://aws.amazon.com/codedeploy/) is a deployment service that automates application deployments to Amazon EC2 instances, on-premises instances, serverless Lambda functions, or Amazon ECS services. 8 | - [CodePipeline](https://aws.amazon.com/codepipeline/) is a fully managed continuous delivery service that helps you automate your release pipelines for fast and reliable application and infrastructure updates. 9 | 10 | ## The Project 11 | 12 | - For this project, we'll create a new repository in CodeCommit, and we'll store some code in there. 13 | - CodeCommit works much like GitHub. We can create files directly from the repository page, we can clone the repo, etc. 14 | 15 | ![CodeCommit](project6_codecommit_main.png) 16 | -------------------------------------------------------------------------------- /Week5/Notes_week5.md: -------------------------------------------------------------------------------- 1 | # Continuous Integration, Continuous Deploy, and Continuous Delivery 2 | 3 | ## What is CI/CD? 4 | 5 | - CI/CD is automating the process of committing something to code and rolling out these new changes as fast, stable and secure as possible. 6 | - CI/CD is very important for the enterprise, in order for them to be competitive. 7 | - **Continuous Integration**: Automating the process of merging code into master. 8 | - **Continuous Delivery**: Extending CI by deploying new merged code into the environment for testing, QA, and production. 9 | - **Continuous Deployment**: Extending continuous delivery by removing human intervention from the CI/CD process. 10 | 11 | ## CI/CD tools 12 | 13 | - Azure DevOps 14 | - GitHub Actions 15 | - AWS Code Deploy 16 | - Jenkins 17 | - CircleCI 18 | - TravisCI 19 | 20 | ## CI/CD uses 21 | 22 | - CI/CD is not just for applications. 23 | - It can be used for infrastructure, and database administration. 24 | - We can use CI/CD processes for packages containing software to be installed. 25 | - **If you can codify it, you can build a CI/CD process for it** 26 | 27 | ## Tips and Tricks 28 | 29 | - Use pipeline templates for common code. 30 | - For CI, speed is key. 31 | - Caching strategy to re-use data from previous builds. 32 | - Run jobs in parallel where it makes sense. 33 | - Try to run tests in parallel if you can. 34 | - **Secure your pipelines.** 35 | - **Secrets** 36 | - **Permissions** 37 | -------------------------------------------------------------------------------- /Week3/Project3/Testing/terraform-aws-webserver/test/webserver_test.go: -------------------------------------------------------------------------------- 1 | package test 2 | 3 | import ( 4 | "fmt" 5 | "testing" 6 | "time" 7 | 8 | http_helper "github.com/gruntwork-io/terratest/modules/http-helper" 9 | "github.com/gruntwork-io/terratest/modules/terraform" 10 | ) 11 | 12 | func TestTerraformWebServerExample(t *testing.T) { 13 | 14 | // the values to pass into the Terraform CLI 15 | terraformOptions := terraform.WithDefaultRetryableErrors(t, &terraform.Options{ 16 | 17 | // the path where the example Terraform code is located 18 | TerraformDir: "../examples/webserver", 19 | 20 | // variables to pass to the Terraform code using -var options 21 | Vars: map[string]interface{}{ 22 | "region": "us-west-2", 23 | "servername": "testwebserver", 24 | }, 25 | }) 26 | 27 | // run a Terraform init and apply with the Terraform options 28 | terraform.InitAndApply(t, terraformOptions) 29 | 30 | // run a Terraform destroy at the end of the test 31 | defer terraform.Destroy(t, terraformOptions) 32 | 33 | publicIp := terraform.Output(t, terraformOptions, "public_ip") 34 | 35 | url := fmt.Sprintf("http://%s:8080", publicIp) 36 | 37 | // this will run an HTTP get request, validate we are getting a 200 response with the "My first Terraform module!" content 38 | // Terratest will run it for 30 times every 5 seconds 39 | http_helper.HttpGetWithRetry(t, url, nil, 200, "My first Terraform module!", 30, 5*time.Second) 40 | 41 | } 42 | -------------------------------------------------------------------------------- /Week6/Notes_week6.md: -------------------------------------------------------------------------------- 1 | # Serverless in Azure and AWS 2 | 3 | - There is always a server, but we aren't managing them. 4 | - Before serverless, the only option was to run apps on a virtual machine. Then, you would still have to worry about the infrastructure. 5 | - With serverless, you simply deploy your code. 6 | - No more managing infrastructure. 7 | - Focus on the code. 8 | - Smaller footprint. 9 | - Serverless is not just web frontend apps. 10 | - Backend can also work serverless. 11 | - **Scalability** 12 | - Auto-scaling 13 | - Much better performance 14 | 15 | ## Azure Functions 16 | 17 | - Azure Functions are part of the serverless backend family. 18 | - Azure Functions allow you to upload any type of backend code. 19 | 20 | ## Azure Web Apps 21 | 22 | - Frontend code. 23 | - Supports many languages. 24 | - Can run pure code or in a container. 25 | 26 | ## AWS Elastic Bean Stalk 27 | 28 | - Frontend code. 29 | - Supports multiple languages. 30 | - Here, you can also run containers. 31 | 32 | ## Serverless Monitoring 33 | 34 | - In Azure: **Azure Monitor** and **Application Insights**. 35 | - In AWS: **Cloudwatch**. 36 | 37 | ## Serverless Pros and Cons 38 | 39 | Pros | Cons 40 | ------------ | ------------- 41 | No infrastructure to manage | Data isn't stored 42 | Highly scalable | Vendor lock-in 43 | Pay for what you use | Debugging may be different for sysadmins 44 | 45 | ## Serverless Kubernetes 46 | 47 | - GKE Autopilot from GCP. 48 | - No nodes need to be managed. 49 | - Billed per pod. 50 | -------------------------------------------------------------------------------- /Week7/4_IAM_Roles.md: -------------------------------------------------------------------------------- 1 | # Creating IAM Roles, Users and Groups 2 | 3 | In this lab we'll learn how to create users and groups, and how to add users to groups in AWS. We'll also assign roles. 4 | 5 | ## The Project 6 | 7 | - To create a new user using the CLI, we run the following command: `aws iam create-user --user-name User`. 8 | - To create a new group using the CLI, we run `aws iam create-group --group-name Group`. 9 | - To add a user to a group: `aws iam add-user-to-group --user-name User --group-name Group`. 10 | - If we go to the console, then to IAM, and click on Users, we'll see our newly created user. Mine is called Chase. 11 | 12 | ![IAM users](project4_iam_users.png) 13 | 14 | - A newly created group has no permissions attached to it. To attach permissions to a group, we go to **Attach Policy** and choose from a list. We can attach up to 10 managed policies. 15 | 16 | ![IAM policy](project4_iam_group_policy.png) 17 | 18 | ### Roles 19 | 20 | - An IAM role is an IAM identity that you can create in your account that has specific permissions. An IAM role is similar to an IAM user, in that it is an AWS identity with permission policies that determine what the identity can and cannot do in AWS. 21 | - To create a role, we go to roles, then **Create role** and from there, we can choose from a number of common use cases. 22 | - For this lab, we'll choose EC2, and attach the **AmazonEC2FullAccess** built-in policy. 23 | - From now on, all EC2 instances with this role attached will have full access to EC2. 24 | 25 | ![IAM roles](project4_iam_roles.png) 26 | -------------------------------------------------------------------------------- /Week4/Project3/My_First_Contribution.md: -------------------------------------------------------------------------------- 1 | # My First Contribution 2 | 3 | For my first contribution to an open source project, I decided to take the [First Contributions](https://github.com/firstcontributions/first-contributions) tutorial. In this tutorial, we add our own name to a file and then open a pull request, to merge the change to the main repo. 4 | 5 | 1. The first step is to fork the repo to our own GitHub account. 6 | 2. We then clone the repo to our local machine, using the `git clone https://github.com/suvo-oko/first-contributions.git`. 7 | 3. We move into the newly created *first-contributions* directory by typing `cd first-contributions`. 8 | 4. We now create a new branch with the command `git checkout -b add-rodrigo-miravalles`. The *git checkout* switches branches and the *-b* flag creates a new branch. From now on, we're going to be working in this branch. 9 | 5. We are going to add our name to the *Contributors.md* file and save it. 10 | 6. We now add the file to the staging area with the command `git add Contributors.md`. 11 | 7. To commit the changes, we run `git commit -m "Add Rodrigo Miravalles to Contributors list"`. 12 | 8. To push the changes to the remote repo, we run `git push origin add-rodrigo-miravalles`, where *add-rodrigo-miravalles* is the name of the working branch. 13 | 9. To submit the changes for review, we go to our GitHub repo and click on the button **Compare and pull request**. 14 | 10. We add a title and a message and click on **Create pull request**. 15 | 16 | ![Contributing 1](../contributing1.png) 17 | ![Contributing 2](../contributing2.png) 18 | -------------------------------------------------------------------------------- /Week4/Project2/Managing_Merge_Conflicts.md: -------------------------------------------------------------------------------- 1 | # Managing Merge Conflicts 2 | 3 | For this project, I took the [Managing Merge Conflicts](https://lab.github.com/githubtraining/managing-merge-conflicts) learning module. Here, I'll describe the steps I took to resolve the merge conflicts presented in the lesson. 4 | 5 | ## Step 1 6 | 7 | - In this scenario, two colleagues are working in their own branches, made changes to the same lines of the same file, and opened pull requests. One pull request was merged to the main branch, but the second pull request has a conflict. 8 | - GitHub detects this conflict and warns the maintainer with the message **This branch has conflicts that must be resolved**. 9 | - Git highlights the content that is in conflict by placing the lines to be verified between the `<<<<<<< update-config` and `>>>>>>> main` markers. The two different sections are separated by the `=======` marker. 10 | ![merge conflict 1](mergeconflict1.png) 11 | - Once the conflict is resolved, we can go ahead and merge the pull request and delete the branch. 12 | 13 | ## Step 2 14 | 15 | - There will be cases where there are several conflicts in a single file, or different conflicting files. 16 | - As the maintainer of the repo, we need to verify the files and resolve all conflicts, to be able to commit the merge. 17 | ![merge conflict 2](mergeconflict2.png) 18 | 19 | ## Step 3 20 | 21 | - In this step, we'll create our own conflict, by altering the contents of a file. 22 | - After editing the file and committing the change, we see the the message that the branch has conflicts that must be resolved. We edit the file by removing the conflicting part and click on the **Mark as resolved** button. 23 | ![merge conflict final](mergeconflictfinal.png) 24 | -------------------------------------------------------------------------------- /Week2/Project2/s3bucket_region.py: -------------------------------------------------------------------------------- 1 | # kindly supplied by Robert Stojan from the group 2 | import sys 3 | import boto3 4 | 5 | try: 6 | def main(): 7 | create_s3bucket(bucket_name, region=None) 8 | 9 | except Exception as e: 10 | print(e) 11 | 12 | def create_s3bucket(bucket_name, region=None): 13 | """Create an S3 bucket in a specified region 14 | ​ 15 | If a region is not specified, the bucket is created in the S3 default 16 | region (us-east-1). 17 | ​ 18 | :param bucket_name: Bucket to create 19 | :param region: String region to create bucket in, e.g., 'us-west-2' 20 | :print: Prints the bucket name that was created 21 | """ 22 | 23 | # Sets the region if passed as the second argument 24 | region = sys.argv[2] 25 | 26 | # If a region isn't specified, use us-east-1 (Virginia) 27 | if region is None: 28 | s3_bucket=boto3.client( 29 | 's3', 30 | ) 31 | bucket = s3_bucket.create_bucket( 32 | Bucket=bucket_name, 33 | ACL='private', 34 | )​ 35 | 36 | # If a region is specified, pass that in to the client and CreateBucketConfiguration 37 | else: 38 | s3_bucket=boto3.client( 39 | 's3', 40 | region_name=region 41 | ) 42 | ​ 43 | # Formatting the LocationConstraint key-value pair the API is expecting for CreateBucketConfiguration 44 | location = {'LocationConstraint': region} 45 | ​ 46 | bucket = s3_bucket.create_bucket( 47 | Bucket=bucket_name, 48 | ACL='private', 49 | CreateBucketConfiguration=location 50 | ) 51 | ​ 52 | print(bucket) 53 | ​ 54 | bucket_name = sys.argv[1] 55 | ​ 56 | if __name__ == '__main__': 57 | main() 58 | -------------------------------------------------------------------------------- /Week2/2_Reusable_clean_Python.md: -------------------------------------------------------------------------------- 1 | # Reusable and Clean Python Code 2 | 3 | ## Introduction 4 | 5 | - We'll use the AWS Boto3 library to deploy a resource to AWS. 6 | - We should strive to write clean and reusable code. 7 | 8 | ## Install Boto3 9 | 10 | - Boto3 is the name of the Python SDK for AWS. It allows you to directly create, update and delete AWS resources from your Python scripts. 11 | - [Python, Boto3 and AWS S3 Desmystified](https://realpython.com/python-boto3-aws-s3/) 12 | - How to install Boto3: From the command line, run `pip install boto3`. 13 | - The function, when run, will create an S3 bucket in AWS. 14 | - To authenticate to AWS: 15 | - From the command line, run `aws configure`. 16 | - [Configuring the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-configure.html) 17 | - I was prompted to enter the following information, obtained from my AWS Console: *Access Key ID*, *Secret Access Key*, *AWS Region*, and *Output Format*. 18 | - For the default region, I chose *eu-west-3*, but this resulted in an error when I ran the script. 19 | - Apparently, the *CreateBucket* operation isn't allowed in all regions. The S3 Bucket will default to *us-east-1* if you don't specify the region. 20 | - I ran `aws configure` again and changed the default region to *us-east-1*. After this change, the script worked and the bucket was created correctly. 21 | - The final code can be found [here](Project2/s3bucket.py). 22 | - There's the option, as pointed out in the forum, to specify the region in the script itself. 23 | - To run the code and create the S3 Bucket: from the command line, run `python s3bucket.py cloudskills808`, where *cloudskills808* is the name of the bucket to be created. 24 | 25 | This is the final result, with the S3 Bucket already up and running in AWS. 26 | 27 | ![AWS Portal](portal_bucket_live.png) 28 | -------------------------------------------------------------------------------- /Week3/Project2/modules/ec2/main.tf: -------------------------------------------------------------------------------- 1 | # this file is meant to be a module, so it needs to referenced in another Terraform configuration file 2 | # below is a security group resource that will allow incoming communication through port 8080 3 | resource "aws_security_group" "allow_8080" { 4 | name = "allow_8080" 5 | description = "Allow port 8080 inbound traffic" 6 | 7 | ingress { 8 | description = "8080 from VPC" 9 | from_port = 8080 10 | to_port = 8080 11 | protocol = "tcp" 12 | cidr_blocks = ["0.0.0.0/0"] # the 0.0.0.0/0 means any IP address is allowed. 13 | } 14 | } 15 | 16 | data "aws_ami" "ubuntu" { 17 | most_recent = true 18 | 19 | filter { 20 | name = "name" 21 | values = ["ubuntu/images/hvm-ssd/ubuntu-focal-20.04-amd64-server-*"] 22 | } 23 | 24 | filter { 25 | name = "virtualization-type" 26 | values = ["hvm"] 27 | } 28 | 29 | owners = ["099720109477"] # Canonical 30 | } 31 | 32 | resource "aws_instance" "web" { 33 | ami = data.aws_ami.ubuntu.id # this value references the data source block. this is called interpolation 34 | instance_type = var.size # this will allow the resource to take variable inputs 35 | 36 | # this is to assign the security group to the aws instance. this argument takes in lists[] 37 | vpc_security_group_ids = [aws_security_group.allow_8080.id] 38 | 39 | # user data to provide when launching the instance. in this case, when the instance launches, it will run the script below 40 | user_data = <<-EOF 41 | #!/bin/bash 42 | echo "My first Terraform module!" > index.html 43 | nohup busybox httpd -f -p 8080 & 44 | EOF 45 | 46 | tags = { 47 | Name = var.servername # here too, this will reference the variable named servername. this will make the code reusable 48 | } 49 | } -------------------------------------------------------------------------------- /Week3/Project3/Testing/terraform-aws-webserver/main.tf: -------------------------------------------------------------------------------- 1 | # this file is meant to be a module, so it needs to referenced in another Terraform configuration file 2 | # below is a security group resource that will allow incoming communication through port 8080 3 | resource "aws_security_group" "allow_8080" { 4 | name = "allow_8080" 5 | description = "Allow port 8080 inbound traffic" 6 | 7 | ingress { 8 | description = "8080 from VPC" 9 | from_port = 8080 10 | to_port = 8080 11 | protocol = "tcp" 12 | cidr_blocks = ["0.0.0.0/0"] # the 0.0.0.0/0 means any IP address is allowed. 13 | } 14 | } 15 | 16 | data "aws_ami" "ubuntu" { 17 | most_recent = true 18 | 19 | filter { 20 | name = "name" 21 | values = ["ubuntu/images/hvm-ssd/ubuntu-focal-20.04-amd64-server-*"] 22 | } 23 | 24 | filter { 25 | name = "virtualization-type" 26 | values = ["hvm"] 27 | } 28 | 29 | owners = ["099720109477"] # Canonical 30 | } 31 | 32 | resource "aws_instance" "web" { 33 | ami = data.aws_ami.ubuntu.id # this value references the data source block. this is called interpolation 34 | instance_type = var.size # this will allow the resource to take variable inputs 35 | 36 | # this is to assign the security group to the aws instance. this argument takes in lists[] 37 | vpc_security_group_ids = [aws_security_group.allow_8080.id] 38 | 39 | # user data to provide when launching the instance. in this case, when the instance launches, it will run the script below 40 | user_data = <<-EOF 41 | #!/bin/bash 42 | echo "My first Terraform module!" > index.html 43 | nohup busybox httpd -f -p 8080 & 44 | EOF 45 | 46 | tags = { 47 | Name = var.servername # here too, this will reference the variable named servername. this will make the code reusable 48 | } 49 | } -------------------------------------------------------------------------------- /Week6/4_Lambda_Function.md: -------------------------------------------------------------------------------- 1 | # Create a Lambda Function 2 | 3 | [AWS Lambda](https://aws.amazon.com/lambda/) is the most popular serverless offering from AWS. Like other serverless compute services, AWS Lambda lets you run code without provisioning or managing servers, creating workload-aware cluster scaling logic, maintaining event integrations, or managing runtimes. 4 | 5 | For this project we'll be using the [Serverless](https://www.serverless.com/) framework to deploy a Lambda function. Serverless is an open-source web framework that allows you to easily deploy your code to different *function as a service* providers. Originally, the framework was designed to work with AWS Lambda. Now it supports other providers, such as Azure, with Azure Functions. 6 | 7 | ## The Project 8 | 9 | - The first step is to install Serverless. We can do it either with NPM or Chocolatey. 10 | - I installed Serverless using Chocolatey by running `choco install serverless`. 11 | - If you want to install it with NPM, first you need to install NPM, which you can get [from here](https://www.npmjs.com/get-npm). 12 | - Then, from the terminal, run `npm install -g serverless`. 13 | - After Serverless is correctly installed, I ran `serverless create --template aws-python3` to create the boilerplate files. Make sure you are in the right directory. This command will create a Lambda function template with Boto3. 14 | - This command will create 2 files in the directory you ran it: [handler.py](Project4/handler.py) and [serverless.yml](Project4/serverless.yml). 15 | - The command to deploy the function is `serverless deploy`. 16 | - This is the newly created function: 17 | ![Lambda Function Console](project4_lambda_aws_console.png) 18 | 19 | ## Conclusion 20 | 21 | - This was my first contact with the Serverless framework, and it seems like a very powerful tool. I'll have to explore it further, and try it with Azure Functions. 22 | -------------------------------------------------------------------------------- /Week6/1_Azure_Function.md: -------------------------------------------------------------------------------- 1 | # Build an Azure Function App 2 | 3 | In this project, we'll create an Azure Function using PowerShell. 4 | 5 | ## The Project 6 | 7 | - First, we'll create our PowerShell file, that contains a function that will run commands to create our Azure Function. 8 | - This is my PowerShell file: [createfunctionapp.ps1](Project1/createfunctionapp.ps1). 9 | - In this file, I added the `az group create` command to create the resource group. 10 | - When the file is complete, we select all its content (ctrl+A), right-click and select **Run selection** (or F8 in VS Code). This will load the function into memory. 11 | - Now, from the terminal, I ran `Create-FunctionApp -RGName 'cloudskillsfunctionsrg' -name 'cloudskillsfunctions' -storageAccountName 'cloudskillsfunctions6500'` to create the resources in Azure using our PowerShell function. `RGName`, `name`, and `storageAccountName` are the mandatory parameters, as defined in the `param` block of the function. These parameters are to be passed at runtime. 12 | - After this action is completed, we can see the resources in the Azure Portal. 13 | ![Project1 Azure Portal](project1_azureportal.png) 14 | - Now, from the Azure Portal, we're gonna go ahead and create our function. 15 | - In our Function resource, we go to the **Functions** section, and click on the first item, **Functions**. We then click on **+Add** to add a new function. We'll use the **HTTP Trigger** template. This function will run whenever it receives an HTTP request. 16 | - This is the **Add function** window: 17 | ![add function](project1_addfunction.png) 18 | - To test our function, we'll go to the newly created function and, under the **Developer** section, we'll go to **Code + Test**. Here, we can see our function's code, which is a PowerShell script. 19 | - On the upper menu, we click on **Test/Run** to test our function. 20 | - This is the result of the test. 21 | ![test result](project1_output_post.png) 22 | -------------------------------------------------------------------------------- /Week4/1_VSCode_Git_GitHub.md: -------------------------------------------------------------------------------- 1 | # VS Code with Git and GitHub 2 | 3 | - VS Code provides tight source-control integration with Git by default. 4 | - VS Code provides many visual cues, helpful prompts, and shortcuts to common Git features, available from the Command Palette, the Status Bar, and the Source Control view. 5 | - From the Source Control view, you can access many Git features, like clone, push, pull, and commit. 6 | 7 | ## The Lab 8 | 9 | For this lab, we'll use VS Code to commit and push code to a GitHub repository. 10 | 11 | 1. Create a new file inside Week1 folder, called [cloudskills_git_project.md](Project1/cloudskills_git_project.md). 12 | 2. Save the file, commit it and push it to the GitHub repository. 13 | 3. I'll go ahead and do the same with this file right here, and the other ones still pending to stage and commit (see pictures). 14 | 4. When you click on the Version Control icon on the left, under **Changes** you can see the files that changed but still haven't been staged. You can click on the plus (+) sign next to the filename to stage it (it's the equivalent of `git add`). The files will go to the **Staged Changes**. Once in the Staged Changes, you can click on the minus (-) sign to unstage the file. It will move back to the Changes section (the equivalent of `git reset`). 15 | 5. To commit the changes, click on the tick icon, on the top menu of the Source Control view. 16 | 6. To push the changes to the remote repository, click on the synchronize changes icon (two clockwise arrows) in the Status Bar. This action will execute a pull and a push. To the right of the synchronize changes icon, you can see two arrows, one pointing downward (the pull) and one pointing upward (the push). In this case, there's nothing to pull and there's one change to be pushed. 17 | 18 | ![Source Control View 1](sourcecontrolview1.png) 19 | ![Source Control View 2](sourcecontrolview2.png) 20 | ![Source Control View 3](sourcecontrolview3.png) 21 | ![Source Control Sync Changes](sourcecontrolsync.png) 22 | -------------------------------------------------------------------------------- /Week6/6_Monitoring_Serverless.md: -------------------------------------------------------------------------------- 1 | # Monitoring Serverless Apps in Azure and AWS 2 | 3 | Azure and AWS offer a number of monitoring features for your serverless apps. Michael Levan shows us some of them in this lecture. 4 | 5 | ## Azure 6 | 7 | - Right off the bat, in the overview pane of our App Service, we can see some information regarding our App's performance. 8 | - When we scroll down, we can see 5 graphics: **Http 5xx**, **Data In**, **Data Out**, **Requests**, and **Response Time**. 9 | - As users interact with our App, we can see how these graphics are updated. 10 | ![App Service Overview](project6_webapp_overview.png) 11 | - If we want a more detailed monitoring of our App, we can enable [Application Insights](https://docs.microsoft.com/en-us/azure/azure-monitor/app/app-insights-overview). 12 | - Application Insights, a feature of Azure Monitor, is an extensible Application Performance Management (APM) service that can be used to monitor your live applications. It will automatically detect performance anomalies, and includes powerful analytics tools to help you diagnose issues and to understand what users actually do with your app. 13 | - We can enable Application Insights for our App by going to the Application Insights section. 14 | - Once enabled, this is what we'll see. 15 | ![Application Insights](project6_application_insights.png) 16 | - Application Insights gives us the possibility to monitor our App's performance in real time, by going to the **Live metrics** section. 17 | - In the App Service resource, we can access a few very useful monitoring tools, under the **Monitoring** section. 18 | - A specially useful tool is the [App Service Diagnostics](https://docs.microsoft.com/en-us/azure/app-service/overview-diagnostics), whic can be accesed by clicking on **Diagnose and Solve Problems** on the menu on the left. 19 | 20 | ## AWS 21 | 22 | - In AWS, we can monitor our Lambda Function's performance by going to the **Monitor** section. We'll see graphics similar to the ones presented in Azure Monitor. 23 | ![AWS Monitor](project6_aws_monitor.png) 24 | -------------------------------------------------------------------------------- /Week7/1_Securing_Code.md: -------------------------------------------------------------------------------- 1 | # Securing Code 2 | 3 | For this lab, we'll see how we can secure our code using [GitHub's Code Scanning](https://docs.github.com/en/code-security/secure-coding) feature. 4 | 5 | ## Code Scanning 6 | 7 | - Code scanning analyzes the code in a GitHub repository to find security vulnerabilities and coding errors. 8 | - You can use code scanning to find, triage, and prioritize fixes for existing problems in your code. 9 | - You can schedule scans for specific days and times, or trigger scans when a specific event occurs in the repository, such as a push. 10 | - If code scanning finds a potential vulnerability or error in your code, GitHub displays an alert in the repository. 11 | - This feature is free just for public repositories. 12 | 13 | ## The Project 14 | 15 | - For this lab, we'll be using the [ci-pythonapp](https://github.com/suvo-oko/ci-pythonapp) repository, the same we used for a previous project. 16 | - To set up Code Scanning, we go to the **Security** tab of our repo, and there, on the lower right corner, we click on **Set up code scanning**. 17 | - In this page, we can choose from different security analysis workflows. We'll choose **CodeQL Analysis**, the default GitHub security analysis tool, by click on **Set up this workflow**. 18 | ![codeql set up](project1_setup_code_scanning.png) 19 | - This will set up a new GitHub Actions workflow template. In this case, it generated a file called **codeql-analysis.yml**. 20 | - GitHub automatically detects the languages in our repository, and generates the workflow accordingly. 21 | 22 | ```yaml 23 | strategy: 24 | fail-fast: false 25 | matrix: 26 | language: [ 'javascript', 'python' ] 27 | ``` 28 | 29 | - When new code is pushed to the main branch, or whenever a contributor opens a pull request, the workflow will be triggered, and two jobs will run in parallel, one for JavaScript, and one for Pyton. 30 | - When the workflow finishes, we can see the results of the analysis by clicking on the **Security** tab of our repo. 31 | - We can click on each individual alert to see a breakdown of the cause of the alert. 32 | 33 | ![alerts](project1_alerts.png) -------------------------------------------------------------------------------- /Week7/5_RBAC_Policies.md: -------------------------------------------------------------------------------- 1 | # Creating RBAC Rules and Azure Policies 2 | 3 | ## Service Principals 4 | 5 | - An Azure service principal is an identity created for use with applications, hosted services, and automated tools to access Azure resources. This access is restricted by the roles assigned to the service principal, giving you control over which resources can be accessed and at which level. 6 | - **For security reasons, it's always recommended to use service principals with automated tools rather than allowing them to log in with a user identity.** 7 | - To create a Service Principal and configure its access to Azure resources using Azure CLI, type the following command: 8 | 9 | `az ad sp create-for-rbac -n "AzureDevOps" --role contributor --scopes /subscriptions/xxxxxxxxxx` 10 | 11 | - Here, we'll create a Service Principal with a custom name (AzureDevOps) with the contributor role. 12 | - This role assignment will apply to the subscription. We can get more granular, and specify a resource group, or even a single resource. 13 | - If we want our apps to have access to Azure resources, we need to add the `sdk-auth` argument to our command. 14 | 15 | `az ad sp create-for-rbac -n "AzureDevOps" --role contributor --scopes /subscriptions/xxxxxxxxxx --sdk-auth` 16 | 17 | - This command will generate an output that is compatible with Azure SDK auth file. 18 | - This is used when you want to do anything from a programmatic perspective. 19 | 20 | ## Azure Active Directory 21 | 22 | - Azure Active Directory (Azure AD) is Microsoft's enterprise cloud-based identity and access management (IAM) solution. 23 | 24 | ## Azure Policy 25 | 26 | - Azure Policy helps to enforce organizational standards and to assess compliance at-scale. 27 | - Common use cases for Azure Policy include implementing governance for resource consistency, regulatory compliance, security, cost, and management. 28 | - Azure Policy evaluates resources in Azure by comparing the properties of those resources to business rules. These business rules, described in JSON format, are known as policy definitions. 29 | - [Azure Policy documentation](https://docs.microsoft.com/en-us/azure/governance/policy/) 30 | -------------------------------------------------------------------------------- /Week8/infrastructure/main.tf: -------------------------------------------------------------------------------- 1 | provider "azurerm" { 2 | features {} 3 | } 4 | 5 | resource "azurerm_resource_group" "azurerg" { 6 | name = var.resource_group_name 7 | location = var.location 8 | } 9 | 10 | resource "azurerm_virtual_network" "azvnet" { 11 | name = "cloudskills_vnet" 12 | address_space = ["10.0.0.0/22"] 13 | location = azurerm_resource_group.azurerg.location 14 | resource_group_name = azurerm_resource_group.azurerg.name 15 | } 16 | 17 | resource "azurerm_subnet" "internal" { 18 | name = "internal" 19 | resource_group_name = azurerm_resource_group.azurerg.name 20 | virtual_network_name = azurerm_virtual_network.azvnet.name 21 | address_prefixes = ["10.0.2.0/24"] 22 | } 23 | 24 | resource "azurerm_network_interface" "main" { 25 | count = 2 26 | name = "UbuntuNIC-${count.index}" 27 | resource_group_name = azurerm_resource_group.azurerg.name 28 | location = azurerm_resource_group.azurerg.location 29 | 30 | ip_configuration { 31 | name = "internal" 32 | subnet_id = azurerm_subnet.internal.id 33 | private_ip_address_allocation = "Dynamic" 34 | } 35 | } 36 | 37 | resource "azurerm_linux_virtual_machine" "LinuxVM" { 38 | count = 2 39 | name = "UbunntuVM-${count.index}" 40 | resource_group_name = azurerm_resource_group.azurerg.name 41 | location = azurerm_resource_group.azurerg.location 42 | size = "Standard_D2S_v3" 43 | admin_username = var.username 44 | admin_password = var.password 45 | disable_password_authentication = false 46 | network_interface_ids = [ 47 | azurerm_network_interface.main[count.index], 48 | ] 49 | 50 | source_image_reference { 51 | publisher = "Canonical" 52 | offer = "UbuntuServer" 53 | sku = "18.04-LTS" 54 | version = "latest" 55 | } 56 | 57 | os_disk { 58 | caching = "ReadWrite" 59 | storage_account_type = "Standard_LRS" 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /Week4/2_Working_Push_Conflicts.md: -------------------------------------------------------------------------------- 1 | # Working Through Push Conflicts 2 | 3 | - When working with a local copy of the remote repository, if you don't pull down changes frequently, you're not going to have the latest image of the repo, with all the latest updates made by other contributors. 4 | - At this point, if you try to push changes to the remote repository, you're going to get an error that says your updates were rejected because the remote contains work that you do not have locally. 5 | - To check the status of your local working branch relative to the remote one, run `git status`. 6 | - To integrate the remote changes into your local branch, run `git pull`. 7 | 8 | ## Merge Conflicts 9 | 10 | - Merge conflicts happen when you merge branches that have competing commits, and Git needs your help to decide which changes to incorporate in the final merge. 11 | - Often, merge conflicts happen when people make different changes to the same line of the same file, or when one person edits a file and another person deletes the same file. 12 | - You must resolve all merge conflicts before you can merge a pull request on GitHub. 13 | - To resolve a merge conflict, you must manually edit the conflicted file to select the changes that you want to keep in the final merge. 14 | - There are two ways to resolve a merge conflict: 15 | - Using the GitHub conflict editor. This is indicated if the merge conflict is caused by competing line changes, uch as when people make different changes to the same line of the same file on different branches in your Git repository. 16 | - For all other types of merge conflicts, you must resolve it in a local clone of the repository, and then use the command line to push the changes. 17 | - To resolve a conflict, GitHub performs what is known as a reverse merge. This means that the changes from the main branch are merged into your current working branch. This allows you to test the resolved code on your branch before you merge it into main. The main branch should be treated as production ready, bug-free code. 18 | - [Resolving a merge conflict on GitHub](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/resolving-a-merge-conflict-on-github). 19 | - [Resolving a merge conflict using the command line](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/resolving-a-merge-conflict-using-the-command-line). 20 | - [Managing merge conflicts](https://lab.github.com/githubtraining/managing-merge-conflicts) learning module from [GitHub Learning Lab](https://lab.github.com/). 21 | 22 | ## The Project 23 | 24 | - To go deeper in the subject, I took the *Managing Merge Conflicts* learning module from GitHub Learning Lab. See above for the link to the module. [Here's the exercise](Project2/Managing_Merge_Conflicts.md). 25 | -------------------------------------------------------------------------------- /Week3/2_Terraform_module_vnet_aws.md: -------------------------------------------------------------------------------- 1 | # Building a Terraform Module To Create a Virtual Network in AWS 2 | 3 | ## Terraform 4 | 5 | - [Terraform](https://www.terraform.io/) is an infrastructure-as-code tool, much like ARM templates. 6 | - Terraform is platform agnostic, meaning it can be used with most of the major cloud platforms. 7 | - Terraform is written in Go. 8 | - To run Terraform, the location of the Terraform executable file must be added to the system's Path variable. 9 | - The Terraform extension for VS Code should also be installed. 10 | - Providers are the tools used by Terraform to talk to the different platforms. For this lab, Terraform will use the [AWS provider](https://registry.terraform.io/providers/hashicorp/aws/latest/docs). 11 | 12 | ## The lab 13 | 14 | - For this lab, we'll deploy an AWS EC2 Instance using Terraform. 15 | - We'll use a module, where all the configuration data will reside. Modules can be reused for different deployments. 16 | - This is the finished module, named [main.tf](Project2/modules/ec2/main.tf). 17 | - We'll also create a [variables.tf](Project2/modules/ec2/variables.tf) file, which will hold our variables, that will serve as parameters for our Terraform module, allowing aspects of the module to be customized without altering the module's own source code, and allowing modules to be shared between different configurations. 18 | - The las step is to create the main Terraform file. This file will call upon the Terraform module (see above), that contains the actual configuration. This file is also called [main.tf](Project2/main.tf) 19 | - After all the files are created and save, we'll connect to AWS by running `aws configure` from the command line. 20 | - We then run `terraform init`. This command will initialize our Terraform directory and download the provider (AWS in this case). This command will create the **.terraform** folder, that contains the AWS provider executable file. 21 | - The second step is to run `terraform plan`. It will display an execution plan. This will show the resources that are going to be created, without actually deploying them to the live environment. 22 | - The last step is to run `terraform apply`. This command will also display the execution plan. To perform the actions shown in the execution plan, we need to type **yes**. 23 | - I ran all these commands successfully, but I couldn't see the EC2 instance in my AWS console. This was because my region was defined as *us-east-2*, and the instance was deployed to the *us-east-1* region. I had to change it in the upper right corner of the console. 24 | - Here are the final results. The EC2 instance running in the console, and the page displaying **My first Terraform module!** 25 | ![ecs aws console](ec2_running.png) 26 | ![My first Terraform module!](public_ip_ec2.png) 27 | - After that, I tore down the resources, by running `terraform destroy`. 28 | -------------------------------------------------------------------------------- /Week4/Notes_week4.md: -------------------------------------------------------------------------------- 1 | # GitHub and Source Control 2 | 3 | - No need to store the code locally 4 | - A means to track and manage changes 5 | - Source of truth 6 | - A way to protect the code 7 | 8 | ## Source Control vs Version Control 9 | 10 | - Version control manages the versions of the code. 11 | - Source control stores the code in a repository. 12 | - Version control allows for comparisons and diffs. 13 | 14 | ## Source control best practices 15 | 16 | - **DON'T** commit passwords and secrets. 17 | - Be very careful when deleting information from the code stored in source control. The commit history keeps all the changes made to the files. Even if the file is deleted from the repo, it still lives in the commit history. 18 | - Write good commit messages, short and descriptive. 19 | - Follow branching best practices. Ensure that you're committing code to the correct branch, and avoid committing code directly to the main branch. 20 | - **Commit often**. 21 | - Test before you commit :exclamation: :exclamation: 22 | 23 | ## Centralzied Version Control 24 | 25 | - One centralized server. 26 | - Everyone uses the code stored in this one server. 27 | - Need Internet connection. 28 | - Single point of failure. 29 | 30 | ## Distributed Source Control 31 | 32 | - Every developer clones a copy of a repository and has the full history of the project in his or her own local machine. 33 | - Much faster. 34 | 35 | Strengths | Best Used For 36 | ------------ | ------------- 37 | Cross platform support | Small and modular code bases 38 | An open source friendly code review model via pull requests | Evolving through open source 39 | Complete offline support | Highly distributed teams 40 | Portable history | Teams working across platforms 41 | An enthusiastic growing user base | Greenfield code bases 42 | 43 | ## Concepts 44 | 45 | - **Fork**: A point-in-time copy of an entire GitHub repository from the source. This repository won't automatically keep up to date with the original repository but will allow you full access to the forked repository. This is a required step to creating pull requests to a repository you don't own. **A fork doesn't exist in Git. It is a concept that only exists in GitHub.** 46 | - **Clone**: An entire copy of a remote repository (including forked repositories). 47 | - **Branch**: A branch is a pointer to a specific commit. The default branch in Git is called *master* or *main*. As you start making commits, you’re given a master branch that points to the last commit you made. Every time you commit, the master branch pointer moves forward automatically. Creating a new branch creates a new pointer. 48 | - **Push Command**: The push command updates your remote repository with your local changes. 49 | - **Pull Request**: Pull requests let you tell others about changes you've pushed to a branch in a repository on GitHub. Once a pull request is opened, you can discuss and review the potential changes with collaborators and add follow-up commits before your changes are merged into the base branch. 50 | -------------------------------------------------------------------------------- /Week1/5_First_GitHub_Action.md: -------------------------------------------------------------------------------- 1 | # Setting up your first GitHub Action 2 | 3 | ## Overview 4 | 5 | - GitHub Actions is a platform to automate workflows. 6 | - GitHub Actions can be used to implement continuous integration (CI) for code that is maintained in GitHub repositories. 7 | - CI is the practice of using automation to build and test software every time a developer commits changes to version control. 8 | - GitHub Actions help you automate tasks within your software development lifecycle 9 | - GitHub Actions are *event-driven*. 10 | - An event automatically triggers a *workflow*, which contains a *job*. 11 | - A job uses *steps* to control the order in which *actions* are run. 12 | - There are two types of Actions: container actions and JavaScript actions. 13 | - With container actions, the environment is part of the action's code. These actions can only be run in a Linux environment that GitHub hosts. Container actions support many different languages. 14 | - JavaScript actions don't include the environment in the code, which means you'll have to specify the environment to execute these actions. Actions can run in a VM in the cloud or on-premises. 15 | - GitHub Actions uses YAML syntax to define the events, jobs, and steps. These YAML files are stored in your code repository, in a directory called `.github/workflows`. 16 | 17 | ## Components of GitHub Actions 18 | 19 | ### Workflows 20 | 21 | - Automated procedure that you add to your repository. 22 | - Made up of one or more jobs. 23 | - Can be scheduled or triggered by an event. 24 | 25 | ### Events 26 | 27 | - A specific activity that triggers a workflow. 28 | - An event is something that occurs on your repository. When this event occurs, it's going to trigger the jobs in the workflow. 29 | - Some examples of events: a new pull request, the creation of a new branch, an issue or a tag, when a user forks a repository, when a new collaborator is added, etc. 30 | 31 | ### Jobs 32 | 33 | - A set of steps that execute on the same runner. 34 | 35 | ### Steps 36 | 37 | - An individual task that can run commands in a job 38 | - A step can be either an action or a shell command 39 | - Each step in a job executes on the same runner 40 | 41 | ### Actions 42 | 43 | - Standalone commands that are combined into *steps* to create a *job* 44 | - The smallest portable building block of a workflow 45 | - To use an action in a workflow, you must include it as a step 46 | 47 | ### Runners 48 | 49 | - A container environment that has the *GitHub Actions runner application* installed. 50 | - A runner listens for available jobs, runs one job at a time, and reports the progress, logs, and results back to GitHub. 51 | - By default, GitHub runs this container for you. 52 | 53 | ## My first workflow 54 | 55 |

56 | ![Learn GitHub Actions](myfirstgithubaction.png) 57 |

58 | ![Learn GitHub Actions 2](helloworldaction1.png) 59 |

60 | ![Learn GitHub Actions 3](helloworldaction2.png) 61 |

62 | ![Learn GitHub Actions 4](helloworldaction3.png) 63 | -------------------------------------------------------------------------------- /Week3/3_Testing_IaC.md: -------------------------------------------------------------------------------- 1 | # Testing Infrastructure As Code 2 | 3 | - [Gruntwork](https://gruntwork.io/) develop a testing tool to test Terraform code called [Terratest](https://terratest.gruntwork.io/). 4 | - Terratest is a Go library. 5 | - To install Go, go to the [Go website](https://golang.org/). 6 | 7 | ## The lab 8 | 9 | - For this lab, we're going to test the Terraform files we used in the previous project to deploy the EC2 instance. 10 | - The Terraform community has standardized the naming scheme for folders containing Terraform modules, as follows: **terraform-provider-module**. 11 | - In our case, the folder is called **terraform-aws-webserver**. 12 | - In order to use Terraform modules that are in Git repositories, the modules need to follow these naming conventions. 13 | - We need to make a few changes to the *main.tf* file to make it more dynamic and reusable. 14 | - We'll replace the *region* and *servername* parameters with variables, so it can be used with different values for region and servername. 15 | - We'll change the *source* parameter to point to the location of the files in this file structure. 16 | - We then create a *variables.tf* file to declare the variables. This is my [variables.tf](Project3/Testing/terraform-aws-webserver/examples/webserver/variables.tf) file. 17 | - We now create a file called *outputs.tf*, to validate that the website is created. The test will do so by verifying the public IP address. 18 | - This is my [outputs.tf](Project3/Testing/terraform-aws-webserver/examples/webserver/outputs.tf) file. 19 | - This is the [Go file](Project3/Testing/terraform-aws-webserver/test/webserver_test.go), used for the test. 20 | - This test will run a `terraform init`, followed by a `terraform apply`. After both are finished, the test will end with a `terraform destroy`. 21 | - The test will also verify if the deployed webserver is running correctly. 22 | - We now need to download the libraries used in the code, with the command `go get -t -v`. 23 | - I encountered an error in this step. The command couldn't find the main module. 24 | - I fixed this error by running `go env -w GO111MODULE=auto`, with the help of Andrew Dawson, from the community. 25 | - [Daniel Komaz](https://github.com/danielkomaz/cloudskillsbootcamp/tree/main/Week-3-Infrastructure-As-Code), from the community, presented another solution. This looks better than the one described above. 26 | 1. Move to the terraform-aws-webserver root folder. 27 | 2. Initialize the Go modules: `go mod init terraform-aws-webserver` 28 | 3. Download module dependencies: `go mod tidy` 29 | 4. Move into the test directory: `cd test` 30 | - Before running the test, make sure to authenticate to AWS by running `aws configure`. 31 | - The command to run the test is `go test -v webserver_test.go`. 32 | - The test passed, and this is the result. 33 | ![Terratest result](go_test_pass.png) 34 | 35 | ## Conclusion 36 | 37 | This was the most challenging project until now. I found the Go language challenging. It took me some time to grasp the concept of modules and the GOPATH. 38 | -------------------------------------------------------------------------------- /Week3/4_Intro_Azure_Bicep.md: -------------------------------------------------------------------------------- 1 | # Introduction to Azure Bicep :muscle: 2 | 3 | - Azure Bicep is a declarative Domain Specific Language (DSL) for deploying Azure resources. 4 | - It uses a more friendly and easier syntax than the JSON syntax of ARM templates. 5 | - Azure Bicep works as an abstraction layer built on top of ARM Templates. Anything that can be done with Azure ARM Templates can be done with Azure Bicep as it provides a “transparent abstraction” over ARM (Azure Resource Manager). 6 | - Azure Bicep is a compiled/transpiled language. This means that the Azure Bicep code is converted into ARM Template code. Then, the resulting ARM Template code is used to deploy the Azure resources. 7 | - [Azure Bicep GitHub page](https://github.com/Azure/bicep). 8 | - [Azure Bicep VS Code extension](https://marketplace.visualstudio.com/items?itemName=ms-azuretools.vscode-bicep). 9 | 10 | ## The Lab 11 | 12 | For this lab, we'll deploy a storage account using Bicep. 13 | 14 | - First steps: 15 | - Install the [Bicep CLI](https://github.com/Azure/bicep/blob/main/docs/installing.md). 16 | - Install the [Bicep VS Code extension](https://marketplace.visualstudio.com/items?itemName=ms-azuretools.vscode-bicep). 17 | - We then create a new file called **main.bicep**. 18 | - From the terminal we run the command `bicep build main.bicep`. 19 | - This action will create a new file called **main.json**, which is the skeleton of an ARM template. 20 | - We then start editing the **main.bicep** file with the code to create a storage account. 21 | - Once the file is saved, we run `bicep build main.bicep` again and the data will be compiled into the **main.json** file. 22 | - This is my [resulting bicep file](Project4/main.bicep). 23 | - And this is the [ARM template JSON file](Project4/main.json). 24 | - The **main.json**, which is the ARM template, is going to be the file we will use to deploy the resources to Azure. 25 | - First, we connect to our Azure account by running `az login`. 26 | - We then have to create a resource group or use an existing one. I created a new one called biceptestrg, with the following command: `az group create --name biceptestrg --location eastus`. 27 | - To deploy the resource, I used the following command: `az deployment group create --resource-group biceptestrg --template-file .\main.json -p name="storagethx2893"`. 28 | - This is the resourge group with the storage account, as seen in the portal: 29 | ![Bicep test](bicep_rg_portal.png) 30 | - Afterwards, I ran `az group delete --name biceptestrg` to delete the resource group. 31 | 32 | ## Conclusion 33 | 34 | Bicep is still in its infancy, and still being developed. As of the time when this lab was taken, just over 3,800 had installed the VS Code extension. However, I believe this tool will grow and be popular. Mostly because of its ease of use, and the excellent VS Code extension, with an intellisense feature that is very good. It makes our lives a lot easier. With a few lines of code, we can deploy complex infrastructures. It still has limitations, but from what I understand, Microsoft is investing heavily in this tool. 35 | 36 | 37 | -------------------------------------------------------------------------------- /Week7/2_Continuous_Security.md: -------------------------------------------------------------------------------- 1 | # Implementing Continuous Security 2 | 3 | In this project, we'll integrate continuous security into an IaC CI pipeline. A security check will be performed for any new branch that is created in the repo.This will enforce security as the code is written. 4 | 5 | ## The Project 6 | 7 | - We'll use the [tf-sec-ops](https://github.com/suvo-oko/tf-sec-ops) repo, which was forked from the [Cloudskills](https://github.com/cloudskills) GitHub account. This repo contains Terraform configuration files with instructions to deploy resources to Azure. 8 | - We'll create a new GitHub Actions workflow file from scratch, and we'll use [Checkov](https://github.com/bridgecrewio/checkov), available from the marketplace. Checkov is a static code analysis tool for infrastructure-as-code. It scans cloud infrastructure provisioned using Terraform, Terraform plan, Cloudformation, Kubernetes, Serverless or ARM Templates and detects security and compliance misconfigurations. 9 | - This is our workflow: 10 | 11 | ```yaml 12 | name: CI 13 | 14 | on: 15 | push: 16 | branches: 17 | - 'feature/*' 18 | workflow_dispatch 19 | 20 | jobs: 21 | build: 22 | runs-on: ubuntu-latest 23 | 24 | steps: 25 | - uses: actions/checkout@v2 26 | 27 | - name: Checkov Github Action 28 | uses: bridgecrewio/checkov-action@v12 29 | ``` 30 | 31 | - When selecting the Checkov GitHub Action, make sure you choose version 12. 32 | - This workflow has a manual trigger, indicated by **workflow_dispatch**. 33 | - When we're done with the file, we'll move to the Actions tab and click on **Run this workflow**. 34 | - The workflow will complete successfully! We can see that Checkov found no errors. 35 | ![checkov 1](project2_checkov_action.png) 36 | - We'll now clone the repository and make some changes to the Terraform code. 37 | - In our main configuration file, we'll change the inbound rules for our network security group, allowing any IP address into our subnet. This security flaw should be detected by Checkov. Let's test it! 38 | 39 | ```terraform 40 | resource "azurerm_network_security_group" "nsg" { 41 | name = "nsg-rdpallow-001" 42 | location = azurerm_resource_group.rg.location 43 | resource_group_name = azurerm_resource_group.rg.name 44 | 45 | security_rule { 46 | name = "RDP-Inbound" 47 | priority = 100 48 | direction = "Inbound" 49 | access = "Allow" 50 | protocol = "Tcp" 51 | source_port_range = "*" 52 | destination_port_range = "3389" 53 | source_address_prefix = "*" # this is the security hole 54 | destination_address_prefix = "*" 55 | } 56 | } 57 | ``` 58 | 59 | - After committing and pushing the code back to our repo, the workflow starts, and we can see that Checkov caught the error. 60 | ![checkov caught error](project2_checkov_error.png) 61 | - With this security monitoring feature, we can check security issues earlier in the process when writing the code, rather than later when infrastructure is already in production. 62 | -------------------------------------------------------------------------------- /Week5/1_Deploy_Web_App_GitHubActions.md: -------------------------------------------------------------------------------- 1 | # Deploying a Web App via GitHub Actions 2 | 3 | ## What is GitHub Actions? 4 | 5 | - See my [First GitHub Action](../Week1/5_First_GitHub_Action.md) document from Week 1 for a quick overview of GitHub Actions. 6 | 7 | ## The Project 8 | 9 | - For this project, we'll use Azure [DevOps Starter](https://docs.microsoft.com/en-us/azure/devops-project/) feature. 10 | - DevOps Starter is an easy and quick way to get started with CI/CD in Azure. 11 | - DevOps Starter automates the setup of an entire continuous integration (CI) and continuous delivery (CD) for your application to Azure. 12 | - DevOps Starter does all the work for the initial configuration of a DevOps pipeline including everything from setting up the initial Git repository, configuring the CI/CD pipeline, creating an Application Insights resource for monitoring, and providing a single view of the entire solution with the creation of a DevOps Starter dashboard in the Azure portal. 13 | - You can choose between using Azure DevOps or GitHub Actions. For this project, we'll use GitHub Actions. 14 | - In the video, Luke deploys a .Net application. I followed his instructions, but then I tried this lab a second time, and I deployed a Node.js app. This is the one depicted here. 15 | 16 | ### Creating the DevOps Starter resource 17 | 18 | - In the Azure Portal, we'll click on *Create a resource* and search for DevOps Starter. We click on *Create*. 19 | - The first step is to choose the language of our application. As mentioned above, I chose *Node.js*. 20 | ![Step 1](devops_starter_1.png) 21 | - We then select the framework. 22 | ![Step 2](devops_starter_2.png) 23 | - The third step is to select the service in which the app will be deployed. In my case, I had the option to choose between *Windows Web App*, *Kubernetes Service* or *Web App for Containers*. I chose *Windows Web App*. 24 | ![Step 3](devops_starter_3.png) 25 | - We need to authorize Azure to access our GitHub account. The next step is to create the repo and give our app a name. 26 | ![Step 4](devops_starter_4.png) ![Step 5](devops_starter_5.png) 27 | - Once the resource is created, we'll have access to a nice dashboard, where we can see important information about our app. 28 | ![Dashboard](dashboard.png) 29 | - Our app is up and running! 30 | ![App running](apprunning_1.png) 31 | 32 | ### CI/CD Pipeline 33 | 34 | - We'll now see the CI/CD pipeline in action. 35 | - Whenever new code is pushed to the repository's master branch, it will trigger the GitHub Action. 36 | - The Action will build the artifact (the finished and packaged application), run tests, deploy the app to Azure App Service, and run functional tests. 37 | - The DevOps Starter creation process created a new GitHub repository in my account. To test the CI/CD workflow, I cloned the repo to my local machine, made a change to the file, and pushed the new code back to the master branch. 38 | - This new push to the repo appears in the DevOps Starter dashboard. 39 | ![New push](new_push.png) 40 | - In the GitHub repository, in the Actions tab, we can see the action concluded successfully. 41 | ![Action succeded](action_done.png) 42 | - And the new app is running! 43 | ![new app](new_page.png) 44 | -------------------------------------------------------------------------------- /Week2/6_Documenting_code.md: -------------------------------------------------------------------------------- 1 | # Documenting Code 2 | 3 | - We will use **markdown** to write documentation. 4 | - More information and resources: 5 | - [Mastering Markdown](https://guides.github.com/features/mastering-markdown/) 6 | - [Complete list of github markdown emoji markup](https://gist.github.com/rxaviers/7360908) 7 | - [Technical writing guidelines](https://www.digitalocean.com/community/tutorials/digitalocean-s-technical-writing-guidelines) 8 | - [Technical writing for the Cloudskills community](https://github.com/CloudSkills/technical-writing-for-cloudskills-community) 9 | 10 | # Title 11 | 12 | Here you will write an introduction to your repository and its contents, and a short synopsis. 13 | 14 | ## Status 15 | 16 | Here you will describe the current status of the repository. This section should be updated regularly. 17 | 18 | ## PowerShell Code 19 | 20 | The PowerShell code found in **Week 2 Scripting Like a Developer** is designed to create a new Resource Group in Azure. 21 | 22 | ### How to use the PowerShell code 23 | 24 | In this section, we describe how the code is used. 25 | 26 | In our specific PowerShell case: 27 | 28 | - The *New-ResourceGroup* function is found under the *Project3* directory and can be used as a reusable function. 29 | - The user can pass in parameters at runtime. 30 | - The script can be reused any time for any environment. 31 | 32 | ### Example 33 | 34 | ``` 35 | function New-ResourceGroup { 36 | [cmdletbinding(SupportsShouldProcess)] # the SupportShouldProcess argument adds Confirm and WhatIf parameters to the function 37 | param ( 38 | [Parameter(Mandatory)] # this indicates that the parameter is mandatory and must be presented at runtime 39 | [string]$rgName, 40 | 41 | [Parameter(Mandatory)] 42 | [string]$location 43 | ) 44 | 45 | # this is splatting, a method of passing a collection of parameter values to a command as a single unit 46 | $params = @{ 47 | 'Name' = $rgName 48 | 'Location' = $location 49 | } 50 | if($PSCmdlet.ShouldProcess('location')){ 51 | New-AzResourceGroup @params 52 | } 53 | } 54 | ``` 55 | 56 | ## Python Code 57 | 58 | The Python code found in **Week 2 Scripting Like a Developer** is designed to create an S3 Bucket in AWS. 59 | 60 | ### How to use the Python code 61 | 62 | The **s3bucket.py** script is designed to be re-used at any point for any environment. There are no hard-coded values. 63 | 64 | ### Example 65 | 66 | ``` 67 | import sys 68 | import boto3 69 | 70 | try: # in case an error occurs 71 | def main(): 72 | create_s3bucket(bucket_name) 73 | 74 | except Exception as e: 75 | print(e) 76 | 77 | def create_s3bucket(bucket_name): 78 | s3_bucket = boto3.client( 79 | 's3', 80 | #region-name='us-east-1' 81 | ) 82 | 83 | bucket = s3_bucket.create_bucket( 84 | Bucket=bucket_name, 85 | ACL='private', # ACL stands for Access Control List 86 | ) 87 | 88 | print(bucket) # this will print the output of creating the S3 bucket 89 | 90 | BUCKET_NAME = sys.argv[1] # argv allows us to pass an argument at runtime 91 | 92 | if __name__ == '__main__': 93 | main() 94 | 95 | ``` 96 | 97 | ## Testing 98 | 99 | Both the PowerShell and Python code have unit tests available, to ensure that the desired outcomes, including values and types, are accurate. 100 | 101 | ## Contributors 102 | 103 | - Michael Levan 104 | - Robert Stojan 105 | - Rodrigo Miravalles 106 | -------------------------------------------------------------------------------- /Week2/Notes_week2.md: -------------------------------------------------------------------------------- 1 | # Scripting Like a Developer 2 | 3 | Core developer theory for DevOps and how to code like a developer. 4 | 5 | ## Developer Theory 6 | 7 | ### Mutable vs. Immutable 8 | 9 | Mutable | Immutable 10 | ------------ | ------------- 11 | Can be modified after its creation | Cannot be modified after its creation 12 | No new object is formed when changes are made to an existing object | Whenever an existing object is changed, a new object is formed 13 | Provides methods to change the content of an object | Does not provide methods for changing the content of an object 14 | Ansible | Terraform 15 | 16 | ### Declarative vs. Imperative 17 | 18 | Imperative | Declarative 19 | ------------ | ------------- 20 | Focuses on how to execute | Focuses on what to execute 21 | Defines control flow as statements that change a program state | Defines program logic but not detailed control flow 22 | Explicit instructions | Describes the outcome 23 | Java, C#, C++, Python | Terraform, CloudFormation, Go, Python 24 | 25 | ### OOP, Functional and Procedural 26 | 27 | #### Object Oriented Programming (OPP) 28 | 29 | A programming paradigm based on the concept of *objects*, which may contain data, in the form of fields, often known as attributes; and code, in the form of procedures, often known as methods. 30 | 31 | #### Functional Programming 32 | 33 | A programming paradigm that treats computation as the evaluation of mathematical functions and avoids changing-state and mutable data. 34 | 35 | #### Procedural Programming 36 | 37 | A programming paradigm, derived from structured programming, based upon the concept of the procedure call. Procedures, also known as routines, subroutines, or functions, simply contain a series of computational steps to be carried out. In procedural programming, we issue instructions to a computer in an ordered sequence. 38 | 39 | ```python 40 | >>> i = 3 41 | >>> j = i + 1 42 | >>> i + j 43 | >>> 7 44 | ``` 45 | 46 | ### Idempotence 47 | 48 | Idempotence is the property of certain operations in mathematics and computer science whereby they can be applied multiple times without changing the result beyond the initial application. 49 | 50 | ## Testing Code 51 | 52 | Testing code is crucial, whether you are writing automation, scripts or application code. 53 | 54 | ### Unit Tests 55 | 56 | Unit testing refers to tests that verify the functionality of a specific section of code, usually at the function level. In an object-oriented environment, this is usually at the class level, and the minimal unit tests include the constructors and destructors. 57 | 58 | For Python we use [unittest](https://docs.python.org/3/library/unittest.html) library and for PowerShell we use the [Pester](https://pester.dev/) framework. 59 | 60 | ### Mock Tests 61 | 62 | A mock test is an approach to unit test and it allows you to make assertions. It allows you to replace parts of your system under test with mock objects and make assertions about how they have been used. 63 | 64 | In Python, we use the [unittest.mock](https://docs.python.org/3/library/unittest.mock.html) library. 65 | 66 | ### Regression Tests 67 | 68 | Regression testing focuses on finding defects after a major code change has occurred. Specifically, it seeks to uncover software regressions, as degraded or lost features, including old bugs that have come back. Such regressions occur whenever software functionality that was previously working correctly, stops working as intended. Typically, regressions occur as an unintended consequence of program changes, when the newly developed part of the software collides with the previously existing code. 69 | -------------------------------------------------------------------------------- /Week7/3_Security_Authentication.md: -------------------------------------------------------------------------------- 1 | # Security Authentication in Code 2 | 3 | In this lab, we'll use Azure's [Managed Service Identity](https://azure.microsoft.com/en-us/blog/keep-credentials-out-of-code-introducing-azure-ad-managed-service-identity/) feature to securely authenticate code. Managed identities for Azure resources are automatically managed by Azure and enable you to authenticate to services that support Azure AD authentication without needing to insert credentials into your code. Ideally, credentials should never appear in your code. 4 | 5 | Our task for the lab is to have a virtual machine access secure information stored in Key Vault using a managed identity. 6 | 7 | Check my notes for [Security for Serverless Apps](Week6/../../Week6/5_Security_Serverless.md) for more on managed identities. 8 | 9 | This image from Microsoft Docs illustrates very well the uses of managed identities. 10 | 11 | ![managed identities](project3_when_to_use_msi.png) 12 | 13 | ## The Project 14 | 15 | - We'll first create a VM in the Azure portal. We'll use the **Windows Server 2019 Datacenter Gen 1** image. 16 | - Once the VM is deployed, we'll create a Key Vault in the same resource group as the VM. 17 | - Once the Key Vault is ready, we'll add a new secret, by clicking on **Secrets** on the left menu, and then on **+ Generate/Import**. 18 | 19 | ![secret](project3_create_secret.png) 20 | - We'll now assign a managed service identity to the VM we created above, which will be used to access the secret stored in the Key Vault. 21 | - Back to the VM, we'll go to **Identity**, and we'll activate the **System assigned identity**. This identity is tied to this specific resource. Only this resource can use the identity, and once the resource is destroyed, the identity ceases to exist. 22 | - We'll now add a role assignment to this identity. For our lab, the scope for this role assignment will be **Key Vault** and the role will be the one of **Reader**. 23 | 24 | ![role assignment](project3_add_role_assignment.png) 25 | - Back to the VM's main page, we'll go to **Access policies**, under the **Settings** sections. Here we'll add a new access policy with the **Get** and **List** secret permissions, and we'll select the VM as the principal. By doing this, we'll allow this managed service identity to retrieve secrets from our Key Vault. 26 | 27 | ![add access policy](project3_add_access_policy.png) 28 | - We'll RDP into our VM using the credentials we set up in the first step, and once inside, we'll open a PowerShell session and install the AZ PowerShell module. This module will allow us to communicate with Azure via PowerShell. 29 | - The command to install the AZ module is `Install-Module AZ`. 30 | - We'll now run a few PowerShell commands: 31 | 1. `Add-AzAccount -Identity` : this will connect to Azure using our Managed Service Identity. Just as a side-note, in the Microsoft documentation of Azure PowerShell, this command is listed as `Connect-AzAccount -Identity`. 32 | 2. `$password = Get-AzKeyVaultSecret -VaultName cloudskillsvault2017 -Name topsecretpw` : this will pull down the secret from the Key Vault. We set this to a variable so it can be used later in the script. 33 | 3. `$password | Get-Member` : the `Get-Member` cmdlet gets the properties and methods of an object. This cmdlet's output shows the `SecretValue` property, which is the one that contains the secret. 34 | 4. `$password.SecretValue` : this is the cmdlet we can use in our PowerShell scripts when we need to pass secrets. 35 | 36 | ![PowerShell](project3_powershell.png) 37 | 38 | ## Extras 39 | 40 | - In the future, I want to learn more about [how to authenticate and authorize Python apps on Azure](https://docs.microsoft.com/en-us/azure/developer/python/azure-sdk-authenticate). 41 | - A [tutorial on how to use a VM system assigned managed identity to access Azure Storage](https://docs.microsoft.com/en-us/azure/active-directory/managed-identities-azure-resources/tutorial-vm-windows-access-storage). 42 | -------------------------------------------------------------------------------- /Books/DevOps_Handbook.md: -------------------------------------------------------------------------------- 1 | # The DEvOps Handbook 2 | 3 | ## Part 1 4 | 5 | ### Introduction 6 | 7 | - DevOps and its resulting technical, architectural, and cultural practices represent a convergence of many philosophical and management movements. 8 | - DevOps is the outcome of applying the most trusted principles from the domains of physical manufacturing and leadership to the IT value stream. 9 | - While the foundation of DevOps can be seen as been derived from Lean, the Theory of Constraints, and the Toyota Kata movement, many also view DevOps as the logical continuation of the Agile software journey. 10 | 11 | #### The Lean Movement 12 | 13 | - Major tenets: 14 | - The manufacturing lead time required to convert raw materials into finished goods is the best predictor of quality, customer satisfaction, and employee happiness. 15 | - One of the best predictors of short lead time is small batch sizes of work. 16 | - Systems thinking: 17 | - Constancy of purpose 18 | - Scientific thinking 19 | - flow and pull (versus push) 20 | - assuring quality at the source 21 | - leading with humility 22 | - respecting every individual 23 | 24 | #### The Agile Manifesto 25 | 26 | - A lightweight set of values and principles applied to software development processes. 27 | - Key principles: 28 | - Deliver working software frequently 29 | - Small size batches 30 | - Incremental releases 31 | - Small, self-motivated teams 32 | - High-trust management model 33 | 34 | #### The Continuous Delivery Movement 35 | 36 | - Built upon the development discipline of continuous build, test, and integration. 37 | - The **Deployment Pipeline** ensures that code and infrastructure are always in a deployable state, and that all code checked in to trunk can be safely deployed into production. 38 | 39 | #### The Toyota Kata 40 | 41 | - Toyota Kata is a management book written by Mike Rother 42 | - **The Improvement Kata** 43 | - Constant cycle of establishing desired future states 44 | - Weekly target outcomes 45 | - Continual improvement of daily work 46 | 47 | #### Infrastructure as Code 48 | 49 | - To automate and treat the work of Operations like application code 50 | - To apply modern development practices to the entire development stream 51 | 52 | ### Chapter 1 Agile, Continuous Delivery, and the Three Ways 53 | 54 | #### Value Stream 55 | 56 | - One of the fundamental concepts in Lean. 57 | - The sequence of activities required to design, produce, and deliver a good or service to a customer, including the dual flows of information and materials. 58 | - In manufacturing operations, the value stream is often easy to see and observe. 59 | - The same principles and patterns can be equally applied to technology work. 60 | - **The DevOps definition of the technology value stream**: the process required to convert a business hypothesis into a technology-enabled service that delivers value to the customer. 61 | 62 | #### Deployment Lead Time 63 | 64 | - A subset of the value stream described above. 65 | - It begin when an engineer checks a change in to version control and ends when that change is successfully running in production, providing value to the customer and generating useful feedback and telemetry. 66 | - The first phase of work, which includes Design and Development, is highly variable and uncertain. 67 | - The second, which includes Testing and Operations, strives to be predictable and mechanistic, with the goal of achieving work outputs with minimized variability. 68 | - Our goal is to have Testing and Operations happening simultaneously with Design/Development, enabling fast flow and high quality. 69 | 70 | #### Lead Time vs. Processing Time 71 | 72 | - The two measure commonly used to measure performance in value streams. 73 | - **Lead Time** starts when the request is made and ends when it is fulfilled. 74 | - **Process Time** starts only when we begin work on the customer request (it omits the time that the work is in queue, waiting to be processed). 75 | - Because lead time is what the customer experiences, we typically focus our process improvement attention there instead of on process time. 76 | -------------------------------------------------------------------------------- /Week3/1_ARM_template_VM_Azure.md: -------------------------------------------------------------------------------- 1 | # Building an ARM Template for Virtual Machine Deployment in Azure 2 | 3 | In this lab, Luke Orellana guides us on how to deploy an Ubuntu virtual machine to Azure using ARM templates. 4 | 5 | - Luke instructs us, as a first step, to install the [Azure Resource Manage (ARM) Tools](https://marketplace.visualstudio.com/items?itemName=msazurermtools.azurerm-vscode-tools) extension for VS Code. 6 | - There is, however, a very powerful pack that comes with 11 extensions for Azure, including the ARM Tools. It is called [Azure Tools](https://marketplace.visualstudio.com/items?itemName=ms-vscode.vscode-node-azure-pack). This is a highly recommended extension, as it comes with most of the major Azure tools you are going to need. 7 | - We then need to install the Azure CLI. The Azure CLI is a set of commands used to create and manage Azure resources. 8 | - The current version of the AzureCLI, as of this writing, is **2.19.1**. 9 | - For instructions on how to install the Azure CLI, go to [this page](https://docs.microsoft.com/en-us/cli/azure/install-azure-cli). 10 | 11 | ## Azure Resource Manager (ARM) 12 | 13 | - ARM is the deployment and management service for Azure. 14 | - When a user sends a request from any of the Azure tools, APIs, or SDKs, Azure Resource Manager receives the request, authenticates and authorizes it. 15 | - All requests are handled through the same API. 16 | 17 | ### ARM Templates 18 | 19 | - ARM templates are JavaScript Object Notation (JSON) files that define the infrastructure and configuration for your deployments. 20 | - It uses a *declarative syntax* (it outlines what resources will look like, without describing its control flow). 21 | - In an ARM template, you specify the resources and the properties for those resources, and *Azure Resource Manager* uses this information to deploy the resources in an organized and consistent manner. 22 | - ARM templates are *idempotent* which means you can deploy the same template many times and get the same resource types in the same state. 23 | - ARM template files are made up of the following elements: 24 | - **schema**: A required section that defines the location of the JSON schema file that describes the structure of JSON data. 25 | - **contentVersion**: A required section that defines the version of your template. The version (1.0.0.0) hasn't changed since ARM templates are in GA. 26 | - **apiProfile**: An optional section that defines a collection of API versions for resource types. 27 | - **parameters**: An optional section where you define values that are provided during deployment. These values can be provided by a parameter file, by command-line parameters, or in the Azure portal. 28 | - **variables**: An optional section where you define values that are used to simplify template language expressions. 29 | - **functions**: An optional section where you can define user-defined functions that are available within the template. User-defined functions can simplify your template when complicated expressions are used repeatedly in your template. 30 | - **resources**: A required section that defines the actual items you want to deploy or update in a resource group or a subscription. 31 | - **output**: An optional section where you specify the values that will be returned at the end of the deployment. 32 | 33 | ## Deployment 34 | 35 | - To connect to Azure CLI, from the command line, run `az login`. 36 | - To run the template, we first need to have a resource group created. 37 | - To create a new Resource Group, run `az group create --name ResourceGroupName --location ResourceGroupLocation` 38 | - The command to deploy the template is `az deployment group create --resource-group ResourceGroupName --template-file .\template.json -p adminPassword="Password"` 39 | - In my case, I ran `az deployment group create --resource-group cslabRG --template-file .\template.json -p adminPassword="Pa55w.rd1234"`. 40 | - If you're running PowerShell, then the command would be `New-AzResourceGroupDeployment`. 41 | - The final JSON file can be seen [here](Project1/template.json). 42 | - This is the result, shown in the Portal. 43 | ![Azure Portal cslabRG](project1_rg.png) 44 | - Just after that, I deleted the resource group by running `az group delete --name cslabRG`. 45 | -------------------------------------------------------------------------------- /Week6/3_Azure_WebApp_CICD_GitHubActions.md: -------------------------------------------------------------------------------- 1 | # Deploy an Azure Web App via CI/CD in GitHub Actions 2 | 3 | For this lab, we'll build and deploy the same web app we used in the previous lab, but this time using GitHub Actions. 4 | 5 | ## The Project 6 | 7 | - The [repository is here](https://github.com/suvo-oko/javascript-sdk-demo-app). 8 | - In our repository, we click on the **Actions** tab to create our new workflow. 9 | - There's a ready made workflow called [Deploy Node.js to Azure Web App](https://github.com/actions/starter-workflows/blob/ab8c670fafe65faf1574245c8fd327fae319d88f/ci/azure.yml), which can be useful, but for this lab we'll create a workflow from scratch. 10 | - This workflow will build the Node.js app and deploy it to Azure App Service. The build and deploy jobs are contained in the same workflow. 11 | - The first thing I did was store the **AZURE_CREDENTIALS** secret in the repository's secrets vault. 12 | - I created a new service principal for this task with the following command: `az ad sp create-for-rbac --name radwebapp --role contributor --scopes /subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx`. 13 | - For more information on Azure service principals and GitHub secrets, please see [Project 3 from Week 5](../Week5/3_Terraform_GitHubActions.md). 14 | 15 | ### The workflow yaml file 16 | 17 | Let's break down the workflow file. 18 | 19 | ```yaml 20 | on: workflow_dispatch 21 | 22 | env: 23 | AZURE_WEBAPP_PACKAGE_PATH: '.' 24 | NODE_VERSION: '12.x' 25 | RESOURCE_GROUP_NAME: 'radnodeapp' 26 | APP_SERVICE_PLAN: 'radnodeappsp' 27 | APP_SERVICE: 'radnodeapp' 28 | ``` 29 | 30 | - Here, we specify what will trigger the action. In this case `workflow_dispatch` means this action will be triggered manually, by clicking the **Run workflow** button. 31 | - The `env` block holds our environment variables. 32 | 33 | ```yaml 34 | jobs: 35 | build-and-deploy: 36 | name: Build and Deploy 37 | runs-on: ubuntu-latest 38 | 39 | steps: 40 | - uses: actions/checkout@v2 41 | 42 | - name: Use Node.js 43 | uses: actions/setup-node@v1 44 | with: 45 | node-version: '12.13.0' 46 | 47 | - name: npm install, build, and test 48 | run: | 49 | npm install 50 | npm run build --if-present 51 | npm run test --if-present 52 | ``` 53 | 54 | - This workflow has just one job, called **Build and Deploy**. What comes afterwards are the different steps to be executed. 55 | - This workflow will run in a container running Ubuntu provided by GitHub. 56 | - The first action will be to checkout our code, so the runner can work on it. 57 | - Next comes Node.js setup, followed by npm commands. 58 | 59 | ```yaml 60 | - name: Azure login 61 | uses: Azure/login@v1 62 | with: 63 | creds: ${{ secrets.AZURE_CREDENTIALS }} 64 | 65 | - name: Create Azure App Service Plan 66 | uses: Azure/cli@1.0.4 67 | with: 68 | inlineScript: az appservice plan create -g ${{ env.RESOURCE_GROUP_NAME }} -n ${{ env.APP_SERVICE_PLAN}} --is-linux 69 | 70 | - name: Create Azure App Service 71 | uses: Azure/cli@1.0.4 72 | with: 73 | inlineScript: az webapp create -g ${{ env.RESOURCE_GROUP_NAME }} -p ${{ env.APP_SERVICE_PLAN}} -n ${{ env.APP_SERVICE }} --runtime "NODE|12-lts" 74 | ``` 75 | 76 | - Here, we'll execute the **Azure login** action. This logs the runner in the Azure CLI, so it can execute commands. To log in to Azure, the runner will fetch the credentials in the repository's secrets vault. 77 | - We then run the Azure CLI commands to create an App Service Plan and an App Service. 78 | 79 | ```yaml 80 | - name: 'Deploy to Azure Web App' 81 | uses: Azure/webapps-deploy@v2 82 | with: 83 | app-name: ${{ env.APP_SERVICE}} 84 | package: ${{ env.AZURE_WEBAPP_PACKAGE_PATH }} 85 | ``` 86 | 87 | - This last step will deploy our web app to Azure using the **webapps-deploy** action. 88 | - We now commit the code and we're ready to run the workflow. 89 | - Since we defined the trigger to **workflow dispatch**, we need to go to the Actions tab, select our workflow, and click on **Run workflow**. 90 | - The workflow is complete, with green checkmarks! 91 | ![workflow complete](project3_workflow_success.png) 92 | 93 | - And these are the newly created resources seen in the Azure Portal. 94 | ![resources](project3_azure_portal.png) 95 | 96 | - And our web app up and running! 97 | ![webapp](project3_webapp.png) 98 | -------------------------------------------------------------------------------- /Week6/5_Security_Serverless.md: -------------------------------------------------------------------------------- 1 | # Security for Serverless Apps 2 | 3 | Both Azure and AWS offer security features to keep our serverless apps more secure. 4 | 5 | ## Azure 6 | 7 | - Let's visit the App we created in the previous exercise. There, we'll click on **Security**, on the menu on the left. This will open the [Azure Security Center](https://docs.microsoft.com/en-us/azure/security-center/) pane. 8 | - Azure Security Center is a unified infrastructure security management system. 9 | - Security Center is natively integrated with App Service, eliminating the need for deployment and onboarding. The integration is transparent. 10 | - In this pane, Security Center shows us the security recommendations and security alerts related to our App. 11 | - If we click on the **View all recommendations in Security Center** button, we are taken to the Security Center page, where we can see all details related to security of our resources in Azure. 12 | ![security center](project5_security_center.png) 13 | - Still in our App Service resource, we can go to **Authentication / Authorization**, under **Settings**. Here we can set authentication features, to prevent anonymous users to access our app. By activating this feature, users will be prompted to supply some sort of user identification in order to be able to access the app. 14 | - If we enable **App Service Authentication**, we are presented with different authentication providers. For this lab, we'll configure **Azure Active Directory**. Like that, users will be prompted to enter their Azure AD credentials to access the App. 15 | ![authentication](project5_authentication.png) 16 | - We first need to configure the authentication provider, and then, under **Action to take when request is not authenticated**, select the authentication provider we wish to use. 17 | - For Azure AD, the configuration is much easier, since we don't need to provide any further data. For the other providers, we'll need to provide the API key and the API secret, to allow Azure access to these providers. 18 | - Now, when we visit our APP URL, we are prompted with a window where we should type our credentials. 19 | ![app signin](project5_signin.png) 20 | - Another security aspect to be aware of is that of [managed identities](https://docs.microsoft.com/en-us/azure/active-directory/managed-identities-azure-resources/). 21 | - There are two types of managed identity. 22 | - **System assigned**: Some Azure services (like this App Service, for instance) allow you to enable a managed identity directly on a service instance. When you enable a system-assigned managed identity an identity is created in Azure AD that is tied to the lifecycle of that service instance. So when the resource is deleted, Azure automatically deletes the identity for you. By design, **only that Azure resource can use this identity to request tokens from Azure AD**. It's a one-to-one relationship. 23 | - **User assigned**: You may also create a managed identity as a standalone Azure resource. You can create a user-assigned managed identity and assign it to one or more instances of an Azure service. In the case of user-assigned managed identities, the identity is managed separately from the resources that use it. It's a one-to-many relationship. 24 | - For App Service, we can activate the system-assigned identity by moving the status slider to On. From this moment on, the App will be registered with Azure AD, and can be granted access to other resources protected by Azure AD. 25 | - If we go to the User Assigned tab, we can simply add an existing identity by clicking on the **+ Add** button. 26 | 27 | ## AWS 28 | 29 | - For Lambda Functions in AWS, we have the [Permissions](https://docs.aws.amazon.com/lambda/latest/dg/lambda-permissions.html) feature. 30 | - We can use IAM to manage access to the Lambda API and its resources, or we can use **execution roles**. 31 | - A Lambda function's execution role is an AWS Identity and Access Management (IAM) role that grants the function permission to access AWS services and resources. You provide this role when you create a function, and Lambda assumes the role when your function is invoked. 32 | - To create an execution role, in the AWS Console we go to our Lambda Function, click on **Configuration** tab, then **Permissions**, from the menu on the left. My GUI is different than the one presented in Michael's lecture. 33 | ![lambda main](project5_lambda.png) 34 | - We then click on **Edit**, in the **Execution role** field. We'll see this window, where we can choose an existing role or create a new one. When we create a new role, we can choose from a number of policy templates. 35 | ![edit execution role](project5_editsettings.png) 36 | - This would be the equivalent of the managed identity in Azure. 37 | -------------------------------------------------------------------------------- /Week5/2_CI_GitHubActions.md: -------------------------------------------------------------------------------- 1 | # Continuous Integration with GitHub Actions 2 | 3 | - This project will focus solely on Continuous Integration using GitHub Actions. 4 | - This can be a typical scenario for a business just starting to implement CI/CD for their applications. 5 | - **The scenario for this lab**: when changes are pushed to any branch in our repository, an action will be triggered. It will run linting and unit tests to confirm that the application is valid. Like that, we can be more confident when we decide to merge the changes into the master branch. 6 | 7 | ## The Project 8 | 9 | - We will use a sample Flask app for this exercise. 10 | - The first step is to fork the project's repo to our own GitHub account. This is [my repository](https://github.com/suvo-oko/ci-pythonapp). 11 | - Once we have the forked repo we go over to the Actions tab. 12 | - GitHub Actions offers several sample actions we can use to get started. We're going to pick one from this list, from the *Continuous Integration* section, called *Python application*. As it says in the description, this action is used to test a Python application. 13 | - When we click on `Set up this workflow`, a new folder called `.github/workflows` is created. Inside this folder, we will find our new workflow yaml file. 14 | - After editing the file to fit our needs, we click on `Start commit` to create the actual pipeline. 15 | 16 | ## The Action file 17 | 18 | Let's break down the yaml file. 19 | 20 | Here, we give the Action a name and we define what will trigger the action, with the `on` parameter. In this case, there are two triggers: when we push a change to any branch and when we create a new file in any branch. 21 | 22 | ```yaml 23 | name: Cloudskills Python app 24 | 25 | on: 26 | push: 27 | branches: 28 | - '*' 29 | create: 30 | branches: 31 | - '*' 32 | ``` 33 | 34 | Now, the `jobs` section starts. Here. we'll say what actions to perform, in the order that they appear. 35 | 36 | ```yaml 37 | jobs: 38 | build: 39 | 40 | runs-on: ubuntu-latest 41 | 42 | steps: 43 | - uses: actions/checkout@v2 44 | - name: Set up Python 3.9 45 | uses: actions/setup-python@v2 46 | with: 47 | python-version: 3.9 48 | - name: Install dependencies 49 | run: | 50 | python -m pip install --upgrade pip 51 | pip install flake8 pytest 52 | if [ -f Application/requirements.txt ]; then pip install -r Application/requirements.txt; fi 53 | ``` 54 | 55 | - We'll first build our environment. 56 | - In our case, it will run inside a container running Ubuntu, hosted in GitHub's own servers. 57 | - The `uses` parameter will call for GitHub's built-in actions, and the `run` parameter will execute command-line programs using the operating system's shell. 58 | - Each action has its own repo, where you can consult its uses, and its more recent version. 59 | - These are the two actions mentioned above: 60 | - [checkout](https://github.com/actions/checkout): this action checks out your repository, so the workflow can access it. 61 | - [setup-python](https://github.com/actions/setup-python): this action sets up a Python environment. 62 | - The `run` parameter will upgrade pip, install the flake8 and pytest modules and the ones contained in the requirements.txt file. 63 | 64 | ```yaml 65 | - name: Lint with flake8 66 | run: | 67 | # stop the build if there are Python syntax errors or undefined names 68 | flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics 69 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide 70 | flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics 71 | - name: Test with pytest 72 | run: | 73 | mkdir testresults 74 | pytest Tests/unit_tests --junitxml=./testresults/test-results.xml 75 | - name: Publish Unit Test Results 76 | uses: EnricoMi/publish-unit-test-result-action@v1.9 77 | with: 78 | files: ./testresults/test-results.xml 79 | ``` 80 | 81 | - There are three steps here: *Lint with flake8*, *Test with pytest*, and *Publish Unit Test Results*. The first two will execute a number of commands in the shell, and the third will publish the unit test results. This last step is an action obtained from the marketplace. 82 | 83 | - To test our CI pipeline, we'll change the file and push it to a new branch. This should trigger the workflow. 84 | - After cloning the repo to my local machine, I created a new branch, changed the index.html and pushed the changes to the GitHub repository. 85 | - The GitHub Action works! The workflow has been triggered and the tests were correctly executed. See images below. 86 | ![workflows](project2_workflows.png) 87 | - These are the test results presented in this handy *junitxml* format. 88 | ![test results](project2_unit_test_results.png) 89 | -------------------------------------------------------------------------------- /Week5/3_Terraform_GitHubActions.md: -------------------------------------------------------------------------------- 1 | # Terraform with GitHub Actions 2 | 3 | - In this lab, we're going to deploy infrastructure to Azure with Terraform using GitHub Actions. Every time we modify the infrastructure and push a change to the main branch, the action is triggered and the infrastructure is deployed. 4 | - Terraform has to authenticate with Azure to be able to communicate. The best practice to deploy to Azure is to use Service Principals, and not your own Azure user account. 5 | - The command to create a Service Principal with the contributor role is the following: `az ad sp create-for-rbac --name ServicePrincipalName --role contributor --scopes /subscriptions/subscriptionid` 6 | - The output of this command is a JSON object, and it looks like this: 7 | 8 | ```json 9 | { 10 | "appId": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", 11 | "displayName": "TerraformSP", 12 | "name": "http://TerraformSP", 13 | "password": "xxxxxxxxxxxxxxxxx", 14 | "tenant": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" 15 | } 16 | ``` 17 | 18 | - We're going to use this information, and the subscription ID, to create a secret in our GitHub repository. 19 | - As we may already be aware, we shouldn't write sensitive information directly in our code stored in GitHub. GitHub has a feature called Secrets that let us safely store passwords, credentials and other sensitive information. 20 | - We're going to store a secret called `AZURE_CREDENTIALS` in our secrets vault. This is going to be used by the [Azure Login GitHub Action](https://github.com/marketplace/actions/azure-login) to access Azure CLI. 21 | - The format taken by the secret isn't exactly the same as the output from the `az ad sp create-for-rbac` command. This is the format I used: 22 | 23 | ```json 24 | { 25 | "clientId": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", 26 | "clientSecret": "xxxxxxxxxxxxxxxxx", 27 | "subscriptionId": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", 28 | "tenantId": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" 29 | } 30 | ``` 31 | 32 | - Terraform needs to connect to Azure, and therefore it also needs to be able to read the Azure credentials. We need to store these credentials in our code in environment variables that Terraform will recognize. That's how I did it: 33 | 34 | ```yaml 35 | env: 36 | ARM_SUBSCRIPTION_ID: ${{ secrets.ARM_SUBSCRIPTION_ID }} 37 | ARM_TENANT_ID: ${{ secrets.ARM_TENANT_ID }} 38 | ARM_CLIENT_ID: ${{ secrets.ARM_CLIENT_ID }} 39 | ARM_CLIENT_SECRET: ${{ secrets.ARM_CLIENT_SECRET }} 40 | AZURE_STORAGE: 'terraform67ui3078' 41 | ``` 42 | 43 | - These are secrets stored in the repo's secrets vault. The AZURE_STORAGE variable will be used later in our code. 44 | 45 | ## The tasks 46 | 47 | When our build starts, it will do the following: 48 | 49 | ```yaml 50 | steps: 51 | 52 | - uses: actions/checkout@v2 53 | 54 | - name: Azure Login 55 | uses: Azure/login@v1 56 | with: 57 | creds: ${{ secrets.AZURE_CREDENTIALS }} 58 | ``` 59 | 60 | - The first step will checkout our repo so the job can access it. 61 | - After that, the *Azure Login* action starts. This action logs us in to Azure CLI, so we can run commands. 62 | 63 | ```yaml 64 | 65 | - name: Build a Terraform state 66 | run: chmod +x ./tfstate.sh && ./tfstate.sh ${{ env.AZURE_STORAGE }} 67 | 68 | - name: HashiCorp - Setup Terraform 69 | uses: hashicorp/setup-terraform@v1.2.1 70 | 71 | - name: Terrafrom init 72 | run: terraform init -backend-config="storage_account_name=${{ env.AZURE_STORAGE }}" 73 | 74 | - name: Terraform plan 75 | run: terraform plan -out=tfplan 76 | 77 | - name: Terraform apply 78 | run: terraform apply tfplan 79 | ``` 80 | 81 | - The *Build a Terraform state* step will run the `tfstate.sh` script. This script will create a storage account to store our Terraform State file. 82 | - The next task installs Terraform in our environment. 83 | - To complete the action, the workflow will run `terraform init`, `terraform plan`, and `terraform apply`. 84 | 85 | After several attempts, the build succeded! 86 | ![successfull build](project3_build_success.png) 87 | 88 | ## Conclusion 89 | 90 | - I struggled with the yaml formatting, namely the indentation. The GitHub interface indicates a few formatting errors with squiggly lines, but it lacks more power, like intellisense or autocomplete. VS Code has a few extensions but I have to investigate further whether they're a good alternative. 91 | - It took me some time to nail down the formatting for the AZURE_CREDENTIALS secret, but in the end I figured it out. 92 | - In the video, Luke writes the environment variables with the Azure credentials directly in the code, but I used the secrets instead. 93 | - It's very important to read the error messages thoroughly. They pack a lot of information about the errors. 94 | - This is [my repository](https://github.com/suvo-oko/terraform-ghactions) for this project. 95 | -------------------------------------------------------------------------------- /Week6/2_Azure_WebApp.md: -------------------------------------------------------------------------------- 1 | # Build an Azure Web App 2 | 3 | For this project, we'll deploy an Azure App Service, with its corresponding App Service Plan, using Terraform. Then we'll deploy an app using the Deployment Center feature in the App Service page in the Portal. 4 | 5 | ## The Project 6 | 7 | - First, we'll create the Terraform file, which will contain the resources to be deployed. 8 | - This is my [main.tf](Project2/main.tf) file. 9 | - In the video, Michael didn't include in his file the creation of the resource group. I included it. As you can see from my file, the first resource block is a resource group. 10 | - Two other files accompany the main.tf: [variables.tf](Project2/variables.tf) and `terraform.tfvars`. 11 | - `variables.tf` is where we define the variables we'll use throughout our configuration file. 12 | - `terraform.tfvars` is the file where we actually specify the values for the variables. 13 | - This is my `variables.tf`: 14 | 15 | ```terraform 16 | variable "app_service_plan_name" { 17 | type = string 18 | } 19 | 20 | variable "location" { 21 | type = string 22 | } 23 | 24 | variable "resource_group_name" { 25 | type = string 26 | } 27 | 28 | variable "app_service_name" { 29 | type = string 30 | } 31 | ``` 32 | 33 | - And this is my `terraform.tfvars`: 34 | 35 | ```terraform 36 | app_service_plan_name = "cloudskillssp" 37 | location = "westeurope" 38 | resource_group_name = "cloudskillsapp-rg" 39 | app_service_name = "cloudskillsradapp" 40 | ``` 41 | 42 | - The `terraform.tfvars` file is usually included in the `.gitignore` file, so it wouldn't normally be in source control. This file is likely to contain sensitive and confidential information, such as passwords and credentials. 43 | - To execute the configuration, we first need to make sure we're in the same directory as the `main.tf`. 44 | - We then run `terraform init` to initialize the working directory. This is the first command that should be run after writing a new Terraform configuration. 45 | - The second command we run is `terraform plan`. This will create an execution plan. This command is a convenient way to check whether the execution plan for a set of changes matches your expectations without making any changes to real resources or to the state. 46 | - The third command is `terraform apply` and this one will actually apply the changes required to reach the desired state described in the configuration file. 47 | - After running these commands, we can see the resources are now present. 48 | ![Azure Portal](project2_azureportal.png) 49 | - The next step is to deploy our app to App Service. For this, we'll use the **Deployment Center** feature. Deployment Center offers a quick and easy way to implement CI/CD. 50 | - The UX we see on the video is different than the one I used. I found this new experience interesting because it's streamlined in a single page. It also automatically creates a GitHub Actions workflow. 51 | ![Deployment Center](project2_deploymentcenter.png) 52 | - We'll deploy a JavaScript sample app, which I forked from Michael's GitHub repo.This is [my repository](https://github.com/suvo-oko/javascript-sdk-demo-app). 53 | - My GitHub account was already authorized in the Azure Portal, so I immediately saw my organization and my repositories. 54 | - For **Workflow Option**, if you select **Add a workflow**, Azure will automatically create a GitHub Action for you. This is a great feature! 55 | - When you click on save, you'll be taken to the **Logs** tab, where you can see the progress of the current commit, and the previous commits. 56 | - After a considerable time had passed, the build and deploy succeeded! 57 | ![Deployment Success](project2_deploymentsuccess.png) 58 | - And this is the app running. 59 | ![App running](project2_webapprunning.png) 60 | - After confirming everything is working, I ran `terraform destroy` to tear down the resources we built. 61 | 62 | ## Conclusion 63 | 64 | - In the first iteration of my Terraform configuration, I used the same value for the `resource_group_name` parameter in all 3 resource blocks present in the configuration, which was `var.resource_group_name`. This references the `variables.tf` file. When I first ran `terraform apply`, it gave me an error during the creation of the app service and app service plan, that stated that the resource group couldn't be found. A second `terraform apply` immediately afterwards succeeded in the creation of these resources. 65 | - Patrick Loftus, from the community, helped me spot my mistake. I have to reference the subsequent resources, the ones which depend on the creation of a previous resource to exist, with **interpolation**. Like that, we're creating an implicit dependency. Otherwise, we need to use the `depends_on` argument. This should be used only when Terraform can't automatically infer the dependency, or when there are hidden dependencies. 66 | - After updating my code, the `resource_group_name` parameter under the `azurerm_app_service_plan` and the `azurerm_app_service` resource blocks looked like this: `resource_group_name = azurerm_resource_group.azurerg.name` 67 | - The dependencies between resources is something we must pay attention to. By using interpolation, Terraform will be able to figure out the dependencies most of the time. 68 | - More detailed information about the `depends_on` can be [found here](https://www.terraform.io/docs/language/meta-arguments/depends_on.html). 69 | -------------------------------------------------------------------------------- /Week1/Notes_week1.md: -------------------------------------------------------------------------------- 1 | # DevOps Fundamental Skills 2 | 3 | ## Introduction 4 | 5 | The most important thing: **DON'T PANIC**:exclamation::exclamation: 6 | 7 | [Michael Levan](https://github.com/AdminTurnedDevOps) gives us an overview of predictions for 2021 and core DevOps concepts and skills. 8 | 9 | ## DevOps Predictions for 2021 10 | 11 | - Culture and leadership 12 | - Organizations are starting to value the DevOps culture more and more 13 | - The ability to have multiple teams working together 14 | - We all need to find ways to communicate more 15 | - **Collaboration** 16 | - Hybrid teams 17 | - Remote teams are the norm now 18 | - Application security 19 | - Security is, unfortunately, an afterthought most of the times 20 | - Security must be at the forefront of every deployment decision 21 | - Cloud adoption 22 | - The public cloud is expected to grow 23 | - **DevOps is for the business** 24 | - DevOps is very much a business position, not simply an engineering one 25 | - Without DevOps there wouldn't be a business 26 | - The DevOps pro is a key stakeholder in the organization 27 | - Developers will have more say 28 | - DevOps is still one of the more increasingly needed roles 29 | 30 | DevOps, as a position, has been around for a few years. 31 | 32 | ## Cloud Computing 33 | 34 | - You are using resources that you don't own 35 | - Microsoft or Amazon, or other cloud vendors, own the resources 36 | - Renting of resources 37 | - High availability 38 | - Scalability 39 | - Quick deployments 40 | 41 | ## Collaboration 42 | 43 | - Communication 44 | - The ability to communicate with your team is crucial 45 | - A constant line of communication has to exist 46 | - Working together 47 | - With good communication comes good ideas 48 | - With good ideas come good theories 49 | - With good theories come good products 50 | - Working with management 51 | - The only way for a team to succeed 52 | - Management and engineers must be on the same page 53 | - Working with other teams 54 | - It's all about communication and collaboration 55 | - Goals should be aligned 56 | 57 | ## Working with the Business 58 | 59 | - Management and engineering should always be on the same page 60 | - Know when to say NO 61 | - When you're in engineering, it's easy not to think about the business' goals and objectives 62 | - Your own goals and the business goals must align 63 | 64 | ## Major DevOps Tooling 65 | 66 |
67 | 68 | ![DevOps Tools](devops-tools.png) 69 | 70 | - Coding and development practices 71 | - Azure: most popular are PowerShell, Python and JavaScript 72 | - AWS: most popular are Python, JavaScript, Java and Go 73 | - Orchestration (Kubernetes) 74 | - Containerization (Docker, containerd, CRI-O) 75 | - Azure has its own containerization platform: Azure Container Instances (ACI) 76 | - In AWS, there are AWS Elastic Container Services and AWS Fargate 77 | - Monitoring 78 | - Azure: Application Insights, Azure Monitor, Prometheus 79 | - AWS: Cloud Watch, Lambda Insights, Prometheus 80 | - Cloud 81 | - CI/CD 82 | - Infrastructure-as-code 83 | 84 | ## Security Skills 85 | 86 | - You don't have to be a hacker, but you have to have an understanding of software and development security skills 87 | - Start thinking about security from the application perspective 88 | - Faster deployments == more vulnerability 89 | - Many CI/CDs pipelines aren't checking for security vulnerabilities 90 | - Core software security competencies 91 | - Static code analysis 92 | - Unit testing 93 | - Performance testing 94 | - **These tests should always be in the CI portion of your pipeline** 95 | 96 | ## Command Line (CLI) 97 | 98 | - **You should be a command line wizard** 99 | - A CLI tool is just a middle-man to an API 100 | - Much quicker 101 | 102 | ## Version/Source Control 103 | 104 | - A place to store code 105 | - Version control == manage versions 106 | - **Collaboration** 107 | - Less bugs 108 | - Code reviews 109 | - The more eyes on the code the better 110 | - Everyone is aware 111 | - Centralized version control 112 | - One server 113 | - everyone uses the code from this one server 114 | - Very hard to collaborate 115 | - Distributed version control 116 | - Most popular today 117 | - Allows anyone to download and have a working copy of the code on their local hosts, typically referred to as a local repo 118 | - Allows multiple people to work with the code at the same time 119 | 120 | ## CI/CD 121 | 122 | - Build, test and create an artifact out of the code to be used in a deployment 123 | - Deploy the code simply with the click of a button or as the code is committed to a repo 124 | - Continuous Delivery: allows you to automatically deploy code daily, weekly, hourly, etc. 125 | - Continuous Deployment: auto-deploy 126 | 127 | ## Guiding Principles & Winning Patterns :bulb: 128 | 129 | 1. Strategic empathy 130 | 1. Take time to understand and share the perspective of another and obsess over customer experience 131 | 1. Laser focus 132 | 1. Keep the main thing the main thing, ruthlessly eliminate distractions 133 | 1. High standards 134 | 1. Great just isn't good enough. Our work is world-class 135 | 1. Long-term thinking 136 | 1. Make short-term sacrifices for long-term success 137 | 1. Radical ownership 138 | 1. Don't blame others or make excuses. Be resourceful and creative 139 | 1. Be scientific 140 | 1. Reason from first principles, experiment, use data, test and iterate 141 | 1. Be decisive 142 | 1. Speed matters in business. Commit fully, work hard, take initiative, and get things done 143 | 1. The truth is undeniable 144 | 1. Be brutally honest and transparent with others, and most importantly, with yourself 145 | 1. Go big or go home 146 | 1. Don't limit yourself to small thinking 147 | 1. Invent the new reality 148 | 1. Focus on the vision and build to the new reality. Incrementally. Every day. 149 | 150 | :star: :star: :star: :star: :star: 151 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # :cloud: Cloud Native Bootcamp :cloud: 2 | 3 | This is the repository for the [Cloudskills](https://cloudskills.io/) Cloud Native DevOps Bootcamp projects. Here you'll find my journey, the projects I'm working on, notes on the lectures, and other additional resources. 4 | 5 | I'll divide this repo in folders named weeks. Under each week, I'll post that week's project, and some extra stuff related to the topics covered. The **Notes** file is where I'll keep the notes on the lecture. 6 | 7 | ### :star: Other members of the Cloudskills community with awesome repos :star: 8 | 9 | - [danielkomaz](https://github.com/danielkomaz/cloudskillsbootcamp) 10 | - [wayneforrester](https://github.com/wayneforrester/cloudskills-cloudnative-devops-bootcamp) 11 | 12 | **KUDOS** to you and to all the members of the [CloudSkills](https://github.com/cloudskills) community! Just hit me up and I'll include your repo here too! 13 | 14 | ## Week 1: DevOps Fundamental Skills 15 | 16 | *February 10th* 17 | 18 | For this first week, Michael Levan gives us an introduction to DevOps, what it is, its role and importance for the business. 19 | 20 | In the lecture, Michael mentions 3 books that are a must read: [The Phoenix Project](https://itrevolution.com/the-phoenix-project/), [The DevOps Handbook](https://itrevolution.com/book/the-devops-handbook/), and [Accelerate](https://itrevolution.com/book/accelerate/). My plan is to write summaries of these 3 books, and post them under the **Books** folder. 21 | 22 | - [My notes](Week1/Notes_week1.md) 23 | 24 | ### Projects 25 | 26 | - [x] [Getting started with GitHub](Week1/1_GitHub_Getting_Started.md) 27 | - [x] [Setting up your first Azure environment](Week1/2_SettingUp_Azure.md) 28 | - [x] [Setting up your first AWS environment](Week1/3_SettingUp_AWS.md) 29 | - [x] [Setting up projects in GitHub](Week1/4_GitHub_Projects.md) 30 | - [x] [Setting up your first GitHub Action](Week1/5_First_GitHub_Action.md) 31 | 32 | ## Week 2: Scripting Like a Developer 33 | 34 | *February 17th* 35 | 36 | In this session, Michael Levan gives us an introduction to coding. 37 | 38 | - [My notes](Week2/Notes_week2.md) 39 | 40 | ### Projects 41 | 42 | - [x] [Setting up a dev environment from start to finish](Week2/1_dev_environment.md) 43 | - [x] [Reusable and clean Python code](Week2/2_Reusable_clean_Python.md) 44 | - [x] [Reusable and clean PowerShell code](Week2/3_Reusable_PowerShell_code.md) 45 | - [x] [Linting in PowerShell and Python](Week2/4_Linting_PowerShell_Python.md) 46 | - [x] [Testing in PowerShell and Python](Week2/5_Testing_PowerShell_Python.md) 47 | - [x] [Documenting code](Week2/6_Documenting_code.md) 48 | 49 | ## Week 3: Infrastructure as Code 50 | 51 | *February 24th* 52 | 53 | Luke Orellana takes us on a tour through Infrastructure-as-code, and its main concepts and tools 54 | 55 | - [My notes](Week3/Notes_week3.md) 56 | 57 | ### Projects 58 | 59 | - [x] [Building an ARM template for virtual machine deployment in Azure](Week3/1_ARM_template_VM_Azure.md) 60 | - [x] [Building a Terraform module to create a virtual network in AWS](Week3/2_Terraform_module_vnet_aws.md) 61 | - [x] [Testing infrastructure-as-code](Week3/3_Testing_IaC.md) 62 | - [x] [Intro to Azure Bicep](Week3/4_Intro_Azure_Bicep.md) 63 | 64 | ## Week 4: GitHub and Source Control 65 | 66 | *March 3rd* 67 | 68 | Michael Levan speaks about the importance of source control. 69 | 70 | - [My notes](Week4/Notes_week4.md) 71 | 72 | ### Projects 73 | 74 | - [x] [VS Code with Git and GitHub](Week4/1_VSCode_Git_GitHub.md) 75 | - [x] [Working through push conflicts](Week4/2_Working_Push_Conflicts.md) 76 | - [x] [Contributing to an open source project](Week4/3_Contributing_Open_Source.md) 77 | 78 | ## Week 5: Continuous Integration, Continuous Deployment, and Continuous Delivery 79 | 80 | *March 10th* 81 | 82 | This week is dedicated to the concepts, benefits, best practices and real-life uses of CI/CD. 83 | 84 | - [My notes](Week5/Notes_week5.md) 85 | 86 | ### Projects 87 | 88 | - [x] [Deploying a web app via GitHub Actions](Week5/1_Deploy_Web_App_GitHubActions.md) 89 | - [x] [Continuous Integration with GitHub Actions](Week5/2_CI_GitHubActions.md) 90 | - [x] [Terraform with GitHub Actions](Week5/3_Terraform_GitHubActions.md) 91 | - [x] [Setting up continuous monitoring](Week5/4_Setting_Up_Continuous_Monitoring.md) 92 | - [x] [Artifacts and packages in CI/CD](Week5/5_Artifacts_Packages_CICD.md) 93 | - [x] [Working with AWS CodeDeploy](Week5/6_Working_AWS_CodeDeploy.md) 94 | 95 | ## Week 6: Serverless in Azure ans AWS 96 | 97 | *March 17th* 98 | 99 | - [My Notes](Week6/Notes_week6.md) 100 | 101 | ### Projects 102 | 103 | - [x] [Build an Azure Function app](Week6/1_Azure_Function.md) 104 | - [x] [Build an Azure Web App](Week6/2_Azure_WebApp.md) 105 | - [x] [Deploy an Azure Web App via CI/CD in GitHub Actions](Week6/3_Azure_WebApp_CICD_GitHubActions.md) 106 | - [x] [Create a Lambda Function](Week6/4_Lambda_Function.md) 107 | - [x] [Security for serverless apps](Week6/5_Security_Serverless.md) 108 | - [x] [Monitoring serverless apps in Azure and AWS](Week6/6_Monitoring_Serverless.md) 109 | 110 | ## Week 7: Security in Azure and AWS 111 | 112 | *March 24th* 113 | 114 | - [My Notes](Week7/Notes_Week7.md) 115 | 116 | ### Projects 117 | 118 | - [x] [Securing code](Week7/1_Securing_Code.md) 119 | - [x] [Implementing Continuous Security](Week7/2_Continuous_Security.md) 120 | - [x] [Security authentication in code](Week7/3_Security_Authentication.md) 121 | - [x] [Creating IAM roles, users and groups](Week7/4_IAM_Roles.md) 122 | - [x] [Creating RBAC rules and Azure Policies](Week7/5_RBAC_Policies.md) 123 | 124 | ## Week 8: Containerization and Kubernetes 125 | 126 | *March 31st* 127 | 128 | - [My Notes](Week8/Notes_Week8.md) 129 | 130 | ### Projects 131 | 132 | - [x] [Creating a Minikube environment](Week8/1_Minikube_Env.md) 133 | - [x] [Creating an AKS cluster](Week8/2_AKS_Cluster.md) 134 | - [x] [Deploying to AKS](Week8/3_Deploy_AKS.md) 135 | - [ ] [Creating an EKS cluster](Week8/4_EKS_Cluster.md) 136 | - [ ] [Deploying to EKS](Week8/5_Deploy_EKS.md) 137 | - [ ] [Using Azure ACI](Week8/6_Azure_ACI.md) 138 | - [ ] [Using AWS Fargate](Week8/7_AWS_Fargate.md) 139 | 140 | ## Week 9: Monitoring and Logging 141 | 142 | *April 7th* 143 | 144 | - [My Notes](Week9/Notes_Week9.md) 145 | 146 | ### Projects 147 | 148 | - [ ] [Monitoring Azure IaaS](Week9/1_Azure_IaaS.md) 149 | - [ ] [Monitoring serverless platforms](Week9/2_Serverless.md) 150 | - [ ] [Azure Monitor and alerting](Week9/3_Azure_Monitor.md) 151 | - [ ] [Azure logging and metrics](Week9/4_Azure_logs_metrics.md) 152 | - [ ] [AWS Cloudwatch](Week9/5_AWS_Cloudwatch.md) 153 | - [ ] [Container monitoring with Prometheus](Week9/6_Prometheus.md) 154 | 155 | ## Week 10: Where To Go From Here (Career Prep) 156 | 157 | *April 14th 158 | -------------------------------------------------------------------------------- /Week3/Project1/template.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", 3 | "contentVersion": "1.0.0.0", 4 | "parameters": { 5 | "adminPassword": { 6 | "type": "string", 7 | "metadata": { 8 | "description": "Administrator Password" 9 | } 10 | } 11 | }, 12 | "functions": [], 13 | "variables": {}, 14 | "resources": [ 15 | { 16 | "name": "[toLower('cslab2017ljmstorage')]", 17 | "type": "Microsoft.Storage/storageAccounts", 18 | "apiVersion": "2019-06-01", 19 | "location": "[resourceGroup().location]", 20 | "tags": { 21 | "displayName": "cslab2017ljm Storage Account" 22 | }, 23 | "sku": { 24 | "name": "Standard_LRS" 25 | }, 26 | "kind": "Storage" 27 | }, 28 | { 29 | "name": "cslab2017ljm-PublicIP", 30 | "type": "Microsoft.Network/publicIPAddresses", 31 | "apiVersion": "2019-11-01", 32 | "location": "[resourceGroup().location]", 33 | "tags": { 34 | "displayName": "PublicIPAddress" 35 | }, 36 | "properties": { 37 | "publicIPAllocationMethod": "Dynamic", 38 | "dnsSettings": { 39 | "domainNameLabel": "[toLower('cslab2017ljm')]" 40 | } 41 | } 42 | }, 43 | { 44 | "name": "cslab2017ljm-nsg", 45 | "type": "Microsoft.Network/networkSecurityGroups", 46 | "apiVersion": "2018-08-01", 47 | "location": "[resourceGroup().location]", 48 | "properties": { 49 | "securityRules": [ 50 | { 51 | "name": "nsgRule1", 52 | "properties": { 53 | "description": "description", 54 | "protocol": "Tcp", 55 | "sourcePortRange": "*", 56 | "destinationPortRange": "22", 57 | "sourceAddressPrefix": "*", 58 | "destinationAddressPrefix": "*", 59 | "access": "Allow", 60 | "priority": 100, 61 | "direction": "Inbound" 62 | } 63 | } 64 | ] 65 | } 66 | }, 67 | { 68 | "name": "cslab2017ljm-VirtualNetwork", 69 | "type": "Microsoft.Network/virtualNetworks", 70 | "apiVersion": "2019-11-01", 71 | "location": "[resourceGroup().location]", 72 | "dependsOn": [ 73 | "[resourceId('Microsoft.Network/networkSecurityGroups', 'cslab2017ljm-nsg')]" 74 | ], 75 | "tags": { 76 | "displayName": "cslab2017ljm-VirtualNetwork" 77 | }, 78 | "properties": { 79 | "addressSpace": { 80 | "addressPrefixes": [ 81 | "10.0.0.0/16" 82 | ] 83 | }, 84 | "subnets": [ 85 | { 86 | "name": "cslab2017ljm-VirtualNetwork-Subnet", 87 | "properties": { 88 | "addressPrefix": "10.0.0.0/24", 89 | "networkSecurityGroup": { 90 | "id": "[resourceId('Microsoft.Network/networkSecurityGroups', 'cslab2017ljm-nsg')]" 91 | } 92 | } 93 | } 94 | ] 95 | } 96 | }, 97 | { 98 | "name": "cslab2017ljm-NetworkInterface", 99 | "type": "Microsoft.Network/networkInterfaces", 100 | "apiVersion": "2019-11-01", 101 | "location": "[resourceGroup().location]", 102 | "dependsOn": [ 103 | "[resourceId('Microsoft.Network/publicIPAddresses', 'cslab2017ljm-PublicIP')]", 104 | "[resourceId('Microsoft.Network/virtualNetworks', 'cslab2017ljm-VirtualNetwork')]" 105 | ], 106 | "tags": { 107 | "displayName": "cslab2017ljm-NetworkInterface" 108 | }, 109 | "properties": { 110 | "ipConfigurations": [ 111 | { 112 | "name": "ipConfig1", 113 | "properties": { 114 | "privateIPAllocationMethod": "Dynamic", 115 | "publicIPAddress": { 116 | "id": "[resourceId('Microsoft.Network/publicIPAddresses', 'cslab2017ljm-PublicIP')]" 117 | }, 118 | "subnet": { 119 | "id": "[resourceId('Microsoft.Network/virtualNetworks/subnets', 'cslab2017ljm-VirtualNetwork', 'cslab2017ljm-VirtualNetwork-Subnet')]" 120 | } 121 | } 122 | } 123 | ] 124 | } 125 | }, 126 | { 127 | "name": "cslab2017ljm", 128 | "type": "Microsoft.Compute/virtualMachines", 129 | "apiVersion": "2019-07-01", 130 | "location": "[resourceGroup().location]", 131 | "dependsOn": [ 132 | "[resourceId('Microsoft.Network/networkInterfaces', 'cslab2017ljm-NetworkInterface')]" 133 | ], 134 | "tags": { 135 | "displayName": "cslab2017ljm" 136 | }, 137 | "properties": { 138 | "hardwareProfile": { 139 | "vmSize": "Standard_A2_v2" 140 | }, 141 | "osProfile": { 142 | "computerName": "cslab2017ljm", 143 | "adminUsername": "adminUsername", 144 | "adminPassword": "[parameters('adminPassword')]" 145 | }, 146 | "storageProfile": { 147 | "imageReference": { 148 | "publisher": "Canonical", 149 | "offer": "UbuntuServer", 150 | "sku": "16.04-LTS", 151 | "version": "latest" 152 | }, 153 | "osDisk": { 154 | "name": "cslab2017ljm-OSDisk", 155 | "caching": "ReadWrite", 156 | "createOption": "FromImage" 157 | } 158 | }, 159 | "networkProfile": { 160 | "networkInterfaces": [ 161 | { 162 | "id": "[resourceId('Microsoft.Network/networkInterfaces', 'cslab2017ljm-NetworkInterface')]" 163 | } 164 | ] 165 | }, 166 | "diagnosticsProfile": { 167 | "bootDiagnostics": { 168 | "enabled": true, 169 | "storageUri": "[reference(resourceId('Microsoft.Storage/storageAccounts/', toLower('cslab2017ljmstorage'))).primaryEndpoints.blob]" 170 | } 171 | } 172 | } 173 | } 174 | ], 175 | "outputs": { 176 | "publicIP": { 177 | "type": "string", 178 | "value": "[reference(resourceId('Microsoft.Network/publicIPAddresses','cslab2017ljm-PublicIP')).dnsSettings.fqdn]" 179 | } 180 | } 181 | } --------------------------------------------------------------------------------