├── CDK ├── AWS_CDK │ ├── requirements.txt │ ├── automation_rocks │ │ ├── __init__.py │ │ ├── automation_rocks.egg-info │ │ │ ├── top_level.txt │ │ │ ├── dependency_links.txt │ │ │ ├── requires.txt │ │ │ ├── SOURCES.txt │ │ │ └── PKG-INFO │ │ ├── __pycache__ │ │ │ ├── __init__.cpython-38.pyc │ │ │ └── automation_rocks_stack.cpython-38.pyc │ │ └── automation_rocks_stack.py │ ├── .gitignore │ ├── cdk.json │ ├── app.py │ ├── setup.py │ └── README.md └── TerraformCDK │ ├── requirements.txt │ ├── .gitignore │ ├── cdktf.json │ ├── help │ └── main.py ├── Inspec ├── files │ ├── inspec │ │ ├── ssh │ │ │ ├── attributes.yml │ │ │ ├── controls │ │ │ │ └── ssh.rb │ │ │ ├── threshold.yml │ │ │ └── inspec.yml │ │ ├── linux │ │ │ ├── attributes.yml │ │ │ ├── controls │ │ │ │ └── linux.rb │ │ │ ├── threshold.yml │ │ │ └── inspec.yml │ │ ├── docker │ │ │ ├── attributes.yml │ │ │ ├── controls │ │ │ │ └── docker.rb │ │ │ ├── threshold.yml │ │ │ └── inspec.yml │ │ └── gcp │ │ │ ├── threshold.yml │ │ │ ├── attributes.yml │ │ │ ├── inspec.yml │ │ │ └── controls │ │ │ └── gcp.rb │ ├── secrets │ │ └── heimdall_key │ ├── setup-docker-secrets.sh │ ├── nginx.conf │ ├── index.html │ ├── docker-compose.yml │ ├── creds.json │ ├── Dockerfile │ └── tests.sh ├── README.md ├── modules │ ├── instance │ │ ├── outputs.tf │ │ ├── variables.tf │ │ └── instance.tf │ └── storage │ │ ├── storage.tf │ │ └── variables.tf ├── gcp.tf ├── terraform.tfvars └── variables.tf ├── helm_example-0.1.0.tgz ├── README.md ├── helm_example ├── templates │ ├── namespace.yaml │ ├── configmap.yaml │ ├── test-svc.yaml │ └── test-deploy.yaml ├── README.md ├── Chart.yaml └── values.yaml ├── EC2_Web_UserData.bash ├── AdminUser.py ├── Automation ├── README.md ├── hosts ├── CloudFormationPlaybook.yml └── CloudFormation.json ├── RegularUser.py ├── DMS ├── README.md ├── DMS_TABLEMAPPINGS.json ├── DMS_CREATE_TASK.py ├── DMS_AWSCLI_COMMAND.sh ├── DMS_TASKSETTINGS.json ├── DMS_SOURCE_CONFIG.sql └── DMS_TARGET_CONFIG.sql ├── Migrate_To_RDS_Oracle ├── README.md ├── install_cli_pip.py ├── Create_Tablespace.py ├── create_user.py ├── Move_Datapump.py ├── create_dblink.py ├── Clean_Up_RDS_Datapump.py ├── Export_Source.py └── Import_Target.py ├── GKE_Cluster_with_Nginx_Ingress ├── modules │ ├── ingress │ │ ├── certs │ │ │ ├── values.yaml.tpl │ │ │ ├── Chart.yaml │ │ │ └── templates │ │ │ │ ├── ClusterIssuer.yml │ │ │ │ └── _helpers.tpl │ │ ├── namespace.tf │ │ ├── data_sources.tf │ │ ├── nginx_ingress.tf │ │ ├── variables.tf │ │ ├── deploy_cert_manager.tf │ │ ├── dns_service_account.tf │ │ └── ingress.tf │ └── gke │ │ ├── data_sources.tf │ │ ├── outputs.tf │ │ ├── variables.tf │ │ └── gke.tf ├── README.md ├── data_sources.tf ├── variables.tf ├── versions.tf ├── outputs.tf ├── terraform.tfvars ├── main.tf └── providers.tf ├── Connect.py ├── Configure_Git.sh ├── Configure cx_Oracle.py └── SQL_Server_2017_On_Linux.sql /CDK/AWS_CDK/requirements.txt: -------------------------------------------------------------------------------- 1 | -e . 2 | -------------------------------------------------------------------------------- /Inspec/files/inspec/ssh/attributes.yml: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /CDK/AWS_CDK/automation_rocks/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /Inspec/files/inspec/linux/attributes.yml: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /CDK/AWS_CDK/.gitignore: -------------------------------------------------------------------------------- 1 | dist/ 2 | cdk.out 3 | -------------------------------------------------------------------------------- /CDK/TerraformCDK/requirements.txt: -------------------------------------------------------------------------------- 1 | cdktf~=0.0.18 -------------------------------------------------------------------------------- /Inspec/files/secrets/heimdall_key: -------------------------------------------------------------------------------- 1 | aZwbtxgUXD8IYDCn_19FpQ 2 | -------------------------------------------------------------------------------- /CDK/AWS_CDK/automation_rocks/automation_rocks.egg-info/top_level.txt: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /CDK/AWS_CDK/automation_rocks/automation_rocks.egg-info/dependency_links.txt: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /helm_example-0.1.0.tgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PatriciaAnong/Blog/HEAD/helm_example-0.1.0.tgz -------------------------------------------------------------------------------- /Inspec/files/inspec/docker/attributes.yml: -------------------------------------------------------------------------------- 1 | container_capadd: SYS_ADMIN 2 | benchmark_version: 2.1.0 3 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Blog 2 | Scripts from my Blog [Patricia-Anong.com/Blog](https://Patricia-Anong.com/blog) 3 | -------------------------------------------------------------------------------- /CDK/TerraformCDK/.gitignore: -------------------------------------------------------------------------------- 1 | dist/ 2 | imports/* 3 | !imports/__init__.py 4 | .terraform 5 | cdktf.out 6 | terraform.tfstate* -------------------------------------------------------------------------------- /helm_example/templates/namespace.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Namespace 3 | metadata: 4 | name: {{ .Values.helm_example.namespace }} 5 | -------------------------------------------------------------------------------- /EC2_Web_UserData.bash: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | /usr/bin/yum -y install httpd php php-mysqli 3 | /sbin/chkconfig httpd on 4 | /sbin/service httpd start 5 | -------------------------------------------------------------------------------- /Inspec/README.md: -------------------------------------------------------------------------------- 1 | # DevSecOps using Inspec 2 | The corresponding blog post can be found at [Patricia-Anong.com/Blog](https://Patricia-Anong.com/blog/2019/9/1/devsecops) -------------------------------------------------------------------------------- /Inspec/files/inspec/ssh/controls/ssh.rb: -------------------------------------------------------------------------------- 1 | # copyright: 2019, Patricia Anong 2 | 3 | title "PAnong Sample Control" 4 | 5 | include_controls "ssh-baseline" do 6 | end 7 | -------------------------------------------------------------------------------- /Inspec/files/inspec/linux/controls/linux.rb: -------------------------------------------------------------------------------- 1 | # copyright: 2019, Patricia Anong 2 | 3 | title "PAnong Sample Control" 4 | 5 | include_controls "linux-baseline" do 6 | end -------------------------------------------------------------------------------- /AdminUser.py: -------------------------------------------------------------------------------- 1 | _author_ = 'panong' 2 | 3 | use admin 4 | 5 | db.createUser({ 6 | user: "Albus", 7 | pwd: "Dumbledore", 8 | roles: ["root"] 9 | }) 10 | 11 | -------------------------------------------------------------------------------- /Inspec/files/inspec/docker/controls/docker.rb: -------------------------------------------------------------------------------- 1 | # copyright: 2019, Patricia Anong 2 | 3 | title "PAnong Sample Control" 4 | 5 | include_controls "docker-baseline" do 6 | end 7 | -------------------------------------------------------------------------------- /Inspec/modules/instance/outputs.tf: -------------------------------------------------------------------------------- 1 | output "instance_public_address" { 2 | value = google_compute_instance.inspec_instance[0].network_interface[0].access_config[0].nat_ip 3 | } -------------------------------------------------------------------------------- /CDK/AWS_CDK/automation_rocks/__pycache__/__init__.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PatriciaAnong/Blog/HEAD/CDK/AWS_CDK/automation_rocks/__pycache__/__init__.cpython-38.pyc -------------------------------------------------------------------------------- /Automation/README.md: -------------------------------------------------------------------------------- 1 | # Automation in the Cloud 2 | The corresponding blog post can be found at [Patricia-Anong.com/Blog](https://Patricia-Anong.com/blog/2017/11/13/automation-in-the-cloud) -------------------------------------------------------------------------------- /RegularUser.py: -------------------------------------------------------------------------------- 1 | _author_ = 'panong' 2 | 3 | use Gryffindor 4 | 5 | db.createUser({ 6 | user: "Harry", 7 | pwd: "Hogwarts4ever", 8 | roles: ["readWrite"] 9 | }) 10 | -------------------------------------------------------------------------------- /DMS/README.md: -------------------------------------------------------------------------------- 1 | # Configuring Amazon Data Migration Service for Oracle Replication 2 | The corresponding blog post can be found at [Patricia-Anong.com/Blog](https://Patricia-Anong.com/blog/2017/9/11/dms) -------------------------------------------------------------------------------- /Migrate_To_RDS_Oracle/README.md: -------------------------------------------------------------------------------- 1 | # Migrate On-Premise Oracle to RDS Oracle 2 | The corresponding blog post can be found at [Patricia-Anong.com/Blog](https://Patricia-Anong.com/blog/2017/7/03/rds-oracle) -------------------------------------------------------------------------------- /CDK/AWS_CDK/automation_rocks/__pycache__/automation_rocks_stack.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PatriciaAnong/Blog/HEAD/CDK/AWS_CDK/automation_rocks/__pycache__/automation_rocks_stack.cpython-38.pyc -------------------------------------------------------------------------------- /Inspec/files/inspec/docker/threshold.yml: -------------------------------------------------------------------------------- 1 | compliance: 2 | min: 40 3 | failed: 4 | critical: 5 | max: 10 6 | error: 7 | total: 8 | max: 5 9 | critical: 10 | max: 5 11 | high: 12 | max: 5 -------------------------------------------------------------------------------- /Inspec/files/inspec/gcp/threshold.yml: -------------------------------------------------------------------------------- 1 | compliance: 2 | min: 40 3 | failed: 4 | critical: 5 | max: 10 6 | error: 7 | total: 8 | max: 5 9 | critical: 10 | max: 5 11 | high: 12 | max: 5 -------------------------------------------------------------------------------- /Inspec/files/inspec/linux/threshold.yml: -------------------------------------------------------------------------------- 1 | compliance: 2 | min: 40 3 | failed: 4 | critical: 5 | max: 10 6 | error: 7 | total: 8 | max: 5 9 | critical: 10 | max: 5 11 | high: 12 | max: 5 -------------------------------------------------------------------------------- /Inspec/files/inspec/ssh/threshold.yml: -------------------------------------------------------------------------------- 1 | compliance: 2 | min: 40 3 | failed: 4 | critical: 5 | max: 10 6 | error: 7 | total: 8 | max: 5 9 | critical: 10 | max: 5 11 | high: 12 | max: 5 -------------------------------------------------------------------------------- /helm_example/README.md: -------------------------------------------------------------------------------- 1 | # Packaging Kubernetes Applications using Helm 2 | The corresponding blog post can be found at [Patricia-Anong.com/Blog](https://Patricia-Anong.com/blog/2019/2/27/packaging-kubernetes-applications-using-helm) -------------------------------------------------------------------------------- /GKE_Cluster_with_Nginx_Ingress/modules/ingress/certs/values.yaml.tpl: -------------------------------------------------------------------------------- 1 | certs: 2 | name: ${name} 3 | env: ${env} 4 | project: ${project} 5 | iac: "deployed-via-terraform" 6 | email: ${email} 7 | dns_secret_name: ${dns_secret_name} -------------------------------------------------------------------------------- /GKE_Cluster_with_Nginx_Ingress/README.md: -------------------------------------------------------------------------------- 1 | # Deploy Nginx Ingress and Cert Manager on a GKE Cluster using Terraform 2 | 3 | The corresponding blog post can be found at [Patricia-Anong.com/Blog](https://Patricia-Anong.com/blog/2017/9/11/dms) 4 | -------------------------------------------------------------------------------- /GKE_Cluster_with_Nginx_Ingress/data_sources.tf: -------------------------------------------------------------------------------- 1 | locals { 2 | env = terraform.workspace 3 | common_tags = { 4 | iac = "deployed-via-terraform" 5 | environment = local.env 6 | owner = "patricia-anong" 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /GKE_Cluster_with_Nginx_Ingress/modules/gke/data_sources.tf: -------------------------------------------------------------------------------- 1 | locals { 2 | 3 | gke_name = "gke-${var.name_prefix}-${var.environment}-${var.region}" 4 | 5 | gke_node_name = "gke-node-${var.name_prefix}-${var.environment}-${var.region}" 6 | 7 | } -------------------------------------------------------------------------------- /CDK/AWS_CDK/automation_rocks/automation_rocks.egg-info/requires.txt: -------------------------------------------------------------------------------- 1 | aws-cdk.aws_cloudwatch 2 | aws-cdk.aws_cloudwatch_actions 3 | aws-cdk.aws_ec2 4 | aws-cdk.aws_events 5 | aws-cdk.aws_sns 6 | aws-cdk.aws_sns_subscriptions 7 | aws-cdk.core==1.76.0 8 | -------------------------------------------------------------------------------- /helm_example/templates/configmap.yaml: -------------------------------------------------------------------------------- 1 | kind: ConfigMap 2 | apiVersion: v1 3 | metadata: 4 | name: configmap 5 | namespace: helm-example 6 | data: 7 | NGINX_PORT: "{{ .Values.helm_example.service.externalPort }}" 8 | 9 | 10 | -------------------------------------------------------------------------------- /GKE_Cluster_with_Nginx_Ingress/variables.tf: -------------------------------------------------------------------------------- 1 | variable project {} 2 | 3 | variable name_prefix {} 4 | 5 | variable region {} 6 | 7 | variable master_authorized_networks_config {} 8 | 9 | variable shielded_instance_config {} 10 | 11 | variable enable_apis {} -------------------------------------------------------------------------------- /Inspec/files/inspec/gcp/attributes.yml: -------------------------------------------------------------------------------- 1 | # Below is to be uncommented and set with your GCP project ID: 2 | gcp_project_id: inspecplaygroundgcp 3 | region_name: us-east1 4 | zone: us-east1-b 5 | instance_name: panong-test-inspec-instance 6 | bucket: inspec-files-bucket -------------------------------------------------------------------------------- /helm_example/Chart.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | description: A Helm chart example for Patricia Anong's Blog 3 | name: helm_example 4 | version: 0.1.0 5 | maintainers: 6 | - name: Patricia.Anong 7 | email: PatriciaAnong@gmail.com 8 | url: patricia-anong.com 9 | -------------------------------------------------------------------------------- /Automation/hosts: -------------------------------------------------------------------------------- 1 | [local] 2 | 127.0.0.1 3 | 4 | [local:vars] 5 | aws_access_key=AHGGDSGLJJ746VKJLH 6 | aws_secret_key=AG5HK88N7D56Ygkhk86y88gd6ff 7 | ansible_connection=local 8 | ansible_python_interpreter=/usr/bin/python 9 | EC2_REGION=us-east-1 10 | AWS_REGION=us-east-1 11 | -------------------------------------------------------------------------------- /CDK/AWS_CDK/cdk.json: -------------------------------------------------------------------------------- 1 | { 2 | "app": "python3 app.py", 3 | "context": { 4 | "@aws-cdk/core:enableStackNameDuplicates": "true", 5 | "aws-cdk:enableDiffNoFail": "true", 6 | "@aws-cdk/core:stackRelativeExports": "true", 7 | "@aws-cdk/aws-ecr-assets:dockerIgnoreSupport": true 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /CDK/TerraformCDK/cdktf.json: -------------------------------------------------------------------------------- 1 | { 2 | "language": "python", 3 | "app": "python ./main.py", 4 | "terraformProviders": [ 5 | "aws@~> 2.0" 6 | ], 7 | "codeMakerOutput": "imports", 8 | "context": { 9 | "excludeStackIdFromLogicalIds": "true", 10 | "allowSepCharsInLogicalIds": "true" 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /GKE_Cluster_with_Nginx_Ingress/modules/ingress/certs/Chart.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v2 2 | name: certs 3 | description: A Helm chart for deploying cert-manager 4 | 5 | type: application 6 | 7 | version: 0.1.0 8 | appVersion: 0.1.0 9 | 10 | maintainers: 11 | - name: Patricia Anong 12 | email: patricia@patricia-anong.com 13 | -------------------------------------------------------------------------------- /GKE_Cluster_with_Nginx_Ingress/versions.tf: -------------------------------------------------------------------------------- 1 | terraform { 2 | required_version = "~> 0.12.25" 3 | required_providers { 4 | google = "~> 3.23.0" 5 | google-beta = "~> 3.23.0" 6 | random = "~> 2.2" 7 | kubernetes = "~> 1.11" 8 | helm = "~> 1.2" 9 | null = "~> 2.1" 10 | } 11 | } -------------------------------------------------------------------------------- /CDK/AWS_CDK/automation_rocks/automation_rocks.egg-info/SOURCES.txt: -------------------------------------------------------------------------------- 1 | README.md 2 | setup.py 3 | automation_rocks/automation_rocks.egg-info/PKG-INFO 4 | automation_rocks/automation_rocks.egg-info/SOURCES.txt 5 | automation_rocks/automation_rocks.egg-info/dependency_links.txt 6 | automation_rocks/automation_rocks.egg-info/requires.txt 7 | automation_rocks/automation_rocks.egg-info/top_level.txt -------------------------------------------------------------------------------- /GKE_Cluster_with_Nginx_Ingress/modules/ingress/namespace.tf: -------------------------------------------------------------------------------- 1 | resource kubernetes_namespace certs { 2 | count = terraform.workspace != "default" ? 1 : 0 3 | 4 | metadata { 5 | annotations = { 6 | name = local.namespace 7 | } 8 | 9 | labels = merge( 10 | tomap({ "name" = local.namespace }), 11 | var.tags 12 | ) 13 | name = local.namespace 14 | } 15 | } -------------------------------------------------------------------------------- /CDK/AWS_CDK/app.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | from aws_cdk import core 4 | 5 | from automation_rocks.automation_rocks_stack import ( 6 | AutomationRocksStack, 7 | nova, 8 | ohio, 9 | ) 10 | 11 | 12 | app = core.App() 13 | AutomationRocksStack(app, "automation-rocks-nova", env=nova) 14 | 15 | AutomationRocksStack(app, "automation-rocks-ohio", env=ohio) 16 | 17 | app.synth() 18 | -------------------------------------------------------------------------------- /Inspec/files/setup-docker-secrets.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if [ -f .env-prod ]; then 4 | echo ".env-prod already exists, if you would like to regenerate your secrets, please delete this file and re-run the script." 5 | else 6 | echo ".env-prod does not exist, creating..." 7 | cat >.env-prod - << EOF 8 | SECRET_KEY_BASE=$(openssl rand -hex 64) 9 | CIPHER_PASSWORD=$(openssl rand -hex 64) 10 | CIPHER_SALT=$(openssl rand -hex 32) 11 | EOF 12 | fi 13 | echo "Done" -------------------------------------------------------------------------------- /Inspec/files/nginx.conf: -------------------------------------------------------------------------------- 1 | user nginx nginx; 2 | worker_processes 2; 3 | error_log /var/log/nginx/error.log; 4 | worker_rlimit_nofile 8192; 5 | 6 | events { 7 | worker_connections 4096; 8 | } 9 | 10 | http { 11 | server { 12 | listen 80 default_server; 13 | listen [::]:80 default_server; 14 | server_name _; 15 | server_tokens off; 16 | root /home/root; 17 | 18 | location / { 19 | index index.html; 20 | } 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /helm_example/values.yaml: -------------------------------------------------------------------------------- 1 | replicaCount: 1 2 | helm_example: 3 | name: "helm-example" 4 | namespace: "helm-example" 5 | image_pull_policy: "IfNotPresent" 6 | SSL_REDIRECT: "true" 7 | region: "us-east-1" 8 | logging: "3" 9 | image: 10 | repository: jenkins/jenkins 11 | tag: lts 12 | pullPolicy: IfNotPresent 13 | service: 14 | name: helm-example 15 | type: LoadBalancer 16 | externalPort: 80 17 | internalPort: 8080 -------------------------------------------------------------------------------- /Connect.py: -------------------------------------------------------------------------------- 1 | import pymongo 2 | 3 | _author_ = 'panong' 4 | 5 | uri = "mongodb://Harry:Hogwarts4ever@192.142.32.100/gryffindor" 6 | client = pymongo.MongoClient(uri) 7 | database = client['gryffindor'] 8 | collection = database['SortingHat'] 9 | 10 | 11 | def record(): 12 | wizards = collection.find({}) 13 | for person in wizards: 14 | print ("Are you afraid of what you'll hear?\nYour Animagus is a {}, {}".format(person['Animagus'],person['Member'])) 15 | 16 | record() 17 | 18 | -------------------------------------------------------------------------------- /Inspec/files/inspec/ssh/inspec.yml: -------------------------------------------------------------------------------- 1 | name: PAnong-SSH-Inspec-Profile 2 | title: PAnong Sample SSH InSpec Profile 3 | maintainer: Patricia Anong 4 | copyright: Patricia Anong 5 | copyright_email: patriciaanong@gmail.com 6 | license: Apache-2.0 7 | summary: An InSpec Compliance Profile For SSH 8 | version: 0.1.0 9 | inspec_version: '>= 4.4.0' 10 | 11 | depends: 12 | - name: ssh-baseline 13 | url: https://github.com/dev-sec/ssh-baseline/archive/master.tar.gz 14 | 15 | supports: 16 | - platform-name: debian 17 | - platform-name: ubuntu -------------------------------------------------------------------------------- /DMS/DMS_TABLEMAPPINGS.json: -------------------------------------------------------------------------------- 1 | { 2 | "rules": [ 3 | { 4 | "rule-type": "selection", 5 | "rule-id": "1", 6 | "rule-name": "1", 7 | "object-locator": { 8 | "schema-name": "DRWHO", 9 | "table-name": "%" 10 | }, 11 | "rule-action": "include" 12 | }, 13 | { 14 | "rule-type": "transformation", 15 | "rule-id": "2", 16 | "rule-name": "2", 17 | "rule-target": "schema", 18 | "object-locator": { 19 | "schema-name": "DRWHO" 20 | }, 21 | "rule-action": "rename", 22 | "value": "DRWHO" 23 | } 24 | ] 25 | } -------------------------------------------------------------------------------- /Inspec/files/inspec/linux/inspec.yml: -------------------------------------------------------------------------------- 1 | name: PAnong-Linux-Inspec-Profile 2 | title: PAnong Sample Linux InSpec Profile 3 | maintainer: Patricia Anong 4 | copyright: Patricia Anong 5 | copyright_email: patriciaanong@gmail.com 6 | license: Apache-2.0 7 | summary: An InSpec Compliance Profile For Linux 8 | version: 0.1.0 9 | inspec_version: '>= 4.4.0' 10 | 11 | depends: 12 | - name: linux-baseline 13 | url: https://github.com/dev-sec/linux-baseline/archive/master.tar.gz 14 | 15 | supports: 16 | - platform-name: debian 17 | - platform-name: ubuntu -------------------------------------------------------------------------------- /GKE_Cluster_with_Nginx_Ingress/outputs.tf: -------------------------------------------------------------------------------- 1 | output cluster_name { 2 | value = module.gke.name 3 | } 4 | 5 | output cluster_endpoint { 6 | sensitive = true 7 | value = module.gke.endpoint 8 | } 9 | 10 | output client_certificate { 11 | sensitive = true 12 | value = module.gke.client_certificate 13 | } 14 | 15 | output client_key { 16 | sensitive = true 17 | value = module.gke.client_key 18 | } 19 | 20 | output cluster_ca_certificate { 21 | sensitive = true 22 | value = module.gke.cluster_ca_certificate 23 | } -------------------------------------------------------------------------------- /Inspec/files/inspec/docker/inspec.yml: -------------------------------------------------------------------------------- 1 | name: PAnong-Docker-Inspec-Profile 2 | title: PAnong Sample Docker InSpec Profile 3 | maintainer: Patricia Anong 4 | copyright: Patricia Anong 5 | copyright_email: patriciaanong@gmail.com 6 | license: Apache-2.0 7 | summary: An InSpec Compliance Profile For Docker 8 | version: 0.1.0 9 | inspec_version: '>= 4.4.0' 10 | 11 | depends: 12 | - name: docker-baseline 13 | url: https://github.com/dev-sec/cis-docker-benchmark/archive/master.tar.gz 14 | 15 | supports: 16 | - platform-name: debian 17 | - platform-name: ubuntu -------------------------------------------------------------------------------- /GKE_Cluster_with_Nginx_Ingress/modules/ingress/data_sources.tf: -------------------------------------------------------------------------------- 1 | locals { 2 | 3 | sa_name = "dns-sa-${var.name_prefix}-${var.environment}" 4 | 5 | namespace = "certs-manager" 6 | 7 | template_vars = { 8 | project = var.project, 9 | 10 | env = var.environment, 11 | 12 | name = "letsencrypt-${var.environment}", 13 | 14 | email = "devops@patricia-anong.com", 15 | 16 | namespace = local.namespace, 17 | 18 | dns_secret_name = kubernetes_secret.dns_sa_credentials[0].metadata[0].name, 19 | 20 | } 21 | 22 | helm_chart_values = templatefile( 23 | "${path.module}/certs/values.yaml.tpl", 24 | local.template_vars 25 | ) 26 | 27 | } -------------------------------------------------------------------------------- /GKE_Cluster_with_Nginx_Ingress/modules/ingress/nginx_ingress.tf: -------------------------------------------------------------------------------- 1 | resource helm_release ingress { 2 | 3 | count = terraform.workspace != "default" ? 1 : 0 4 | 5 | name = "nginx" 6 | 7 | repository = "https://kubernetes-charts.storage.googleapis.com" 8 | 9 | chart = "nginx-ingress" 10 | 11 | version = "" 12 | force_update = true 13 | 14 | cleanup_on_fail = true 15 | 16 | set { 17 | name = "rbac.create" 18 | value = true 19 | } 20 | 21 | set { 22 | name = "podSecurityPolicy.enabled" 23 | value = true 24 | } 25 | 26 | set { 27 | name = "controller.publishService.enabled" 28 | value = true 29 | } 30 | 31 | } -------------------------------------------------------------------------------- /Automation/CloudFormationPlaybook.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: Run CloudFormation 4 | hosts: "localhost" 5 | connection: "local" 6 | gather_facts: false 7 | 8 | tasks: 9 | - name: launch ansible cloudformation template 10 | cloudformation: 11 | stack_name: "Transformers1" 12 | state: "present" 13 | region: "us-east-1" 14 | disable_rollback: true 15 | template: "autobots.json" 16 | template_parameters: 17 | DBPassword: "NoDeceptic0ns" 18 | DBUser: "OptimusPrime" 19 | KeyName: "autobots" 20 | Subnets: "subnet-3c892210,subnet-e9dcbaa1" 21 | tags: 22 | Stack: "AgeOfExtinction" -------------------------------------------------------------------------------- /GKE_Cluster_with_Nginx_Ingress/terraform.tfvars: -------------------------------------------------------------------------------- 1 | project = "panong-blog-gke" 2 | region = "us-east1" 3 | environment = "dev" 4 | name_prefix = "devops-rules" 5 | 6 | master_authorized_networks_config = [ 7 | { 8 | cidr_blocks = [ 9 | { 10 | cidr_block = "86.75.30.9/32" 11 | display_name = "Jenny" 12 | }, 13 | ] 14 | }, 15 | ] 16 | 17 | shielded_instance_config = { 18 | enable_secure_boot = true 19 | enable_integrity_monitoring = true 20 | } 21 | 22 | enable_apis = { 23 | kubernetes = "container.googleapis.com" 24 | iam = "iam.googleapis.com" 25 | cloud_resource = "cloudresourcemanager.googleapis.com" 26 | } -------------------------------------------------------------------------------- /DMS/DMS_CREATE_TASK.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import boto3 4 | 5 | #CONNECT TO DMS VIA BOTO3 CLIENT 6 | client = boto3.client('dms') 7 | 8 | response = client.create_replication_task( 9 | ReplicationTaskIdentifier='dr-who-migration', 10 | SourceEndpointArn='arn:aws:dms:us-east-1:687720138916:endpoint:T2LA4RF4ULC44N3RR2PFTMRZCY', 11 | TargetEndpointArn='arn:aws:dms:us-east-1:687720138916:endpoint:MOAITORVFW3E62MS4A2QXAGVYA', 12 | ReplicationInstanceArn='arn:aws:dms:us-east-1:687720138916:rep:MT3SNM4BY5U4KAEOH4XUZEO4ME', 13 | MigrationType='full-load', 14 | TableMappings='file://DMS_TABLEMAPPINGS.json', 15 | ReplicationTaskSettings='file://DMS_TASKSETTINGS.json' 16 | ) -------------------------------------------------------------------------------- /helm_example/templates/test-svc.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: {{ .Values.helm_example.service.name }}-service 5 | namespace: {{ .Values.helm_example.namespace }} 6 | annotations: 7 | service.beta.kubernetes.io/aws-load-balancer-backend-protocol: tcp 8 | spec: 9 | type: {{ .Values.helm_example.service.type }} 10 | ports: 11 | - name: http 12 | port: {{ .Values.helm_example.service.externalPort }} 13 | targetPort: {{ .Values.helm_example.service.internalPort }} 14 | protocol: TCP 15 | selector: 16 | app: {{ .Values.helm_example.service.name }} 17 | externalTrafficPolicy: Local 18 | status: 19 | loadBalancer: {} -------------------------------------------------------------------------------- /GKE_Cluster_with_Nginx_Ingress/modules/ingress/variables.tf: -------------------------------------------------------------------------------- 1 | variable project { 2 | description = "The project ID to host the cluster in" 3 | type = string 4 | } 5 | 6 | variable region { 7 | description = "The location (region or zone) to host the cluster in" 8 | type = string 9 | } 10 | 11 | variable environment { 12 | description = "Environment in which to deploy" 13 | type = string 14 | } 15 | 16 | variable name_prefix { 17 | description = "Company or Application Name appended to full name of a resource" 18 | type = string 19 | } 20 | 21 | variable tags { 22 | description = "Tags to be applied to the deployed resources" 23 | type = map(string) 24 | } -------------------------------------------------------------------------------- /Migrate_To_RDS_Oracle/install_cli_pip.py: -------------------------------------------------------------------------------- 1 | #BOOTSTRAP AWS CLI 2 | curl -O https://bootstrap.pypa.io/get-pip.py 3 | python get-pip.py 4 | pip install awscli 5 | 6 | 7 | #CONFIRM AWSCLI INSTALLED 8 | aws --version 9 | 10 | #INSTALL BOTO3 -PYTHON INTERFACE TO AWS CLI 11 | pip install awscli boto3 -U --ignore-installed six 12 | 13 | #VERIFY BOTO3 INSTALLATION: IF NOTHING HAPPENS, THE INTALLATION WAS SUCCESSFUL 14 | python -c "import boto3" 15 | 16 | #CONFIGURE AWS 17 | aws configure 18 | AWS Access Key ID [None]: *************CRMA 19 | AWS Secret Access Key [None]: *********************************91O4X 20 | Default region name [None]: us-east-1 21 | Default output format [None]: json -------------------------------------------------------------------------------- /Migrate_To_RDS_Oracle/Create_Tablespace.py: -------------------------------------------------------------------------------- 1 | #CREATE THE NECESSARY TABLESPACE ON TARGET DB 2 | #!/usr/bin/python 3 | 4 | import os 5 | import sys 6 | from subprocess import Popen, PIPE 7 | 8 | sql = """ 9 | set linesize 120 10 | col owner for a10 11 | col object_name for a30 12 | set wrap off 13 | 14 | CREATE TABLESPACE WONDERWOMAN_DATA 15 | DATAFILE SIZE 100M AUTOEXTEND ON NEXT 100M MAXSIZE UNLIMITED; 16 | """ 17 | 18 | proc = Popen(["sqlplus", "panong/dbalady@WONDERWOMAN"], stdout=PIPE, stdin=PIPE, stderr=PIPE) 19 | proc.stdin.write(sql) 20 | (out, err) = proc.communicate() 21 | 22 | if proc.returncode != 0: 23 | print err 24 | sys.exit(proc.returncode) 25 | else: 26 | print out -------------------------------------------------------------------------------- /CDK/TerraformCDK/help: -------------------------------------------------------------------------------- 1 | ======================================================================================================== 2 | 3 | Your cdktf Python project is ready! 4 | 5 | cat help Prints this message 6 | 7 | Compile: 8 | python3 ./main.py Compile and run the python code. 9 | 10 | Synthesize: 11 | cdktf synth Synthesize Terraform resources to cdktf.out/ 12 | 13 | Diff: 14 | cdktf diff Perform a diff (terraform plan) for the given stack 15 | 16 | Deploy: 17 | cdktf deploy Deploy the given stack 18 | 19 | Destroy: 20 | cdktf destroy Destroy the given stack 21 | 22 | ======================================================================================================== -------------------------------------------------------------------------------- /Inspec/gcp.tf: -------------------------------------------------------------------------------- 1 | provider "google" { 2 | credentials = "${file("creds.json")}" 3 | project = var.project 4 | region = "us-east1" 5 | } 6 | 7 | module "instance" { 8 | source = "./modules/instance" 9 | namespace = var.namespace 10 | name = var.name 11 | environment = var.environment 12 | machine_type = var.machine_type 13 | enabled = var.enabled 14 | } 15 | 16 | module "bucket" { 17 | source = "./modules/storage" 18 | namespace = var.namespace 19 | name = var.name 20 | environment = var.environment 21 | encryption = var.encryption 22 | enabled = var.enabled 23 | matches_storage_class = var.matches_storage_class 24 | } 25 | 26 | 27 | 28 | -------------------------------------------------------------------------------- /GKE_Cluster_with_Nginx_Ingress/main.tf: -------------------------------------------------------------------------------- 1 | module gke { 2 | 3 | source = "./modules/gke" 4 | project = var.project 5 | region = var.region 6 | enable_apis = var.enable_apis 7 | environment = local.env 8 | name_prefix = var.name_prefix 9 | tags = [lower(local.common_tags["iac"]), lower(local.common_tags["environment"]), lower(local.common_tags["owner"])] 10 | 11 | master_authorized_networks_config = var.master_authorized_networks_config 12 | 13 | shielded_instance_config = var.shielded_instance_config 14 | 15 | } 16 | 17 | module ingress { 18 | source = "./modules/ingress" 19 | project = var.project 20 | region = var.region 21 | environment = local.env 22 | name_prefix = var.name_prefix 23 | tags = local.common_tags 24 | } -------------------------------------------------------------------------------- /Inspec/files/index.html: -------------------------------------------------------------------------------- 1 | 2 | Welcome to the wonderful world of DevSecOps 3 | 4 | 5 |

Automation is fun.

6 |

Thank you for reading my blog and following along!

7 | 12 |

Check out some of my other code on GitHub. 13 |


14 |

Check out some of my other blog posts here.

15 | 16 |

© Patricia Anong, 2019

17 | 18 | -------------------------------------------------------------------------------- /Migrate_To_RDS_Oracle/create_user.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import boto3 3 | import pprint 4 | 5 | from botocore.exceptions import ClientError 6 | 7 | pp = pprint.PrettyPrinter(indent=4) 8 | 9 | #CONNECT TO IAM VIA BOTO3 CLIENT 10 | client = boto3.client('iam') 11 | 12 | list_users = client.get_user() 13 | 14 | try: 15 | #CREATE USER 16 | user = client.create_user(UserName='bb8') 17 | #ADD USER TO GROUP: 18 | group = client.add_user_to_group( 19 | GroupName='Droids', 20 | UserName='bb8' 21 | ) 22 | #CREATE ACCESS KEY 23 | keys = client.create_access_key(UserName='bb8') 24 | pp.pprint(keys) 25 | pp.pprint(list_users) 26 | except ClientError as e: 27 | pp.pprint(e) 28 | -------------------------------------------------------------------------------- /DMS/DMS_AWSCLI_COMMAND.sh: -------------------------------------------------------------------------------- 1 | #CREATE TASK 2 | aws dms create-replication-task 3 | --replication-task-identifier dr-who-migration 4 | --source-endpoint-arn arn:aws:dms:us-east-1:1111111111111:endpoint:DOCTORWHOISAWOMAN01 5 | --target-endpoint-arn arn:aws:dms:us-east-1:111111111111:endpoint:YUPSHESUREIS02WOWZERS 6 | --replication-instance-arn arn:aws:dms:us-east-1:111111111111:rep:SOCRAZYTHATDOCTORWHOSIAWOMAN03 7 | --migration-type full-load 8 | --table-mappings file:////home/panong/DMS_TABLEMAPPINGS.json 9 | --replication-task-settings file:////home/panong/DMS_TASKSETTINGS.json 10 | 11 | 12 | #START TASK 13 | aws dms start-replication-task 14 | --replication-task-arn arn:aws:dms:us-east-1:1111111111111:task:YUPYOUJUSTSTARTEDTHEDRWHOTASK 15 | --start-replication-task-type start-replication -------------------------------------------------------------------------------- /GKE_Cluster_with_Nginx_Ingress/modules/ingress/certs/templates/ClusterIssuer.yml: -------------------------------------------------------------------------------- 1 | apiVersion: cert-manager.io/v1alpha2 2 | 3 | kind: ClusterIssuer 4 | 5 | metadata: 6 | name: {{ .Values.certs.name }} 7 | 8 | namespace: {{ .Values.certs.namespace }} 9 | 10 | labels: 11 | environment: {{ .Values.certs.env }} 12 | name: {{ .Values.certs.name }} 13 | iac: {{ .Values.certs.iac }} 14 | 15 | spec: 16 | acme: 17 | server: https://acme-v02.api.letsencrypt.org/directory 18 | email: {{ .Values.certs.email }} 19 | privateKeySecretRef: 20 | name: {{ .Values.certs.name }} 21 | solvers: 22 | - http01: 23 | ingress: 24 | class : nginx 25 | - dns01: 26 | clouddns: 27 | project: {{ .Values.certs.project }} 28 | serviceAccountSecretRef: 29 | name: {{ .Values.certs.dns_secret_name }} 30 | -------------------------------------------------------------------------------- /GKE_Cluster_with_Nginx_Ingress/modules/ingress/deploy_cert_manager.tf: -------------------------------------------------------------------------------- 1 | resource helm_release issuer { 2 | count = terraform.workspace != "default" ? 1 : 0 3 | depends_on = [ helm_release.cert-manager ] 4 | 5 | name = "certs" 6 | namespace = local.namespace 7 | chart = "${path.module}/certs" 8 | 9 | force_update = true 10 | cleanup_on_fail = true 11 | recreate_pods = false 12 | reset_values = false 13 | 14 | create_namespace = true 15 | 16 | values = [local.helm_chart_values] 17 | } 18 | 19 | resource helm_release cert-manager { 20 | count = terraform.workspace != "default" ? 1 : 0 21 | 22 | name = "cert-manager" 23 | namespace = local.namespace 24 | chart = "cert-manager" 25 | repository = "https://charts.jetstack.io" 26 | 27 | force_update = false 28 | create_namespace = true 29 | 30 | set { 31 | name = "installCRDs" 32 | value = true 33 | } 34 | 35 | } -------------------------------------------------------------------------------- /Migrate_To_RDS_Oracle/Move_Datapump.py: -------------------------------------------------------------------------------- 1 | #MOVE DATAPUMP FILE USING DBLINK 2 | 3 | #!/usr/bin/python 4 | 5 | import os 6 | import sys 7 | from subprocess import Popen, PIPE 8 | 9 | sql = """ 10 | set linesize 120 11 | col owner for a10 12 | col object_name for a30 13 | set wrap off 14 | 15 | BEGIN 16 | DBMS_FILE_TRANSFER.PUT_FILE( 17 | source_directory_object => 'DATA_PUMP_DIR', 18 | source_file_name => 'EXPDAT01.DMP', 19 | destination_directory_object => 'DATA_PUMP_DIR', 20 | destination_file_name => 'DB_COPY.DMP', 21 | destination_database => 'RDS_ORACLE' 22 | ); 23 | END; 24 | / 25 | """ 26 | 27 | proc = Popen(["sqlplus", "dprince/sourceDCComics"], stdout=PIPE, stdin=PIPE, stderr=PIPE) 28 | proc.stdin.write(sql) 29 | (out, err) = proc.communicate() 30 | 31 | if proc.returncode != 0: 32 | print err 33 | sys.exit(proc.returncode) 34 | else: 35 | print out 36 | -------------------------------------------------------------------------------- /GKE_Cluster_with_Nginx_Ingress/providers.tf: -------------------------------------------------------------------------------- 1 | provider google { 2 | project = var.project 3 | region = var.region 4 | credentials = file("creds.json") 5 | } 6 | 7 | 8 | provider google-beta { 9 | project = var.project 10 | region = var.region 11 | credentials = file("creds.json") 12 | } 13 | 14 | # The Kubernetes Provider 15 | provider kubernetes { 16 | host = module.gke.endpoint[0] 17 | client_certificate = module.gke.client_certificate 18 | client_key = module.gke.client_key 19 | cluster_ca_certificate = module.gke.cluster_ca_certificate 20 | } 21 | 22 | # The Helm provider 23 | provider helm { 24 | kubernetes { 25 | host = module.gke.endpoint[0] 26 | client_certificate = module.gke.client_certificate 27 | client_key = module.gke.client_key 28 | cluster_ca_certificate = module.gke.cluster_ca_certificate 29 | } 30 | } 31 | 32 | # The Null Provider 33 | provider null { 34 | } -------------------------------------------------------------------------------- /Migrate_To_RDS_Oracle/create_dblink.py: -------------------------------------------------------------------------------- 1 | #CREATE TNS ENTRY ON SOURCE ORACLE DB 2 | 3 | WONDERWOMAN = 4 | (DESCRIPTION = 5 | (ADDRESS_LIST = 6 | (ADDRESS = (PROTOCOL = TCP)(HOST = dccomics.chfghaffqaya.us-east-1.rds.amazonaws.com)(PORT = 1521)) 7 | ) 8 | (CONNECT_DATA = 9 | (SERVICE_NAME = DCCOMICS) 10 | ) 11 | ) 12 | 13 | 14 | 15 | # CREATE DBLINK 16 | 17 | #!/usr/bin/python 18 | 19 | import os 20 | import sys 21 | from subprocess import Popen, PIPE 22 | 23 | sql = """ 24 | set linesize 400 25 | col owner for a10 26 | col object_name for a30 27 | 28 | CREATE PUBLIC DATABASE LINK RDS_ORACLE 29 | CONNECT TO DPRINCE IDENTIFIED BY dccomics 30 | USING 'WONDERWOMAN'; 31 | """ 32 | 33 | proc = Popen(["sqlplus", "-S", "/", "as", "sysdba"], stdout=PIPE, stdin=PIPE, stderr=PIPE) 34 | proc.stdin.write(sql) 35 | (out, err) = proc.communicate() 36 | 37 | if proc.returncode != 0: 38 | print err 39 | sys.exit(proc.returncode) 40 | else: 41 | print out 42 | -------------------------------------------------------------------------------- /Inspec/terraform.tfvars: -------------------------------------------------------------------------------- 1 | # Compute 2 | machine_type = "n1-standard-1" 3 | 4 | zone= "us-east1-b" 5 | 6 | image = "ubuntu-1804-lts" 7 | 8 | network = "default" 9 | 10 | http_source_ips = [ "127.0.0.0/32" ] 11 | 12 | https_source_ips = [ "127.0.0.0/32" ] 13 | 14 | scopes = [ "compute-rw", "storage-rw" ] 15 | # Shared 16 | namespace = "inspec" 17 | 18 | environment = "test" 19 | 20 | name = "panong" 21 | 22 | enabled = true 23 | 24 | # Storage 25 | location = "US" 26 | 27 | project = "inspecplaygroundgcp" 28 | 29 | force_destroy = true 30 | 31 | storage_class = "MULTI_REGIONAL" 32 | 33 | kms_key_name = "" 34 | 35 | encryption = false 36 | 37 | # lifecycle_rule condition block 38 | age = 10 39 | 40 | with_state = "ANY" 41 | 42 | matches_storage_class = [ "MULTI_REGIONAL", "DURABLE_REDUCED_AVAILABILITY" ] 43 | 44 | num_newer_versions = 10 45 | 46 | # lifecycle_rule action block 47 | action_type = "SetStorageClass" 48 | 49 | action_storage_class = "MULTI_REGIONAL" 50 | 51 | # versioning block 52 | versioning_enabled = true -------------------------------------------------------------------------------- /helm_example/templates/test-deploy.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: Deployment 3 | metadata: 4 | name: {{ .Values.helm_example.name }}-deployment 5 | namespace: {{ .Values.helm_example.namespace }} 6 | spec: 7 | strategy: 8 | type: RollingUpdate 9 | rollingUpdate: 10 | maxSurge: 1 11 | maxUnavailable: 1 12 | revisionHistoryLimit: 1 13 | replicas: 1 14 | selector: 15 | matchLabels: 16 | app: {{ .Values.helm_example.name }} 17 | template: 18 | metadata: 19 | labels: 20 | app: {{ .Values.helm_example.name }} 21 | spec: 22 | containers: 23 | - name: {{ .Values.helm_example.name }} 24 | image: {{ .Values.helm_example.image.repository }} 25 | imagePullPolicy: {{ .Values.helm_example.image.pullPolicy }} 26 | envFrom: 27 | - configMapRef: 28 | name: configmap 29 | ports: 30 | - containerPort: {{ .Values.helm_example.service.internalPort }} 31 | protocol: TCP 32 | name: http 33 | -------------------------------------------------------------------------------- /Configure_Git.sh: -------------------------------------------------------------------------------- 1 | #cd to the directory you want to set up 2 | cd /path/to/empty/directory/ 3 | 4 | #Initialize as a git directory: 5 | git init 6 | 7 | #Create SSH key for GIT: 8 | ssh-keygen -t rsa 9 | 10 | #Copy the ssh key and add to GIT (online){Profile > Settings> SSH and GPG Keys} 11 | cat /home/user/.ssh/id_rsa.pub 12 | 13 | #Config User connections: 14 | git config --global user.name [USER NAME] 15 | git config --global user.email [USER EMAIL] 16 | 17 | #Commit the user name changes: 18 | git commit --amend --reset-author 19 | 20 | #Add remote Repo: 21 | git remote add [REMOTE NAME] [LINK FROM GITHUB TO CLONE AS SSH/HTTPS] 22 | #e.g. git remote add Blog git@github.com:PatAnong/Blog.git 23 | 24 | #Verify the remote repo added: 25 | git remote -v 26 | 27 | #Clone the branches from the online repository locally: 28 | git fetch [REMOTE_NAME] 29 | 30 | #Add the files on your laptop: 31 | git add -A 32 | 33 | #Pull the commits from remote: 34 | git pull [REMOTE NAME] [BRANCH] 35 | 36 | #Commit the changes: 37 | git commit -a -m "{Comment]" 38 | 39 | #Push your updates 40 | git push [REMOTE NAME] [BRANCH] 41 | 42 | #Confirm all updates committed: 43 | git status 44 | -------------------------------------------------------------------------------- /GKE_Cluster_with_Nginx_Ingress/modules/gke/outputs.tf: -------------------------------------------------------------------------------- 1 | output name { 2 | description = "The name of the cluster master." 3 | value = google_container_cluster.k8s[*].name 4 | } 5 | 6 | output endpoint { 7 | description = "The IP address of the cluster master." 8 | value = google_container_cluster.k8s[*].endpoint 9 | depends_on = [ 10 | google_container_cluster.k8s, 11 | google_container_node_pool.node_pool, 12 | ] 13 | sensitive = true 14 | } 15 | 16 | output client_certificate { 17 | description = "Public certificate used by clients to authenticate to the cluster endpoint." 18 | value = base64decode(join(",", google_container_cluster.k8s[*].master_auth[0].client_certificate)) 19 | sensitive = true 20 | } 21 | 22 | output client_key { 23 | description = "Private key used by clients to authenticate to the cluster endpoint." 24 | value = base64decode(join(",", google_container_cluster.k8s[*].master_auth[0].client_key)) 25 | sensitive = true 26 | } 27 | 28 | output cluster_ca_certificate { 29 | description = "The public certificate that is the root of trust for the cluster." 30 | value = base64decode(join(",", google_container_cluster.k8s[*].master_auth[0].cluster_ca_certificate)) 31 | sensitive = true 32 | } -------------------------------------------------------------------------------- /GKE_Cluster_with_Nginx_Ingress/modules/ingress/dns_service_account.tf: -------------------------------------------------------------------------------- 1 | # DNS Service Account 2 | resource google_service_account service_account { 3 | count = terraform.workspace != "default" ? 1 : 0 4 | 5 | account_id = local.sa_name 6 | display_name = "DNS Service Account managed by Terraform" 7 | 8 | } 9 | 10 | resource google_project_iam_member service_account_roles { 11 | 12 | count = terraform.workspace != "default" ? 1 : 0 13 | 14 | role = "roles/dns.admin" 15 | 16 | member = "serviceAccount:${google_service_account.service_account[0].email}" 17 | } 18 | 19 | ## Create Service Account key and Store as Kubernetes Secret 20 | resource google_service_account_key dns_sa_key { 21 | 22 | count = terraform.workspace != "default" ? 1 : 0 23 | 24 | service_account_id = google_service_account.service_account[count.index].name 25 | 26 | } 27 | 28 | resource kubernetes_secret dns_sa_credentials { 29 | 30 | count = terraform.workspace != "default" ? 1 : 0 31 | 32 | metadata { 33 | name = "clouddns-sa-key" 34 | 35 | labels = merge( 36 | tomap({ "name" = "clouddns-sa-key" }), 37 | var.tags 38 | ) 39 | } 40 | 41 | data = { 42 | "credentials.json" = base64decode(google_service_account_key.dns_sa_key[count.index].private_key) 43 | } 44 | 45 | } 46 | 47 | -------------------------------------------------------------------------------- /Migrate_To_RDS_Oracle/Clean_Up_RDS_Datapump.py: -------------------------------------------------------------------------------- 1 | #CLEAN UP RDS DATAPUMP DIRECTORY: 2 | #LIST FILES IN DATAPUMP DIRECTORY 3 | 4 | #!/usr/bin/python 5 | 6 | import os 7 | import sys 8 | from subprocess import Popen, PIPE 9 | 10 | sql = """ 11 | set linesize 400 12 | col owner for a10 13 | col object_name for a30 14 | 15 | select * from table(RDSADMIN.RDS_FILE_UTIL.LISTDIR('DATA_PUMP_DIR')) order by mtime; 16 | """ 17 | 18 | proc = Popen(["sqlplus", "-S", "/", "as", "sysdba"], stdout=PIPE, stdin=PIPE, stderr=PIPE) 19 | proc.stdin.write(sql) 20 | (out, err) = proc.communicate() 21 | 22 | if proc.returncode != 0: 23 | print err 24 | sys.exit(proc.returncode) 25 | else: 26 | print out 27 | 28 | 29 | 30 | #DELETE THE FILES NO LONGER NEEDED 31 | 32 | #!/usr/bin/python 33 | 34 | import os 35 | import sys 36 | from subprocess import Popen, PIPE 37 | 38 | sql = """ 39 | set linesize 400 40 | col owner for a10 41 | col object_name for a30 42 | 43 | exec utl_file.fremove('DATA_PUMP_DIR','DB_COPY.DMP'); 44 | """ 45 | 46 | proc = Popen(["sqlplus", "-S", "/", "as", "sysdba"], stdout=PIPE, stdin=PIPE, stderr=PIPE) 47 | proc.stdin.write(sql) 48 | (out, err) = proc.communicate() 49 | 50 | if proc.returncode != 0: 51 | print err 52 | sys.exit(proc.returncode) 53 | else: 54 | print out -------------------------------------------------------------------------------- /Inspec/files/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.7' 2 | 3 | services: 4 | 5 | inspec: 6 | build: 7 | context: . 8 | image: inspec 9 | container_name: inspec 10 | ports: 11 | - "80:80" 12 | restart: unless-stopped 13 | secrets: 14 | - heimdall_key 15 | volumes: 16 | - "$HOME/Documents:/src" 17 | environment: 18 | - NGINX_PORT=80 19 | - GOOGLE_APPLICATION_CREDENTIALS=/home/root/creds.json 20 | - CHEF_LICENSE=accept-silent 21 | 22 | db: 23 | image: postgres:latest 24 | container_name: heimdall_db 25 | restart: unless-stopped 26 | volumes: 27 | - heimdall_dbdata:/var/lib/postgresql/data 28 | expose: 29 | - "5432" 30 | 31 | web: 32 | image: mitre/heimdall:latest 33 | container_name: heimdall_server 34 | environment: 35 | DATABASE_URL: postgres://postgres@db/heimdall_postgres_production 36 | RAILS_SERVE_STATIC_FILES: "true" 37 | RAILS_ENV: production 38 | HEIMDALL_RELATIVE_URL_ROOT: "" 39 | env_file: .env-prod 40 | restart: unless-stopped 41 | command: /bin/sh -c "rm -f tmp/pids/server.pid && bundle exec rake assets:precompile && bundle exec rails s -p 3000 -b '0.0.0.0'" 42 | ports: 43 | - "3000:3000" 44 | depends_on: 45 | - "db" 46 | - "inspec" 47 | 48 | secrets: 49 | heimdall_key: 50 | file: secrets/heimdall_key 51 | 52 | volumes: 53 | heimdall_dbdata: -------------------------------------------------------------------------------- /CDK/AWS_CDK/setup.py: -------------------------------------------------------------------------------- 1 | import setuptools 2 | 3 | 4 | with open("README.md") as fp: 5 | long_description = fp.read() 6 | 7 | 8 | setuptools.setup( 9 | name="automation_rocks", 10 | version="0.0.1", 11 | 12 | description="An empty CDK Python app", 13 | long_description=long_description, 14 | long_description_content_type="text/markdown", 15 | 16 | author="author", 17 | 18 | package_dir={"": "automation_rocks"}, 19 | packages=setuptools.find_packages(where="automation_rocks"), 20 | 21 | install_requires=[ 22 | "aws-cdk.core==1.76.0", 23 | "aws-cdk.aws_cloudwatch", 24 | "aws-cdk.aws_ec2", 25 | "aws-cdk.aws_sns", 26 | "aws-cdk.aws_events", 27 | "aws-cdk.aws_sns_subscriptions", 28 | "aws-cdk.aws_cloudwatch_actions", 29 | ], 30 | 31 | python_requires=">=3.6", 32 | 33 | classifiers=[ 34 | "Development Status :: 4 - Beta", 35 | 36 | "Intended Audience :: Developers", 37 | 38 | "License :: OSI Approved :: Apache Software License", 39 | 40 | "Programming Language :: JavaScript", 41 | "Programming Language :: Python :: 3 :: Only", 42 | "Programming Language :: Python :: 3.6", 43 | "Programming Language :: Python :: 3.7", 44 | "Programming Language :: Python :: 3.8", 45 | 46 | "Topic :: Software Development :: Code Generators", 47 | "Topic :: Utilities", 48 | 49 | "Typing :: Typed", 50 | ], 51 | ) 52 | -------------------------------------------------------------------------------- /Inspec/modules/storage/storage.tf: -------------------------------------------------------------------------------- 1 | resource "google_storage_bucket" "inspec_files" { 2 | count = var.enabled == true ? 1 : 0 3 | name = "inspec-files-bucket" 4 | location = var.location 5 | force_destroy = true 6 | storage_class = var.storage_class 7 | project = "inspecplaygroundgcp" 8 | 9 | lifecycle_rule { 10 | action { 11 | type = var.action_type 12 | storage_class = var.action_storage_class 13 | } 14 | 15 | condition { 16 | age = var.age 17 | with_state = var.with_state 18 | matches_storage_class = var.matches_storage_class 19 | num_newer_versions = var.num_newer_versions 20 | } 21 | } 22 | 23 | versioning { 24 | enabled = var.versioning_enabled 25 | } 26 | 27 | encryption { 28 | default_kms_key_name = var.encryption == "" ? "" : "${var.kms_key_name}" 29 | } 30 | } 31 | 32 | resource "null_resource" "UploadFilesToBucket" { 33 | depends_on = [ google_storage_bucket.inspec_files ] 34 | count = var.enabled == true ? 1 : 0 35 | provisioner "local-exec" { 36 | command = "gsutil cp -r . gs://inspec-files-bucket" 37 | } 38 | } 39 | 40 | // resource "google_storage_bucket_object" "" { 41 | // depends_on = [ google_storage_bucket.inspec_files ] 42 | // count = var.enabled == true ? 1 : 0 43 | // name = "script.sh" 44 | // source = "gcp.rb" 45 | // bucket = google_storage_bucket.inspec_files 46 | // } 47 | -------------------------------------------------------------------------------- /Configure cx_Oracle.py: -------------------------------------------------------------------------------- 1 | #Create a directory for all the packages 2 | mkdir /opt/oracle 3 | cd /opt/oracle 4 | unzip /home/panong/Downloads/instantclient-basic-linux.x64-12.2.0.1.0.zip 5 | unzip /home/panong/Downloads/java_ee_sdk-7u3.zie 6 | 7 | #TAR.GZ FOR cx_oracle 8 | tar -zxvf /home/panong/Downloads/cx_Oracle-6.0rc1.tar.gz 9 | #OR RPM 10 | rpm -ivh cx_Oracle-6.0rc1-py27-1.x86_64.rpm 11 | 12 | Configure Instant Client: 13 | 14 | cd /opt/oracle/instantclient_12_2 15 | ln -s libclntsh.so.12.1 libclntsh.so 16 | 17 | 18 | #Download dependencies for cx_Oracle 19 | curl -O https://bootstrap.pypa.io/get-pip.py 20 | python get-pip 21 | yum install python-devel 22 | 23 | 24 | #Install cx_Oracle 25 | export LD_RUN_PATH=/opt/oracle/instantclient_12_2 26 | export ORACLE_HOME=/opt/oracle/instantclient_12_2 27 | pip install cx_Oracle 28 | 29 | 30 | 31 | #Add the Library Path to .bash_profile: 32 | 33 | export LD_LIBRARY_PATH=/opt/oracle/instantclient_12_2 34 | 35 | 36 | 37 | #TEST YOUR CONNECTION: 38 | 39 | #!/usr/bin/env python 40 | 41 | import cx_Oracle 42 | 43 | def query(): 44 | import cx_Oracle 45 | db = cx_Oracle.connect("dprince", "dccomics", "192.142.0.1/WonderWoman") 46 | cursor = db.cursor() 47 | cursor.execute("select dbid,log_mode,open_mode from v$database") 48 | #return cursor.fetchone()[0] 49 | #print cursor.description 50 | for row in cursor: 51 | print row 52 | db.close() 53 | 54 | query() 55 | 56 | -------------------------------------------------------------------------------- /CDK/TerraformCDK/main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from constructs import Construct 3 | from cdktf import App, TerraformStack 4 | 5 | from imports.aws import AwsProvider, SnsTopic, CloudwatchMetricAlarm 6 | 7 | class MyStack(TerraformStack): 8 | def __init__(self, scope: Construct, ns: str): 9 | super().__init__(scope, ns) 10 | 11 | # define resources here 12 | AwsProvider(self, 'Aws', region='us-east-1') 13 | 14 | # SNS Topic 15 | BlogTopic = SnsTopic(self, 'Topic', display_name='panong-blog-cdktf') 16 | 17 | # CloudWatch Alarm 18 | CloudwatchMetricAlarm(self, 'PAnongBlogAlarm', 19 | actions_enabled = True, 20 | alarm_actions = [BlogTopic.arn], 21 | alarm_name = 'panong-blog-cdktf', 22 | comparison_operator = 'GreaterThanOrEqualToThreshold', 23 | evaluation_periods = 1, 24 | metric_name = 'VpcEventCount', 25 | namespace = 'CloudTrailMetrics', 26 | period = 300, 27 | statistic = 'Sum', 28 | threshold = 1, 29 | treat_missing_data = 'notBreaching' 30 | ) 31 | 32 | app = App() 33 | MyStack(app, "tf-rocks") 34 | 35 | app.synth() 36 | -------------------------------------------------------------------------------- /Inspec/files/inspec/gcp/inspec.yml: -------------------------------------------------------------------------------- 1 | name: PAnong-GCP-Inspec-Profile 2 | title: PAnong Sample GCP InSpec Profile 3 | maintainer: Patricia Anong 4 | copyright: Patricia Anong 5 | copyright_email: patriciaanong@gmail.com 6 | license: Apache-2.0 7 | summary: An InSpec Compliance Profile For GCP 8 | version: 0.1.0 9 | inspec_version: '>= 4.4.0' 10 | 11 | attributes: 12 | - name: gcp_project_id 13 | required: true 14 | description: 'The GCP project identifier.' 15 | type: string 16 | 17 | depends: 18 | - name: inspec-gcp 19 | url: https://github.com/inspec/inspec-gcp/archive/master.tar.gz 20 | # - name: supermarket-linux-baseline 21 | # git: https://github.com/dev-sec/linux-baseline 22 | # branch: master 23 | # #supermarket: dev-sec/linux-baseline 24 | # #url: https://github.com/dev-sec/linux-baseline/archive/master.tar.gz 25 | # - name: supermarket-ssh-baseline 26 | # git: https://github.com/dev-sec/ssh-baseline 27 | # branch: master 28 | # #supermarket: dev-sec/ssh-baseline 29 | # #url: https://github.com/dev-sec/ssh-baseline/archive/master.tar.gz 30 | # # - name: supermarket-docker-baseline 31 | # # supermarket: dev-sec/cis-docker-benchmark 32 | # # #url: https://github.com/dev-sec/cis-docker-benchmark/archive/master.tar.gz 33 | 34 | supports: 35 | - platform: gcp 36 | - platform-name: debian 37 | - platform-name: ubuntu 38 | 39 | inputs: 40 | - name: gcp_project_id 41 | description: 'The GCP project identifier.' 42 | type: string 43 | value: inspecplaygroundgcp -------------------------------------------------------------------------------- /Inspec/files/creds.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "service_account", 3 | "project_id": "inspecplaygroundgcp", 4 | "private_key_id": "1379e80few2633hg632831912810he1578d", 5 | "private_key": "-----BEGIN PRIVATE KEY-----\nYCp83aTGqX/bqkVtzReKohiUemJYebEGp6GoAuIm/qxY+y2nvYmSH2EZp0OCKyCZ\ndrSUUwtZ9FIPRt2jm9r/B6gzjOPysKUgXaDaLzy1sQKBgQD+Vev9ddkTfNr3d71S\nT1PlYVwrlL9IQQQ5XP6CvvBS6mQkg6E2NDS5GaMTw3LTz5/S1MTqLdkHOqlMd1nf\nTTTrrBYo8J0B5Q6XynRvIp4tiZqO3B+R7vzhLQ5YC2MUItp1EzmWRqaQ9i+wBY5G\n4Czsozkr1qvwZplGwncIfyYotsrV7dxgSGfn7QVFXgWjKJmyeGrVDBu+9a0gMTBJ\n/jHSO5K6njQJbguvz/gHi3Wv9NTQaQ/Z28EeZKXcsUynvazG1L6Vkymqx8nu3hzW\nRECo++Jf9wKBgQDXgh7RSkaCSM2R25hEYouIVhMvqn7azpyxrqbKStiAqbPpbM/x\nihMStTKF0Js/9zJKB5hDhk3b3k0OSiORH260M8EjN58St/DBlf9LHF5D39FjNApc\nMdF9TBxXZm6x5UaO5ZdqSrthIfoOgOapW4HKFDYIwk3DkavS+UfHFxc6QQKBgQCZ\na2ToK5PF7iSGMoO9670RJL/CVjS748ZEH9MZEU7zYYGOip0WY3/9hg55XUpLl3aS\nYU2VG+q1veNsmo2Ht7AEqPGnJPdjKms4au+FlNO0gWjGzggG4N5KtBb8KD9Mil0w\nhhjfcd2PfWAVxLmNQhy6RhJZifay56RRscn8m6TwwQKBgQDujJexyK/mxBEmiNZb\nTKEX2clz/OMA1TEg4MQLgXltgZlBSWE4qWV5N87IhYLXzEWgXqLINIqnz9C+ARrk\nwa9ghPkD9puTatZB0qw/XwnTLdKIq6xVs1n2sTh5HCzXx7MIsX7ns2d//k+MLM7p\n077s8kZ8y6I3fp2qbWsWLp27bw==\n-----END PRIVATE KEY-----\n", 6 | "client_email": "patriciaanong@inspecplaygroundgcp.iam.gserviceaccount.com", 7 | "client_id": "739724408343205350", 8 | "auth_uri": "https://accounts.google.com/o/oauth2/auth", 9 | "token_uri": "https://oauth2.googleapis.com/token", 10 | "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", 11 | "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/patriciaanong%40inspecplaygroundgcp.iam.gserviceaccount.com" 12 | } 13 | -------------------------------------------------------------------------------- /GKE_Cluster_with_Nginx_Ingress/modules/ingress/ingress.tf: -------------------------------------------------------------------------------- 1 | resource helm_release wp { 2 | 3 | count = terraform.workspace != "default" ? 1 : 0 4 | 5 | name = "wp" 6 | repository = "https://charts.bitnami.com/bitnami" 7 | chart = "wordpress" 8 | 9 | cleanup_on_fail = true 10 | force_update = true 11 | recreate_pods = false 12 | reset_values = true 13 | 14 | set { 15 | name = "service.type" 16 | value = "ClusterIP" 17 | } 18 | 19 | } 20 | 21 | resource kubernetes_ingress ingress { 22 | 23 | count = terraform.workspace != "default" ? 1 : 0 24 | 25 | metadata { 26 | name = "ingress" 27 | 28 | labels = merge( 29 | tomap({ "name" = local.namespace }), 30 | var.tags 31 | ) 32 | 33 | annotations = { 34 | "cert-manager.io/cluster-issuer" = "letsencrypt-${var.environment}" 35 | 36 | "kubernetes.io/ingress.class" = "nginx" 37 | 38 | } 39 | } 40 | 41 | spec { 42 | backend { 43 | service_name = "${helm_release.wp[count.index].metadata[0].name}-${helm_release.wp[count.index].metadata[0].chart}" 44 | 45 | service_port = 80 46 | } 47 | 48 | rule { 49 | host = "${helm_release.wp[count.index].metadata[0].name}-${var.environment}.com" 50 | http { 51 | path { 52 | backend { 53 | service_name = "${helm_release.wp[count.index].metadata[0].name}-${helm_release.wp[count.index].metadata[0].chart}" 54 | service_port = 80 55 | } 56 | path = "/" 57 | } 58 | } 59 | } 60 | 61 | tls { 62 | hosts = ["${helm_release.wp[count.index].metadata[0].name}-${var.environment}.com"] 63 | 64 | secret_name = "${helm_release.wp[count.index].metadata[0].name}-${var.environment}-ssl-cert" 65 | } 66 | } 67 | } -------------------------------------------------------------------------------- /CDK/AWS_CDK/README.md: -------------------------------------------------------------------------------- 1 | 2 | # Welcome to your CDK Python project! 3 | 4 | This is a blank project for Python development with CDK. 5 | 6 | The `cdk.json` file tells the CDK Toolkit how to execute your app. 7 | 8 | This project is set up like a standard Python project. The initialization 9 | process also creates a virtualenv within this project, stored under the `.venv` 10 | directory. To create the virtualenv it assumes that there is a `python3` 11 | (or `python` for Windows) executable in your path with access to the `venv` 12 | package. If for any reason the automatic creation of the virtualenv fails, 13 | you can create the virtualenv manually. 14 | 15 | To manually create a virtualenv on MacOS and Linux: 16 | 17 | ``` 18 | $ python3 -m venv .venv 19 | ``` 20 | 21 | After the init process completes and the virtualenv is created, you can use the following 22 | step to activate your virtualenv. 23 | 24 | ``` 25 | $ source .venv/bin/activate 26 | ``` 27 | 28 | If you are a Windows platform, you would activate the virtualenv like this: 29 | 30 | ``` 31 | % .venv\Scripts\activate.bat 32 | ``` 33 | 34 | Once the virtualenv is activated, you can install the required dependencies. 35 | 36 | ``` 37 | $ pip install -r requirements.txt 38 | ``` 39 | 40 | At this point you can now synthesize the CloudFormation template for this code. 41 | 42 | ``` 43 | $ cdk synth 44 | ``` 45 | 46 | To add additional dependencies, for example other CDK libraries, just add 47 | them to your `setup.py` file and rerun the `pip install -r requirements.txt` 48 | command. 49 | 50 | ## Useful commands 51 | 52 | * `cdk ls` list all stacks in the app 53 | * `cdk synth` emits the synthesized CloudFormation template 54 | * `cdk deploy` deploy this stack to your default AWS account/region 55 | * `cdk diff` compare deployed stack with current state 56 | * `cdk docs` open CDK documentation 57 | 58 | Enjoy! 59 | -------------------------------------------------------------------------------- /GKE_Cluster_with_Nginx_Ingress/modules/gke/variables.tf: -------------------------------------------------------------------------------- 1 | variable project { 2 | description = "The project ID to host the cluster in" 3 | type = string 4 | } 5 | 6 | variable region { 7 | description = "The region to host the cluster in" 8 | type = string 9 | } 10 | 11 | variable environment { 12 | description = "Environment in which to deploy" 13 | type = string 14 | } 15 | 16 | variable name_prefix { 17 | description = "Company or Application Name appended to full name of a resource" 18 | type = string 19 | } 20 | 21 | variable master_authorized_networks_config { 22 | description = "The desired configuration options for master authorized networks. Omit the nested cidr_blocks attribute to disallow external access (except the cluster node IPs, which GKE automatically whitelists)." 23 | } 24 | 25 | variable basic_auth_username { 26 | description = "The username used for basic auth; set both this and `basic_auth_password` to \"\" to disable basic auth." 27 | type = string 28 | default = "" 29 | } 30 | 31 | variable basic_auth_password { 32 | description = "The password used for basic auth; set both this and `basic_auth_username` to \"\" to disable basic auth." 33 | type = string 34 | default = "" 35 | } 36 | 37 | variable enable_client_certificate_authentication { 38 | description = "Whether to enable authentication by x509 certificates. With ABAC disabled, these certificates are effectively useless." 39 | type = bool 40 | default = false 41 | } 42 | 43 | variable shielded_instance_config { 44 | description = "Enables monitoring and attestation of the boot integrity of the instance." 45 | } 46 | 47 | variable tags { 48 | description = "Tags to be applied to the deployed resources" 49 | type = list(string) 50 | } 51 | 52 | variable enable_apis { 53 | description = "APIs to enable for GCP project" 54 | type = map(string) 55 | } -------------------------------------------------------------------------------- /Inspec/modules/instance/variables.tf: -------------------------------------------------------------------------------- 1 | # Shared 2 | variable "namespace" { 3 | type = string 4 | default = "" 5 | description = "Namespace, e.g. organization name" 6 | } 7 | 8 | variable "environment" { 9 | type = string 10 | default = "" 11 | description = "Environment, e.g. dev, test, prod" 12 | } 13 | 14 | variable "name" { 15 | type = string 16 | default = "" 17 | description = "Name, e.g. jenkins" 18 | } 19 | 20 | variable "enabled" { 21 | type = bool 22 | default = true 23 | description = "Set to false to prevent the module from creating any resources" 24 | } 25 | 26 | variable "project" { 27 | type = string 28 | default = "" 29 | description = "The project in which the resource will be created. If left blank, the provider project is used." 30 | } 31 | 32 | # Compute 33 | variable "machine_type" { 34 | type = string 35 | description = "Machine Type, e.g n1-standard-1" 36 | } 37 | 38 | variable "zone" { 39 | type = string 40 | default = "us-east1-b" 41 | description = "Zone to create compute resource" 42 | } 43 | 44 | variable "image" { 45 | type = string 46 | default = "ubuntu-1804-lts" 47 | description = "OS Image for Compute Resource" 48 | } 49 | 50 | variable "network" { 51 | type = string 52 | default = "default" 53 | description = "Network for Compute Resource" 54 | } 55 | 56 | variable "http_source_ips" { 57 | type = list(string) 58 | default = [ "127.0.0.0/32" ] 59 | description = "The IPs that can reach the compute resource via ." 60 | } 61 | 62 | variable "https_source_ips" { 63 | type = list(string) 64 | default = [ "127.0.0.0/32" ] 65 | description = "The IPs that can reach the compute resource via https." 66 | } 67 | 68 | variable "scopes" { 69 | type = list(string) 70 | default = [ "compute-rw", "storage-rw" ] 71 | description = "The scopes of the service account associated with the compute resources." 72 | } -------------------------------------------------------------------------------- /CDK/AWS_CDK/automation_rocks/automation_rocks_stack.py: -------------------------------------------------------------------------------- 1 | from aws_cdk import ( 2 | core, 3 | aws_cloudwatch as cw, 4 | aws_ec2 as ec2, 5 | aws_sns as sns, 6 | aws_events as events, 7 | aws_sns_subscriptions as subscriptions, 8 | aws_cloudwatch_actions as cw_actions, 9 | ) 10 | 11 | from aws_cdk.core import App, Construct, Stack 12 | 13 | nova = core.Environment(account="000000000000", region="us-east-1") 14 | ohio = core.Environment(region="us-east-2") 15 | 16 | class AutomationRocksStack(core.Stack): 17 | 18 | def __init__(self, scope: core.Construct, construct_id: str, **kwargs) -> None: 19 | super().__init__(scope, construct_id, **kwargs) 20 | 21 | # The code that defines your stack goes here 22 | 23 | sns_topic = sns.Topic(self, 'Topic') 24 | 25 | snsEmail = core.CfnParameter(self, 26 | 'SNSEmail', 27 | default = 'PAnong@automation_rocks.com', 28 | description = 'Email Endpoint for SNS Notifications', 29 | type = 'String' 30 | ) 31 | 32 | email = sns_topic.add_subscription(subscriptions.EmailSubscription( 33 | snsEmail.value_as_string 34 | ) 35 | ) 36 | 37 | cwAlarm = cw.CfnAlarm(self, 'VPCAlarm', 38 | actions_enabled=True, 39 | alarm_actions=[sns_topic.topic_arn], 40 | alarm_description="A CloudWatch Alarm that triggers when changes are made to the VPC.", 41 | comparison_operator="GreaterThanOrEqualToThreshold", 42 | evaluation_periods=1, 43 | treat_missing_data="notBreaching", 44 | threshold=1, 45 | metric_name="VpcEventCount", 46 | namespace="CloudTrailMetrics", 47 | period=300, 48 | statistic="Sum", 49 | ) 50 | -------------------------------------------------------------------------------- /GKE_Cluster_with_Nginx_Ingress/modules/ingress/certs/templates/_helpers.tpl: -------------------------------------------------------------------------------- 1 | {{/* vim: set filetype=mustache: */}} 2 | {{/* 3 | Expand the name of the chart. 4 | */}} 5 | {{- define "certs.name" -}} 6 | {{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" -}} 7 | {{- end -}} 8 | 9 | {{/* 10 | Create a default fully qualified app name. 11 | We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). 12 | If release name contains chart name it will be used as a full name. 13 | */}} 14 | {{- define "certs.fullname" -}} 15 | {{- if .Values.fullnameOverride -}} 16 | {{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" -}} 17 | {{- else -}} 18 | {{- $name := default .Chart.Name .Values.nameOverride -}} 19 | {{- if contains $name .Release.Name -}} 20 | {{- .Release.Name | trunc 63 | trimSuffix "-" -}} 21 | {{- else -}} 22 | {{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}} 23 | {{- end -}} 24 | {{- end -}} 25 | {{- end -}} 26 | 27 | {{/* 28 | Create chart name and version as used by the chart label. 29 | */}} 30 | {{- define "certs.chart" -}} 31 | {{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" -}} 32 | {{- end -}} 33 | 34 | {{/* 35 | Common labels 36 | */}} 37 | {{- define "certs.labels" -}} 38 | helm.sh/chart: {{ include "certs.chart" . }} 39 | {{ include "certs.selectorLabels" . }} 40 | {{- if .Chart.AppVersion }} 41 | app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} 42 | {{- end }} 43 | app.kubernetes.io/managed-by: {{ .Release.Service }} 44 | {{- end -}} 45 | 46 | {{/* 47 | Selector labels 48 | */}} 49 | {{- define "certs.selectorLabels" -}} 50 | app.kubernetes.io/name: {{ include "certs.name" . }} 51 | app.kubernetes.io/instance: {{ .Release.Name }} 52 | {{- end -}} 53 | 54 | {{/* 55 | Create the name of the service account to use 56 | */}} 57 | {{- define "certs.serviceAccountName" -}} 58 | {{- if .Values.serviceAccount.create -}} 59 | {{ default (include "certs.fullname" .) .Values.serviceAccount.name }} 60 | {{- else -}} 61 | {{ default "default" .Values.serviceAccount.name }} 62 | {{- end -}} 63 | {{- end -}} 64 | -------------------------------------------------------------------------------- /Inspec/files/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM nginx:latest 2 | USER root 3 | 4 | ARG INSTANCE_NAME=panong-test-inspec-instance 5 | ARG PROJECT_NAME=inspecplaygroundgcp 6 | 7 | # Go, Make, Docker, Python, jq, git, ruby 8 | RUN apt-get -qq update -y && \ 9 | apt-get -qq upgrade -y && \ 10 | apt-get -qq install --no-install-recommends --no-install-suggests -y \ 11 | zip jq libffi-dev openssl wget curl \ 12 | gcc libc-dev build-essential unzip \ 13 | golang-go make git openrc vim \ 14 | python3 python3-dev python3-pip \ 15 | libffi-dev libssl-dev gnupg docker.io \ 16 | zlib1g-dev ruby-full openssh-server \ 17 | && pip3 install --no-cache-dir --upgrade pip \ 18 | && ln -s /usr/bin/python3 /usr/bin/python 19 | 20 | # Python requirements 21 | RUN pip install -qU pip setuptools \ 22 | && wget -qO /usr/bin/docker-compose https://github.com/docker/compose/releases/download/1.24.1/docker-compose-`uname -s`-`uname -m` \ 23 | && chmod +x /usr/bin/docker-compose 24 | 25 | # GCloud Requirements 26 | RUN echo "deb [signed-by=/usr/share/keyrings/cloud.google.gpg] http://packages.cloud.google.com/apt cloud-sdk main" | tee -a /etc/apt/sources.list.d/google-cloud-sdk.list \ 27 | && curl -s https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key --keyring /usr/share/keyrings/cloud.google.gpg add - \ 28 | && apt-get -qq update -y \ 29 | && apt-get -qq install google-cloud-sdk -y 30 | 31 | # Ruby requirements 32 | RUN gem install -q --silent --no-ri --no-rdoc bundler ffi \ 33 | etc nokogiri inspec-bin bigdecimal inspec_tools \ 34 | ed25519 bcrypt_pbkdf 35 | 36 | # Configure Nginx 37 | COPY nginx.conf /etc/nginx/nginx.conf 38 | 39 | RUN cat /etc/nginx/nginx.conf \ 40 | && nginx -t 41 | 42 | # Configure Inspec to ssh into GCP Compute Servers 43 | COPY inspec /home/root/inspec 44 | COPY creds.json /home/root 45 | 46 | RUN gcloud auth activate-service-account --key-file=/home/root/creds.json \ 47 | && gcloud config set project ${PROJECT_NAME} 48 | 49 | RUN eval `ssh-agent -s` \ 50 | && mkdir -p /root/.ssh \ 51 | && chmod 0700 /root/.ssh \ 52 | && gcloud compute ssh root@${INSTANCE_NAME} --zone us-east1-b --quiet 53 | 54 | WORKDIR /home/root/inspec/gcp -------------------------------------------------------------------------------- /Inspec/files/tests.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | export EMAIL=patriciaanong@gmail.com 4 | export CIRCLE=PAnong_Inspec_Blog 5 | export API_KEY=`cat /run/secrets/heimdall_key` 6 | 7 | # Run Inspec Profiles 8 | 9 | inspec exec . -t gcp:// --input-file attributes.yml --reporter json:/src/gcp-inspec-instance-results.json || true 10 | inspec_tools compliance -j /src/gcp-inspec-instance-results.json -f threshold.yml 11 | inspec_tools summary -j /src/gcp-inspec-instance-results.json -o /src/gcp-summary.json -c 12 | 13 | # CIS Docker Baseline 14 | cd ../docker 15 | inspec exec . -i ~/.ssh/google_compute_engine -t ssh://root@34.73.144.37 --reporter json:/src/docker-inspec-instance-results.json --input-file attributes.yml 16 | inspec_tools compliance -j /src/docker-inspec-instance-results.json -f threshold.yml 17 | inspec_tools summary -j /src/docker-inspec-instance-results.json -o /src/docker-summary.json -c 18 | 19 | # Linux Baseline 20 | cd ../linux 21 | inspec exec . -i ~/.ssh/google_compute_engine -t ssh://root@34.73.144.37 --reporter json:/src/linux-inspec-instance-results.json || true 22 | inspec_tools compliance -j /src/linux-inspec-instance-results.json -f threshold.yml 23 | inspec_tools summary -j /src/linux-inspec-instance-results.json -o /src/linux-summary.json -c 24 | 25 | # SSH Baseline 26 | cd ../ssh 27 | inspec exec . -i ~/.ssh/google_compute_engine -t ssh://root@34.73.144.37 --reporter json:/src/ssh-inspec-instance-results.json || true 28 | inspec_tools compliance -j /src/ssh-inspec-instance-results.json -f threshold.yml 29 | inspec_tools summary -j /src/ssh-inspec-instance-results.json -o /src/ssh-summary.json -c 30 | 31 | # Push evaluations to Heimdall 32 | curl -F "file=@/src/gcp-inspec-instance-results.json" -F "email=$EMAIL" -F "api_key=$API_KEY" -F "circle=$CIRCLE" heimdall_server:3000/evaluation_upload_api 33 | 34 | curl -F "file=@/src/docker-inspec-instance-results.json" -F "email=$EMAIL" -F "api_key=$API_KEY" -F "circle=$CIRCLE" heimdall_server:3000/evaluation_upload_api 35 | 36 | curl -F "file=@/src/linux-inspec-instance-results.json" -F "email=$EMAIL" -F "api_key=$API_KEY" -F "circle=$CIRCLE" heimdall_server:3000/evaluation_upload_api 37 | 38 | curl -F "file=@/src/ssh-inspec-instance-results.json" -F "email=$EMAIL" -F "api_key=$API_KEY" -F "circle=$CIRCLE" heimdall_server:3000/evaluation_upload_api -------------------------------------------------------------------------------- /Inspec/files/inspec/gcp/controls/gcp.rb: -------------------------------------------------------------------------------- 1 | # copyright: 2019, Patricia Anong 2 | 3 | title "PAnong Sample Control" 4 | 5 | gcp_project_id = attribute("gcp_project_id") 6 | region_name = attribute("region_name") 7 | bucket = attribute("bucket") 8 | zone = attribute("zone") 9 | instance_name = attribute("instance_name", description: "GCP instance Name") 10 | 11 | # you add controls/tests here 12 | control "gcp-project-1.0" do 13 | impact 1.0 14 | title "Ensure project exists." 15 | describe google_project(project: gcp_project_id) do 16 | it { should exist } 17 | its("name") { should eq "InspecPlayGroundGCP" } 18 | end 19 | end 20 | 21 | control "gcp-single-region-0.1" do 22 | impact 0.1 23 | title "Ensure single region has the correct properties." 24 | desc "If us-east1-b is not available or present, re-assess deployment strategy." 25 | describe google_compute_region(project: gcp_project_id, name: region_name) do 26 | its("zone_names") { should include zone } 27 | end 28 | end 29 | 30 | control "gcp-regions-loop-0.3" do 31 | impact 0.3 32 | title "Ensure regions have the correct properties in bulk." 33 | desc "Verify the Regions available to the Project" 34 | google_compute_regions(project: gcp_project_id).region_names.each do |region_name| 35 | describe google_compute_region(project: gcp_project_id, name: region_name) do 36 | it { should be_up } 37 | end 38 | end 39 | end 40 | 41 | control "gcp-storage-bucket-0.5" do 42 | impact 0.5 43 | title "Ensure that the storage bucket was created" 44 | describe google_storage_bucket(name: bucket) do 45 | it { should exist } 46 | its("storage_class") { should eq "MULTI_REGIONAL" } 47 | its("location") { should eq "US" } 48 | end 49 | end 50 | 51 | control "gcp-instance-1.0" do 52 | impact 1.0 53 | title "Ensure the Instance was created and is running" 54 | desc "caveat", "If the Instance does not exist - run terraform apply." 55 | describe google_compute_instance(project: gcp_project_id, zone: zone, name: instance_name) do 56 | it { should exist } 57 | its("name") { should eq "panong-test-inspec-instance" } 58 | its("machine_type") { should match "n1-standard-1" } 59 | its("cpu_platform") { should match "Intel Haswell" } 60 | its("status") { should eq "RUNNING" } 61 | end 62 | end -------------------------------------------------------------------------------- /Migrate_To_RDS_Oracle/Export_Source.py: -------------------------------------------------------------------------------- 1 | #####CREATE DATABASE PUMP OF FULL DATABASE 2 | 3 | ####@export.sql 4 | set scan off 5 | set serveroutput on 6 | set escape off 7 | whenever sqlerror exit 8 | DECLARE 9 | h1 number; 10 | errorvarchar varchar2(100):= 'ERROR'; 11 | tryGetStatus number := 0; 12 | begin 13 | h1 := dbms_datapump.open (operation => 'EXPORT', job_mode => 'FULL', job_name => 'EXPORT_ORACLE_MIGRATION', version => 'COMPATIBLE'); 14 | tryGetStatus := 1; 15 | dbms_datapump.set_parallel(handle => h1, degree => 1); 16 | dbms_datapump.add_file(handle => h1, filename => 'EXPDAT.LOG', directory => 'DATA_PUMP_DIR', filetype => 3); 17 | dbms_datapump.set_parameter(handle => h1, name => 'KEEP_MASTER', value => 0); 18 | dbms_datapump.add_file(handle => h1, filename => 'EXPDAT%U.DMP', directory => 'DATA_PUMP_DIR', filesize => '10G', filetype => 1, reusefile => 1); 19 | dbms_datapump.set_parameter(handle => h1, name => 'INCLUDE_METADATA', value => 1); 20 | dbms_datapump.set_parameter(handle => h1, name => 'FLASHBACK_SCN', value => dbms_flashback.get_system_change_number); 21 | dbms_datapump.set_parameter(handle => h1, name => 'DATA_ACCESS_METHOD', value => 'AUTOMATIC'); 22 | dbms_datapump.set_parameter(handle => h1, name => 'ESTIMATE', value => 'BLOCKS'); 23 | dbms_datapump.start_job(handle => h1, skip_current => 0, abort_step => 0); 24 | dbms_datapump.detach(handle => h1); 25 | errorvarchar := 'NO_ERROR'; 26 | EXCEPTION 27 | WHEN OTHERS THEN 28 | BEGIN 29 | IF ((errorvarchar = 'ERROR')AND(tryGetStatus=1)) THEN 30 | DBMS_DATAPUMP.DETACH(h1); 31 | END IF; 32 | EXCEPTION 33 | WHEN OTHERS THEN 34 | NULL; 35 | END; 36 | RAISE; 37 | END; 38 | / 39 | 40 | 41 | #RUN THE EXPORT SCRIPT VIA PYTHON 42 | #!/usr/bin/python 43 | 44 | import os 45 | import sys 46 | from subprocess import Popen, PIPE 47 | 48 | sql = """ 49 | set linesize 400 50 | col owner for a10 51 | col object_name for a30 52 | 53 | @export.sql 54 | """ 55 | 56 | proc = Popen(["sqlplus", "-S", "/", "as", "sysdba"], stdout=PIPE, stdin=PIPE, stderr=PIPE) 57 | proc.stdin.write(sql) 58 | (out, err) = proc.communicate() 59 | 60 | if proc.returncode != 0: 61 | print err 62 | sys.exit(proc.returncode) 63 | else: 64 | print out 65 | -------------------------------------------------------------------------------- /Migrate_To_RDS_Oracle/Import_Target.py: -------------------------------------------------------------------------------- 1 | #IMPORT FILES ON RDS 2 | #!/usr/bin/python 3 | 4 | import os 5 | import sys 6 | from subprocess import Popen, PIPE 7 | 8 | sql = """ 9 | set linesize 400 10 | col owner for a10 11 | col object_name for a30 12 | 13 | @import.sql 14 | """ 15 | 16 | proc = Popen(["sqlplus", "-S", "/", "as", "sysdba"], stdout=PIPE, stdin=PIPE, stderr=PIPE) 17 | proc.stdin.write(sql) 18 | (out, err) = proc.communicate() 19 | 20 | if proc.returncode != 0: 21 | print err 22 | sys.exit(proc.returncode) 23 | else: 24 | print out 25 | 26 | 27 | 28 | #@import.sql 29 | set scan off 30 | set serveroutput on 31 | set escape off 32 | whenever sqlerror exit 33 | DECLARE 34 | h1 number; 35 | errorvarchar varchar2(100):= 'ERROR'; 36 | tryGetStatus number := 0; 37 | begin 38 | h1 := dbms_datapump.open (operation => 'IMPORT', job_mode => 'SCHEMA', job_name => 'IMPORT_TO_RDS', version => 'COMPATIBLE'); 39 | tryGetStatus := 1; 40 | dbms_datapump.set_parallel(handle => h1, degree => 1); 41 | dbms_datapump.add_file(handle => h1, filename => 'IMPORT.LOG', directory => 'DATA_PUMP_DIR', filetype => 3); 42 | dbms_datapump.set_parameter(handle => h1, name => 'KEEP_MASTER', value => 0); 43 | dbms_datapump.metadata_filter(handle => h1, name => 'SCHEMA_EXPR', value => 'IN(''DPRINCE'')'); 44 | dbms_datapump.add_file(handle => h1, filename => 'DB_COPY.DMP, directory => 'DATA_PUMP_DIR', filetype => 1); 45 | dbms_datapump.set_parameter(handle => h1, name => 'INCLUDE_METADATA', value => 1); 46 | dbms_datapump.metadata_transform (handle => h1, name => 'OID', value => 0 , object_type => null); 47 | dbms_datapump.set_parameter(handle => h1, name => 'DATA_ACCESS_METHOD', value => 'AUTOMATIC'); 48 | dbms_datapump.set_parameter(handle => h1, name => 'TABLE_EXISTS_ACTION', value => 'REPLACE'); 49 | dbms_datapump.set_parameter(handle => h1, name => 'SKIP_UNUSABLE_INDEXES', value => 1); 50 | dbms_datapump.start_job(handle => h1, skip_current => 0, abort_step => 0); 51 | dbms_datapump.detach(handle => h1); 52 | errorvarchar := 'NO_ERROR'; 53 | EXCEPTION 54 | WHEN OTHERS THEN 55 | BEGIN 56 | IF ((errorvarchar = 'ERROR')AND(tryGetStatus=1)) THEN 57 | DBMS_DATAPUMP.DETACH(h1); 58 | END IF; 59 | EXCEPTION 60 | WHEN OTHERS THEN 61 | NULL; 62 | END; 63 | RAISE; 64 | END; 65 | / 66 | -------------------------------------------------------------------------------- /Inspec/modules/instance/instance.tf: -------------------------------------------------------------------------------- 1 | resource "google_compute_instance" "inspec_instance" { 2 | count = var.enabled == true ? 1 : 0 3 | name = "${var.name}-${var.environment}-${var.namespace}-instance" 4 | machine_type = var.machine_type 5 | zone = var.zone 6 | 7 | tags = [ 8 | "http-server", 9 | "https-server" 10 | ] 11 | 12 | boot_disk { 13 | initialize_params { 14 | image = var.image 15 | } 16 | } 17 | 18 | network_interface { 19 | network = var.network 20 | access_config { 21 | } 22 | } 23 | 24 | metadata = { 25 | startup-script = <