├── examples ├── vars │ ├── empty.tf │ ├── c.auto.tfvars │ ├── .gitignore │ ├── terraform.tfvars.py │ ├── c.tf │ ├── a.tf.py │ ├── d.tf.py │ ├── b.tf.py │ └── test_vars.py ├── aws │ ├── files │ │ ├── top.txt │ │ └── subdir │ │ │ ├── middle.txt │ │ │ └── subdir2 │ │ │ └── bottom.txt │ ├── .gitignore │ ├── more-files │ │ ├── more-files-here.txt │ │ └── extra │ │ │ └── bonus.txt │ ├── Makefile │ ├── aws.tf.py │ ├── README.md │ ├── terraform.tf.py │ ├── variables.tf │ ├── test_aws.py │ ├── iam.tf.py │ ├── security-groups.tf.py │ └── s3.tf.py ├── jinja │ ├── terraform.tfvars.j2 │ ├── .gitignore │ ├── vars.tf │ ├── resource_from_jinja.tf.j2 │ ├── resource_from_terraform.tf │ ├── resource_from_python.tf.py │ ├── outputs.tf │ ├── README.md │ └── test_jinja.py ├── test-framework │ ├── .gitignore │ ├── README.md │ └── test_test_framework.py ├── flatten │ ├── stacks │ │ ├── vpc │ │ │ ├── outputs.tf │ │ │ ├── variables.tf │ │ │ ├── dev │ │ │ │ └── vpc.dev.auto.tfvars │ │ │ ├── prod │ │ │ │ └── vpc.prod.auto.tfvars │ │ │ └── main.tf │ │ ├── iam │ │ │ ├── dev │ │ │ │ └── iam.dev.auto.tfvars │ │ │ ├── outputs.tf │ │ │ ├── prod │ │ │ │ └── iam.prod.auto.tfvars │ │ │ └── main.tf │ │ ├── vpc-peering │ │ │ ├── prod │ │ │ │ └── vpc-peering.prod.auto.tfvars │ │ │ ├── outputs.tf │ │ │ ├── variables.tf │ │ │ ├── aws.tf.py │ │ │ ├── vpc.auto.tfvars.py │ │ │ └── main.tf │ │ └── stack.tf.py │ ├── .gitignore │ ├── modules │ │ └── iam-disable-user │ │ │ └── main.tf │ ├── pretf.workflow.py │ ├── test_flatten.py │ └── README.md └── README.md ├── tests ├── test_variables_files │ ├── empty.tf │ ├── empty.tf.expected.json │ ├── escaped_strings.tf.expected.json │ ├── announcing_terraform_1.tf.expected.json │ ├── announcing_terraform_3.tf.expected.json │ ├── variables.tfvars.json │ ├── announcing_terraform_2.tf.expected.json │ ├── announcing_terraform_4.tf.expected.json │ ├── escaped_strings.tf │ ├── complex_types.tf │ ├── complex_types.tf.expected.json │ ├── variables.tfvars │ ├── variables.tfvars.json.expected.json │ ├── announcing_terraform_3.tf │ ├── variables.tfvars.expected.json │ ├── announcing_terraform_2.tf │ ├── announcing_terraform_1.tf │ ├── announcing_terraform_4.tf │ ├── variables.tf.expected.json │ └── variables.tf ├── test_api.py ├── test_variables.py ├── test_blocks.py └── test_collections.py ├── pytest.ini ├── pretf ├── pretf │ ├── version.py │ ├── labels.py │ ├── parser.py │ ├── log.py │ ├── cli.py │ ├── exceptions.py │ ├── api.py │ ├── collections.py │ ├── test.py │ ├── command.py │ ├── blocks.py │ ├── util.py │ ├── render.py │ ├── variables.py │ └── workflow.py ├── setup.cfg ├── Makefile └── setup.py ├── stubs ├── hcl2.pyi ├── pytest.pyi ├── boto_source_profile_mfa.pyi └── colorama.pyi ├── pretf.aws ├── setup.cfg ├── Makefile ├── setup.py └── pretf │ └── aws.py ├── .gitignore ├── mypy.ini ├── Makefile ├── mkdocs.yml ├── docs ├── tutorial │ ├── get-started.md │ ├── dynamic-resources.md │ ├── custom-workflows.md │ ├── direnv-and-asdf-vm.md │ ├── dynamic-references.md │ ├── define-resources.md │ ├── aws-projects.md │ └── terraform-variables.md ├── api │ ├── collections.md │ ├── projects.md │ ├── api.md │ ├── aws.md │ └── workflow.md ├── index.md └── comparisons.md ├── .envrc ├── LICENSE ├── README.md ├── azure-pipelines.yml └── CHANGELOG.md /examples/vars/empty.tf: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /examples/aws/files/top.txt: -------------------------------------------------------------------------------- 1 | top one -------------------------------------------------------------------------------- /tests/test_variables_files/empty.tf: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | addopts = -v 3 | -------------------------------------------------------------------------------- /examples/aws/files/subdir/middle.txt: -------------------------------------------------------------------------------- 1 | middle one -------------------------------------------------------------------------------- /pretf/pretf/version.py: -------------------------------------------------------------------------------- 1 | __version__ = "0.7.3" 2 | -------------------------------------------------------------------------------- /examples/aws/.gitignore: -------------------------------------------------------------------------------- 1 | .terraform* 2 | *.json 3 | -------------------------------------------------------------------------------- /stubs/hcl2.pyi: -------------------------------------------------------------------------------- 1 | def loads(s: str) -> dict: ... 2 | -------------------------------------------------------------------------------- /tests/test_variables_files/empty.tf.expected.json: -------------------------------------------------------------------------------- 1 | [] -------------------------------------------------------------------------------- /examples/aws/files/subdir/subdir2/bottom.txt: -------------------------------------------------------------------------------- 1 | bottom one -------------------------------------------------------------------------------- /examples/aws/more-files/more-files-here.txt: -------------------------------------------------------------------------------- 1 | there are more! -------------------------------------------------------------------------------- /pretf/setup.cfg: -------------------------------------------------------------------------------- 1 | [options] 2 | python_requires = >= 3.6 3 | -------------------------------------------------------------------------------- /stubs/pytest.pyi: -------------------------------------------------------------------------------- 1 | def xfail(message:str) -> None: ... 2 | -------------------------------------------------------------------------------- /examples/aws/more-files/extra/bonus.txt: -------------------------------------------------------------------------------- 1 | this is a bonus file! -------------------------------------------------------------------------------- /examples/jinja/terraform.tfvars.j2: -------------------------------------------------------------------------------- 1 | byte_length = {{ 12 }} 2 | -------------------------------------------------------------------------------- /pretf.aws/setup.cfg: -------------------------------------------------------------------------------- 1 | [options] 2 | python_requires = >= 3.6 3 | -------------------------------------------------------------------------------- /tests/test_variables_files/escaped_strings.tf.expected.json: -------------------------------------------------------------------------------- 1 | [] -------------------------------------------------------------------------------- /examples/aws/Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: test 2 | test: 3 | pytest -vv 4 | -------------------------------------------------------------------------------- /tests/test_variables_files/announcing_terraform_1.tf.expected.json: -------------------------------------------------------------------------------- 1 | [] -------------------------------------------------------------------------------- /tests/test_variables_files/announcing_terraform_3.tf.expected.json: -------------------------------------------------------------------------------- 1 | [] -------------------------------------------------------------------------------- /examples/vars/c.auto.tfvars: -------------------------------------------------------------------------------- 1 | five = 5 2 | 3 | seven = ["7a", "7b"] 4 | -------------------------------------------------------------------------------- /examples/jinja/.gitignore: -------------------------------------------------------------------------------- 1 | .terraform* 2 | *.json 3 | terraform.tfstate* 4 | -------------------------------------------------------------------------------- /examples/jinja/vars.tf: -------------------------------------------------------------------------------- 1 | variable "byte_length" { 2 | default = 8 3 | } 4 | -------------------------------------------------------------------------------- /examples/vars/.gitignore: -------------------------------------------------------------------------------- 1 | .terraform* 2 | *.json 3 | terraform.tfstate* 4 | -------------------------------------------------------------------------------- /examples/test-framework/.gitignore: -------------------------------------------------------------------------------- 1 | .terraform* 2 | *.json 3 | terraform.tfstate* 4 | -------------------------------------------------------------------------------- /examples/flatten/stacks/vpc/outputs.tf: -------------------------------------------------------------------------------- 1 | output "vpc_id" { 2 | value = aws_vpc.main.id 3 | } -------------------------------------------------------------------------------- /examples/flatten/stacks/vpc/variables.tf: -------------------------------------------------------------------------------- 1 | variable "vpc_cidr_block" { 2 | type = string 3 | } 4 | -------------------------------------------------------------------------------- /examples/flatten/stacks/iam/dev/iam.dev.auto.tfvars: -------------------------------------------------------------------------------- 1 | environment = "dev" 2 | 3 | stack = "iam-dev" 4 | -------------------------------------------------------------------------------- /examples/flatten/stacks/iam/outputs.tf: -------------------------------------------------------------------------------- 1 | output "user_name" { 2 | value = aws_iam_user.pretf.name 3 | } -------------------------------------------------------------------------------- /examples/flatten/stacks/iam/prod/iam.prod.auto.tfvars: -------------------------------------------------------------------------------- 1 | environment = "prod" 2 | 3 | stack = "iam-prod" 4 | -------------------------------------------------------------------------------- /examples/vars/terraform.tfvars.py: -------------------------------------------------------------------------------- 1 | def pretf_variables(): 2 | yield {"three": 3} 3 | yield {"four": 4} 4 | -------------------------------------------------------------------------------- /examples/flatten/stacks/vpc-peering/prod/vpc-peering.prod.auto.tfvars: -------------------------------------------------------------------------------- 1 | environment = "prod" 2 | 3 | stack = "vpc-peering-prod" 4 | -------------------------------------------------------------------------------- /examples/flatten/stacks/vpc/dev/vpc.dev.auto.tfvars: -------------------------------------------------------------------------------- 1 | environment = "dev" 2 | 3 | stack = "vpc-dev" 4 | 5 | vpc_cidr_block = "10.85.0.0/24" 6 | -------------------------------------------------------------------------------- /examples/flatten/stacks/vpc/prod/vpc.prod.auto.tfvars: -------------------------------------------------------------------------------- 1 | environment = "prod" 2 | 3 | stack = "vpc-prod" 4 | 5 | vpc_cidr_block = "10.90.0.0/24" 6 | -------------------------------------------------------------------------------- /stubs/boto_source_profile_mfa.pyi: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | import boto3 4 | 5 | def get_session(**kwargs: Any) -> boto3.Session: ... 6 | -------------------------------------------------------------------------------- /stubs/colorama.pyi: -------------------------------------------------------------------------------- 1 | class Fore: 2 | CYAN: str 3 | RED: str 4 | 5 | class Style: 6 | RESET_ALL: str 7 | 8 | def init() -> None: ... 9 | -------------------------------------------------------------------------------- /examples/flatten/stacks/vpc-peering/outputs.tf: -------------------------------------------------------------------------------- 1 | output "status" { 2 | value = aws_vpc_peering_connection_accepter.prod_to_dev.accept_status 3 | } 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__/ 2 | .direnv/ 3 | .mypy_cache/ 4 | .pytest_cache/ 5 | .tool-versions 6 | .vscode/ 7 | *.egg-info/ 8 | build/ 9 | dist/ 10 | -------------------------------------------------------------------------------- /examples/jinja/resource_from_jinja.tf.j2: -------------------------------------------------------------------------------- 1 | resource "random_id" "from_jinja" { 2 | byte_length = {{ var.byte_length }} 3 | prefix = "jinja-" 4 | } 5 | -------------------------------------------------------------------------------- /mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | mypy_path = pretf:pretf.aws:stubs 3 | namespace_packages = True 4 | disallow_untyped_calls = True 5 | disallow_untyped_defs = True 6 | -------------------------------------------------------------------------------- /examples/flatten/stacks/vpc-peering/variables.tf: -------------------------------------------------------------------------------- 1 | variable "dev_vpc_id" { 2 | type = string 3 | } 4 | 5 | variable "prod_vpc_id" { 6 | type = string 7 | } 8 | -------------------------------------------------------------------------------- /examples/jinja/resource_from_terraform.tf: -------------------------------------------------------------------------------- 1 | resource "random_id" "from_terraform" { 2 | byte_length = var.byte_length 3 | prefix = "terraform-" 4 | } 5 | -------------------------------------------------------------------------------- /tests/test_variables_files/variables.tfvars.json: -------------------------------------------------------------------------------- 1 | { 2 | "image_id": "ami-abc123", 3 | "availability_zone_names": [ 4 | "us-west-1a", 5 | "us-west-1c" 6 | ] 7 | } -------------------------------------------------------------------------------- /examples/vars/c.tf: -------------------------------------------------------------------------------- 1 | variable "five" { 2 | type = string 3 | } 4 | 5 | variable "six" { 6 | type = list(string) 7 | default = ["six1", "six2"] 8 | } 9 | 10 | variable "seven" {} 11 | -------------------------------------------------------------------------------- /tests/test_variables_files/announcing_terraform_2.tf.expected.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "name": "base_network_cidr", 4 | "default": "10.0.0.0/8", 5 | "source": "announcing_terraform_2.tf" 6 | } 7 | ] -------------------------------------------------------------------------------- /examples/aws/aws.tf.py: -------------------------------------------------------------------------------- 1 | from pretf.aws import provider_aws 2 | 3 | 4 | def pretf_blocks(var): 5 | yield provider_aws( 6 | region=var.aws_region, 7 | **var.aws_credentials["nonprod"], 8 | ) 9 | -------------------------------------------------------------------------------- /examples/vars/a.tf.py: -------------------------------------------------------------------------------- 1 | from pretf.blocks import output, variable 2 | 3 | 4 | def pretf_blocks(var): 5 | yield variable.one(default=1) 6 | yield output.one(value=var.one) 7 | yield variable.two(default=2) 8 | -------------------------------------------------------------------------------- /examples/vars/d.tf.py: -------------------------------------------------------------------------------- 1 | from pretf.blocks import output 2 | 3 | 4 | def pretf_blocks(var): 5 | yield output.five(value=var.five) 6 | yield output.six(value=var.six) 7 | yield output.seven(value=var.seven) 8 | -------------------------------------------------------------------------------- /examples/flatten/.gitignore: -------------------------------------------------------------------------------- 1 | stacks/*/*/* 2 | !stacks/*/*/*.*.auto.tfvars 3 | 4 | # Check with this command: 5 | # find . -type f | git check-ignore --no-index --non-matching --verbose --stdin | grep -P '^.*:.*:[\s!]' | cut -f 2 | sort 6 | -------------------------------------------------------------------------------- /examples/jinja/resource_from_python.tf.py: -------------------------------------------------------------------------------- 1 | from pretf.api import block 2 | 3 | 4 | def pretf_blocks(var): 5 | yield block("resource", "random_id", "from_python", { 6 | "byte_length": var.byte_length, 7 | "prefix": "python-", 8 | }) 9 | -------------------------------------------------------------------------------- /pretf/pretf/labels.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | from .blocks import Block 4 | 5 | 6 | def clean(label: str) -> str: 7 | return re.sub(r"__+", "_", re.sub(r"[^a-zA-Z0-9]", "_", label)) 8 | 9 | 10 | def get(block: Block) -> str: 11 | return block._labels[-1] 12 | -------------------------------------------------------------------------------- /examples/aws/README.md: -------------------------------------------------------------------------------- 1 | # Example: AWS resources 2 | 3 | This shows how to create various dynamic AWS resources such as IAM users and S3 objects. 4 | 5 | It also shows how Pretf can be dropped into a project to generate files without requiring any other configuration or project changes. 6 | -------------------------------------------------------------------------------- /tests/test_variables_files/announcing_terraform_4.tf.expected.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "name": "source_image_region", 4 | "source": "announcing_terraform_4.tf" 5 | }, 6 | { 7 | "name": "target_image_regions", 8 | "source": "announcing_terraform_4.tf" 9 | } 10 | ] -------------------------------------------------------------------------------- /examples/flatten/stacks/vpc/main.tf: -------------------------------------------------------------------------------- 1 | resource "aws_vpc" "main" { 2 | cidr_block = var.vpc_cidr_block 3 | 4 | enable_dns_support = true 5 | enable_dns_hostnames = true 6 | 7 | tags = { 8 | Name = "pretf-workspaces-flatten-${var.environment}" 9 | Environment = var.environment 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /tests/test_variables_files/escaped_strings.tf: -------------------------------------------------------------------------------- 1 | resource "helm_release" "external_dns" { 2 | name = "external-dns" 3 | 4 | set { 5 | name = "domainFilters" 6 | value = format("\"[%s]\"", jsonencode(var.domain)) 7 | } 8 | 9 | set { 10 | name = "rbac.create" 11 | value = "true" 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /tests/test_variables_files/complex_types.tf: -------------------------------------------------------------------------------- 1 | variable "aws_credentials" { 2 | type = map({ 3 | nonprod = map(string) 4 | prod = map(string) 5 | }) 6 | default = { 7 | nonprod = { 8 | profile = "pretf-nonprod" 9 | } 10 | 11 | prod = { 12 | profile = "pretf-prod" 13 | } 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /examples/vars/b.tf.py: -------------------------------------------------------------------------------- 1 | from pretf.blocks import output, variable 2 | 3 | 4 | def pretf_blocks(var): 5 | yield output.two_attr(value=var.two) 6 | yield output.two_dict(value=var["two"]) 7 | yield variable.three(default=3) 8 | yield output.three(value=var.three) 9 | yield variable.four 10 | yield output.four(value=var.four) 11 | -------------------------------------------------------------------------------- /examples/test-framework/README.md: -------------------------------------------------------------------------------- 1 | # Example: testing with pytest 2 | 3 | This shows a test that creates Terraform configuration, runs Terraform, and then make assertions about its output. 4 | 5 | ## Usage 6 | 7 | Run the following command: 8 | 9 | ```shell 10 | pytest 11 | ``` 12 | 13 | Or for more verbose output: 14 | 15 | ``` 16 | pytest -v 17 | ``` 18 | -------------------------------------------------------------------------------- /examples/aws/terraform.tf.py: -------------------------------------------------------------------------------- 1 | from pretf.aws import terraform_backend_s3 2 | 3 | 4 | def pretf_blocks(var): 5 | yield terraform_backend_s3( 6 | bucket="pretf-examples-aws", 7 | dynamodb_table="pretf-examples-aws", 8 | key="terraform.tfstate", 9 | region=var.aws_region, 10 | **var.aws_credentials["nonprod"], 11 | ) 12 | -------------------------------------------------------------------------------- /examples/jinja/outputs.tf: -------------------------------------------------------------------------------- 1 | output "byte_length" { 2 | value = var.byte_length 3 | } 4 | 5 | output "from_jinja" { 6 | value = resource.random_id.from_jinja.hex 7 | } 8 | 9 | output "from_python" { 10 | value = resource.random_id.from_python.hex 11 | } 12 | 13 | output "from_terraform" { 14 | value = resource.random_id.from_terraform.hex 15 | } 16 | 17 | -------------------------------------------------------------------------------- /tests/test_variables_files/complex_types.tf.expected.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "name": "aws_credentials", 4 | "default": { 5 | "nonprod": { 6 | "profile": "pretf-nonprod" 7 | }, 8 | "prod": { 9 | "profile": "pretf-prod" 10 | } 11 | }, 12 | "source": "complex_types.tf" 13 | } 14 | ] -------------------------------------------------------------------------------- /examples/flatten/stacks/iam/main.tf: -------------------------------------------------------------------------------- 1 | resource "aws_iam_user" "pretf" { 2 | name = "pretf-flatten-${var.environment}" 3 | } 4 | 5 | # Example of including a local module. 6 | # Note that it is using the "modules" symlink 7 | # created by the pretf.workflow.py file. 8 | 9 | module "disable_user" { 10 | source = "./modules/iam-disable-user" 11 | user_name = aws_iam_user.pretf.name 12 | } 13 | -------------------------------------------------------------------------------- /tests/test_variables_files/variables.tfvars: -------------------------------------------------------------------------------- 1 | # https://www.terraform.io/docs/configuration/variables.html 2 | 3 | image_id = "ami-abc123" 4 | 5 | availability_zone_names = [ 6 | "us-east-1a", 7 | "us-west-1c", 8 | ] 9 | 10 | # https://learn.hashicorp.com/terraform/getting-started/variables.html 11 | 12 | amis = { 13 | "us-east-1" = "ami-abc123" 14 | "us-west-2" = "ami-def456" 15 | } 16 | -------------------------------------------------------------------------------- /examples/flatten/stacks/vpc-peering/aws.tf.py: -------------------------------------------------------------------------------- 1 | from pretf.aws import provider_aws 2 | 3 | 4 | def pretf_blocks(var): 5 | yield provider_aws( 6 | alias="dev", 7 | region=var.aws_region, 8 | **var.aws_credentials["nonprod"], 9 | ) 10 | yield provider_aws( 11 | alias="prod", 12 | region=var.aws_region, 13 | **var.aws_credentials["prod"], 14 | ) 15 | -------------------------------------------------------------------------------- /examples/jinja/README.md: -------------------------------------------------------------------------------- 1 | # Example: Jinja2 templates 2 | 3 | This shows how to use Jinja2 templates to create Terraform resources and variables. 4 | 5 | Pretf renders `*.tf.j2` and `*.tfvars.j2` into `*.tf` and `*.tfvars` in-memory, but then parses them and writes them as JSON files. The extra step of converting them to JSON files makes it easier to clean up generated files and makes for simpler `.gitignore` rules. 6 | -------------------------------------------------------------------------------- /tests/test_variables_files/variables.tfvars.json.expected.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "name": "image_id", 4 | "value": "ami-abc123", 5 | "source": "variables.tfvars.json" 6 | }, 7 | { 8 | "name": "availability_zone_names", 9 | "value": [ 10 | "us-west-1a", 11 | "us-west-1c" 12 | ], 13 | "source": "variables.tfvars.json" 14 | } 15 | ] -------------------------------------------------------------------------------- /examples/flatten/modules/iam-disable-user/main.tf: -------------------------------------------------------------------------------- 1 | variable "user_name" {} 2 | 3 | data "aws_iam_policy_document" "deny_all" { 4 | statement { 5 | effect = "Deny" 6 | 7 | actions = [ 8 | "*", 9 | ] 10 | 11 | resources = [ 12 | "*", 13 | ] 14 | } 15 | } 16 | 17 | resource "aws_iam_user_policy" "disabled" { 18 | name = "${var.user_name}-disabled" 19 | user = var.user_name 20 | policy = data.aws_iam_policy_document.deny_all.json 21 | } 22 | -------------------------------------------------------------------------------- /examples/flatten/stacks/vpc-peering/vpc.auto.tfvars.py: -------------------------------------------------------------------------------- 1 | from pretf.api import get_outputs, log 2 | 3 | 4 | def pretf_variables(var): 5 | dev = get_outputs("vpc/dev") 6 | if not dev: 7 | raise log.bad("vpc/dev stack has no outputs") 8 | 9 | prod = get_outputs("vpc/prod") 10 | if not dev: 11 | raise log.bad("vpc/prod stack has no outputs") 12 | 13 | yield { 14 | "dev_vpc_id": dev["vpc_id"], 15 | "prod_vpc_id": prod["vpc_id"], 16 | } 17 | -------------------------------------------------------------------------------- /tests/test_variables_files/announcing_terraform_3.tf: -------------------------------------------------------------------------------- 1 | # https://www.hashicorp.com/blog/announcing-terraform-0-12#generalized-type-system 2 | 3 | module "network" { 4 | source = "./modules/network" 5 | 6 | base_network_cidr = "10.0.0.0/8" 7 | } 8 | 9 | module "consul_cluster" { 10 | source = "./modules/aws-consul-cluster" 11 | 12 | vpc_id = module.network.vpc_id 13 | vpc_cidr_block = module.network.vpc_cidr_block 14 | subnet_ids = module.network.subnet_ids 15 | } 16 | -------------------------------------------------------------------------------- /examples/aws/variables.tf: -------------------------------------------------------------------------------- 1 | variable "aws_credentials" { 2 | default = { 3 | nonprod = { 4 | profile = "pretf-nonprod" 5 | } 6 | } 7 | } 8 | 9 | variable "aws_region" { 10 | default = "eu-west-1" 11 | } 12 | 13 | variable "envtype" { 14 | default = "nonprod" 15 | } 16 | 17 | variable "user_names" { 18 | default = ["pretf-iam-user-1", "pretf-iam-user-2"] 19 | } 20 | 21 | variable "security_group_allowed_cidrs" { 22 | default = [ 23 | "1.1.1.1/32", 24 | "8.8.8.8/32", 25 | "10.0.0.0/24", 26 | "192.168.0.0/24", 27 | ] 28 | } 29 | -------------------------------------------------------------------------------- /pretf/Makefile: -------------------------------------------------------------------------------- 1 | $(eval NAME := $(shell python setup.py --name)) 2 | $(eval PY_NAME := $(shell python setup.py --name | sed 's/-/_/g')) 3 | $(eval VERSION := $(shell python setup.py --version)) 4 | 5 | SDIST = dist/$(NAME)-$(VERSION).tar.gz 6 | WHEEL = dist/$(PY_NAME)-$(VERSION)-py3-none-any.whl 7 | 8 | .PHONY: build 9 | build: clean $(SDIST) $(WHEEL) 10 | 11 | .PHONY: clean 12 | clean: 13 | rm -rf build dist *.egg-info 14 | 15 | .PHONY: upload 16 | upload: $(SDIST) $(WHEEL) 17 | twine upload $(SDIST) $(WHEEL) 18 | 19 | $(SDIST): 20 | python setup.py sdist 21 | 22 | $(WHEEL): 23 | python setup.py bdist_wheel 24 | -------------------------------------------------------------------------------- /tests/test_variables_files/variables.tfvars.expected.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "name": "image_id", 4 | "value": "ami-abc123", 5 | "source": "variables.tfvars" 6 | }, 7 | { 8 | "name": "availability_zone_names", 9 | "value": [ 10 | "us-east-1a", 11 | "us-west-1c" 12 | ], 13 | "source": "variables.tfvars" 14 | }, 15 | { 16 | "name": "amis", 17 | "value": { 18 | "us-east-1": "ami-abc123", 19 | "us-west-2": "ami-def456" 20 | }, 21 | "source": "variables.tfvars" 22 | } 23 | ] -------------------------------------------------------------------------------- /pretf.aws/Makefile: -------------------------------------------------------------------------------- 1 | $(eval NAME := $(shell python setup.py --name)) 2 | $(eval PY_NAME := $(shell python setup.py --name | sed 's/-/_/g')) 3 | $(eval VERSION := $(shell python setup.py --version)) 4 | 5 | SDIST = dist/$(NAME)-$(VERSION).tar.gz 6 | WHEEL = dist/$(PY_NAME)-$(VERSION)-py3-none-any.whl 7 | 8 | .PHONY: build 9 | build: clean $(SDIST) $(WHEEL) 10 | 11 | .PHONY: clean 12 | clean: 13 | rm -rf build dist *.egg-info 14 | 15 | .PHONY: upload 16 | upload: $(SDIST) $(WHEEL) 17 | twine upload $(SDIST) $(WHEEL) 18 | 19 | $(SDIST): 20 | python setup.py sdist 21 | 22 | $(WHEEL): 23 | python setup.py bdist_wheel 24 | -------------------------------------------------------------------------------- /examples/vars/test_vars.py: -------------------------------------------------------------------------------- 1 | from pretf import test 2 | 3 | 4 | class TestVars(test.SimpleTest): 5 | def test_create(self): 6 | self.pretf.init() 7 | 8 | def test_outputs(self): 9 | outputs = self.pretf.apply() 10 | assert outputs == { 11 | "five": 5, 12 | "four": 4, 13 | "one": 1, 14 | "seven": ["7a", "7b"], 15 | "six": ["six1", "six2"], 16 | "three": 3, 17 | "two_attr": 2, 18 | "two_dict": 2, 19 | } 20 | 21 | @test.always 22 | def test_destroy(self): 23 | self.pretf.destroy() 24 | -------------------------------------------------------------------------------- /tests/test_variables_files/announcing_terraform_2.tf: -------------------------------------------------------------------------------- 1 | # https://www.hashicorp.com/blog/announcing-terraform-0-12#first-class-expression-syntax 2 | 3 | variable "base_network_cidr" { 4 | default = "10.0.0.0/8" 5 | } 6 | 7 | resource "google_compute_network" "example" { 8 | name = "test-network" 9 | auto_create_subnetworks = false 10 | } 11 | 12 | resource "google_compute_subnetwork" "example" { 13 | count = 4 14 | 15 | name = "test-subnetwork" 16 | ip_cidr_range = cidrsubnet(var.base_network_cidr, 4, count.index) 17 | region = "us-central1" 18 | network = google_compute_network.custom-test.self_link 19 | } 20 | -------------------------------------------------------------------------------- /examples/jinja/test_jinja.py: -------------------------------------------------------------------------------- 1 | from pretf import test 2 | 3 | 4 | class TestJinja(test.SimpleTest): 5 | def test_init(self): 6 | self.pretf.init() 7 | 8 | def test_outputs(self): 9 | outputs = self.pretf.apply() 10 | assert outputs.keys() == {"byte_length", "from_jinja", "from_python", "from_terraform"} 11 | assert outputs["byte_length"] == 12 12 | assert outputs["from_jinja"].startswith("jinja-") 13 | assert outputs["from_python"].startswith("python-") 14 | assert outputs["from_terraform"].startswith("terraform-") 15 | 16 | @test.always 17 | def test_destroy(self): 18 | self.pretf.destroy() 19 | -------------------------------------------------------------------------------- /examples/aws/test_aws.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import ANY 2 | 3 | from pretf import test 4 | 5 | 6 | class TestAWS(test.SimpleTest): 7 | def test_init(self): 8 | self.pretf.init() 9 | 10 | def test_outputs(self): 11 | outputs = self.pretf.apply() 12 | assert outputs == { 13 | "private_sg_id": ANY, 14 | "public_sg_id": ANY, 15 | "total_bytes": 63, 16 | "total_files": 5, 17 | "user_pretf_iam_user_1": "pretf-iam-user-1", 18 | "user_pretf_iam_user_2": "pretf-iam-user-2", 19 | } 20 | 21 | @test.always 22 | def test_destroy(self): 23 | self.pretf.destroy() 24 | -------------------------------------------------------------------------------- /examples/aws/iam.tf.py: -------------------------------------------------------------------------------- 1 | from pretf.api import labels 2 | from pretf.blocks import output, resource 3 | 4 | 5 | def pretf_blocks(var): 6 | 7 | group = yield resource.aws_iam_group.pretf( 8 | name="pretf-aws", 9 | ) 10 | 11 | for name in var.user_names: 12 | 13 | name_label = labels.clean(name) 14 | 15 | user = yield resource.aws_iam_user[name_label]( 16 | name=name, 17 | ) 18 | 19 | yield resource.aws_iam_user_group_membership[name_label]( 20 | user=user.name, 21 | groups=[group.name], 22 | ) 23 | 24 | yield output[f"user_{name_label}"]( 25 | value=user.name, 26 | ) 27 | -------------------------------------------------------------------------------- /pretf.aws/setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | import re 3 | 4 | from setuptools import setup 5 | 6 | 7 | def get_version(): 8 | here = os.path.abspath(os.path.dirname(__file__)) 9 | with open(os.path.join(here, "..", "pretf", "pretf", "version.py")) as open_file: 10 | contents = open_file.read() 11 | match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", contents, re.MULTILINE) 12 | return match.group(1) 13 | 14 | setup( 15 | name="pretf.aws", 16 | version=get_version(), 17 | author="Raymond Butcher", 18 | author_email="randomy@gmail.com", 19 | license="MIT License", 20 | packages=["pretf"], 21 | install_requires=["boto3", "boto3-stubs", "pretf"], 22 | zip_safe=False, 23 | ) 24 | -------------------------------------------------------------------------------- /examples/flatten/pretf.workflow.py: -------------------------------------------------------------------------------- 1 | from pretf import workflow 2 | 3 | 4 | def pretf_workflow(path): 5 | # Restrict where pretf/terraform can run to directories containing an auto 6 | # tfvars file. It will show an error when running in the wrong directory. 7 | workflow.require_files("*.*.auto.tfvars") 8 | 9 | # Flatten the directory structure into the working directory. 10 | workflow.delete_links() 11 | created = workflow.link_files("*.tf", "*.tf.py", "*.tfvars.py", "modules") 12 | 13 | # Now run the standard Pretf workflow which generates files 14 | # and then executes Terraform. Pass in the mirrored files 15 | # so they can be cleaned up. 16 | return workflow.default(created=created) 17 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | FORMAT_SOURCES = pretf/pretf pretf.aws/pretf tests 2 | ALL_SOURCES = $(FORMAT_SOURCES) examples 3 | 4 | .PHONY: all 5 | all: tidy test 6 | 7 | .PHONY: clean 8 | clean: 9 | cd pretf; make clean 10 | cd pretf.aws; make clean 11 | 12 | .PHONY: docs 13 | docs: 14 | mkdocs serve 15 | 16 | .PHONY: test 17 | test: 18 | mypy $(shell python -c 'import pathlib; print(" ".join(sorted(f"-m pretf.{p.stem}" for p in pathlib.Path().glob("pretf*/pretf/*.py"))))') 19 | flake8 --ignore E501,W503 $(ALL_SOURCES) 20 | pytest -v tests 21 | 22 | .PHONY: tidy 23 | tidy: 24 | isort --float-to-top --profile black $(ALL_SOURCES) 25 | black $(FORMAT_SOURCES) 26 | cd examples; terraform fmt -recursive 27 | 28 | .PHONY: testall 29 | testall: tidy test 30 | pytest -v examples 31 | -------------------------------------------------------------------------------- /mkdocs.yml: -------------------------------------------------------------------------------- 1 | site_name: Pretf Documentation 2 | theme: readthedocs 3 | docs_dir: docs 4 | nav: 5 | - Home: index.md 6 | - Comparisons: comparisons.md 7 | - Tutorial: 8 | - Get started: tutorial/get-started.md 9 | - Define resources: tutorial/define-resources.md 10 | - Dynamic resources: tutorial/dynamic-resources.md 11 | - Dynamic references: tutorial/dynamic-references.md 12 | - Terraform variables: tutorial/terraform-variables.md 13 | - Custom workflows: tutorial/custom-workflows.md 14 | - AWS projects: tutorial/aws-projects.md 15 | - direnv and asdf-vm: tutorial/direnv-and-asdf-vm.md 16 | - API: 17 | - pretf.api: api/api.md 18 | - pretf.aws: api/aws.md 19 | - pretf.collections: api/collections.md 20 | - pretf.workflow: api/workflow.md 21 | - projects: api/projects.md 22 | -------------------------------------------------------------------------------- /pretf/setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | import re 3 | 4 | from setuptools import setup 5 | 6 | 7 | def get_version(): 8 | here = os.path.abspath(os.path.dirname(__file__)) 9 | with open(os.path.join(here, "pretf", "version.py")) as open_file: 10 | contents = open_file.read() 11 | match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", contents, re.MULTILINE) 12 | return match.group(1) 13 | 14 | version = get_version() 15 | 16 | setup( 17 | name="pretf", 18 | version=version, 19 | author="Raymond Butcher", 20 | author_email="ray.butcher@claranet.uk", 21 | license="MIT License", 22 | packages=["pretf"], 23 | entry_points={"console_scripts": ("pretf=pretf.cli:main")}, 24 | install_requires=["colorama", "Jinja2", "python-hcl2>=3.0.0"], 25 | extras_require={"aws": ["pretf.aws=={}".format(version)]}, 26 | zip_safe=False, 27 | ) 28 | -------------------------------------------------------------------------------- /docs/tutorial/get-started.md: -------------------------------------------------------------------------------- 1 | Pretf provides a way to generate Terraform configuration with Python code. It should be seen as an extension for Terraform projects, to be used in situations where the standard configuration language is not working well for your project. 2 | 3 | * If you are not familiar with Terraform: 4 | * Start with Terraform, not Pretf 5 | * Otherwise: 6 | * If it works well for your project: 7 | * Continue using Terraform without Pretf 8 | * If you're using "workarounds", "escape hatches", or "hacks": 9 | * Consider using Pretf 10 | 11 | These tutorials assume you have an existing Terraform project and you want to generate additional resources with Pretf. 12 | 13 | Before starting, run `pretf version` to check that Pretf and Terraform are installed: 14 | 15 | ```shell 16 | $ pretf version 17 | Pretf v0.7.0 18 | Terraform v0.12.21 19 | + provider.aws v2.50.0 20 | ``` 21 | -------------------------------------------------------------------------------- /tests/test_variables_files/announcing_terraform_1.tf: -------------------------------------------------------------------------------- 1 | # From https://www.hashicorp.com/blog/announcing-terraform-0-12 2 | 3 | data "consul_key_prefix" "environment" { 4 | path = "apps/example/env" 5 | } 6 | 7 | resource "aws_elastic_beanstalk_environment" "example" { 8 | name = "test_environment" 9 | application = "testing" 10 | 11 | setting { 12 | namespace = "aws:autoscaling:asg" 13 | name = "MinSize" 14 | value = "1" 15 | } 16 | 17 | dynamic "setting" { 18 | for_each = data.consul_key_prefix.environment.var 19 | content { 20 | namespace = "aws:elasticbeanstalk:application:environment" 21 | name = setting.key 22 | value = setting.value 23 | } 24 | } 25 | } 26 | 27 | output "environment" { 28 | value = { 29 | id = aws_elastic_beanstalk_environment.example.id 30 | vpc_settings = { 31 | for s in aws_elastic_beanstalk_environment.example.all_settings : 32 | s.name => s.value 33 | if s.namespace == "aws:ec2:vpc" 34 | } 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /docs/tutorial/dynamic-resources.md: -------------------------------------------------------------------------------- 1 | There is too much duplication on the previous page, and what if we want to add more animals? Let's use a 'for loop': 2 | 3 | ```python 4 | # animals.tf.py 5 | 6 | from pretf.api import block 7 | 8 | 9 | def pretf_blocks(): 10 | animals = ["dog", "cat", "buffalo", "rabbit", "badger"] 11 | for name in animals: # loop over list 12 | yield block("resource", "random_integer", name, { # dynamic resource name 13 | "min": 1, 14 | "max": 10, 15 | }) 16 | ``` 17 | 18 | Now run `pretf plan` and you will see those 5 resources. 19 | 20 | ## Why not use 'count'? 21 | 22 | Terraform supports creating resources from a list like this: 23 | 24 | ```terraform 25 | resource "random_integer" "animals" { 26 | count = length(var.animals) 27 | min = 1 28 | max = 10 29 | } 30 | ``` 31 | 32 | But at the time of this writing, Terraform 0.12.4 is the latest version, and it still [recreates resources when you change the list](https://github.com/hashicorp/terraform/issues/17179). 33 | -------------------------------------------------------------------------------- /docs/tutorial/custom-workflows.md: -------------------------------------------------------------------------------- 1 | When Pretf runs, it looks for a `pretf.workflow.py` file in the current or parent directories. If found, Pretf will call the `pretf_workflow()` function from that file. 2 | 3 | The following `pretf.workflow.py` file implements a simplified version of the default Pretf behaviour. This is only useful as an example to use for getting started with your own custom workflow. 4 | 5 | ```python 6 | # pretf.workflow.py 7 | 8 | from pretf import workflow 9 | 10 | 11 | def pretf_workflow(): 12 | # Delete *.tf.json and *.tfvars.json files. 13 | workflow.delete_files() 14 | 15 | # Create *.tf.json and *.tfvars.json files 16 | # from *.tf.py and *.tfvars.py files. 17 | created = workflow.create_files() 18 | 19 | # Execute Terraform, raising an exception if it fails. 20 | proc = workflow.execute_terraform() 21 | 22 | # Clean up created files if successful. 23 | workflow.clean_files(created) 24 | 25 | return proc 26 | ``` 27 | 28 | To customise the behaviour of Pretf, create this file and customise as required. 29 | -------------------------------------------------------------------------------- /.envrc: -------------------------------------------------------------------------------- 1 | # Install asdf-vm plugins and tools. 2 | asdf_tools=" 3 | python 3.6.15 4 | terraform 1.1.0 5 | " 6 | if command -v asdf > /dev/null; then 7 | echo "${asdf_tools}" > .tool-versions 8 | for plugin in $(cut -d ' ' -f 1 .tool-versions); do 9 | if ! asdf plugin-list | grep $plugin > /dev/null; then 10 | echo "Installing asdf plugin $plugin" 11 | asdf plugin-add $plugin 12 | fi 13 | done 14 | asdf install 15 | fi 16 | 17 | # Use a virtual environment for Python. 18 | layout python3 19 | 20 | # Install Python packages. 21 | python_packages=" 22 | black 23 | boto_source_profile_mfa==0.0.9 24 | flake8 25 | isort 26 | mkdocs 27 | mypy 28 | pytest 29 | twine 30 | -e pretf 31 | -e pretf.aws 32 | " 33 | pip install --upgrade pip 34 | pip install $python_packages | grep -v "Requirement already satisfied:" || true 35 | 36 | # Add a terraform shim to run Pretf instead of Terraform. 37 | PATH_add "$( 38 | mkdir -p .direnv/bin && 39 | cd $_ && 40 | ln -fs $(which terraform) tf && 41 | ln -fs $(which pretf) terraform && 42 | pwd 43 | )" 44 | -------------------------------------------------------------------------------- /examples/test-framework/test_test_framework.py: -------------------------------------------------------------------------------- 1 | from pretf import test, workflow 2 | from pretf.blocks import output, variable 3 | 4 | 5 | class TestTestFramework(test.SimpleTest): 6 | def test_create(self): 7 | 8 | workflow.delete_files("*.json") 9 | 10 | with self.create("one.tf.json"): 11 | one = yield variable.one(default=True) 12 | yield output.one(value=one) 13 | 14 | self.tf.init() 15 | 16 | outputs = self.tf.apply() 17 | assert outputs == {"one": True} 18 | 19 | def test_change(self): 20 | 21 | with self.create("one.tf.json"): 22 | one = yield variable.one(default=False) 23 | yield output.one(value=one) 24 | 25 | with self.create("two.tf.json"): 26 | two = yield variable.two(default={"x": [1, 2, 3], "y": 4}) 27 | yield output.two(value=two) 28 | 29 | outputs = self.tf.apply() 30 | assert outputs == {"one": False, "two": {"x": [1, 2, 3], "y": 4}} 31 | 32 | @test.always 33 | def test_destroy(self): 34 | self.tf.destroy() 35 | -------------------------------------------------------------------------------- /tests/test_api.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from pretf.api import block 4 | 5 | 6 | @pytest.mark.parametrize( 7 | "obj,expected", 8 | [ 9 | (block("provider", "aws", {}), "aws"), 10 | (block("provider", "aws", {}).alias, "aws"), 11 | (block("provider", "aws", {"region": "eu-west-1"}), "aws"), 12 | (block("provider", "aws", {"region": "eu-west-1"}).alias, "aws"), 13 | (block("provider", "aws", {"alias": "nonprod"}), "aws.nonprod"), 14 | (block("provider", "aws", {"alias": "nonprod"}).alias, "aws.nonprod"), 15 | (block("variable", "one", {}), "${var.one}"), 16 | (block("resource", "one", "two", {}), "${one.two}"), 17 | ( 18 | block("resource", "aws_instance", "www", {}).ipv6_addresses[0], 19 | "${aws_instance.www.ipv6_addresses[0]}", 20 | ), 21 | ( 22 | block("resource", "one", "two", {}).list[0].another_list[1], 23 | "${one.two.list[0].another_list[1]}", 24 | ), 25 | ], 26 | ) 27 | def test_block(obj, expected): 28 | assert str(obj) == expected 29 | -------------------------------------------------------------------------------- /tests/test_variables_files/announcing_terraform_4.tf: -------------------------------------------------------------------------------- 1 | # https://www.hashicorp.com/blog/announcing-terraform-0-12#iteration-constructs 2 | 3 | locals { 4 | public_instances_by_az = { 5 | for i in aws_instance.example : i.availability_zone => i... 6 | if i.associate_public_ip_address 7 | } 8 | } 9 | 10 | variable "source_image_region" { 11 | type = string 12 | } 13 | 14 | variable "target_image_regions" { 15 | type = list(string) 16 | } 17 | 18 | resource "azurerm_shared_image_version" "ubuntu" { 19 | name = "1.0.1" 20 | gallery_name = azurerm_shared_image_gallery.image_gallery.name 21 | image_name = azurerm_shared_image.image_definition.name 22 | resource_group_name = azurerm_resource_group.image_gallery.name 23 | location = var.source_image_location 24 | managed_image_id = data.azurerm_image.ubuntu.id[count.index] 25 | 26 | dynamic "target_region" { 27 | for_each = var.target_image_regions 28 | content { 29 | name = target_region.value 30 | regional_replica_count = 1 31 | } 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Raymond Butcher 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /tests/test_variables_files/variables.tf.expected.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "name": "image_id", 4 | "source": "variables.tf" 5 | }, 6 | { 7 | "name": "availability_zone_names", 8 | "default": [ 9 | "us-west-1a" 10 | ], 11 | "source": "variables.tf" 12 | }, 13 | { 14 | "name": "image_id_2", 15 | "source": "variables.tf" 16 | }, 17 | { 18 | "name": "access_key", 19 | "source": "variables.tf" 20 | }, 21 | { 22 | "name": "secret_key", 23 | "source": "variables.tf" 24 | }, 25 | { 26 | "name": "region", 27 | "default": "us-east-1", 28 | "source": "variables.tf" 29 | }, 30 | { 31 | "name": "cidrs", 32 | "default": [], 33 | "source": "variables.tf" 34 | }, 35 | { 36 | "name": "cidrs_2", 37 | "source": "variables.tf" 38 | }, 39 | { 40 | "name": "amis", 41 | "default": { 42 | "us-east-1": "ami-b374d5a5", 43 | "us-west-2": "ami-4b32be2b" 44 | }, 45 | "source": "variables.tf" 46 | } 47 | ] -------------------------------------------------------------------------------- /tests/test_variables.py: -------------------------------------------------------------------------------- 1 | import json 2 | from pathlib import Path 3 | 4 | import pytest 5 | 6 | from pretf.variables import get_variables_from_file 7 | 8 | 9 | def find_test_files(): 10 | test_files_path = Path(__file__).parent / "test_variables_files" 11 | test_files = set(test_files_path.iterdir()) 12 | for test_file_path in sorted(test_files): 13 | if not test_file_path.name.endswith(".expected.json"): 14 | expected_file_path = test_file_path.with_name( 15 | test_file_path.name + ".expected.json" 16 | ) 17 | if expected_file_path in test_files: 18 | yield (test_file_path, expected_file_path) 19 | 20 | 21 | @pytest.mark.parametrize("test_file_path,expected_file_path", find_test_files()) 22 | def test_get_variables_from_file(test_file_path, expected_file_path): 23 | print("test", test_file_path, expected_file_path) 24 | with expected_file_path.open() as open_file: 25 | expected = json.load(open_file) 26 | result = [] 27 | for var in get_variables_from_file(test_file_path): 28 | result.append(dict(var)) 29 | assert expected == result 30 | -------------------------------------------------------------------------------- /examples/flatten/test_flatten.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import ANY 2 | 3 | import pytest 4 | 5 | from pretf import test 6 | 7 | 8 | class TestFlatten(test.SimpleTest): 9 | @pytest.mark.parametrize( 10 | "stack,env,expected", [ 11 | ("iam", "dev", {"user_name": "pretf-flatten-dev"}), 12 | ("iam", "prod", {"user_name": "pretf-flatten-prod"}), 13 | ("vpc", "dev", {"vpc_id": ANY}), 14 | ("vpc", "prod", {"vpc_id": ANY}), 15 | ("vpc-peering", "prod", {"status": "active"}), 16 | ], 17 | ) 18 | def test_apply(self, stack, env, expected): 19 | self.pretf(f"stacks/{stack}/{env}").init() 20 | outputs = self.pretf(f"stacks/{stack}/{env}").apply() 21 | assert outputs == expected 22 | 23 | @test.always 24 | @pytest.mark.parametrize( 25 | "stack,env", [ 26 | ("vpc-peering", "prod"), 27 | ("vpc", "prod"), 28 | ("vpc", "dev"), 29 | ("iam", "prod"), 30 | ("iam", "dev"), 31 | ], 32 | ) 33 | def test_destroy(self, stack, env): 34 | self.pretf(f"stacks/{stack}/{env}").destroy() 35 | -------------------------------------------------------------------------------- /examples/aws/security-groups.tf.py: -------------------------------------------------------------------------------- 1 | from ipaddress import IPv4Network 2 | 3 | from pretf.blocks import output, resource 4 | 5 | 6 | def pretf_blocks(var): 7 | 8 | private_label = "private" 9 | private = yield resource.aws_security_group[private_label]( 10 | name="pretf-example-aws-private", 11 | ) 12 | 13 | public_label = "public" 14 | public = yield resource.aws_security_group[public_label]( 15 | name="pretf-example-aws-public", 16 | ) 17 | 18 | for cidr in sorted(set(var.security_group_allowed_cidrs)): 19 | 20 | cidr_label = cidr.replace(".", "_").replace("/", "_") 21 | 22 | if IPv4Network(cidr).is_global: 23 | group = public 24 | group_label = public_label 25 | else: 26 | group = private 27 | group_label = private_label 28 | 29 | for port in (80, 443): 30 | rule_label = f"{group_label}_{port}_from_{cidr_label}" 31 | yield resource.aws_security_group_rule[rule_label]( 32 | security_group_id=group.id, 33 | type="ingress", 34 | protocol="tcp", 35 | from_port=port, 36 | to_port=port, 37 | cidr_blocks=[cidr], 38 | ) 39 | 40 | yield output.private_sg_id(value=private.id) 41 | yield output.public_sg_id(value=public.id) 42 | -------------------------------------------------------------------------------- /examples/flatten/stacks/vpc-peering/main.tf: -------------------------------------------------------------------------------- 1 | data "aws_caller_identity" "dev" { 2 | provider = aws.dev 3 | } 4 | 5 | resource "aws_vpc_peering_connection" "prod_to_dev" { 6 | provider = aws.prod 7 | vpc_id = var.prod_vpc_id 8 | peer_owner_id = data.aws_caller_identity.dev.account_id 9 | peer_vpc_id = var.dev_vpc_id 10 | } 11 | 12 | resource "aws_vpc_peering_connection_accepter" "prod_to_dev" { 13 | provider = aws.dev 14 | vpc_peering_connection_id = aws_vpc_peering_connection.prod_to_dev.id 15 | auto_accept = true 16 | } 17 | 18 | # Options can't be set until the connection has been accepted and is active, 19 | # so create an explicit dependency on the accepter when setting options. 20 | 21 | locals { 22 | active_vpc_peering_connection_id = aws_vpc_peering_connection_accepter.prod_to_dev.id 23 | } 24 | 25 | resource "aws_vpc_peering_connection_options" "prod" { 26 | provider = aws.prod 27 | vpc_peering_connection_id = local.active_vpc_peering_connection_id 28 | requester { 29 | allow_remote_vpc_dns_resolution = true 30 | } 31 | } 32 | 33 | resource "aws_vpc_peering_connection_options" "dev" { 34 | provider = aws.dev 35 | vpc_peering_connection_id = local.active_vpc_peering_connection_id 36 | accepter { 37 | allow_remote_vpc_dns_resolution = true 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /tests/test_variables_files/variables.tf: -------------------------------------------------------------------------------- 1 | # https://www.terraform.io/docs/configuration/variables.html 2 | 3 | variable "image_id" { 4 | type = string 5 | } 6 | 7 | variable "availability_zone_names" { 8 | type = list(string) 9 | default = ["us-west-1a"] 10 | } 11 | 12 | resource "aws_instance" "example" { 13 | instance_type = "t2.micro" 14 | ami = var.image_id 15 | } 16 | 17 | variable "image_id_2" { 18 | type = string 19 | description = "The id of the machine image (AMI) to use for the server." 20 | } 21 | 22 | # https://learn.hashicorp.com/terraform/getting-started/variables.html 23 | 24 | variable "access_key" {} 25 | variable "secret_key" {} 26 | variable "region" { 27 | default = "us-east-1" 28 | } 29 | 30 | provider "aws" { 31 | access_key = var.access_key 32 | secret_key = var.secret_key 33 | region = var.region 34 | } 35 | 36 | # implicitly by using brackets [...] 37 | variable "cidrs" { default = [] } 38 | 39 | # explicitly 40 | variable "cidrs_2" { type = list } 41 | 42 | variable "amis" { 43 | type = "map" 44 | default = { 45 | "us-east-1" = "ami-b374d5a5" 46 | "us-west-2" = "ami-4b32be2b" 47 | } 48 | } 49 | 50 | resource "aws_instance" "example_2" { 51 | ami = var.amis[var.region] 52 | instance_type = "t2.micro" 53 | } 54 | 55 | output "ami" { 56 | value = aws_instance.example.ami 57 | } 58 | -------------------------------------------------------------------------------- /pretf/pretf/parser.py: -------------------------------------------------------------------------------- 1 | import json 2 | import sys 3 | from pathlib import Path 4 | from typing import Generator, List 5 | 6 | import hcl2 7 | 8 | from . import log 9 | 10 | 11 | def get_outputs_from_block(block: dict) -> Generator[dict, None, None]: 12 | 13 | if "output" not in block: 14 | return 15 | 16 | output = block["output"] 17 | 18 | if isinstance(output, dict): 19 | outputs = [output] 20 | else: 21 | outputs = output 22 | 23 | for output in outputs: 24 | for name, block in output.items(): 25 | yield {"name": name, "value": block["value"]} 26 | 27 | 28 | def parse_environment_variable_for_variables(name: str, value: str) -> dict: 29 | contents = f"{name[7:]} = {value}" 30 | return parse_hcl2(contents) 31 | 32 | 33 | def parse_hcl2(contents: str) -> dict: 34 | try: 35 | return hcl2.loads(contents) 36 | except Exception as error: 37 | print(file=sys.stderr) 38 | log.bad("Error parsing:") 39 | print(contents, file=sys.stderr) 40 | log.bad(f"Raising: {error.__class__.__name__}") 41 | raise 42 | 43 | 44 | def parse_json_file_for_blocks(path: Path) -> List[dict]: 45 | 46 | with open(path) as open_file: 47 | contents = json.load(open_file) 48 | 49 | if isinstance(contents, dict): 50 | blocks = [contents] 51 | else: 52 | blocks = contents 53 | 54 | return blocks 55 | -------------------------------------------------------------------------------- /examples/flatten/README.md: -------------------------------------------------------------------------------- 1 | # Example: flattened directory layout 2 | 3 | This is an example project that "flattens" stack directories using symlinks. This is a simple and obvious project structure that is easy to work with. 4 | 5 | Features: 6 | 7 | * Directory-based 8 | * Change into an environment directory and run Terraform 9 | * No extra CLI arguments required when running Terraform 10 | * Simple directory layout allowing for DRY code 11 | * One persistent `.terraform directory` per environment 12 | * No need to run `terraform init` more than once 13 | * Automatic AWS credentials when running locally 14 | * With MFA token support 15 | * Ability to use Python for anything that is too complicated or not supported in HCL 16 | 17 | ## Comparison to workspaces layout 18 | 19 | When compared to the `workspaces` layout, the `flatten` layout: 20 | 21 | Pros: 22 | 23 | * Has more obvious environment directories 24 | * `cd` and `ls` the directories to see what exists 25 | * Is easier to switch between environments 26 | * `cd` into `iam/dev` to work with the `iam` stack in the `dev` environment 27 | * Has more options for S3 backend separation 28 | * Each environment could use a separate backend 29 | * All environments within an account could share a backend 30 | * All environments in all accounts could share a backend 31 | 32 | Cons: 33 | 34 | * Has more symlinks 35 | * One for each file in the parent directories 36 | -------------------------------------------------------------------------------- /examples/flatten/stacks/stack.tf.py: -------------------------------------------------------------------------------- 1 | from pretf.aws import provider_aws, terraform_backend_s3 2 | from pretf.blocks import variable 3 | 4 | 5 | def pretf_blocks(var): 6 | 7 | # Create variables needed by this file. 8 | 9 | yield variable.aws_credentials( 10 | default={ 11 | "nonprod": { 12 | "profile": "pretf-nonprod", 13 | }, 14 | "prod": { 15 | "profile": "pretf-prod", 16 | }, 17 | }, 18 | ) 19 | yield variable.aws_region( 20 | default="eu-west-1", 21 | ) 22 | yield variable.environment( 23 | type="string", 24 | ) 25 | yield variable.stack( 26 | type="string", 27 | ) 28 | 29 | # Create a backend configuration using the environment details. 30 | # Stacks in the same account share backend resources. 31 | 32 | if var.environment == 'prod': 33 | account = "prod" 34 | else: 35 | account = "nonprod" 36 | 37 | backend = f"pretf-examples-flatten-{account}" 38 | 39 | yield terraform_backend_s3( 40 | bucket=backend, 41 | dynamodb_table=backend, 42 | key=f"{var.stack}/terraform.tfstate", 43 | region=var.aws_region, 44 | **var.aws_credentials[account], 45 | ) 46 | 47 | # Create a default AWS provider for this environment. 48 | 49 | yield provider_aws( 50 | region=var.aws_region, 51 | **var.aws_credentials[account], 52 | ) 53 | -------------------------------------------------------------------------------- /docs/tutorial/direnv-and-asdf-vm.md: -------------------------------------------------------------------------------- 1 | [direnv](https://direnv.net/) is a tool for setting up development environments on a per-directory basis. 2 | 3 | [asdf-vm](https://asdf-vm.com/) is a tool for installing and managing software versions. 4 | 5 | These tools work great with Pretf projects. This example `.envrc` file does the following when you `cd` into the project directory: 6 | 7 | * Creates and activates a virtual environment for Python. 8 | * Ensures that Pretf is installed. 9 | * Ensures that Terraform is installed. 10 | * Adds a `terraform` shim to run Pretf instead of Terraform. 11 | 12 | ```shell 13 | # Use a virtual environment for Python. 14 | layout python3 15 | 16 | # Install Python packages. 17 | python_packages=" 18 | pretf[aws] 19 | " 20 | for package in $python_packages; do 21 | pip install $package | grep -v "Requirement already satisfied:" || true 22 | done 23 | 24 | # Install asdf-vm plugins and tools. 25 | asdf_tools=" 26 | terraform 0.12.21 27 | " 28 | if command -v asdf > /dev/null; then 29 | echo "${asdf_tools}" > .tool-versions 30 | for plugin in $(cut -d ' ' -f 1 .tool-versions); do 31 | if ! asdf plugin-list | grep $plugin > /dev/null; then 32 | echo "Installing asdf plugin $plugin" 33 | asdf plugin-add $plugin 34 | fi 35 | done 36 | asdf install 37 | fi 38 | 39 | # Add a terraform shim to run Pretf instead of Terraform. 40 | PATH_add "$(mkdir -p .direnv/bin && cd $_ && ln -fs $(which pretf) terraform && pwd)" 41 | ``` 42 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Pretf 2 | 3 | [![Documentation Status](https://readthedocs.org/projects/pretf/badge/?version=latest)](http://pretf.readthedocs.io/) [![Build Status](https://dev.azure.com/raymondbutcher/Pretf/_apis/build/status/raymondbutcher.pretf?branchName=master)](https://dev.azure.com/raymondbutcher/Pretf/_build/latest?definitionId=1&branchName=master) 4 | 5 | Pretf is a completely transparent, drop-in Terraform wrapper that generates Terraform configuration with Python. It requires no configuration and no changes to standard Terraform projects to start using it. 6 | 7 | Terraform includes first-class support for configuration in JSON files. Pretf generates those JSON files using your Python functions. 8 | 9 | ## Documentation 10 | 11 | The documentation for Pretf is located at: [https://pretf.readthedocs.io/](https://pretf.readthedocs.io/) 12 | 13 | ## Features and goals 14 | 15 | * Drop into any standard Terraform project. 16 | * Configuration is optional and often unnecessary. 17 | * Just add Python files next to the Terraform files. 18 | * Standard Terraform command line usage. 19 | * Obvious. 20 | * Projects using Pretf are like standard Terraform projects but with extra Python files. 21 | * Python files in projects are self-explanatory; their purpose is obvious. 22 | * Minimal. 23 | * No concept of specific Terraform resources, instead there is a generic way to output JSON configuration blocks. 24 | * Small API. 25 | * Small project scope. 26 | * Easy to learn. 27 | * Flexible. 28 | * Change the entire workflow if you want. 29 | -------------------------------------------------------------------------------- /examples/README.md: -------------------------------------------------------------------------------- 1 | # Pretf examples 2 | 3 | This directory contains various examples of how to use Pretf. 4 | 5 | ## AWS credentials 6 | 7 | The credentials in these examples are unsual because: 8 | 9 | * Different examples have different mixtures of: 10 | * Credentials for backend in nonprod, prod, or both AWS accounts. 11 | * Credentials for resources in nonprod, prod, or both AWS accounts. 12 | * All examples are tested with a single `pytest` command. 13 | * Credentials are automated when running locally. 14 | * It also runs in an Azure DevOps pipeline. 15 | 16 | To allow all of the above to work, these examples use an `aws_credentials` variable that contains credentials for both nonprod and prod AWS accounts. Each stack/environment uses the credentials for the relevant account, ignoring the other account if it isn't used. 17 | 18 | The default value for `aws_credentials` contains AWS profiles for local usage. The CI environment overrides this value with its own credentials using access keys instead of profiles. 19 | 20 | ### Running locally 21 | 22 | Add these profiles to your AWS credentials configuration: 23 | 24 | * `pretf-nonprod` 25 | * `pretf-prod` 26 | 27 | ### Running in CI environments 28 | 29 | Set this environment variable: 30 | 31 | ```sh 32 | TF_VAR_aws_credentials='{ nonprod = { access_key = "REDACTED", secret_key="REDACTED" }, prod = { access_key = "REDACTED", secret_key="REDACTED" } } 33 | ``` 34 | 35 | Or pass it in as a Terraform CLI argument: 36 | 37 | ```sh 38 | terraform plan \ 39 | -var 'aws_credentials={ nonprod = { access_key = "REDACTED", secret_key="REDACTED" }, prod = { access_key = "REDACTED", secret_key="REDACTED" } }' 40 | ``` 41 | -------------------------------------------------------------------------------- /azure-pipelines.yml: -------------------------------------------------------------------------------- 1 | # Python package 2 | # Create and test a Python package on multiple Python versions. 3 | # Add steps that analyze code, save the dist with the build record, publish to a PyPI-compatible index, and more: 4 | # https://docs.microsoft.com/azure/devops/pipelines/languages/python 5 | 6 | trigger: 7 | - master 8 | 9 | pool: 10 | vmImage: 'ubuntu-latest' 11 | 12 | strategy: 13 | maxParallel: 1 14 | matrix: 15 | Python36: 16 | python.version: '3.6' 17 | Python37: 18 | python.version: '3.7' 19 | 20 | variables: 21 | - group: aws-credentials 22 | 23 | steps: 24 | - task: UsePythonVersion@0 25 | inputs: 26 | versionSpec: '$(python.version)' 27 | displayName: 'Use Python $(python.version)' 28 | 29 | - script: | 30 | python -m pip install --upgrade pip 31 | pip install -e pretf -e pretf.aws 32 | pip install pytest pytest-azurepipelines 33 | displayName: 'Install dependencies' 34 | 35 | - script: | 36 | pytest -v tests 37 | displayName: 'Unit tests' 38 | 39 | - script: | 40 | wget -q -O terraform.zip https://releases.hashicorp.com/terraform/1.1.0/terraform_1.1.0_linux_amd64.zip 41 | unzip -q terraform.zip 42 | sudo mv terraform /usr/local/bin 43 | condition: eq(variables['python.version'], '3.6') 44 | displayName: 'Install Terraform' 45 | 46 | - script: | 47 | pytest -v examples 48 | env: 49 | TF_VAR_aws_credentials: '{ nonprod = { access_key = "$(AWS_ACCESS_KEY_ID_NONPROD)", secret_key="$(AWS_SECRET_ACCESS_KEY_NONPROD)" }, prod = { access_key = "$(AWS_ACCESS_KEY_ID_PROD)", secret_key="$(AWS_SECRET_ACCESS_KEY_PROD)" } }' 50 | condition: eq(variables['python.version'], '3.6') 51 | displayName: 'Integration tests' 52 | -------------------------------------------------------------------------------- /pretf/pretf/log.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from functools import wraps 3 | from typing import Any, Callable 4 | 5 | import colorama 6 | 7 | 8 | def colorama_init(func: Callable, state: dict = {}) -> Callable: 9 | @wraps(func) 10 | def wrapped(*args: Any, **kwargs: dict) -> Any: 11 | 12 | if not state: 13 | colorama.init() 14 | state["init"] = True 15 | 16 | return func(*args, **kwargs) 17 | 18 | return wrapped 19 | 20 | 21 | @colorama_init 22 | def accept(message: Any) -> bool: 23 | """ 24 | Prompts the user to enter "yes" or "no". Returns True if the 25 | response was "yes", otherwise False. Ctrl-c counts as "no". 26 | 27 | """ 28 | 29 | message = f"[pretf] {message} [yes/no]: " 30 | response = "" 31 | while response not in ("yes", "no"): 32 | try: 33 | response = input(message).lower() 34 | except KeyboardInterrupt: 35 | response = "no" 36 | print() 37 | return response == "yes" 38 | 39 | 40 | class bad(Exception): 41 | @colorama_init 42 | def __init__(self, message: Any): 43 | """ 44 | Displays a message prefixed with [pref] in red. 45 | Can be raised as an exception to display the message and then exit. 46 | 47 | """ 48 | print( 49 | f"{colorama.Fore.RED}[pretf] {message}{colorama.Style.RESET_ALL}", 50 | file=sys.stderr, 51 | ) 52 | 53 | 54 | class ok(Exception): 55 | @colorama_init 56 | def __init__(self, message: Any): 57 | """ 58 | Displays a message prefixed with [pref] in cyan. 59 | Can be raised as an exception to display the message and then exit. 60 | 61 | """ 62 | 63 | print( 64 | f"{colorama.Fore.CYAN}[pretf] {message}{colorama.Style.RESET_ALL}", 65 | file=sys.stderr, 66 | ) 67 | -------------------------------------------------------------------------------- /docs/tutorial/dynamic-references.md: -------------------------------------------------------------------------------- 1 | The previous page has 5 dynamic resources but nothing is done with them. To access an attribute of a resource, just... access it. 2 | 3 | Accessing an attribute in Pretf will not return the actual dynamic value of the resource managed by Terraform. Instead, it returns an interpolation reference string for Terraform to use when it runs. 4 | 5 | ```python 6 | # animals.tf.py 7 | 8 | from pretf.api import block 9 | 10 | 11 | def pretf_blocks(): 12 | animals = ["dog", "cat", "buffalo", "rabbit", "badger"] 13 | for name in animals: 14 | animal = yield block("resource", "random_integer", name, { 15 | "min": 1, 16 | "max": 10, 17 | }) 18 | yield block("output", name, { 19 | "value": animal.result, # access "result" attribute of resource 20 | }) 21 | ``` 22 | 23 | Now run `pretf plan/apply` and state will contain the additional outputs. `animal.result` was translated into `"${resource.random_integer.dog.result}"` for the `dog` iteration of the loop, and Terraform used that to output the actual value. 24 | 25 | Accessing any attribute of a block object will return a string containing a Terraform reference to that attribute. This lets you take advantage of Terraform's [implicit resource dependencies](https://learn.hashicorp.com/terraform/getting-started/dependencies.html). 26 | 27 | ## Assign and yield 28 | 29 | The above code contains the pattern `result = yield block(...)`. 30 | 31 | Pretf sends yielded values back to generators. This allows functions assign block objects to a variable and yield them in the same line. 32 | 33 | ## Reference without yielding 34 | 35 | If something is defined in another file, but you still want to reference it, then create a block object with an empty body. Do not `yield` it, because that would include it in the `*.tf.json` output. 36 | 37 | ```python 38 | yield block("output", name, { 39 | "value": block("resource", "random_integer", name, {}).result, 40 | }) 41 | ``` 42 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Pretf Changes 2 | 3 | ## [Unreleased] 4 | 5 | # Added 6 | 7 | * Add support for rendering Jinja2 templates. 8 | 9 | ### Changed 10 | 11 | * Use python-hcl2 for parsing Terraform files (#65) 12 | 13 | ### Fixed 14 | 15 | * Fixed parsing complex types in variable definitions (#29) 16 | * Updated command line argument parsing and variable precedence logic, fixing some issues (#38, #54, #66) 17 | 18 | ### Removed 19 | 20 | * Pretf no longer supports [passing a different configuration directory](https://www.terraform.io/docs/cli/commands/plan.html#passing-a-different-configuration-directory), bringing it in line with Terraform version v0.15 and above. Pretf currently has no specific handling or support for the [-chdir argument](https://www.terraform.io/docs/cli/commands/#switching-working-directory-with-chdir) but it does get passed through to the Terraform command if provided. 21 | 22 | ## 0.7.3 23 | 24 | ### Fixed 25 | 26 | * Prevent simultaneous MFA prompts (#61) 27 | 28 | ## 0.7.2 29 | 30 | ### Added 31 | 32 | * `aws.terraform_remote_state_s3()` function added. 33 | 34 | ## 0.7.1 35 | 36 | ### Changed 37 | 38 | * Added `pytest` dependency which is required for `api.get_outputs()`. 39 | 40 | ## 0.7.0 41 | 42 | ### Added 43 | 44 | * `api.get_outputs()` function added. 45 | * `pretf.blocks` module added. 46 | * More human-friendly way to define Terraform blocks in Python. 47 | * `workflow.delete_links()` function added. 48 | * `workflow.link_files()` function added. 49 | * `workflow.link_module()` function added. 50 | * `workflow.load_parent()` function added. 51 | * More composable workflows. 52 | 53 | ### Changed 54 | 55 | * Use multiple threads to render files. 56 | * Should fix rare race conditions with multiple files referring to each others' variables. 57 | * `log.bad()` and `log.ok()` can now be raised as exceptions to display a message and then exit. 58 | 59 | ### Deprecated 60 | 61 | * `workflow.mirror_files()` function deprecated. 62 | * Use the new `link` functions instead. 63 | 64 | ### Removed 65 | 66 | * `workspaces` example removed. 67 | * It was too much effort to maintain it. I don't recommend using this project structure because the currently selected workspace is not obvious, and it can't have different S3 backends for different environments. 68 | -------------------------------------------------------------------------------- /examples/aws/s3.tf.py: -------------------------------------------------------------------------------- 1 | import os 2 | from pathlib import Path 3 | 4 | from pretf.api import labels 5 | from pretf.blocks import output, resource, variable 6 | from pretf.collections import collect 7 | 8 | 9 | def pretf_blocks(): 10 | """ 11 | This demonstrates recursively uploading files to an S3 bucket. 12 | 13 | """ 14 | 15 | # Create an S3 bucket. 16 | bucket = yield resource.aws_s3_bucket.test( 17 | bucket="pretf-example-aws-files", 18 | acl="private", 19 | ) 20 | 21 | # Upload all files from the "files" and "more-files" directories. 22 | total_files = 0 23 | total_bytes = 0 24 | for source in ("files", "more-files"): 25 | objects = yield aws_s3_bucket_objects( 26 | bucket=bucket, 27 | source=source, 28 | ) 29 | total_files += objects.total_files 30 | total_bytes += objects.total_bytes 31 | 32 | # Output some stats. 33 | yield output.total_files(value=total_files) 34 | yield output.total_bytes(value=total_bytes) 35 | 36 | 37 | @collect 38 | def aws_s3_bucket_objects(var): 39 | """ 40 | Creates aws_s3_bucket_object resources for all files in the given 41 | source directory. This is using the "collections" API to create 42 | a reusable function that generates resources. 43 | 44 | """ 45 | 46 | # Inputs. 47 | yield variable.bucket() 48 | yield variable.prefix(default="") 49 | yield variable.source() 50 | 51 | # Get the resource name of the bucket, 52 | # to be used in object resource names. 53 | bucket_label = labels.get(var.bucket) 54 | 55 | total_files = 0 56 | total_bytes = 0 57 | 58 | # Resources. 59 | for path in Path(var.source).rglob("*"): 60 | if path.is_file(): 61 | key = f"{var.prefix}{path.relative_to(var.source)}" 62 | object_label = labels.clean(f"{bucket_label}/{key}") 63 | yield resource.aws_s3_bucket_object[object_label]( 64 | bucket=var.bucket.id, 65 | key=key, 66 | source=path, 67 | ) 68 | total_files += 1 69 | total_bytes += os.path.getsize(path) 70 | 71 | # Outputs. 72 | yield output.total_files(value=total_files) 73 | yield output.total_bytes(value=total_bytes) 74 | -------------------------------------------------------------------------------- /docs/api/collections.md: -------------------------------------------------------------------------------- 1 | ## collect 2 | 3 | This is a decorator used to create a collection. Collections are similar to Terraform modules except the resources are included in the root module rather than under a named module. 4 | 5 | Decorated functions should: 6 | 7 | * Accept a single argument `var` 8 | * Yield `pretf.api.tf` blocks 9 | * Optionally including `variable` blocks to define inputs 10 | * Optionally including `output` blocks to define outputs 11 | 12 | When using a collection, any required inputs defined by variable blocks must be passed in as keyword arguments. Any outputs defined by output blocks can be accessed as attributes of the collection. 13 | 14 | Example: 15 | 16 | ```python 17 | from pretf.api import block 18 | from pretf.collections import collect 19 | 20 | 21 | def pretf_blocks(): 22 | web = yield security_group( 23 | name="web", 24 | type="ingress", 25 | cidrs=["10.0.0.0/24", "192.168.0.0/24"], 26 | protocol="tcp", 27 | ports=[80, 443], 28 | ) 29 | yield block("output", "web_sg_id", {"value": web.group.id}) 30 | 31 | 32 | @collect 33 | def security_group(var): 34 | 35 | # Inputs. 36 | yield block("variable", "name", {}) 37 | yield block("variable", "type", {}) 38 | yield block("variable", "protocol", {}) 39 | yield block("variable", "cidrs", {"default": []}) 40 | yield block("variable", "ports", {"default": []}) 41 | 42 | # Group resource. 43 | group = yield block("resource", "aws_security_group", var.name, { 44 | "name": var.name, 45 | }) 46 | 47 | # Rule resources. 48 | for cidr in sorted(set(var.cidrs)): 49 | cidr_label = cidr.replace(".", "_").replace("/", "_") 50 | for port in var.ports: 51 | rule_label = f"{var.name}_{port}_from_{cidr_label}" 52 | yield block("resource", "aws_security_group_rule", rule_label, 53 | { 54 | "security_group_id": group.id, 55 | "type": "ingress", 56 | "protocol": var.protocol, 57 | "from_port": port, 58 | "to_port": port, 59 | "cidr_blocks": [cidr], 60 | }, 61 | ) 62 | 63 | # Outputs. 64 | yield block(f"output", "group", {"value": group}) 65 | ``` 66 | -------------------------------------------------------------------------------- /pretf/pretf/cli.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from subprocess import CalledProcessError, CompletedProcess 3 | from typing import Union 4 | 5 | from . import log, util, workflow 6 | from .exceptions import FunctionNotFoundError, RequiredFilesNotFoundError, VariableError 7 | from .version import __version__ 8 | 9 | 10 | def main() -> None: 11 | try: 12 | result = run() 13 | except CalledProcessError as error: 14 | sys.exit(error.returncode) 15 | if isinstance(result, CompletedProcess): 16 | sys.exit(result.returncode) 17 | elif isinstance(result, int): 18 | sys.exit(result) 19 | raise TypeError(result) 20 | 21 | 22 | def run() -> Union[CompletedProcess, int]: 23 | """ 24 | This is the pretf CLI tool entrypoint. 25 | 26 | """ 27 | 28 | subcommand, options = util.parse_args() 29 | 30 | if subcommand == "version": 31 | print(f"Pretf v{__version__}") 32 | 33 | if subcommand in {"", "0.12upgrade", "fmt", "help", "version"}: 34 | skip = True 35 | elif subcommand == "workspace" and "show" in options: 36 | skip = True 37 | else: 38 | skip = False 39 | 40 | if skip: 41 | return workflow.execute_terraform(verbose=False) 42 | 43 | try: 44 | 45 | workflow_path = util.find_workflow_path() 46 | 47 | if workflow_path: 48 | if workflow_path.name == "pretf.py": 49 | log.bad( 50 | "workflow: pretf.py is deprecated, rename it to pretf.workflow.py" 51 | ) 52 | return workflow.custom(workflow_path) 53 | else: 54 | return workflow.default() 55 | 56 | except (log.bad, log.ok): 57 | pass 58 | 59 | except FunctionNotFoundError as error: 60 | 61 | log.bad(error) 62 | 63 | except RequiredFilesNotFoundError as error: 64 | 65 | log.bad(f"required files not found: {' '.join(error.name_patterns)}") 66 | candidates = error.get_candidates() 67 | if candidates: 68 | log.bad("try changing directory to:") 69 | for path in candidates: 70 | log.bad(f"* {path}") 71 | 72 | except VariableError as error: 73 | 74 | if hasattr(error, "errors"): 75 | for error in error.errors: 76 | log.bad(error) 77 | else: 78 | log.bad(error) 79 | 80 | return CompletedProcess(args=sys.argv, returncode=1) 81 | -------------------------------------------------------------------------------- /tests/test_blocks.py: -------------------------------------------------------------------------------- 1 | from pretf.blocks import data, locals, module, output, provider, resource, variable 2 | from pretf.render import unwrap_yielded 3 | 4 | 5 | def test_data(): 6 | assert str(data) == "" 7 | assert str(data.null_data_source) == "" 8 | assert str(data.null_data_source.test) == "${data.null_data_source.test}" 9 | 10 | # Use index access for dynamic names. 11 | assert str(data.null_data_source["test"]) == "${data.null_data_source.test}" 12 | 13 | 14 | def test_locals(): 15 | assert str(locals) == "" 16 | assert str(locals.test) == "${local.test}" 17 | 18 | # Confirm that the locals block works to generate configuration. 19 | assert list(unwrap_yielded(locals(a=1, b=2, c=3))) == [ 20 | {"locals": {"a": 1, "b": 2, "c": 3}} 21 | ] 22 | 23 | 24 | def test_module(): 25 | assert str(module) == "" 26 | assert str(module.test) == "${module.test}" 27 | 28 | # Pass in a dictionary for the body. 29 | one = module.one({"a": 1, "b": 2, "c": 3}) 30 | assert str(one) == "${module.one}" 31 | 32 | # Pass in multiple dictionaries for the body. 33 | two = module.two({"a": 1, "b": 2}, {"c": 3}) 34 | assert str(two) == "${module.two}" 35 | 36 | # Pass in keyword arguments for the body. 37 | three = module.three(a=1, b=2, c=3) 38 | assert str(three) == "${module.three}" 39 | 40 | # Pass in a dictionary and keyword arguments for the body. 41 | four = module.four({"a": 1}, b=2, c=3) 42 | assert str(four) == "${module.four}" 43 | 44 | # Pass in multiple dictionaries and keyword arguments for the body. 45 | five = module.five({"a": 1}, {"b": 2}, c=3) 46 | assert str(five) == "${module.five}" 47 | 48 | 49 | def test_output(): 50 | assert str(output) == "" 51 | assert str(output.test) == "${output.test}" 52 | 53 | 54 | def test_provider(): 55 | assert str(provider) == "" 56 | assert str(provider.aws) == "aws" 57 | 58 | 59 | def test_resource(): 60 | assert str(resource) == "" 61 | assert ( 62 | str(resource.null_resource) == "" 63 | ) 64 | assert str(resource.null_resource.test) == "${null_resource.test}" 65 | 66 | 67 | def test_variable(): 68 | assert str(variable) == "" 69 | assert str(variable.test) == "${var.test}" 70 | -------------------------------------------------------------------------------- /docs/tutorial/define-resources.md: -------------------------------------------------------------------------------- 1 | The primary feature of Pretf is to read `*.tf.py` files and create matching `*.tf.json` files. These JSON files are supported by Terraform as an alternative to the more commonly used `*.tf` files. 2 | 3 | Start by creating a file named `animals.tf.py` in your Terraform project directory: 4 | 5 | ```python 6 | # animals.tf.py 7 | 8 | from pretf.api import block 9 | 10 | 11 | def pretf_blocks(): 12 | yield block("resource", "random_integer", "dog", { 13 | "min": 1, 14 | "max": 10, 15 | }) 16 | 17 | yield block("resource", "random_integer", "cat", { 18 | "min": 1, 19 | "max": 10, 20 | }) 21 | 22 | yield block("resource", "random_integer", "buffalo", { 23 | "min": 1, 24 | "max": 10, 25 | }) 26 | ``` 27 | 28 | The function must be named `pretf_blocks()` for Pretf to find it. 29 | 30 | Now run `pretf validate`, which will generate a JSON file, run `terraform validate`, and clean it up afterwards: 31 | 32 | ```shell 33 | $ pretf validate 34 | [pretf] create: animals.tf.json 35 | [pretf] run: terraform validate 36 | Success! The configuration is valid. 37 | ``` 38 | 39 | If you saw: 40 | 41 | ```shell 42 | Error: provider.random: no suitable version installed 43 | version requirements: "(any version)" 44 | versions installed: none 45 | ``` 46 | 47 | Then you probably need to run `pretf init` to install the Random provider. This is standard Terraform behaviour and nothing to do with Pretf, except that Pretf runs Terraform . Run `pretf init` and then `pretf validate` again. 48 | 49 | With Terraform successfully validating your generated JSON file, you can now run `pretf plan` or `pretf apply` to manage the new resources. You can still run `terraform` directly, but using `pretf` as a wrapper will ensure that generated JSON files are always up to date. 50 | 51 | ## The 'yield' keyword 52 | 53 | If you are not familiar with the `yield` keyword in the above code, then read about [Python Generators](https://www.python.org/dev/peps/pep-0255/) introduced in Python 2.2. Normally we might create a list, append elements to that list, and then return the list at the end of the function. Instead, we can just `yield` each element as we go. 54 | 55 | ## Translate HCL to Python 56 | 57 | Translating HCL code into Python block objects is very simple. They are constructed in the same way as the original HCL, only requiring slight adjustments to be valid Python syntax. 58 | 59 | The following HCL resource consists of a block type `resource`, labels `random_integer` and `dog`, and the body `{ min = 1, max = 10 }`. 60 | 61 | ```terraform 62 | resource "random_integer" "dog" { 63 | min = 1 64 | max = 10 65 | } 66 | ``` 67 | 68 | Simply translate each part into a valid Python type and pass it into the `block()` function: 69 | 70 | ```python 71 | block("resource", "random_integer", "dog", { 72 | "min": 1, 73 | "max": 10, 74 | }) 75 | ``` 76 | -------------------------------------------------------------------------------- /docs/tutorial/aws-projects.md: -------------------------------------------------------------------------------- 1 | ## Installation 2 | 3 | AWS utilities are provided in a separate package, to keep dependencies of the core Pretf package minimal. 4 | 5 | To install the Pretf AWS utilities: 6 | 7 | ```shell 8 | pip install pretf.aws 9 | ``` 10 | 11 | Additionally, these utilities will make use of [boto-source-profile-mfa](https://github.com/claranet/boto-source-profile-mfa) if it has been installed: 12 | 13 | ```shell 14 | pip install boto-source-profile-mfa 15 | ``` 16 | 17 | ## AWS providers 18 | 19 | Pretf can generate AWS provider blocks, with full support for MFA prompts. 20 | 21 | ```python 22 | # aws.tf.py 23 | 24 | from pretf.aws import provider_aws 25 | 26 | 27 | def pretf_blocks(var): 28 | yield provider_aws( 29 | profile=var.aws_profile, 30 | region=var.aws_region, 31 | ) 32 | ``` 33 | 34 | ## Terraform S3 backend 35 | 36 | Pretf can dynamically generate the [S3 backend](https://www.terraform.io/docs/backends/types/s3.html) configuration, and even create the resources required for the backend. 37 | 38 | ```python 39 | # terraform.tf.py 40 | 41 | from pretf.aws import terraform_backend_s3 42 | 43 | 44 | def pretf_blocks(var): 45 | # This will check for the existence of the specified S3 bucket 46 | # and DynamoDB table. If they do not exist, you will be prompted 47 | # to create them with a CloudFormation stack. The AWS credentials 48 | # will then be exported as environment variables, and a Terraform 49 | # configuration block for the S3 backend is returned. It is then 50 | # yielded to be included in the generated JSON file. 51 | yield terraform_backend_s3( 52 | bucket=f"pretf-tfstate-{var.envtype}", 53 | dynamodb_table=f"pretf-tfstate-{var.envtype}", 54 | key="terraform.tfstate", 55 | profile=var.aws_profile, 56 | region=var.aws_region, 57 | ) 58 | ``` 59 | 60 | ## Terraform S3 remote state 61 | 62 | Pretf can generate S3 remote state data source blocks, with full support for MFA prompts. 63 | 64 | ```python 65 | # terraform.tf.py 66 | 67 | from pretf.aws import terraform_remote_state_s3 68 | 69 | 70 | def pretf_blocks(var): 71 | yield terraform_remote_state_s3("ecs_cluster", config={ 72 | "bucket": var.ecs_cluster_remote_state["bucket"], 73 | "key": var.ecs_cluster_remote_state["key"], 74 | "profile": var.ecs_cluster_remote_state["profile"], 75 | "region": var.ecs_cluster_remote_state["region"], 76 | }) 77 | ``` 78 | 79 | ## Multiple AWS accounts 80 | 81 | It is easy to work with multiple AWS accounts from the same Terraform stack. This is something that is not possible with Terraform wrappers that rely on environment variables alone, because environment variables can only be used for 1 set of credentials at a time. 82 | 83 | ```python 84 | # aws.tf.py 85 | 86 | from pretf.aws import provider_aws 87 | 88 | 89 | def pretf_blocks(var): 90 | for alias, profile in var.aws_profiles.items(): 91 | yield provider_aws( 92 | alias=alias, 93 | profile=profile, 94 | region=var.aws_region, 95 | ) 96 | ``` 97 | -------------------------------------------------------------------------------- /pretf/pretf/exceptions.py: -------------------------------------------------------------------------------- 1 | from collections import defaultdict 2 | from os.path import relpath 3 | from pathlib import Path 4 | from typing import TYPE_CHECKING, Any, Dict, List, Sequence 5 | 6 | if TYPE_CHECKING: 7 | from pretf.variables import VariableDefinition, VariableValue # noqa: F401 8 | 9 | 10 | class FunctionNotFoundError(Exception): 11 | pass 12 | 13 | 14 | class RequiredFilesNotFoundError(Exception): 15 | def __init__(self, name_patterns: Sequence[str], root: Path): 16 | self.name_patterns = name_patterns 17 | self.root = root 18 | 19 | def get_candidates(self) -> List[str]: 20 | 21 | dirs: Dict[Path, List[str]] = defaultdict(list) 22 | for pattern in self.name_patterns: 23 | for path in self.root.rglob(pattern): 24 | dirs[path.parent].append(pattern) 25 | 26 | matching_dirs = [] 27 | for path, patterns in dirs.items(): 28 | if len(patterns) == len(self.name_patterns): 29 | matching_dirs.append(path) 30 | 31 | relative_paths = [] 32 | for path in sorted(matching_dirs): 33 | relative_paths.append(relpath(path)) 34 | 35 | return relative_paths 36 | 37 | 38 | class VariableError(Exception): 39 | def __init__(self) -> None: 40 | self.errors: List[VariableError] = [] 41 | 42 | def add(self, error: "VariableError") -> None: 43 | self.errors.append(error) 44 | 45 | def __str__(self) -> str: 46 | errors = "\n".join(f" {error}" for error in self.errors) 47 | return f"\n{errors}" 48 | 49 | 50 | class VariableAlreadyDefinedError(VariableError): 51 | def __init__( 52 | self, old_var: "VariableDefinition", new_var: "VariableDefinition" 53 | ) -> None: 54 | self.old_var = old_var 55 | self.new_var = new_var 56 | 57 | def __str__(self) -> str: 58 | return f"create: {self.new_var.source} cannot define var.{self.new_var.name} because {self.old_var.source} already defined it" 59 | 60 | 61 | class VariableNotConsistentError(VariableError): 62 | def __init__(self, old_var: "VariableValue", new_var: "VariableValue") -> None: 63 | self.old_var = old_var 64 | self.new_var = new_var 65 | 66 | def __str__(self) -> str: 67 | return f"create: {self.new_var.source} cannot set var.{self.new_var.name}={repr(self.new_var.value)} because {self.old_var.source} set var.{self.old_var.name}={repr(self.old_var.value)}" 68 | 69 | 70 | class VariableNotDefinedError(VariableError): 71 | def __init__(self, name: str, consumer: Any): 72 | self.name = name 73 | self.consumer = consumer 74 | 75 | def __str__(self) -> str: 76 | return f"create: {self.consumer} cannot access var.{self.name} because it has not been defined" 77 | 78 | 79 | class VariableNotPopulatedError(VariableError): 80 | def __init__(self, name: str, consumer: Any): 81 | self.name = name 82 | self.consumer = consumer 83 | 84 | def __str__(self) -> str: 85 | return f"create: {self.consumer} cannot access var.{self.name} because it has no value" 86 | -------------------------------------------------------------------------------- /pretf/pretf/api.py: -------------------------------------------------------------------------------- 1 | import inspect 2 | from pathlib import Path 3 | from typing import Any, Optional, Union 4 | 5 | from . import labels, log 6 | from .blocks import Block 7 | from .util import is_verbose 8 | 9 | 10 | def block(block_type: str, *args: Any) -> Block: 11 | if args: 12 | body = args[-1] 13 | labels = list(args[:-1]) 14 | else: 15 | body = None 16 | labels = [] 17 | return Block(block_type, labels, body) 18 | 19 | 20 | def get_outputs(cwd: Union[Path, str], verbose: Optional[bool] = None) -> dict: 21 | """ 22 | Runs `pretf output` in the specified directory and returns the values. 23 | If the path is not anchored (i.e. does not start with ./ or ../ or /) 24 | then it will check the current directory and all parent directories 25 | until found. 26 | 27 | """ 28 | 29 | from pretf.command import PretfCommand 30 | 31 | if isinstance(cwd, Path): 32 | # Use path as-is. 33 | path = cwd 34 | elif isinstance(cwd, str): 35 | if cwd.startswith("./") or cwd.startswith("../") or cwd.startswith("/"): 36 | # Use path as-is. 37 | path = Path(cwd) 38 | else: 39 | # Look for this unanchored path in the current directory 40 | # and all parent directories until found. 41 | here = Path.cwd() 42 | while True: 43 | path = here / cwd 44 | if path.is_dir(): 45 | # Use this matching directory. 46 | break 47 | elif here.parent == here: 48 | # Reached the top, use the path as a relative path 49 | # and let Terraform give an error. 50 | path = Path(cwd) 51 | break 52 | else: 53 | # Move up a directory and let it try there. 54 | here = here.parent 55 | else: 56 | raise TypeError(cwd) 57 | 58 | if is_verbose(verbose) or not path.is_dir(): 59 | 60 | # Find the calling directory of this function, usually the directory 61 | # containing the pretf.workflow.py file that has called this function. 62 | frame = inspect.currentframe() 63 | if not frame: 64 | raise Exception("get_outputs() called from unknown frame") 65 | caller_frame = frame.f_back 66 | if not caller_frame: 67 | raise Exception("get_outputs() called from unknown caller") 68 | caller_info = inspect.getframeinfo(caller_frame) 69 | caller_file = caller_info.filename 70 | 71 | if path.is_dir(): 72 | log.ok(f"outputs: {cwd} -> {caller_file}") 73 | else: 74 | if path.exists(): 75 | raise log.bad( 76 | f"get_outputs({cwd!r}) in {caller_file}: {path} is not a directory" 77 | ) 78 | else: 79 | raise log.bad( 80 | f"get_outputs({cwd!r}) in {caller_file}: {path} does not exist" 81 | ) 82 | 83 | outputs = PretfCommand(cwd=path, verbose=False).output() 84 | 85 | values = {} 86 | for name, data in outputs.items(): 87 | values[name] = data["value"] 88 | 89 | return values 90 | 91 | 92 | __all__ = ["block", "get_outputs", "labels", "log"] 93 | -------------------------------------------------------------------------------- /docs/api/projects.md: -------------------------------------------------------------------------------- 1 | Pretf finds and imports Python files from the current directory. Pretf calls specific functions in those files. This page explains the files and functions that you must have in your project for Pretf to work. 2 | 3 | ## \*.tf.py 4 | 5 | By default, Pretf looks for `*.tf.py` files and creates matching `*.tf.json` files. For example, a file named `iam.tf.py` would create `iam.tf.json`. 6 | 7 | These files must contain a `pretf_blocks()` function. This function can optionally accept any of the arguments `path`, `terraform` and `var`, which provide access to the same values as in Terraform. This function must be a generator that yields only blocks and/or dictionaries representing Terraform blocks. 8 | 9 | Example: 10 | 11 | ```python 12 | # iam.tf.py 13 | 14 | from pretf.api import block 15 | 16 | 17 | def pretf_blocks(var): 18 | for user_name in var.user_names: 19 | 20 | yield block("resource", "aws_iam_user", user_name, { 21 | "name": user_name, 22 | }) 23 | 24 | label = f"{user_name}_in_{var.group_name}" 25 | yield block("resource", "aws_iam_user_group_membership",label, { 26 | "user": user.name, 27 | "groups": [var.group_name], 28 | }) 29 | ``` 30 | 31 | ## \*.tfvars.py 32 | 33 | By default, Pretf looks for `*.tfvars.py` files and creates matching `*.tfvars.json` files. For example, a file named `terraform.tfvars.py` would create `terraform.tfvars.json`. 34 | 35 | These files must contain a `pretf_variables()` function. This function can optionally accept any of the arguments `path`, `terraform` and `var`, which provide access to the same values as in Terraform. This function must be a generator that yields dictionaries containing variable values. Each dictionary can contain any number of variables. 36 | 37 | Example: 38 | 39 | ```python 40 | # terraform.tfvars.py 41 | 42 | 43 | def pretf_variables(): 44 | yield {"environment": "dev"} 45 | yield { 46 | "group_name": "admin", 47 | "user_names": ["peanut", "cornelius"], 48 | } 49 | ``` 50 | 51 | ## pretf.workflow.py 52 | 53 | When Pretf runs, it looks for a `pretf.workflow.py` file in the current or parent directories. If found, Pretf will call the `pretf_workflow()` function from that file. If this file does not exist, then Pretf runs in default mode. 54 | 55 | This function can optionally accept the arguments `path` and `terraform` which provide access to the same values as in Terraform (e.g. `path.module`, `terraform.workspace`). It must return an exit status (e.g. 0 for success, 1 for error) or a `subprocess.CompletedProcess` object. 56 | 57 | The following is a valid `pretf.workflow.py` file that performs the same functionality as default mode. It can be extended with custom logic, or changed entirely. 58 | 59 | Example: 60 | 61 | ```python 62 | # pretf.workflow.py 63 | 64 | from pretf import workflow 65 | 66 | 67 | def pretf_workflow(): 68 | # Delete *.tf.json and *.tfvars.json files. 69 | workflow.delete_files() 70 | 71 | # Create *.tf.json and *.tfvars.json files 72 | # from *.tf.py and *.tfvars.py files. 73 | created = workflow.create_files() 74 | 75 | # Execute Terraform, raising an exception if it fails. 76 | proc = workflow.execute_terraform() 77 | 78 | # Clean up created files if successful. 79 | workflow.clean_files(created) 80 | 81 | return proc 82 | ``` 83 | -------------------------------------------------------------------------------- /pretf/pretf/collections.py: -------------------------------------------------------------------------------- 1 | from functools import wraps 2 | from typing import Any, Callable, Generator, Iterable, Sequence, Union 3 | 4 | from .parser import get_outputs_from_block 5 | from .render import call_pretf_function, unwrap_yielded 6 | from .variables import VariableStore, VariableValue, get_variable_definitions_from_block 7 | 8 | 9 | class Collection(Iterable): 10 | def __init__( 11 | self, blocks: Sequence[Union[dict, "Collection"]], outputs: dict 12 | ) -> None: 13 | self.__blocks = blocks 14 | self.__outputs = outputs 15 | 16 | def __getattr__(self, name: str) -> Any: 17 | if name in self.__outputs: 18 | return self.__outputs[name] 19 | raise AttributeError(f"output not defined: {name}") 20 | 21 | def __iter__(self) -> Generator[dict, Any, None]: 22 | for block in self.__blocks: 23 | if isinstance(block, Collection): 24 | yield from block 25 | else: 26 | yield block 27 | 28 | 29 | def collect(func: Callable) -> Callable: 30 | """ 31 | This is a decorator used to create a collection. Collections are similar 32 | to Terraform modules except the resources are included in the root 33 | module rather than under a named module. 34 | 35 | Decorated functions should: 36 | 37 | * Accept a single argument "var" 38 | * Yield pretf.api.tf blocks 39 | * Optionally including "variable" blocks to define inputs 40 | * Optionally including "output" blocks to define outputs 41 | 42 | When using a collection, any required inputs defined by variable blocks 43 | must be passed in as keyword arguments. Any outputs defined by output 44 | blocks can be accessed as attributes of the collection. 45 | 46 | """ 47 | 48 | @wraps(func) 49 | def wrapped(**kwargs: dict) -> Collection: 50 | 51 | # Create a store to track variables. 52 | var_store = VariableStore() 53 | 54 | # Load variable values from kwargs passed into the collection function. 55 | for key, value in kwargs.items(): 56 | var_value = VariableValue(name=key, value=value, source="kwargs") 57 | var_store.add(var_value) 58 | 59 | # Call the collection function, passing in "path", "terraform" and "var" if required. 60 | gen = call_pretf_function(func=func, var=var_store.proxy(func.__name__)) 61 | 62 | blocks = [] 63 | outputs = {} 64 | 65 | yielded = None 66 | while True: 67 | 68 | try: 69 | yielded = gen.send(yielded) 70 | except StopIteration: 71 | break 72 | 73 | for block in unwrap_yielded(yielded): 74 | 75 | # Use variable blocks to update the variable store. 76 | var_def = None 77 | for var_def in get_variable_definitions_from_block( 78 | block, func.__name__ 79 | ): 80 | var_store.add(var_def) 81 | 82 | # Use output blocks to update the output values. 83 | output = None 84 | for output in get_outputs_from_block(block): 85 | name = output["name"] 86 | value = output["value"] 87 | outputs[name] = value 88 | 89 | # Use any other blocks in the resulting JSON. 90 | if not var_def and not output: 91 | blocks.append(block) 92 | 93 | return Collection(blocks, outputs) 94 | 95 | return wrapped 96 | -------------------------------------------------------------------------------- /docs/tutorial/terraform-variables.md: -------------------------------------------------------------------------------- 1 | ## Starting code 2 | 3 | Let's start by showing `animals.tf.py` from the previous pages, plus a new file `users.tf.py`. We can see that these files are hardcoding their data: the lists of animals and users. 4 | 5 | ```python 6 | # animals.tf.py 7 | 8 | from pretf.api import block 9 | 10 | 11 | def pretf_blocks(): 12 | animals = ["dog", "cat", "buffalo", "rabbit", "badger"] # hardcoded 13 | for name in animals: 14 | animal = yield block("resource", "random_integer", name, { 15 | "min": 1, 16 | "max": 10, 17 | }) 18 | yield block("output", name, {"value": animal.result}) 19 | ``` 20 | 21 | ```python 22 | # users.tf.py 23 | 24 | from pretf.api import tf 25 | 26 | 27 | def pretf_blocks(): 28 | users = ["ray", "violet"] # hardcoded 29 | for name in users: 30 | yield block("resource", "aws_iam_user", "name", { 31 | "name": name, 32 | }) 33 | ``` 34 | 35 | ## Terraform variables 36 | 37 | Terraform variables can be accessed in Pretf by adding a `var` argument to the `pretf_blocks()` function. Pretf will see this argument in the function signature and pass in a variables object. Let's use that instead of hardcoding values: 38 | 39 | 40 | ```terraform 41 | # variables.tf 42 | 43 | variable "animals" { 44 | type = list(string) 45 | } 46 | 47 | variable "users" { 48 | type = list(string) 49 | } 50 | ``` 51 | 52 | ```terraform 53 | # terraform.tfvars 54 | 55 | animals = ["dog", "cat", "buffalo", "rabbit", "badger"] 56 | 57 | users = ["ray", "violet"] 58 | ``` 59 | 60 | ```python 61 | # animals.tf.py 62 | 63 | from pretf.api import block 64 | 65 | 66 | def pretf_blocks(var): # var added to the function signature 67 | for name in var.animals: # accessing a variable 68 | animal = yield block("resource", "random_integer", name, { 69 | "min": 1, 70 | "max": 10, 71 | }) 72 | yield block("output", name, { 73 | "value": animal.result, 74 | }) 75 | ``` 76 | 77 | ```python 78 | # users.tf.py 79 | 80 | from pretf.api import block 81 | 82 | 83 | def pretf_blocks(var): # var added to the function signature 84 | for name in var.users: # accessing a variable 85 | yield block("resource", "aws_iam_user", name, { 86 | "name": name 87 | }) 88 | ``` 89 | 90 | # Variable definition precedence 91 | 92 | From the [Terraform documentation](https://www.terraform.io/docs/configuration/variables.html#variable-definition-precedence 93 | ): 94 | 95 |
96 |

Terraform loads variables in the following order, with later sources taking precedence over earlier ones:

97 |
    98 |
  • Environment variables.
  • 99 |
  • The terraform.tfvars file, if present.
  • 100 |
  • The terraform.tfvars.json file, if present.
  • 101 |
  • Any *.auto.tfvars or *.auto.tfvars.json files, processed in lexical order of their filenames.
  • 102 |
  • Any -var and -var-file options on the command line, in the order they are provided.
  • 103 |
104 |
105 | 106 | Pretf uses the same rules when resolving variable values. 107 | 108 | If a project has `*.tfvars.py` files to generate `*.tfvars.json` files that would change the value of a variable (i.e. one of the above sources has already set the variable to a different value) then Pretf will exit with a descriptive error message. This ensures that Python and Terraform run with consistent variable values. 109 | -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | # Pretf 2 | 3 | Pretf is a completely transparent, drop-in Terraform wrapper that generates Terraform configuration with Python. It requires no configuration and no changes to standard Terraform projects to start using it. 4 | 5 | Terraform includes first-class support for configuration in JSON files. Pretf generates those JSON files using your Python functions. 6 | 7 | ## Requirements 8 | 9 | * Python 3.6+ 10 | * Terraform 0.12.0+ 11 | 12 | ## Installation 13 | 14 | For core functionality: 15 | 16 | ```shell 17 | pip install pretf 18 | ``` 19 | 20 | For AWS functionality: 21 | 22 | ```shell 23 | pip install pretf.aws 24 | ``` 25 | 26 | Install them both: 27 | 28 | ```shell 29 | pip install pretf[aws] 30 | ``` 31 | 32 | ## Overview 33 | 34 | Here is what happens when you run `pretf`: 35 | 36 | 1. `*.tf.json` and `*.tfvars.json` files are deleted. 37 | 2. `*.tf.json` and `*.tfvars.json` files are created from `*.tf.py` and `*.tfvars.py` files. 38 | 3. Terraform is executed, passing through any command line arguments. 39 | 4. Created files are cleaned up. 40 | 41 | For example, with `iam.tf.py`: 42 | 43 | ```python 44 | from pretf.api import block 45 | 46 | 47 | def pretf_blocks(var): 48 | 49 | group = yield block("resource", "aws_iam_group", "admins", { 50 | "name": "admins", 51 | }) 52 | 53 | for name in var.admin_user_names: 54 | 55 | user = yield block("resource", "aws_iam_user", name, { 56 | "name": name, 57 | }) 58 | 59 | yield block("resource", "aws_iam_user_group_membership", name, { 60 | "user": user.name, 61 | "groups": [ 62 | group.name, 63 | ], 64 | }) 65 | ``` 66 | 67 | It would generate `iam.tf.json`: 68 | 69 | ```json 70 | [ 71 | { 72 | "resource": { 73 | "aws_iam_group": { 74 | "admins": { 75 | "name": "admins" 76 | } 77 | } 78 | } 79 | }, 80 | { 81 | "resource": { 82 | "aws_iam_user": { 83 | "ray": { 84 | "name": "ray" 85 | } 86 | } 87 | } 88 | }, 89 | { 90 | "resource": { 91 | "aws_iam_user_group_membership": { 92 | "ray": { 93 | "user": "${aws_iam_user.ray.name}", 94 | "groups": [ 95 | "${aws_iam_group.admins.name}" 96 | ] 97 | } 98 | } 99 | } 100 | }, 101 | { 102 | "resource": { 103 | "aws_iam_user": { 104 | "violet": { 105 | "name": "violet" 106 | } 107 | } 108 | } 109 | }, 110 | { 111 | "resource": { 112 | "aws_iam_user_group_membership": { 113 | "violet": { 114 | "user": "${aws_iam_user.violet.name}", 115 | "groups": [ 116 | "${aws_iam_group.admins.name}" 117 | ] 118 | } 119 | } 120 | } 121 | } 122 | ] 123 | ``` 124 | 125 | And then Terraform would manage those resources. 126 | 127 | ## Configuration 128 | 129 | Configuration is completely optional. By default, Pretf will delete `*.tf.json` and `*.tfvars.json` files, create `*.tf.json` and `*.tfvars.json` files from `*.tf.py` and `*.tfvars.py` files, and then execute Terraform. 130 | 131 | To make something else to happen when Pretf runs, simply create a `pretf.workflow.py` file containing a `pretf_workflow()` function. This could include: 132 | 133 | * Using files from outside of the current directory. 134 | * Not running `terraform` after generating files. 135 | * Doing anything, because you configure Pretf by writing a Python function. 136 | 137 | ## Source code 138 | 139 | The source code for Pretf is located at: [https://github.com/raymondbutcher/pretf](https://github.com/raymondbutcher/pretf) 140 | -------------------------------------------------------------------------------- /pretf/pretf/test.py: -------------------------------------------------------------------------------- 1 | import collections 2 | import contextlib 3 | import functools 4 | import inspect 5 | import os 6 | from json import dump as json_dump 7 | from typing import Any, Callable, Dict, Generator, List, Type 8 | 9 | import pytest 10 | 11 | from pretf import command, render 12 | 13 | 14 | class SimpleTestMeta(type): 15 | def __new__( 16 | cls: Type["SimpleTestMeta"], 17 | name: str, 18 | bases: tuple, 19 | dct: dict, 20 | ) -> "SimpleTestMeta": 21 | """ 22 | Wraps all test methods with the pretf_test_function() decorator. 23 | 24 | """ 25 | 26 | for name, value in list(dct.items()): 27 | if name.startswith("test_") and callable(value): 28 | dct[name] = pretf_test_function(value) 29 | 30 | return super().__new__(cls, name, bases, dct) 31 | 32 | def __init__(self, name: str, bases: tuple, dct: dict) -> None: 33 | """ 34 | Adds any test method using the @always decorator to cls._always 35 | so the pretf_test_function() can run it even when previous tests 36 | have failed. 37 | 38 | """ 39 | 40 | super().__init__(name, bases, dct) 41 | 42 | self._always = set() 43 | for name, value in list(dct.items()): 44 | if hasattr(value, "_always"): 45 | self._always.add(value.__name__) 46 | 47 | 48 | class SimpleTest(metaclass=SimpleTestMeta): 49 | 50 | pretf = command.PretfCommand() 51 | tf = command.TerraformCommand() 52 | 53 | @contextlib.contextmanager 54 | def create(self, file_name: str) -> Generator[None, None, None]: 55 | 56 | assert file_name.endswith(".tf.json") or file_name.endswith(".tfvars.json") 57 | assert "/" not in file_name 58 | 59 | if not hasattr(self, "_blocks"): 60 | self._blocks: Dict[str, list] = collections.defaultdict(list) 61 | 62 | if not hasattr(self, "_create"): 63 | self._create: List[str] = [] 64 | 65 | self._create.append(file_name) 66 | 67 | yield 68 | 69 | self._create.pop() 70 | 71 | contents = self._blocks.pop(file_name) 72 | 73 | with open(file_name, "w") as open_file: 74 | json_dump(contents, open_file, indent=2, default=render.json_default) 75 | 76 | 77 | def always(func: Callable) -> Callable: 78 | """ 79 | Marks a test method to run even when previous tests have failed. 80 | 81 | """ 82 | 83 | func._always = True # type: ignore 84 | return func 85 | 86 | 87 | def pretf_test_function(func: Callable) -> Callable: 88 | @functools.wraps(func) 89 | def wrapped(self: Any, *args: tuple, **kwargs: dict) -> Any: 90 | 91 | if hasattr(self.__class__, "_failed"): 92 | if func.__name__ not in self._always: 93 | pytest.xfail(f"{self.__class__} failed") 94 | 95 | # Change the working directory to the test file. 96 | cwd_before = os.getcwd() 97 | func_file = inspect.getfile(func) 98 | func_dir = os.path.dirname(func_file) 99 | os.chdir(func_dir) 100 | 101 | try: 102 | 103 | if inspect.isgeneratorfunction(func): 104 | 105 | gen = func(self, *args, **kwargs) 106 | 107 | yielded = None 108 | while True: 109 | 110 | try: 111 | yielded = gen.send(yielded) 112 | except StopIteration: 113 | break 114 | 115 | if not hasattr(self, "_create") or not self._create: 116 | raise Exception( 117 | "yield must be called inside a self.create() context" 118 | ) 119 | 120 | file_name = self._create[-1] 121 | for block in render.unwrap_yielded(yielded): 122 | self._blocks[file_name].append(block) 123 | 124 | else: 125 | return func(self, *args, **kwargs) 126 | 127 | except Exception: 128 | 129 | self.__class__._failed = func.__name__ 130 | raise 131 | 132 | finally: 133 | 134 | os.chdir(cwd_before) 135 | 136 | return wrapped 137 | -------------------------------------------------------------------------------- /docs/comparisons.md: -------------------------------------------------------------------------------- 1 | These comparisons are mostly based on impressions of other projects, not necessarily based on experience from using them. 2 | 3 | ## Terraform 4 | 5 | Pretf is not a replacement for Terraform. It is used to generate Terraform configuration files, and then run Terraform. Pretf can be used to generate more complicated Terraform configuration files than the Terraform configuration language normally allows. 6 | 7 | ## Terragrunt 8 | 9 | On the surface, Terragrunt appears to: 10 | 11 | 1. Add extra functions to Terraform (with interpolation syntax, in a place where Terraform will ignore it). 12 | 2. Work with some Terragrunt-specific conventions or patterns to provide extra features on top of Terraform. 13 | 14 | Pretf avoids working within the confines of `*.tf` files or supplied functions, instead letting you write Python code, and write your own functions. This is much more flexible, as long as you are familiar with Python. 15 | 16 | Pretf has fewer conventions and should be easier for newcomers to understand. The extra `*.tf.py` files in projects using Pretf should be fairly self-explanatory to anyone that can read Python code. 17 | 18 | The most complicated part of Pretf projects will be when a `pretf.workflow.py` file is added to customise the workflow. However, this file is completely optional and written by you (or a team member) specifically for your project. It contains the logic of your customised workflow, so it should again be fairly self-explanatory. 19 | 20 | Pretf is more flexible and more obvious than Terragrunt, as long as you are familiar with Python. 21 | 22 | ## Pulumi 23 | 24 | Pulumi is an alternative to Terraform that allows you to create resources with Python code, and other languages. 25 | 26 | The Pulumi SDK includes classes for every resource, while Pretf only uses generic strings and dictionaries. Pulumi's approach allows for IDE features like autocompletion and error checking, but it requires ongoing maintenance and updates to their SDK to support new resources. 27 | 28 | If you want to use Terraform, then Pretf gets you some of the selling points of Pulumi (write the logic in Python) without making you switch products. 29 | 30 | ## Jinjaform 31 | 32 | [Jinjaform](https://github.com/claranet/jinjaform) is another Terraform wrapper by the same author as Pretf. 33 | 34 | Jinjaform has been successful in some respects. However, the mixture of Jinja2 templates and HCL is ugly. In an attempt to make those templates cleaner, support for custom Jinja2 filters and functions (written in Python) was added. So now Jinjaform mixes HCL, Jinja2 and Python. Pretf is the next step, just getting out of the way and letting you write some Python code. 35 | 36 | ## Custom wrappers 37 | 38 | Wrappers written in Bash or Make seem to have some or all of these problems: 39 | 40 | 1. Have unclear or unobvious command line arguments. 41 | * Which environment and which account? How many possible places can this code be deployed to? Do I need to read the documentation or source code to figure this out? 42 | * The user can make a mistake. 43 | 2. Restricted subset of Terraform CLI commands, making it hard to run unsupported commands. 44 | * E.g. `terraform apply -target=` not supported, so the user has to figure out how to run that command without losing things that the wrapper was doing such as configuring the backend and passing parameters. 45 | * The user can make a mistake. 46 | 3. Use the same directory for multiple remote backends, so it runs the slow `terraform init` command every time. 47 | 4. Require setting AWS credentials before running. 48 | * Often requires running a separate command, and knowing which credentials to use. 49 | * Making it difficult or impossible to work with multiple AWS accounts in the same stack. 50 | * The user can make a mistake. 51 | 52 | Pretf is a transparent wrapper, meaning it performs its functionality and then executes Terraform, passing along any command line arguments. Transparent wrappers avoid issues 1 and 2. 53 | 54 | Pretf can be used to dynamically generate the backend configuration, and work with files from other directories. This allows for directory-based environments or stacks, which avoids issue 3. 55 | 56 | Pretf also has good support for AWS credentials and backends. AWS credentials can be set within Python programmatically, so the user does not have to set credentials separately. This avoids issue 4. 57 | -------------------------------------------------------------------------------- /docs/api/api.md: -------------------------------------------------------------------------------- 1 | ## block 2 | 3 | This is used to create Terraform configuration blocks from within `pretf_blocks()` functions in `*.tf.py` files. Blocks must be yielded to be included in the generated JSON files. 4 | 5 | Blocks are the most fundamental part of Terraform configuration. Read the [documentation](https://www.terraform.io/docs/configuration/syntax.html) to learn more about blocks. 6 | 7 | Signature: 8 | 9 | ```python 10 | def block(block_type: str, *labels: str, body: Optional[dict] = None) -> Block 11 | 12 | block_type: 13 | block type such as "resource", "variable", "provider" 14 | labels: 15 | labels for the block 16 | body: 17 | the body of the block 18 | 19 | returns: 20 | configuration block 21 | ``` 22 | 23 | Example: 24 | 25 | ```python 26 | from pretf.api import block 27 | 28 | 29 | def pretf_blocks(): 30 | 31 | # The group resource is defined in another file, 32 | # but we want to reference it here, so we can 33 | # create a block with an empty body. It is not 34 | # yielded so it won't be included in the JSON. 35 | group = block("resource", "aws_iam_group", "example", {}) 36 | 37 | # Create and yield a block to include it in the JSON. 38 | user = yield block("resource", "aws_iam_user", "example", { 39 | "name": "example", 40 | }) 41 | 42 | # Create and yield another block, this time demonstrating 43 | # how block attributes can be accessed. The resulting JSON 44 | # will contain Terraform references like: 45 | # "users": "${aws_iam_user.example.name}", 46 | # "groups": ["${aws_iam_group.example.name}"] 47 | yield block("resource", "aws_iam_user_group_membership", "example", { 48 | "user": user.name, 49 | "groups": [group.name] 50 | }) 51 | ``` 52 | 53 | ## get_outputs 54 | 55 | Runs `pretf output` in the specified directory and returns the values. If the path is not anchored (i.e. does not start with `./` or `../` or `/`) then it will check the current directory and all parent directories until found. 56 | 57 | Signature: 58 | 59 | ```python 60 | def get_outputs(cwd: Union[Path, str], verbose: Optional[bool] = None) -> dict: 61 | 62 | cwd: 63 | directory where Pretf/Terraform will run 64 | verbose: 65 | whether to print information 66 | 67 | returns: 68 | output values 69 | ``` 70 | 71 | Example: 72 | 73 | ```python 74 | from pretf.api import get_outputs 75 | 76 | 77 | def pretf_variables(): 78 | vpc_outputs = get_outputs("vpc") 79 | yield { 80 | "vpc_id": vpc_outputs["vpc_id"], 81 | "vpc_cidr_block": vpc_outputs["vpc_cidr_block"], 82 | } 83 | ``` 84 | 85 | ## log 86 | 87 |

log.accept

88 | 89 | Prompts the user to enter "yes" or "no". Returns `True` if the response was "yes", otherwise `False`. Pressing Ctrl-C counts as "no". 90 | 91 | Signature: 92 | 93 | ```python 94 | def accept(message: Any) -> bool: 95 | 96 | message: 97 | the message to display 98 | 99 | returns: 100 | whether the user entered "yes" 101 | ``` 102 | 103 | Example: 104 | 105 | ```python 106 | from pretf.api import log 107 | 108 | 109 | def pretf_workflow(): 110 | if log.accept("do you wish to continue?"): 111 | print("user accepted the prompt") 112 | else: 113 | print("user did not accept the prompt") 114 | ``` 115 | 116 |

log.bad

117 | 118 | Displays a message prefixed with `[pref]` in red. Can be raised as an exception to display the message and then exit. 119 | 120 | Signature: 121 | 122 | ```python 123 | def bad(message: Any) -> None: 124 | 125 | message: 126 | the message to display 127 | 128 | returns: 129 | None 130 | ``` 131 | 132 | Example: 133 | 134 | ```python 135 | from pretf.api import log 136 | 137 | 138 | def pretf_workflow(): 139 | log.bad("something bad happened") 140 | ``` 141 | 142 |

log.ok

143 | 144 | Displays a message prefixed with `[pref]` in cyan. Can be raised as an exception to display the message and then exit. 145 | 146 | Signature: 147 | 148 | ```python 149 | def ok(message: Any) -> None: 150 | 151 | message: 152 | the message to display 153 | 154 | returns: 155 | None 156 | ``` 157 | 158 | Example: 159 | 160 | ```python 161 | from pretf.api import log 162 | 163 | 164 | def pretf_workflow(): 165 | log.bad("something normal happened") 166 | ``` 167 | -------------------------------------------------------------------------------- /docs/api/aws.md: -------------------------------------------------------------------------------- 1 | ## get_account_id 2 | 3 | Returns the AWS account ID for the provided [boto3.Session](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html) or [boto3.Session](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html) arguments. 4 | 5 | Signature: 6 | 7 | ```python 8 | get_account_id(session=None, **kwargs) 9 | 10 | session: 11 | optional boto3.Session 12 | **kwargs: 13 | optional arguments for creating a boto3.Session 14 | 15 | returns: 16 | str 17 | ``` 18 | 19 | Example: 20 | 21 | ```python 22 | from pretf.aws import get_account_id 23 | 24 | 25 | def pretf_workflow(): 26 | account_id = get_account_id(profile_name="example") 27 | 28 | 29 | def pretf_blocks(var): 30 | account_id = get_account_id(profile_name=var.aws_profile) 31 | ``` 32 | 33 | ## get_frozen_credentials 34 | 35 | Returns AWS credentials for the provided [boto3.Session](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html) or [boto3.Session](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html) arguments. 36 | 37 | Signature: 38 | 39 | ```python 40 | get_frozen_credentials(session=None, **kwargs) 41 | 42 | session: 43 | optional boto3.Session 44 | **kwargs: 45 | optional arguments for creating a boto3.Session 46 | 47 | returns: 48 | botocore.credentials.ReadOnlyCredentials 49 | ``` 50 | 51 | Example: 52 | 53 | ```python 54 | from pretf.aws import get_frozen_credentials 55 | 56 | 57 | def pretf_workflow(): 58 | creds = get_frozen_credentials(profile_name="example") 59 | 60 | 61 | def pretf_blocks(var): 62 | creds = get_frozen_credentials(profile_name=var.aws_profile) 63 | ``` 64 | 65 | ## get_session 66 | 67 | Returns a [boto3.Session](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html). Uses [boto-source-profile-mfa](https://github.com/claranet/boto-source-profile-mfa) if installed. 68 | 69 | Signature: 70 | 71 | ```python 72 | get_session(**kwargs) 73 | 74 | **kwargs: 75 | optional arguments for creating a boto3.Session 76 | 77 | returns: 78 | botocore.credentials.ReadOnlyCredentials 79 | ``` 80 | 81 | Example: 82 | 83 | ```python 84 | from pretf.aws import get_session 85 | 86 | 87 | def pretf_workflow(): 88 | session = get_session(profile_name="example") 89 | 90 | 91 | def pretf_blocks(var): 92 | session = get_session(profile_name=var.aws_profile) 93 | ``` 94 | 95 | ## provider_aws 96 | 97 | Returns an [AWS provider](https://www.terraform.io/docs/providers/aws/index.html) block. In cases where Terraform might not support the `profile` option, it will be replaced with static credentials so that Terraform can use them instead. This is particularly useful when the `profile` needs to prompt for an MFA token, which Terraform does not support. 98 | 99 | Signature: 100 | 101 | ```python 102 | provider_aws(**body) 103 | 104 | **body: 105 | required dict of configuration options as per Terraform documentation 106 | 107 | returns: 108 | Block 109 | ``` 110 | 111 | Example: 112 | 113 | ```python 114 | from pretf.aws import provider_aws 115 | 116 | 117 | def pretf_blocks(var): 118 | yield provider_aws( 119 | profile=var.aws_profile, 120 | region=var.aws_region, 121 | ) 122 | ``` 123 | 124 | ## terraform_backend_s3 125 | 126 | Ensures that the S3 backend exists, prompting to create it if necessary, sets the credentials as environment variables in some cases, and returns a Terraform configuration block for it. Accepts the same options as the [S3 backend configuration variables](https://www.terraform.io/docs/backends/types/s3.html#configuration-variables). 127 | 128 | Signature: 129 | 130 | ```python 131 | terraform_backend_s3(bucket, dynamodb_table, **config) 132 | 133 | bucket: 134 | required str for the S3 bucket name to use for storing the Terraform state file 135 | dynamodb_table: 136 | required str for the DynamoDB table to use for locking the Terraform state file 137 | **config: 138 | required dict of other configuration options as per Terraform documentation 139 | 140 | returns: 141 | Block 142 | ``` 143 | 144 | Example: 145 | 146 | ```python 147 | from pretf.aws import terraform_backend_s3 148 | 149 | 150 | def pretf_blocks(var): 151 | yield terraform_backend_s3( 152 | bucket="example-tfstate-bucket", 153 | dynamodb_table="example-tfstate-table", 154 | key="terraform.tfstate", 155 | profile=var.aws_profile, 156 | region="eu-west-1", 157 | ) 158 | ``` 159 | -------------------------------------------------------------------------------- /tests/test_collections.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from pretf.api import block, labels 4 | from pretf.blocks import Block 5 | from pretf.collections import collect 6 | from pretf.exceptions import VariableNotPopulatedError 7 | 8 | 9 | @collect 10 | def iam_user(var): 11 | # Inputs. 12 | yield block("variable", "name", {}) 13 | yield block("variable", "path", {"default": "/"}) 14 | 15 | # Resources. 16 | user = yield block( 17 | "resource", "aws_iam_user", var.name, {"name": var.name, "path": var.path} 18 | ) 19 | 20 | # Outputs. 21 | yield block("output", "name", {"value": var.name}) 22 | yield block("output", "resource", {"value": user}) 23 | 24 | 25 | @collect 26 | def iam_group(var): 27 | # Inputs. 28 | yield block("variable", "name", {}) 29 | yield block("variable", "path", {"default": "/"}) 30 | 31 | # Resources. 32 | group = yield block("resource", "aws_iam_group", var.name, {"name": var.name}) 33 | 34 | # Outputs. 35 | yield block("output", "name", {"value": var.name}) 36 | yield block("output", "resource", {"value": group}) 37 | 38 | 39 | @collect 40 | def iam_group_with_users(var): 41 | # Inputs. 42 | yield block("variable", "group_name", {}) 43 | yield block("variable", "user_names", {}) 44 | 45 | # Yield resources from a nested collection. 46 | group = yield iam_group(name=var.group_name) 47 | 48 | # Yield resources from a nested collection. 49 | users = {} 50 | for name in var.user_names: 51 | user = yield iam_user(name=name) 52 | users[name] = user.resource 53 | 54 | # Yield resources from a nested collection, 55 | # using "yield from" this time. 56 | # It can be assigned to a variable this way. 57 | yield from aws_iam_user_group_membership(group=group.resource, users=users) 58 | 59 | # Outputs. 60 | yield block("output", "group", {"value": group.resource}) 61 | yield block("output", "users", {"value": group.resource}) 62 | 63 | 64 | @collect 65 | def aws_iam_user_group_membership(var): 66 | # Inputs. 67 | yield block("variable", "group", {}) 68 | yield block("variable", "users", {}) 69 | 70 | # Resources. 71 | group_label = labels.get(var.group) 72 | for user_label, user in sorted(var.users.items()): 73 | label = f"{user_label}_in_{group_label}" 74 | yield block( 75 | "resource", 76 | "aws_iam_user_group_membership", 77 | label, 78 | {"user": user.name, "groups": [var.group.name]}, 79 | ) 80 | 81 | 82 | def test_collect(): 83 | 84 | # Create collection with bad inputs. 85 | with pytest.raises(VariableNotPopulatedError): 86 | iam_user() 87 | 88 | # Call collection with valid inputs. 89 | peanut = iam_user(name="peanut") 90 | 91 | # The collection is iterable and contains yielded blocks, 92 | # excluding variables and outputs. 93 | expected = [ 94 | {"resource": {"aws_iam_user": {"peanut": {"name": "peanut", "path": "/"}}}} 95 | ] 96 | assert list(peanut) == expected 97 | 98 | # Yielded outputs can be accessed as attributes. 99 | # This one is a simple string.. 100 | assert peanut.name == "peanut" 101 | 102 | # This one is a Block. 103 | user = peanut.resource 104 | assert isinstance(user, Block) 105 | assert user.arn == "${aws_iam_user.peanut.arn}" 106 | 107 | # This one doesn't exist 108 | with pytest.raises(AttributeError): 109 | peanut.nope 110 | 111 | 112 | def test_nested_collections(): 113 | 114 | # Create a collection that has nested collections. 115 | result = iam_group_with_users(group_name="dogs", user_names=["peanut", "cornelius"]) 116 | 117 | # Check it created the resources from the nested collections. 118 | expected = [ 119 | {"resource": {"aws_iam_group": {"dogs": {"name": "dogs"}}}}, 120 | {"resource": {"aws_iam_user": {"peanut": {"name": "peanut", "path": "/"}}}}, 121 | { 122 | "resource": { 123 | "aws_iam_user": {"cornelius": {"name": "cornelius", "path": "/"}} 124 | } 125 | }, 126 | { 127 | "resource": { 128 | "aws_iam_user_group_membership": { 129 | "cornelius_in_dogs": { 130 | "groups": ["${aws_iam_group.dogs.name}"], 131 | "user": "${aws_iam_user.cornelius.name}", 132 | } 133 | } 134 | } 135 | }, 136 | { 137 | "resource": { 138 | "aws_iam_user_group_membership": { 139 | "peanut_in_dogs": { 140 | "groups": ["${aws_iam_group.dogs.name}"], 141 | "user": "${aws_iam_user.peanut.name}", 142 | } 143 | } 144 | } 145 | }, 146 | ] 147 | assert list(result) == expected 148 | -------------------------------------------------------------------------------- /pretf/pretf/command.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | from json import loads as json_loads 4 | from pathlib import Path 5 | from subprocess import CompletedProcess 6 | from types import TracebackType 7 | from typing import Any, Optional, Type, Union 8 | 9 | from pretf import util, workflow 10 | 11 | 12 | class SensitiveValue: 13 | def __init__(self, value: Any): 14 | self.value = value 15 | 16 | 17 | class TerraformCommand: 18 | def __init__(self, cwd: Union[Path, str] = "", verbose: Optional[bool] = False): 19 | if not isinstance(cwd, Path): 20 | cwd = Path(cwd) 21 | self.cwd = cwd 22 | self.env = os.environ.copy() 23 | self.env["TF_IN_AUTOMATION"] = "1" 24 | self.env["PRETF_VERBOSE"] = "1" if verbose else "0" 25 | self.verbose = verbose 26 | 27 | # Calling the object just returns another object with the specified path. 28 | 29 | def __call__(self, cwd: Union[Path, str] = "") -> "TerraformCommand": 30 | return self.__class__(cwd or self.cwd) 31 | 32 | # Context manager. 33 | # It doesn't do anything but can make the test code easier to follow. 34 | 35 | def __enter__(self) -> "TerraformCommand": 36 | return self 37 | 38 | def __exit__( 39 | self, 40 | exc_type: Optional[Type[BaseException]], 41 | exc_val: Optional[BaseException], 42 | exc_tb: Optional[TracebackType], 43 | ) -> None: 44 | return None 45 | 46 | # Terraform command. 47 | 48 | def execute(self, *args: str) -> CompletedProcess: 49 | return workflow.execute_terraform( 50 | args=args, 51 | cwd=self.cwd, 52 | env=self.env, 53 | capture=True, 54 | verbose=self.verbose, 55 | ) 56 | 57 | # Terraform shortcuts. 58 | 59 | def apply(self, *args: str) -> dict: 60 | """ 61 | Runs terraform apply, parses the output for output values, 62 | and returns them as a dictionary. 63 | 64 | """ 65 | 66 | apply_args = ["apply", "-json", "-auto-approve=true"] 67 | for arg in args: 68 | if arg not in apply_args: 69 | apply_args.append(arg) 70 | 71 | proc = self.execute(*apply_args) 72 | 73 | outputs = None 74 | for line in proc.stdout.splitlines(): 75 | log = json_loads(line) 76 | if log["type"] == "outputs": 77 | outputs = log["outputs"] 78 | 79 | if outputs is None: 80 | if proc.stderr: 81 | print(proc.stderr, file=sys.stderr) 82 | raise ValueError(f"Could not parse outputs from: {proc.stdout}") 83 | 84 | values = {} 85 | 86 | for name in outputs: 87 | value = outputs[name]["value"] 88 | if outputs[name]["sensitive"]: 89 | value = SensitiveValue(value) 90 | values[name] = value 91 | 92 | return values 93 | 94 | def destroy(self, *args: str) -> str: 95 | """ 96 | Runs terraform destroy and returns the stdout. 97 | 98 | """ 99 | 100 | destroy_args = ["destroy", "-input=false", "-auto-approve=true", "-no-color"] 101 | for arg in args: 102 | if arg not in destroy_args: 103 | destroy_args.append(arg) 104 | return self.execute(*destroy_args).stdout 105 | 106 | def get(self, *args: str) -> str: 107 | """ 108 | Runs terraform get and returns the stdout. 109 | 110 | """ 111 | 112 | get_args = ["get", "-no-color"] 113 | for arg in args: 114 | if arg not in get_args: 115 | get_args.append(arg) 116 | return self.execute(*get_args).stdout 117 | 118 | def init(self, *args: str) -> str: 119 | """ 120 | Runs terraform init and returns the stdout. 121 | 122 | """ 123 | 124 | init_args = ["init", "-input=false", "-no-color"] 125 | for arg in args: 126 | if arg not in init_args: 127 | init_args.append(arg) 128 | return self.execute(*init_args).stdout 129 | 130 | def output(self, *args: str) -> dict: 131 | """ 132 | Runs terraform output and returns the JSON. 133 | 134 | """ 135 | 136 | output_args = ["output", "-json"] 137 | for arg in args: 138 | if arg not in output_args: 139 | output_args.append(arg) 140 | return json_loads(self.execute(*output_args).stdout) 141 | 142 | def plan(self, *args: str) -> str: 143 | """ 144 | Runs terraform plan and returns the stdout. 145 | 146 | """ 147 | 148 | plan_args = ["plan", "-input=false", "-no-color"] 149 | for arg in args: 150 | if arg not in plan_args: 151 | plan_args.append(arg) 152 | return self.execute(*plan_args).stdout 153 | 154 | 155 | class PretfCommand(TerraformCommand): 156 | def execute(self, *args: str) -> CompletedProcess: 157 | return util.execute( 158 | file="pretf", 159 | args=["pretf"] + list(args), 160 | cwd=self.cwd, 161 | env=self.env, 162 | capture=True, 163 | verbose=self.verbose, 164 | ) 165 | -------------------------------------------------------------------------------- /pretf/pretf/blocks.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Iterable 2 | from types import ModuleType 3 | from typing import Any, Dict, Generator, List, Optional, Union 4 | 5 | 6 | class BlockModule(ModuleType): 7 | def __init__( 8 | self, block_type: str, labels: Optional[List[str]] = None, needs: int = 0 9 | ): 10 | if labels is None: 11 | labels = [] 12 | 13 | self.block_type = block_type 14 | self.labels = labels 15 | self.needs = needs 16 | 17 | # Also be a module. 18 | name = ".".join([__name__, block_type] + labels) 19 | self.__path__ = name 20 | super().__init__(name) 21 | 22 | def __call__(self, *bodies: Dict[str, Any], **kwargs: Dict[str, Any]) -> "Block": 23 | return Block(self.block_type, self.labels, {})(*bodies, **kwargs) 24 | 25 | def __getattr__(self, name: str) -> Union["BlockModule", "Block"]: 26 | if name.startswith("__"): 27 | raise AttributeError(name) 28 | if self.needs == 0: 29 | return getattr(Block(self.block_type, self.labels, {}), name) 30 | elif self.needs == 1: 31 | return Block(self.block_type, self.labels + [name], {}) 32 | else: 33 | return self.__class__(self.block_type, self.labels + [name], self.needs - 1) 34 | 35 | __getitem__ = __getattr__ 36 | 37 | 38 | class Block(Iterable): 39 | def __init__(self, block_type: str, labels: List[str], body: Dict[str, Any]): 40 | self._block_type = block_type 41 | self._labels = labels 42 | self._body = body 43 | 44 | def __call__(self, *bodies: Dict[str, Any], **kwargs: Dict[str, Any]) -> "Block": 45 | """ 46 | Returns a new block with the specified body. 47 | 48 | """ 49 | 50 | body: Dict[str, Any] = {} 51 | for each in bodies: 52 | body.update(each) 53 | body.update(kwargs) 54 | return self.__class__(self._block_type, self._labels, body) 55 | 56 | def __iter__(self) -> Generator[tuple, None, None]: 57 | if self._labels: 58 | result: dict = {} 59 | here = result 60 | for label in self._labels[:-1]: 61 | here[label] = {} 62 | here = here[label] 63 | here[self._labels[-1]] = self._body 64 | else: 65 | result = self._body 66 | yield (self._block_type, result) 67 | 68 | def _get_expression(self, name: Optional[str] = None) -> Union["Interpolated", str]: 69 | if self._block_type == "data": 70 | if len(self._labels) < 2: 71 | raise ValueError("data blocks require 2 labels") 72 | parts = [self._block_type] + list(self._labels) 73 | elif self._block_type == "locals": 74 | if len(self._labels) < 1 and not name: 75 | raise ValueError("locals blocks require 1 label") 76 | parts = ["local"] + list(self._labels) 77 | elif self._block_type == "module": 78 | if len(self._labels) < 1: 79 | raise ValueError("module blocks require 1 label") 80 | parts = [self._block_type] + list(self._labels) 81 | elif self._block_type == "output": 82 | if len(self._labels) < 1: 83 | raise ValueError("output blocks require 1 label") 84 | parts = [self._block_type] + list(self._labels) 85 | elif self._block_type == "provider": 86 | if len(self._labels) < 1: 87 | raise ValueError("provider blocks require 1 label") 88 | parts = list(self._labels) 89 | if name == "alias" or not name: 90 | if self._body: 91 | alias = self._body.get("alias") or "default" 92 | if alias != "default": 93 | parts.append(alias) 94 | return ".".join(parts) 95 | elif self._block_type == "resource": 96 | if len(self._labels) < 2: 97 | raise ValueError("resource blocks require 2 labels") 98 | parts = list(self._labels) 99 | elif self._block_type == "variable": 100 | if len(self._labels) < 1: 101 | raise ValueError("variable blocks require 1 label") 102 | parts = ["var"] + self._labels 103 | else: 104 | parts = [self._block_type] + list(self._labels) 105 | 106 | if name: 107 | parts.append(name) 108 | 109 | return Interpolated(".".join(parts)) 110 | 111 | def __getattr__(self, name: str) -> Union["Interpolated", str]: 112 | if name.startswith("__"): 113 | raise AttributeError(name) 114 | return self._get_expression(name) 115 | 116 | __getitem__ = __getattr__ 117 | 118 | def __repr__(self) -> str: 119 | parts: List[Any] = [self._block_type] 120 | parts.extend(self._labels) 121 | if self._body is not None: 122 | parts.append(self._body) 123 | return f"block({', '.join(repr(part) for part in parts)})" 124 | 125 | def __str__(self) -> str: 126 | return str(self._get_expression()) 127 | 128 | 129 | class Interpolated: 130 | def __init__(self, value: str): 131 | self.__value = value 132 | 133 | def __eq__(self, other: Any) -> bool: 134 | return str(self) == other 135 | 136 | def __getattr__(self, attr: str) -> "Interpolated": 137 | return type(self)(self.__value + "." + attr) 138 | 139 | def __getitem__(self, index: int) -> "Interpolated": 140 | return type(self)(f"{self.__value}[{index}]") 141 | 142 | def __repr__(self) -> str: 143 | return f"Interpolated({repr(self.__value)})" 144 | 145 | def __str__(self) -> str: 146 | return "${" + self.__value + "}" 147 | 148 | 149 | data = BlockModule("data", needs=2) 150 | locals = BlockModule("locals", needs=0) 151 | module = BlockModule("module", needs=1) 152 | output = BlockModule("output", needs=1) 153 | provider = BlockModule("provider", needs=1) 154 | resource = BlockModule("resource", needs=2) 155 | variable = BlockModule("variable", needs=1) 156 | 157 | __all__ = ["data", "locals", "module", "output", "provider", "resource", "variable"] 158 | -------------------------------------------------------------------------------- /pretf/pretf/util.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shlex 3 | import sys 4 | from contextlib import contextmanager 5 | from fnmatch import fnmatch 6 | from importlib.abc import Loader 7 | from importlib.machinery import ModuleSpec 8 | from importlib.util import module_from_spec, spec_from_file_location 9 | from io import StringIO 10 | from pathlib import Path, PurePath 11 | from subprocess import PIPE, CalledProcessError, CompletedProcess, Popen 12 | from threading import Thread 13 | from types import ModuleType 14 | from typing import ( 15 | IO, 16 | BinaryIO, 17 | Generator, 18 | List, 19 | Optional, 20 | Sequence, 21 | TextIO, 22 | Tuple, 23 | Union, 24 | ) 25 | 26 | from . import log 27 | 28 | 29 | def execute( 30 | file: str, 31 | args: Sequence[str], 32 | cwd: Optional[Union[Path, str]] = None, 33 | env: Optional[dict] = None, 34 | capture: bool = False, 35 | verbose: Optional[bool] = None, 36 | ) -> CompletedProcess: 37 | """ 38 | Executes a command and waits for it to finish. 39 | 40 | If args are provided, then they will be used. 41 | 42 | If args are not provided, and arguments were used to run this program, 43 | then those arguments will be used. 44 | 45 | If args are not provided, and no arguments were used to run this program, 46 | and default args are provided, then they will be used. 47 | 48 | Returns the exit code from the command that is run. 49 | 50 | """ 51 | 52 | if env is None: 53 | env = os.environ.copy() 54 | 55 | if is_verbose(verbose): 56 | log.ok(f"run: {' '.join(shlex.quote(arg) for arg in args)}") 57 | 58 | if capture: 59 | return _execute_and_capture(file, args, cwd, env, verbose) 60 | else: 61 | return _execute(file, args, cwd, env) 62 | 63 | 64 | def _execute( 65 | file: str, args: Sequence[str], cwd: Optional[Union[Path, str]], env: dict 66 | ) -> CompletedProcess: 67 | 68 | proc = Popen(args, executable=file, cwd=cwd, env=env) 69 | 70 | while True: 71 | try: 72 | returncode = proc.wait() 73 | except KeyboardInterrupt: 74 | pass 75 | else: 76 | break 77 | 78 | if returncode != 0: 79 | raise CalledProcessError( 80 | returncode=returncode, 81 | cmd=" ".join(shlex.quote(arg) for arg in args), 82 | ) 83 | 84 | return CompletedProcess(args=args, returncode=returncode) 85 | 86 | 87 | def _execute_and_capture( 88 | file: str, 89 | args: Sequence[str], 90 | cwd: Optional[Union[Path, str]], 91 | env: dict, 92 | verbose: Optional[bool], 93 | ) -> CompletedProcess: 94 | 95 | stdout_buffer = StringIO() 96 | stderr_buffer = StringIO() 97 | 98 | proc = Popen(args, executable=file, stdout=PIPE, stderr=PIPE, cwd=cwd, env=env) 99 | 100 | stdout_args: List[Optional[IO]] = [proc.stdout, stdout_buffer] 101 | if is_verbose(verbose): 102 | stdout_args.append(sys.stdout) 103 | stdout_thread = Thread(target=_fan_out, args=stdout_args) 104 | stdout_thread.start() 105 | 106 | stderr_args = [proc.stderr, stderr_buffer, sys.stderr] 107 | stderr_thread = Thread(target=_fan_out, args=stderr_args) 108 | stderr_thread.start() 109 | 110 | while True: 111 | try: 112 | returncode = proc.wait() 113 | except KeyboardInterrupt: 114 | pass 115 | else: 116 | break 117 | 118 | stdout_thread.join() 119 | stderr_thread.join() 120 | 121 | stdout_buffer.seek(0) 122 | stderr_buffer.seek(0) 123 | 124 | if returncode != 0: 125 | raise CalledProcessError( 126 | returncode=returncode, 127 | cmd=" ".join(shlex.quote(arg) for arg in args), 128 | output=stdout_buffer.read(), 129 | stderr=stderr_buffer.read(), 130 | ) 131 | 132 | return CompletedProcess( 133 | args=args, 134 | returncode=returncode, 135 | stdout=stdout_buffer.read(), 136 | stderr=stderr_buffer.read(), 137 | ) 138 | 139 | 140 | def _fan_out(input_steam: BinaryIO, *output_streams: TextIO) -> None: 141 | while True: 142 | char = input_steam.read(1).decode() 143 | if char: 144 | for output_stream in output_streams: 145 | output_stream.write(char) 146 | else: 147 | break 148 | 149 | 150 | def find_paths( 151 | path_patterns: Sequence[str], 152 | exclude_name_patterns: Sequence[str] = [], 153 | cwd: Optional[Union[Path, str]] = None, 154 | ) -> Generator[Path, None, None]: 155 | 156 | if cwd is None: 157 | cwd = Path.cwd() 158 | elif isinstance(cwd, str): 159 | cwd = Path(cwd) 160 | 161 | for pattern in path_patterns: 162 | for path in cwd.glob(pattern): 163 | for exclude_name_pattern in exclude_name_patterns: 164 | if fnmatch(path.name, exclude_name_pattern): 165 | break 166 | else: 167 | yield path 168 | 169 | 170 | def find_workflow_path(cwd: Optional[Union[Path, str]] = None) -> Optional[Path]: 171 | 172 | if cwd is None: 173 | cwd = Path.cwd() 174 | elif isinstance(cwd, str): 175 | cwd = Path(cwd) 176 | 177 | for name in ("pretf.workflow.py", "pretf.py"): 178 | 179 | path = cwd / name 180 | if path.exists(): 181 | return path 182 | 183 | for dir_path in path.parents: 184 | path = dir_path / name 185 | if path.exists(): 186 | return path 187 | 188 | return None 189 | 190 | 191 | @contextmanager 192 | def import_file(path: Union[PurePath, str]) -> Generator[ModuleType, None, None]: 193 | """ 194 | Imports a Python module from any local filesystem path. 195 | Temporarily alters sys.path to allow the imported module 196 | to import other modules in the same directory. 197 | 198 | """ 199 | 200 | pathdir = os.path.dirname(path) 201 | if pathdir in sys.path: 202 | added_to_sys_path = False 203 | else: 204 | sys.path.insert(0, pathdir) 205 | added_to_sys_path = True 206 | try: 207 | name = os.path.basename(path).split(".")[0] 208 | spec = spec_from_file_location(name, str(path)) 209 | assert isinstance(spec, ModuleSpec) 210 | module = module_from_spec(spec) 211 | assert isinstance(spec.loader, Loader) 212 | loader: Loader = spec.loader 213 | try: 214 | loader.exec_module(module) 215 | except Exception as error: 216 | log.bad(error) 217 | raise 218 | yield module 219 | finally: 220 | if added_to_sys_path: 221 | sys.path.remove(pathdir) 222 | 223 | 224 | def is_verbose(verbose: Optional[bool], default: bool = True) -> bool: 225 | if verbose is not None: 226 | return verbose 227 | env_verbose = os.environ.get("PRETF_VERBOSE") 228 | if env_verbose == "1": 229 | return True 230 | elif env_verbose == "0": 231 | return False 232 | else: 233 | return default 234 | 235 | 236 | def parse_args() -> Tuple[str, List[str]]: 237 | 238 | subcommand = "" 239 | options = [] 240 | 241 | help_flags = set(("-h", "-help", "--help")) 242 | version_flags = set(("-v", "-version", "--version")) 243 | 244 | tokens = sys.argv[1:] 245 | while tokens: 246 | token = tokens.pop(0) 247 | 248 | if token in help_flags: 249 | subcommand = "help" 250 | elif token in version_flags: 251 | subcommand = "version" 252 | elif token in ("-out", "-var", "-var-file") and tokens: 253 | token = token + "=" + tokens.pop(0) 254 | options.append(token) 255 | elif not subcommand: 256 | subcommand = token 257 | else: 258 | options.append(token) 259 | 260 | return (subcommand, options) 261 | -------------------------------------------------------------------------------- /pretf/pretf/render.py: -------------------------------------------------------------------------------- 1 | import inspect 2 | import os 3 | from collections.abc import Iterable 4 | from functools import lru_cache 5 | from pathlib import Path, PurePath 6 | from threading import Thread 7 | from typing import Any, Callable, Dict, Generator, List, Optional, Union 8 | 9 | import jinja2 10 | 11 | from . import log 12 | from .blocks import Block, Interpolated 13 | from .exceptions import FunctionNotFoundError 14 | from .parser import parse_hcl2 15 | from .util import find_workflow_path, import_file 16 | from .variables import ( 17 | TerraformVariableStore, 18 | VariableProxy, 19 | VariableValue, 20 | get_variable_definitions_from_block, 21 | ) 22 | 23 | 24 | class PathProxy: 25 | def __init__(self) -> None: 26 | self.cwd = Path.cwd() 27 | self.module = Path(".") 28 | self.root = Path(".") 29 | 30 | @property # type: ignore 31 | @lru_cache(maxsize=None) 32 | def top(self) -> Path: 33 | """ 34 | Returns the directory containing the pretf.workflow.py file, 35 | or the current directory if there is none. 36 | 37 | """ 38 | 39 | workflow_path = find_workflow_path() 40 | if workflow_path: 41 | return workflow_path.parent 42 | else: 43 | return self.cwd 44 | 45 | 46 | def render_files( 47 | files_to_create: Dict[Path, Path], 48 | ) -> Dict[Path, Union[dict, List[dict]]]: 49 | 50 | variables = TerraformVariableStore(files_to_create=files_to_create) 51 | variables.load() 52 | 53 | threads = [] 54 | for target_path, source_path in files_to_create.items(): 55 | thread: RenderThread 56 | if source_path.name.endswith(".j2"): 57 | thread = RenderJinjaThread( 58 | source_path=source_path, 59 | target_path=target_path, 60 | variables=variables, 61 | ) 62 | elif source_path.name.endswith(".py"): 63 | thread = RenderPythonThread( 64 | source_path=source_path, 65 | target_path=target_path, 66 | variables=variables, 67 | ) 68 | else: 69 | raise ValueError(source_path) 70 | thread.start() 71 | threads.append(thread) 72 | 73 | for thread in threads: 74 | try: 75 | thread.join() 76 | except KeyboardInterrupt: 77 | variables.abort() 78 | thread.join() 79 | 80 | results = {} 81 | for thread in threads: 82 | if thread.error: 83 | raise thread.error 84 | results[thread.target_path] = thread.contents() 85 | return results 86 | 87 | 88 | class RenderThread(Thread): 89 | def __init__( 90 | self, 91 | source_path: Path, 92 | target_path: Path, 93 | variables: TerraformVariableStore, 94 | ): 95 | super().__init__() 96 | 97 | self.source_path = source_path 98 | self.target_path = target_path 99 | self.target_name = target_path.name 100 | self.variables = variables 101 | 102 | self.blocks: List[dict] = [] 103 | self.error: Optional[Exception] = None 104 | self.is_tfvars = self.target_name.endswith(".tfvars.json") 105 | 106 | def contents(self) -> Union[dict, List[dict]]: 107 | if self.is_tfvars: 108 | merged = {} 109 | for block in self.blocks: 110 | for name, value in block.items(): 111 | merged[name] = value 112 | return merged 113 | else: 114 | return self.blocks 115 | 116 | def process_tf_block(self, block: dict) -> None: 117 | for var in get_variable_definitions_from_block(block, source=self.source_path): 118 | # Add the variable definition. This doesn't necessarily 119 | # make it available to use, because a tfvars file may 120 | # populate it later. 121 | self.variables.add(var) 122 | 123 | def process_tfvars_dict(self, values: dict) -> None: 124 | # Only populate the variable store with values in this file 125 | # if it is waiting for this file. It is possible to generate 126 | # tfvars files that don't get used as a source for values. 127 | if self.variables.tfvars_waiting_for(self.target_path): 128 | for name, value in values.items(): 129 | var = VariableValue(name=name, value=value, source=self.source_path) 130 | self.variables.add(var) 131 | 132 | def render(self) -> Generator[dict, None, None]: 133 | raise NotImplementedError("subclass should implement this") 134 | 135 | def run(self) -> None: 136 | try: 137 | self.blocks = list(self.render()) 138 | except Exception as error: 139 | log.bad(f"create: {self.target_name} could not be processed") 140 | self.error = error 141 | finally: 142 | # Tell the variable store that the file is done, 143 | # whether it was successful or not, so it can 144 | # unblock other threads if necessary. 145 | self.variables.file_done(self.target_path) 146 | 147 | 148 | class RenderJinjaThread(RenderThread): 149 | def render(self) -> Generator[dict, None, None]: 150 | 151 | template_string = self.source_path.read_text() 152 | template = jinja2.Template(template_string) 153 | rendered = template.render( 154 | path=PathProxy(), 155 | terraform=TerraformProxy(), 156 | var=self.variables.proxy(consumer=self.source_path), 157 | ) 158 | block = parse_hcl2(rendered) 159 | 160 | if self.is_tfvars: 161 | self.process_tfvars_dict(block) 162 | else: 163 | self.process_tf_block(block) 164 | 165 | yield block 166 | 167 | 168 | class RenderPythonThread(RenderThread): 169 | def render(self) -> Generator[dict, None, None]: 170 | 171 | return_value = None 172 | 173 | # Load the file and start the generator. 174 | with import_file(self.source_path) as module: 175 | 176 | if self.is_tfvars: 177 | func_name = "pretf_variables" 178 | else: 179 | func_name = "pretf_blocks" 180 | 181 | if not hasattr(module, func_name): 182 | raise FunctionNotFoundError( 183 | f"create: {self.source_path} does not have a {repr(func_name)} function" 184 | ) 185 | 186 | # Call the pretf_* function, passing in "path", "terraform" and "var" if required. 187 | var_proxy = self.variables.proxy(consumer=self.source_path) 188 | self.gen = call_pretf_function( 189 | func=getattr(module, func_name), var=var_proxy 190 | ) 191 | 192 | # Process each yielded block. 193 | while True: 194 | 195 | try: 196 | yielded = self.gen.send(return_value) 197 | except StopIteration: 198 | break 199 | 200 | return_value = yielded 201 | 202 | if self.is_tfvars: 203 | if not isinstance(yielded, dict): 204 | raise TypeError( 205 | f"expected dict to be yielded but got {repr(yielded)}" 206 | ) 207 | self.process_tfvars_dict(yielded) 208 | yield yielded 209 | else: 210 | for block in unwrap_yielded(yielded): 211 | self.process_tf_block(block) 212 | yield block 213 | 214 | 215 | class TerraformProxy: 216 | @property # type: ignore 217 | @lru_cache(maxsize=None) 218 | def workspace(self) -> str: 219 | workspace = os.getenv("TF_WORKSPACE") 220 | if not workspace: 221 | cwd = Path.cwd() 222 | try: 223 | workspace = (cwd / ".terraform" / "environment").read_text() 224 | except FileNotFoundError: 225 | workspace = "default" 226 | return workspace 227 | 228 | 229 | def call_pretf_function( 230 | func: Callable, var: Optional[VariableProxy] = None, context: Optional[dict] = None 231 | ) -> Any: 232 | kwargs: Dict[str, Any] = context or {} 233 | sig = inspect.signature(func) 234 | if "path" in sig.parameters: 235 | kwargs["path"] = PathProxy() 236 | if "terraform" in sig.parameters: 237 | kwargs["terraform"] = TerraformProxy() 238 | if "var" in sig.parameters and var is not None: 239 | kwargs["var"] = var 240 | return func(**kwargs) 241 | 242 | 243 | def json_default(obj: Any) -> Any: 244 | if isinstance(obj, (Block, Interpolated, PurePath)): 245 | return str(obj) 246 | raise TypeError(repr(obj)) 247 | 248 | 249 | def unwrap_yielded( 250 | yielded: Union[Block, dict, Iterable], **kwargs: Any 251 | ) -> Generator[dict, None, None]: 252 | if isinstance(yielded, Block): 253 | yield dict(iter(yielded)) 254 | elif isinstance(yielded, dict): 255 | yield yielded 256 | else: 257 | root = kwargs.get("root", yielded) 258 | parent = kwargs.get("parent", object()) 259 | if isinstance(yielded, Iterable) and yielded != parent: 260 | for nested in yielded: 261 | yield from unwrap_yielded(nested, parent=yielded, root=root) 262 | else: 263 | raise TypeError( 264 | f"expected block to be yielded but got {repr(kwargs.get('root', yielded))}" 265 | ) 266 | -------------------------------------------------------------------------------- /docs/api/workflow.md: -------------------------------------------------------------------------------- 1 | ## clean_files 2 | 3 | Deletes the specified files. Intended for use after `create_files()`. Use `delete_files()` if wildcards are required. 4 | 5 | Signature: 6 | 7 | ```python 8 | def clean_files(paths: Sequence[Path], verbose:bool=True) -> None: 9 | 10 | paths: 11 | files to delete 12 | 13 | verbose: 14 | whether to print information 15 | ``` 16 | 17 | Example: 18 | 19 | ```python 20 | from pretf import workflow 21 | 22 | 23 | def pretf_workflow(): 24 | created = workflow.create_files() 25 | proc = workflow.execute_terraform() 26 | workflow.clean_files(created) 27 | return proc 28 | ``` 29 | 30 | ## create_files 31 | 32 | Creates `*.tf.json` and `*.tfvars.json` files in `target_dir` from `*.tf.py` and `*.tfvars.py` in source_dirs. 33 | 34 | `target_dir` defaults to the current working directory and `source_dirs` defaults to a list containing `target_dir`. 35 | 36 | If multiple source_dirs are specified, and there are duplicate file names, the files in the latter directories take precedence. 37 | 38 | It is recommended to call create() only once. Pass in multiple source_dirs rather than calling it multiple times. Pretf parses variables from files in the current directory and the source_dirs. Calling it multiple times with different source_dirs could give Pretf a different set of files to parse each time it is called, resulting in different variables each time. 39 | 40 | Signature: 41 | 42 | ```python 43 | def create_files( 44 | target_dir: Union[Path, str] = "", 45 | source_dirs: Sequence[Union[Path, str]] = [], 46 | verbose: bool = True, 47 | ) -> List[Path]: 48 | ``` 49 | 50 | Example: 51 | 52 | ```python 53 | from pretf import workflow 54 | 55 | 56 | def pretf_workflow(): 57 | workflow.create_files() 58 | return workflow.execute_terraform() 59 | ``` 60 | 61 | ## custom 62 | 63 | Calls the `pretf_workflow()` function from the specified Python file. This is useful for having a custom workflow that is used by multiple `pretf.workflow.py` files in different directories. 64 | 65 | Signature: 66 | 67 | ```python 68 | def custom(module_path: Union[PurePath, str], context: Optional[dict] = None) -> int: 69 | 70 | module_path: 71 | file path for the Python module 72 | context: 73 | dictionary to pass into the pretf_workflow() function 74 | 75 | returns: 76 | exit code for when pretf finishes 77 | ``` 78 | 79 | Example: 80 | 81 | ```python 82 | from pretf import workflow 83 | 84 | 85 | def pretf_workflow(): 86 | return workflow.custom("../src/pretf_workflow.py") 87 | ``` 88 | 89 | ## default 90 | 91 | This is the default Pretf workflow. This is automatically used when there is no `pretf.workflow.py` file in the current directory, or it can be called directly from a custom workflow function if it just needs to do something before or after the default workflow. 92 | 93 | Signature: 94 | 95 | ```python 96 | def default( 97 | clean: bool = True, 98 | created: list = [], 99 | verbose: bool = True, 100 | ) -> CompletedProcess: 101 | 102 | clean: 103 | whether to delete created files afterwards 104 | 105 | created: 106 | extra files to delete afterwards 107 | 108 | verbose: 109 | whether to print information 110 | 111 | returns: 112 | exit code for when pretf finishes 113 | ``` 114 | 115 | Example: 116 | 117 | ```python 118 | from pretf import workflow 119 | 120 | 121 | def pretf_workflow(): 122 | workflow.create_files() 123 | return workflow.default() 124 | ``` 125 | 126 | ## delete_files 127 | 128 | Deletes matching files from the current directory. Defaults to deleting files normally created by the `create()` function. Optionally exclude files matching a specified pattern. 129 | 130 | Signature: 131 | 132 | ```python 133 | def delete_files( 134 | *path_patterns: str, 135 | exclude_name_patterns: Sequence[str] = [], 136 | cwd: Optional[Union[Path, str]] = None, 137 | verbose: bool = True, 138 | ) -> List[Path]: 139 | 140 | path_patterns: 141 | path glob patterns to mirror into the current directory 142 | defaults to ("*.tf.json", "*.tfvars.json") 143 | exclude_name_patterns: 144 | name glob patterns to exclude 145 | cwd: 146 | current directory 147 | verbose: 148 | whether to print information 149 | 150 | returns: 151 | removed files 152 | ``` 153 | 154 | Example: 155 | 156 | ```python 157 | from pretf import workflow 158 | 159 | 160 | def pretf_workflow(): 161 | workflow.delete_files() 162 | return workflow.execute_terraform() 163 | ``` 164 | 165 | ## delete_links 166 | 167 | Deletes symlinks from the current directory. 168 | 169 | Signature: 170 | 171 | ```python 172 | def delete_files( 173 | cwd: Optional[Union[Path, str]] = None, 174 | verbose: bool = True, 175 | ) -> List[Path]: 176 | 177 | cwd: 178 | current directory 179 | verbose: 180 | whether to print information 181 | 182 | returns: 183 | removed files 184 | ``` 185 | 186 | Example: 187 | 188 | ```python 189 | from pretf import workflow 190 | 191 | 192 | def pretf_workflow(): 193 | workflow.delete_links() 194 | workflow.link_files("*.tf.py") 195 | return workflow.execute_terraform() 196 | ``` 197 | 198 | ## execute_terraform 199 | 200 | Executes Terraform and waits for it to finish. Command line arguments are passed through to Terraform. Returns the exit code from Terraform. 201 | 202 | Signature: 203 | 204 | ```python 205 | def execute_terraform(verbose: bool = True) -> int: 206 | 207 | verbose: 208 | whether to print the command 209 | 210 | returns: 211 | exit code from the process 212 | ``` 213 | 214 | Example: 215 | 216 | ```python 217 | from pretf import workflow 218 | 219 | 220 | def pretf_workflow(): 221 | return workflow.execute_terraform() 222 | ``` 223 | 224 | ## load_parent 225 | 226 | Looks for the closest `pretf.workflow.py` file in parent directories and calls the `pretf_workflow()` function. Errors if there are no `pretf.workflow.py` files in any parent directories. 227 | 228 | Signature: 229 | 230 | ```python 231 | def load_parent(**kwargs: Any) -> CompletedProcess: 232 | 233 | kwargs: 234 | keyword arguments to pass into the pretf_workflow() function 235 | 236 | returns: 237 | exit code for when pretf finishes 238 | ``` 239 | 240 | Example: 241 | 242 | ```python 243 | from pretf import workflow 244 | 245 | 246 | def pretf_workflow(): 247 | workflow.require_files("terraform.tfvars") 248 | return workflow.load_parent() 249 | ``` 250 | 251 | ## link_files 252 | 253 | Creates symlinks from all files and directories matching the source patterns into the current directory. 254 | 255 | Signature: 256 | 257 | ```python 258 | def mirror_files( 259 | *path_patterns: Union[Path, str], 260 | exclude_name_patterns: Sequence[str] = [".*", "_*", "pretf.workflow.py"], 261 | cwd: Optional[Union[Path, str]] = None, 262 | verbose: bool = True, 263 | ) -> List[Path]: 264 | 265 | path_patterns: 266 | paths or path glob patterns to mirror into the current directory 267 | exclude_name_patterns: 268 | name glob patterns to exclude 269 | cwd: 270 | current directory 271 | verbose: 272 | whether to print information 273 | 274 | returns: 275 | created symlinks 276 | ``` 277 | 278 | Example: 279 | 280 | ```python 281 | from pretf import workflow 282 | 283 | 284 | def pretf_workflow(): 285 | workflow.delete_links() 286 | workflow.link_files("*.tf.py") 287 | return workflow.execute_terraform() 288 | ``` 289 | 290 | ## link_module 291 | 292 | Creates symlinks from all files and directories in a module into the current directory. Remote modules are first downloaded into a cache directory. 293 | 294 | Signature: 295 | 296 | ```python 297 | def link_module( 298 | source: Union[Path,str], 299 | version: Optional[str] = None, 300 | update: bool = False, 301 | cache_dir: Optional[Union[Path, str]] = None, 302 | cwd: Optional[Union[Path, str]] = None, 303 | verbose: bool = True, 304 | ) -> List[Path]: 305 | 306 | source: 307 | location of module to mirror into the current directory 308 | version: 309 | the module version (if using registry) 310 | update: 311 | whether to fetch the module every time, or use a cached copy 312 | cache_dir: 313 | location to use for caching modules 314 | cwd: 315 | current directory 316 | verbose: 317 | whether to print information 318 | 319 | returns: 320 | created symlinks 321 | ``` 322 | 323 | Example: 324 | 325 | ```python 326 | from pretf import workflow 327 | 328 | 329 | def pretf_workflow(): 330 | workflow.delete_links() 331 | workflow.link_module("claranet/vpc-modules/aws", version="1.1.0") 332 | return workflow.execute_terraform() 333 | ``` 334 | 335 | ## mirror_files 336 | 337 | > This function will be removed in a future version. Use [delete_links](#delete_links) and [link_files](#link_files) instead. 338 | 339 | Creates symlinks from all files and directories matching the source patterns into the current directory. Deletes all pre-existing symlinks in the current directory. 340 | 341 | Signature: 342 | 343 | ```python 344 | def mirror_files( 345 | *path_patterns: Union[Path, str], 346 | exclude_name_patterns: Sequence[str] = [".*", "_*", "pretf.workflow.py"], 347 | include_directories: bool = True, 348 | cwd: Optional[Union[Path, str]] = None, 349 | verbose: bool = True, 350 | ) -> List[Path]: 351 | 352 | path_patterns: 353 | paths or path glob patterns to mirror into the current directory 354 | exclude_name_patterns: 355 | name glob patterns to exclude 356 | cwd: 357 | current directory 358 | verbose: 359 | whether to print information 360 | 361 | returns: 362 | created symlinks 363 | ``` 364 | 365 | Example: 366 | 367 | ```python 368 | from pretf import workflow 369 | 370 | 371 | def pretf_workflow(): 372 | workflow.mirror_files("../src/*") 373 | return workflow.execute_terraform() 374 | ``` 375 | 376 | ## require_files 377 | 378 | Raises an exception if the specified files are not found in the current directory. Pretf will catch this exception, display an error message, and show other directories that do contain the files. 379 | 380 | This can be used to restrict where Pretf/Terraform can run, while informing users where it can run if they make a mistake. 381 | 382 | If multiple patterns are provided, the directory must contain files that match all patterns (performing an `AND` search). 383 | 384 | ```python 385 | def require_files(*name_patterns: str) -> None: 386 | 387 | name_patterns: 388 | name glob patterns to require 389 | ``` 390 | 391 | Example: 392 | 393 | ```python 394 | from pretf import workflow 395 | 396 | 397 | def pretf_workflow(): 398 | workflow.require_files("*.tfvars") 399 | return workflow.default() 400 | ``` 401 | -------------------------------------------------------------------------------- /pretf.aws/pretf/aws.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | from functools import lru_cache, wraps 4 | from threading import RLock 5 | from time import sleep 6 | from typing import Any, Callable, Optional 7 | 8 | from boto3 import Session 9 | 10 | from pretf.api import block, log 11 | from pretf.blocks import Block 12 | 13 | try: 14 | import boto_source_profile_mfa 15 | 16 | use_boto_source_profile_mfa = True 17 | except ImportError: 18 | use_boto_source_profile_mfa = False 19 | 20 | 21 | # Use this lock on anything that might trigger an MFA prompt, 22 | # because otherwise it is possible for multiple threads to 23 | # prompt the user at the same time, resulting in confusing 24 | # and broken prompts for the user. This is a flaw in boto3. 25 | lock = RLock() 26 | 27 | 28 | def locked(func: Callable) -> Callable: 29 | @wraps(func) 30 | def wrapped(*args: Any, **kwargs: Any) -> Any: 31 | with lock: 32 | return func(*args, **kwargs) 33 | 34 | return wrapped 35 | 36 | 37 | @locked 38 | def _assume_role(session: Session, **kwargs: str) -> Session: 39 | 40 | for key, value in list(kwargs.items()): 41 | if not value: 42 | del kwargs[key] 43 | 44 | sts_client = session.client("sts") 45 | response = sts_client.assume_role(**kwargs) 46 | creds = response["Credentials"] 47 | 48 | return Session( 49 | aws_access_key_id=creds["AccessKeyId"], 50 | aws_secret_access_key=creds["SecretAccessKey"], 51 | aws_session_token=creds["SessionToken"], 52 | ) 53 | 54 | 55 | @locked 56 | def _create_s3_backend( 57 | session: Session, bucket: str, table: str, region_name: str 58 | ) -> None: 59 | 60 | # Prompt before creating anything. 61 | account_id = get_account_id(session) 62 | bucket_arn = _get_s3_bucket_arn(region_name, account_id, bucket) 63 | table_arn = _get_dynamodb_table_arn(region_name, account_id, table) 64 | log.ok(f"backend: {bucket_arn}") 65 | log.ok(f"backend: {table_arn}") 66 | if not log.accept("backend: create backend resources"): 67 | log.bad("backend: not created") 68 | raise SystemExit(1) 69 | 70 | # Use the S3 bucket and DynamoDB table name for the CloudFormation stack. 71 | if bucket == table: 72 | stack_name = bucket 73 | else: 74 | stack_name = f"{bucket}-{table}" 75 | stack_arn = _get_cloudformation_stack_arn(region_name, account_id, stack_name) 76 | log.ok(f"backend: creating {stack_arn}") 77 | 78 | # Create the stack. 79 | cloudformation_client = session.client("cloudformation", region_name=region_name) 80 | cloudformation_client.create_stack( 81 | StackName=stack_name, 82 | ResourceTypes=["AWS::DynamoDB::Table", "AWS::S3::Bucket"], 83 | TemplateBody=json.dumps( 84 | { 85 | "Resources": { 86 | "Table": { 87 | "Type": "AWS::DynamoDB::Table", 88 | "Properties": { 89 | "TableName": table, 90 | "AttributeDefinitions": [ 91 | {"AttributeName": "LockID", "AttributeType": "S"} 92 | ], 93 | "KeySchema": [ 94 | {"AttributeName": "LockID", "KeyType": "HASH"} 95 | ], 96 | "BillingMode": "PAY_PER_REQUEST", 97 | }, 98 | }, 99 | "Bucket": { 100 | "Type": "AWS::S3::Bucket", 101 | "Properties": { 102 | "AccessControl": "Private", 103 | "BucketName": bucket, 104 | "VersioningConfiguration": {"Status": "Enabled"}, 105 | }, 106 | }, 107 | } 108 | } 109 | ), 110 | ) 111 | 112 | # Wait for it to complete. 113 | log.ok("backend: please wait...") 114 | while True: 115 | sleep(10) 116 | response = cloudformation_client.describe_stacks(StackName=stack_name) 117 | for stack in response["Stacks"]: 118 | if stack["StackStatus"] == "CREATE_IN_PROGRESS": 119 | pass 120 | elif stack["StackStatus"] == "CREATE_COMPLETE": 121 | log.ok("backend: create complete") 122 | return 123 | else: 124 | log.bad(f"backend: {stack['StackStatus']}") 125 | log.bad(f"backend: {stack['StackStatusReason']}") 126 | 127 | 128 | def _get_cloudformation_stack_arn( 129 | region_name: str, account_id: str, stack_name: str 130 | ) -> str: 131 | return f"arn:aws:cloudformation:{region_name}:{account_id}:stack/{stack_name}" 132 | 133 | 134 | def _get_dynamodb_table_arn(region_name: str, account_id: str, table: str) -> str: 135 | return f"arn:aws:dynamodb:{region_name}:{account_id}:{table}" 136 | 137 | 138 | def _get_s3_bucket_arn(region_name: str, account_id: str, bucket: str) -> str: 139 | return f"arn:aws:s3:{region_name}:{account_id}:{bucket}" 140 | 141 | 142 | @locked 143 | def _get_s3_backend_status( 144 | session: Session, region_name: str, bucket: str, table: str 145 | ) -> dict: 146 | 147 | s3_client = session.client("s3") 148 | 149 | try: 150 | response = s3_client.get_bucket_versioning(Bucket=bucket) 151 | except s3_client.exceptions.NoSuchBucket: 152 | bucket_exists = False 153 | bucket_versioning_enabled = False 154 | else: 155 | bucket_exists = True 156 | bucket_versioning_enabled = response["Status"] == "Enabled" 157 | 158 | dynamodb_client = session.client("dynamodb", region_name=region_name) 159 | 160 | try: 161 | dynamodb_client.describe_table(TableName=table) 162 | except dynamodb_client.exceptions.ResourceNotFoundException: 163 | table_exists = False 164 | else: 165 | table_exists = True 166 | 167 | return { 168 | "bucket_exists": bucket_exists, 169 | "bucket_versioning_enabled": bucket_versioning_enabled, 170 | "table_exists": table_exists, 171 | } 172 | 173 | 174 | def _profile_creds_definitely_supported_by_terraform(creds: Any) -> bool: 175 | if creds.method in ("config-file", "shared-credentials-file"): 176 | # The credentials were in the config file, so Terraform 177 | # will have no trouble finding them using the profile. 178 | return True 179 | else: 180 | # The credentials were more complicated, using the assume-role 181 | # provider, custom-process provider, or something else. Terraform 182 | # does not support as many credential types as Boto (e.g. Terraform 183 | # can't do MFA prompts) so we should remove the "profile" from the 184 | # configuration and expose the actual credentials to Terraform. 185 | return False 186 | 187 | 188 | @locked 189 | def export_environment_variables( 190 | session: Optional[Session] = None, 191 | region_name: Optional[str] = None, 192 | **kwargs: Any, 193 | ) -> None: 194 | """ 195 | Exports AWS credentials as environment variables. 196 | 197 | """ 198 | 199 | if session is None: 200 | session = get_session(**kwargs) 201 | 202 | creds = get_frozen_credentials(session) 203 | 204 | if creds.access_key: 205 | os.environ["AWS_ACCESS_KEY_ID"] = creds.access_key 206 | 207 | if creds.secret_key: 208 | os.environ["AWS_SECRET_ACCESS_KEY"] = creds.secret_key 209 | 210 | if creds.token: 211 | os.environ["AWS_SECURITY_TOKEN"] = creds.token 212 | os.environ["AWS_SESSION_TOKEN"] = creds.token 213 | 214 | if not region_name: 215 | region_name = session.region_name 216 | if region_name: 217 | os.environ["AWS_REGION"] = region_name 218 | os.environ["AWS_DEFAULT_REGION"] = region_name 219 | 220 | 221 | @lru_cache() 222 | @locked 223 | def get_account_id( 224 | session: Optional[Session] = None, 225 | **kwargs: Any, 226 | ) -> str: 227 | if session is None: 228 | session = get_session(**kwargs) 229 | sts_client = session.client("sts") 230 | account_id = sts_client.get_caller_identity()["Account"] 231 | return account_id 232 | 233 | 234 | @locked 235 | def get_frozen_credentials( 236 | session: Optional[Session] = None, 237 | **kwargs: Any, 238 | ) -> Any: 239 | if session is None: 240 | session = get_session(**kwargs) 241 | return session.get_credentials().get_frozen_credentials() 242 | 243 | 244 | @lru_cache() 245 | def get_session(**kwargs: Any) -> Session: 246 | if use_boto_source_profile_mfa: 247 | return boto_source_profile_mfa.get_session(**kwargs) 248 | else: 249 | return Session(**kwargs) 250 | 251 | 252 | @locked 253 | def provider_aws(**body: Any) -> Block: 254 | """ 255 | Returns an AWS provider block. If provided, the `profile` option 256 | may be replaced with temporary credentials for that profile. 257 | 258 | """ 259 | 260 | if body.get("profile"): 261 | session = get_session(profile_name=body["profile"]) 262 | creds = session.get_credentials() 263 | if not _profile_creds_definitely_supported_by_terraform(creds): 264 | 265 | # This profile is using credentials that Terraform may not 266 | # support, so get static/frozen credentials and inject them 267 | # into the configuration. 268 | 269 | del body["profile"] 270 | 271 | frozen_creds = creds.get_frozen_credentials() 272 | body["access_key"] = frozen_creds.access_key 273 | body["secret_key"] = frozen_creds.secret_key 274 | if creds.token: 275 | body["token"] = frozen_creds.token 276 | 277 | return block("provider", "aws", body) 278 | 279 | 280 | @locked 281 | def terraform_backend_s3(bucket: str, dynamodb_table: str, **config: Any) -> Block: 282 | """ 283 | This ensures that the S3 backend exists, prompting to create it if 284 | necessary, sets the credentials as environment variables in some 285 | cases, and returns a Terraform configuration block for it. 286 | 287 | """ 288 | 289 | # Create a session from any AWS credentials options. 290 | 291 | session_kwargs = {} 292 | session_kwargs_map = { 293 | "profile": "profile_name", 294 | "access_key": "aws_access_key_id", 295 | "secret_key": "aws_secret_access_key", 296 | "token": "aws_session_token", 297 | } 298 | for config_key, session_key in session_kwargs_map.items(): 299 | config_value = config.get(config_key) 300 | if config_value: 301 | session_kwargs[session_key] = config[config_key] 302 | 303 | session = get_session(**session_kwargs) 304 | 305 | region = config.get("region") or session.region_name 306 | 307 | # Replace the profile argument with environment variables. 308 | 309 | if config.get("profile"): 310 | creds = session.get_credentials() 311 | if not _profile_creds_definitely_supported_by_terraform(creds): 312 | 313 | # This profile is using credentials that Terraform may not 314 | # support, so get static/frozen credentials and export them 315 | # as environment variables. 316 | 317 | # Use environment variables for credentials rather than 318 | # injecting them into the backend configuration because 319 | # Terraform gets confused when the backend configuration 320 | # changes, which happens with certain AWS credential types 321 | # such as assuming roles. 322 | 323 | del config["profile"] 324 | 325 | export_environment_variables(session=session, region_name=region) 326 | 327 | # Assume role before interacting with backend resources. This not the same 328 | # as profiles that assume roles. This is when Terraform has specifically 329 | # been configured to assume a role. This is more likely to happen when 330 | # running Terraform on an EC2 instance using instance profile credentials, 331 | # or using environment variables to set credentials, and then assuming 332 | # different roles using those credentials. 333 | 334 | if config.get("role_arn"): 335 | session = _assume_role( 336 | session, 337 | RoleArn=config["role_arn"], 338 | RoleSessionName=config.get("session_name", ""), 339 | ExternalId=config.get("external_id", ""), 340 | ) 341 | 342 | # Check if the backend resources have been created. 343 | 344 | status = _get_s3_backend_status( 345 | session=session, region_name=region, bucket=bucket, table=dynamodb_table 346 | ) 347 | 348 | if not all(status.values()): 349 | 350 | if any(status.values()): 351 | 352 | log.bad("backend: incomplete backend setup") 353 | 354 | account_id = get_account_id(session=session) 355 | bucket_arn = _get_s3_bucket_arn(region, account_id, bucket) 356 | table_arn = _get_dynamodb_table_arn(region, account_id, dynamodb_table) 357 | 358 | if status["bucket_exists"]: 359 | log.ok(f"backend: {bucket_arn} found") 360 | else: 361 | log.bad(f"backend: {bucket_arn} not found") 362 | 363 | if status["bucket_versioning_enabled"]: 364 | log.ok(f"backend: {bucket_arn} versioning enabled") 365 | else: 366 | log.bad(f"backend: {bucket_arn} versioning disabled") 367 | 368 | if status["table_exists"]: 369 | log.ok(f"backend: {table_arn} found") 370 | else: 371 | log.bad(f"backend: {table_arn} not found") 372 | 373 | raise SystemExit(1) 374 | 375 | _create_s3_backend( 376 | session=session, bucket=bucket, table=dynamodb_table, region_name=region 377 | ) 378 | 379 | # Return the configuration to use the backend. 380 | 381 | config["bucket"] = bucket 382 | config.setdefault("encrypt", True) 383 | config["dynamodb_table"] = dynamodb_table 384 | config["region"] = region 385 | 386 | return block("terraform", {"backend": {"s3": config}}) 387 | 388 | 389 | @locked 390 | def terraform_remote_state_s3(name: str, **body: Any) -> Block: 391 | """ 392 | This returns a Terraform configuration block for a "terraform_remote_state" 393 | data source, with added support for AWS profiles using MFA prompts. 394 | 395 | """ 396 | 397 | body["backend"] = "s3" 398 | config = body.get("config", {}) 399 | if config.get("profile"): 400 | 401 | session = get_session(profile_name=config["profile"]) 402 | creds = session.get_credentials() 403 | if not _profile_creds_definitely_supported_by_terraform(creds): 404 | 405 | # This profile is using credentials that Terraform may not 406 | # support, so get static/frozen credentials and inject them 407 | # into the configuration. 408 | 409 | del config["profile"] 410 | 411 | frozen_creds = creds.get_frozen_credentials() 412 | config["access_key"] = frozen_creds.access_key 413 | config["secret_key"] = frozen_creds.secret_key 414 | if creds.token: 415 | config["token"] = frozen_creds.token 416 | 417 | return block("data", "terraform_remote_state", name, body) 418 | -------------------------------------------------------------------------------- /pretf/pretf/variables.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shlex 3 | from collections import defaultdict 4 | from pathlib import Path 5 | from threading import Event, Lock 6 | from typing import Any, Dict, Generator, List, Set, Union 7 | 8 | from . import log, util 9 | from .exceptions import ( 10 | VariableAlreadyDefinedError, 11 | VariableNotConsistentError, 12 | VariableNotDefinedError, 13 | VariableNotPopulatedError, 14 | ) 15 | from .parser import ( 16 | parse_environment_variable_for_variables, 17 | parse_hcl2, 18 | parse_json_file_for_blocks, 19 | ) 20 | 21 | 22 | class VariableProxy: 23 | def __init__(self, store: "VariableStore", consumer: Any): 24 | self._store = store 25 | self._consumer = consumer 26 | 27 | def __contains__(self, name: str) -> bool: 28 | return name in self._store 29 | 30 | def __getattr__(self, name: str) -> Any: 31 | return self._store.get(name, self._consumer) 32 | 33 | __getitem__ = __getattr__ 34 | 35 | 36 | class VariableStore: 37 | def __init__(self) -> None: 38 | self._allow_changes = True 39 | self._allow_defaults = True 40 | self._definitions: dict = {} 41 | self._values: dict = {} 42 | 43 | def __contains__(self, name: str) -> bool: 44 | if name in self._definitions: 45 | if name in self._values: 46 | return True 47 | if self._allow_defaults: 48 | if self._definitions[name].has_default: 49 | return True 50 | return False 51 | 52 | def add(self, var: Union["VariableDefinition", "VariableValue"]) -> None: 53 | if isinstance(var, VariableDefinition): 54 | if var.name in self._definitions: 55 | old_var = self._definitions[var.name] 56 | raise VariableAlreadyDefinedError(old_var=old_var, new_var=var) 57 | self._definitions[var.name] = var 58 | elif isinstance(var, VariableValue): 59 | if not self._allow_changes and var.name in self._values: 60 | old_var = self._values[var.name] 61 | if var.value != old_var.value: 62 | raise VariableNotConsistentError(old_var=old_var, new_var=var) 63 | self._values[var.name] = var 64 | else: 65 | raise TypeError(var) 66 | 67 | def enable_changes(self) -> None: 68 | self._allow_changes = True 69 | 70 | def enable_defaults(self) -> None: 71 | self._allow_defaults = True 72 | 73 | def disable_changes(self) -> None: 74 | self._allow_changes = False 75 | 76 | def disable_defaults(self) -> None: 77 | self._allow_defaults = False 78 | 79 | def get(self, name: str, consumer: Any) -> Any: 80 | if name in self._definitions: 81 | if name in self._values: 82 | return self._values[name].value 83 | if self._allow_defaults: 84 | if self._definitions[name].has_default: 85 | return self._definitions[name].default 86 | raise VariableNotPopulatedError(name, consumer) 87 | raise VariableNotDefinedError(name, consumer) 88 | 89 | def proxy(self, consumer: Any) -> VariableProxy: 90 | return VariableProxy(store=self, consumer=consumer) 91 | 92 | 93 | class TerraformVariableStore(VariableStore): 94 | def __init__(self, files_to_create: dict) -> None: 95 | super().__init__() 96 | self._files_to_create = files_to_create 97 | self._files_done: Set[Path] = set() 98 | self._tfvars_waiting: Set[Path] = set() 99 | self._events: Dict[str, List[Event]] = defaultdict(list) 100 | self._lock = Lock() 101 | self._source_priority: List[str] = [] 102 | 103 | def _blocked_threads(self) -> int: 104 | count = 0 105 | for events in self._events.values(): 106 | for event in events: 107 | if not event.is_set(): 108 | count += 1 109 | return count 110 | 111 | def _threads(self) -> int: 112 | return len(self._files_to_create) - len(self._files_done) 113 | 114 | def _unblock(self, name: str) -> None: 115 | events = self._events[name] 116 | while events: 117 | event = events.pop() 118 | event.set() 119 | 120 | def abort(self) -> None: 121 | with self._lock: 122 | for name in self._events: 123 | self._unblock(name) 124 | 125 | def add(self, var: Union["VariableDefinition", "VariableValue"]) -> None: 126 | with self._lock: 127 | 128 | if var.name in self: 129 | old_var = self._values[var.name] 130 | old_priority = self._source_priority.index(old_var.source) 131 | else: 132 | old_priority = -1 133 | 134 | new_priority = self._source_priority.index(var.source) 135 | 136 | if new_priority > old_priority: 137 | super().add(var) 138 | 139 | # If this variable is ready, 140 | # unblock any threads waiting for it. 141 | if var.name in self: 142 | self._unblock(var.name) 143 | 144 | def file_done(self, path: Path) -> None: 145 | with self._lock: 146 | 147 | self._files_done.add(path) 148 | self._tfvars_waiting.discard(path) 149 | 150 | # If there are no tfvars files left to be rendered, 151 | # then allow the default values to be used. 152 | if not self._tfvars_waiting: 153 | self.enable_defaults() 154 | 155 | # Unblock other threads waiting for variables with default values. 156 | for var in self._definitions.copy().values(): 157 | if var.has_default: 158 | self._unblock(var.name) 159 | 160 | # If all other threads are blocked waiting for variables then 161 | # there is a deadlock. In that case unblock every variable so 162 | # threads can continue and fail. 163 | blocked_threads = self._blocked_threads() 164 | if blocked_threads and blocked_threads >= self._threads(): 165 | for name in self._events: 166 | self._unblock(name) 167 | 168 | def get(self, name: str, consumer: Any) -> Any: 169 | with self._lock: 170 | 171 | # Return the value if the variable is ready. 172 | if name in self: 173 | return super().get(name, consumer) 174 | 175 | # If all other threads are blocked and waiting for variables, 176 | # then having this thread wait for a variable would cause a deadlock. 177 | # In that case just try to return the value and let it fail. 178 | if self._blocked_threads() + 1 >= self._threads(): 179 | return super().get(name, consumer) 180 | 181 | # Create an event that can be used to block this thread 182 | # until the variable is ready. 183 | event = Event() 184 | self._events[name].append(event) 185 | 186 | # Block this thread until another thread makes the variable ready 187 | # or another thread detects a deadlock and unblocks all threads. 188 | event.wait() 189 | 190 | # Try to return the value. 191 | # If there was a deadlock then this will fail. 192 | return super().get(name, consumer) 193 | 194 | def tfvars_wait_for(self, path: Path) -> None: 195 | if path not in self._files_done: 196 | self._tfvars_waiting.add(path) 197 | self.disable_defaults() 198 | 199 | def tfvars_waiting_for(self, path: Path) -> bool: 200 | return path in self._tfvars_waiting 201 | 202 | def load(self) -> None: 203 | """ 204 | Load Terraform variables from various sources. 205 | 206 | From https://www.terraform.io/docs/configuration/variables.html 207 | 208 | Terraform loads variables in the following order, 209 | with later sources taking precedence over earlier ones: 210 | 211 | * Environment variables 212 | * The terraform.tfvars file, if present. 213 | * The terraform.tfvars.json file, if present. 214 | * Any *.auto.tfvars or *.auto.tfvars.json files, processed in lexical order of their filenames. 215 | * Any -var and -var-file options on the command line, in the order they are provided. 216 | 217 | """ 218 | 219 | self.disable_defaults() 220 | self.enable_changes() 221 | 222 | auto_tfvars_files = set() 223 | default_tfvars_files = set() 224 | tf_files = set() 225 | 226 | target_dir = next(iter(self._files_to_create.keys())).parent 227 | 228 | future_files: Set[Path] = set() 229 | future_files.update(target_dir.iterdir()) 230 | future_files.update(self._files_to_create.keys()) 231 | for path in future_files: 232 | name = path.name 233 | if name.endswith(".auto.tfvars") or name.endswith(".auto.tfvars.json"): 234 | auto_tfvars_files.add(path) 235 | elif name in ("terraform.tfvars", "terraform.tfvars.json"): 236 | default_tfvars_files.add(path) 237 | elif name.endswith(".tf") or name.endswith(".tf.json"): 238 | tf_files.add(path) 239 | 240 | # Load variable definitions. 241 | for path in tf_files: 242 | self._source_priority.append(path.name) 243 | if path in self._files_to_create: 244 | self._source_priority.append(self._files_to_create[path].name) 245 | else: 246 | self._source_priority.append(path.name) 247 | for var in get_variables_from_file(path): 248 | self.add(var) 249 | 250 | # Load variable values. 251 | # 1. Environment variables. 252 | for key, value in os.environ.items(): 253 | if key.startswith("TF_VAR_"): 254 | self._source_priority.append(key) 255 | parsed = parse_environment_variable_for_variables(key, value) 256 | for name, value in parsed.items(): 257 | var = VariableValue(name=name, value=value, source=key) 258 | self.add(var) 259 | 260 | # 2. The terraform.tfvars file, if present. 261 | # 3. The terraform.tfvars.json file, if present. 262 | for path in sorted(default_tfvars_files): 263 | if path in self._files_to_create: 264 | self._source_priority.append(self._files_to_create[path].name) 265 | self.tfvars_wait_for(path) 266 | else: 267 | self._source_priority.append(path.name) 268 | for var in get_variables_from_file(path): 269 | self.add(var) 270 | 271 | # 4. Any *.auto.tfvars or *.auto.tfvars.json files, 272 | # processed in lexical order of their filenames. 273 | for path in sorted(auto_tfvars_files): 274 | if path in self._files_to_create: 275 | self._source_priority.append(self._files_to_create[path].name) 276 | self.tfvars_wait_for(path) 277 | else: 278 | self._source_priority.append(path.name) 279 | for var in get_variables_from_file(path): 280 | self.add(var) 281 | 282 | # 5. Any -var and -var-file options on the command line, 283 | # in the order they are provided. 284 | _, options = util.parse_args() 285 | for option in options: 286 | if option.startswith("-var="): 287 | self._source_priority.append(option) 288 | var_string = shlex.split(option[5:])[0] 289 | name, value = var_string.split("=", 1) 290 | var = VariableValue(name=name, value=value, source=option) 291 | self.add(var) 292 | elif option.startswith("-var-file="): 293 | var_file = Path(os.path.abspath(option[10:])).resolve() 294 | # TODO: could be a minor bug with variable priorities when this specifies a file in another 295 | # directory and there is a file with the same name in the current directory. Fixing this will 296 | # require updating all of the var.source code to allow for full paths, but only printing 297 | # the name if there are errors (except when the source is in another directory, 298 | # in which case a full path should be displayed). 299 | for target_path, source_path in self._files_to_create.items(): 300 | if target_path.resolve() == var_file: 301 | self._source_priority.append(source_path.name) 302 | self.tfvars_wait_for(target_path) 303 | break 304 | else: 305 | self._source_priority.append(var_file.name) 306 | for var in get_variables_from_file(var_file): 307 | self.add(var) 308 | 309 | self.disable_changes() 310 | if not self._tfvars_waiting: 311 | self.enable_defaults() 312 | 313 | 314 | class VariableDefinition: 315 | def __init__(self, name: str, source: Any, **kwargs: dict) -> None: 316 | self.name = name 317 | self.source = str(source) 318 | self.has_default = False 319 | for key, value in kwargs.items(): 320 | if key == "default": 321 | self.has_default = True 322 | self.default = value 323 | else: 324 | raise TypeError( 325 | f"{self.__class__.__name__}() got an unexpected keyword argument {repr(key)}" 326 | ) 327 | 328 | def __iter__(self) -> Generator[tuple, None, None]: 329 | yield ("name", self.name) 330 | if hasattr(self, "default"): 331 | yield ("default", self.default) 332 | yield ("source", self.source) 333 | 334 | 335 | class VariableValue: 336 | def __init__(self, name: str, value: Any, source: Any) -> None: 337 | self.name = name 338 | self.value = value 339 | self.source = str(source) 340 | 341 | def __iter__(self) -> Generator[tuple, None, None]: 342 | yield ("name", self.name) 343 | yield ("value", self.value) 344 | yield ("source", self.source) 345 | 346 | 347 | def get_variable_definitions_from_block( 348 | block: dict, source: Any 349 | ) -> Generator[VariableDefinition, None, None]: 350 | 351 | if "variable" not in block: 352 | return 353 | 354 | variable = block["variable"] 355 | 356 | if isinstance(variable, list): 357 | variables = variable 358 | elif isinstance(variable, dict): 359 | variables = [variable] 360 | elif isinstance(variable, str): 361 | raise ValueError( 362 | f"invalid variable, possibly missing body in block() call: {repr(block)}" 363 | ) 364 | else: 365 | raise ValueError(f"invalid variable: {repr(block)}") 366 | 367 | for variable in variables: 368 | for name, block in variable.items(): 369 | kwargs = {"name": name, "source": source} 370 | if "default" in block: 371 | kwargs["default"] = block["default"] 372 | yield VariableDefinition(**kwargs) 373 | 374 | 375 | def get_variable_values_from_block( 376 | block: dict, source: Any 377 | ) -> Generator[VariableValue, None, None]: 378 | for name, value in block.items(): 379 | yield VariableValue(name=name, value=value, source=source) 380 | 381 | 382 | def get_variables_from_file( 383 | path: Path, 384 | ) -> Generator[Union[VariableDefinition, VariableValue], None, None]: 385 | try: 386 | if path.name.endswith(".tf"): 387 | block = parse_hcl2(path.read_text()) 388 | yield from get_variable_definitions_from_block(block, path.name) 389 | elif path.name.endswith(".tfvars"): 390 | block = parse_hcl2(path.read_text()) 391 | yield from get_variable_values_from_block(block, path.name) 392 | elif path.name.endswith(".tf.json"): 393 | blocks = parse_json_file_for_blocks(path) 394 | for block in blocks: 395 | yield from get_variable_definitions_from_block(block, path.name) 396 | elif path.name.endswith(".tfvars.json"): 397 | blocks = parse_json_file_for_blocks(path) 398 | for block in blocks: 399 | yield from get_variable_values_from_block(block, path.name) 400 | else: 401 | raise ValueError(f"Unexpected file extension: {path.name}") 402 | except Exception: 403 | log.bad(f"Error loading variables from {path}") 404 | raise 405 | -------------------------------------------------------------------------------- /pretf/pretf/workflow.py: -------------------------------------------------------------------------------- 1 | import inspect 2 | import json 3 | import os 4 | import shlex 5 | import sys 6 | from pathlib import Path, PurePath 7 | from subprocess import CalledProcessError, CompletedProcess 8 | from typing import Any, Dict, List, Optional, Sequence, Union 9 | 10 | from . import log, util 11 | from .exceptions import RequiredFilesNotFoundError 12 | from .render import call_pretf_function, json_default, render_files 13 | from .util import import_file, is_verbose 14 | 15 | 16 | def clean_files( 17 | paths: Sequence[Path], 18 | verbose: Optional[bool] = None, 19 | ) -> None: 20 | """ 21 | Deletes the specified files. Intended for use after `create_files()`. 22 | Use `delete_files()` if wildcards are required. 23 | 24 | """ 25 | 26 | if paths and is_verbose(verbose): 27 | names = [path.name for path in paths] 28 | log.ok(f"clean: {' '.join(sorted(names))}") 29 | 30 | for path in paths: 31 | try: 32 | path.unlink() 33 | except FileNotFoundError: 34 | pass 35 | 36 | 37 | def create_files( 38 | target_dir: Union[Path, str] = "", 39 | source_dirs: Sequence[Union[Path, str]] = [], 40 | verbose: Optional[bool] = None, 41 | ) -> List[Path]: 42 | """ 43 | Creates rendered files in target_dir from source files in source_dirs. 44 | 45 | Handles the following: 46 | filename.tf.j2 -> filename.tf 47 | filename.tf.py -> filename.tf.json 48 | filename.tfvars.j2 -> filename.tfvars 49 | filename.tfvars.py -> filename.tfvars.json 50 | 51 | Jinja2 files (*.j2) require the Jinja2 package to be installed. 52 | 53 | Both target_dir and source_dirs will default to the directory 54 | specified in the CLI arguments, if specified, otherwise the current 55 | working directory. 56 | 57 | If multiple source_dirs are specified, and there are duplicate 58 | file names, the files in the latter directories take precedence. 59 | 60 | It is recommended to call create() only once. Pass in multiple 61 | source_dirs rather than calling it multiple times. Pretf parses 62 | variables from files in the current directory and the source_dirs. 63 | Calling it multiple times with different source_dirs could give 64 | Pretf a different set of files to parse each time it is called, 65 | resulting in different variables each time. 66 | 67 | """ 68 | 69 | if isinstance(target_dir, str): 70 | target_dir = Path(target_dir) 71 | 72 | if not source_dirs: 73 | source_dirs = [target_dir] 74 | 75 | # Find all files in the specified source directories. 76 | files_to_create = {} 77 | for source_dir in source_dirs or ["."]: 78 | if isinstance(source_dir, str): 79 | source_dir = Path(source_dir) 80 | for source_path in source_dir.iterdir(): 81 | file_name = source_path.name 82 | if ( 83 | file_name.endswith(".tf.j2") 84 | or file_name.endswith(".tf.py") 85 | or file_name.endswith(".tfvars.j2") 86 | or file_name.endswith(".tfvars.py") 87 | ): 88 | target_path = (target_dir / file_name).with_suffix(".json") 89 | files_to_create[target_path] = source_path 90 | 91 | # Render the JSON data from *.tf.py and *.tfvars.py files. 92 | if files_to_create: 93 | file_contents = render_files(files_to_create) 94 | else: 95 | file_contents = {} 96 | 97 | if file_contents and is_verbose(verbose): 98 | names = [path.name for path in file_contents.keys()] 99 | log.ok(f"create: {' '.join(sorted(names))}") 100 | 101 | # Write JSON files. 102 | created = [] 103 | for output_path, contents in sorted(file_contents.items()): 104 | with output_path.open("w") as open_file: 105 | json.dump(contents, open_file, indent=2, default=json_default) 106 | created.append(output_path) 107 | 108 | return created 109 | 110 | 111 | def custom( 112 | path: Union[PurePath, str], 113 | context: Optional[dict] = None, 114 | ) -> CompletedProcess: 115 | """ 116 | Calls the pretf_workflow() function from the specified Python file. 117 | This is useful for having a custom workflow that is used by multiple 118 | pretf.workflow.py files in different directories. 119 | 120 | """ 121 | 122 | with import_file(path) as module: 123 | 124 | try: 125 | func = getattr(module, "pretf_workflow") 126 | except AttributeError: 127 | raise log.bad(f"workflow: {path} does not have a 'pretf_workflow' function") 128 | 129 | # Call the pretf_workflow() function, 130 | # passing in "path" and "terraform" if required. 131 | result = call_pretf_function(func=func, context=context) 132 | 133 | if isinstance(result, int): 134 | result = CompletedProcess(args=[str(path)], returncode=result) 135 | 136 | return result 137 | 138 | 139 | def default( 140 | clean: bool = True, 141 | created: list = [], 142 | verbose: Optional[bool] = None, 143 | ) -> CompletedProcess: 144 | """ 145 | This is the default Pretf workflow. This is automatically used when there 146 | is no pretf.workflow.py file in the current directory, or it can be called 147 | directly from a custom workflow function if it just needs to do something 148 | before or after the default workflow. 149 | 150 | """ 151 | 152 | # Delete *.tf.json and *.tfvars.json files. 153 | delete_files(verbose=verbose) 154 | 155 | # Create *.tf.json and *.tfvars.json files 156 | # from *.tf.py and *.tfvars.py files. 157 | created = created + create_files(verbose=verbose) 158 | 159 | # Execute Terraform, raising an exception if it fails. 160 | proc = execute_terraform(verbose=verbose) 161 | 162 | # Clean up created files. 163 | if clean: 164 | clean_files(paths=created, verbose=verbose) 165 | 166 | return proc 167 | 168 | 169 | def delete_files( 170 | *path_patterns: str, 171 | exclude_name_patterns: Sequence[str] = [], 172 | cwd: Optional[Union[Path, str]] = None, 173 | verbose: Optional[bool] = None, 174 | ) -> List[Path]: 175 | """ 176 | Deletes matching files from the current directory. 177 | Defaults to deleting files normally created by the create() function. 178 | Optionally exclude files matching a specified pattern. 179 | 180 | """ 181 | 182 | if not path_patterns: 183 | path_patterns = ("*.tf.json", "*.tfvars.json") 184 | 185 | if cwd is None: 186 | cwd = Path.cwd() 187 | elif isinstance(cwd, str): 188 | cwd = Path(cwd) 189 | 190 | # Find files to delete. 191 | delete = [] 192 | paths = util.find_paths( 193 | path_patterns=path_patterns, 194 | exclude_name_patterns=exclude_name_patterns, 195 | cwd=cwd, 196 | ) 197 | for path in paths: 198 | if not path.is_dir(): 199 | delete.append(path) 200 | 201 | if delete and is_verbose(verbose): 202 | names = [path.name for path in delete] 203 | log.ok(f"delete: {' '.join(sorted(names))}") 204 | 205 | # Delete files. 206 | deleted = [] 207 | for path in delete: 208 | path.unlink() 209 | deleted.append(path) 210 | 211 | return deleted 212 | 213 | 214 | def delete_links( 215 | cwd: Optional[Union[Path, str]] = None, 216 | verbose: Optional[bool] = None, 217 | ) -> List[Path]: 218 | """ 219 | Deletes symlinks from the current directory. 220 | 221 | """ 222 | 223 | if cwd is None: 224 | cwd = Path.cwd() 225 | elif isinstance(cwd, str): 226 | cwd = Path(cwd) 227 | 228 | # Find links to delete. 229 | delete = [] 230 | for path in cwd.iterdir(): 231 | if path.is_symlink(): 232 | delete.append(path) 233 | 234 | if delete and is_verbose(verbose): 235 | names = [path.name for path in delete] 236 | log.ok(f"unlink: {' '.join(sorted(names))}") 237 | 238 | # Delete links. 239 | deleted = [] 240 | for path in delete: 241 | path.unlink() 242 | deleted.append(path) 243 | 244 | return deleted 245 | 246 | 247 | def execute_terraform( 248 | args: Optional[Sequence[str]] = None, 249 | cwd: Optional[Union[Path, str]] = None, 250 | env: Optional[dict] = None, 251 | capture: bool = False, 252 | verbose: Optional[bool] = None, 253 | ) -> CompletedProcess: 254 | """ 255 | Executes Terraform and waits for it to finish. 256 | Command line arguments are passed through to Terraform. 257 | Returns the exit code from Terraform. 258 | 259 | """ 260 | 261 | if args is None: 262 | args = ["terraform"] + sys.argv[1:] 263 | else: 264 | args = ["teraform"] + list(args) 265 | 266 | # Find the Terraform executable in the PATH. 267 | for path in os.environ["PATH"].split(os.pathsep): 268 | 269 | if sys.platform == "win32": 270 | terraform_path = os.path.join(path, "terraform.exe") 271 | else: 272 | terraform_path = os.path.join(path, "terraform") 273 | 274 | # Skip if it doesn't exist here. 275 | if not os.path.exists(terraform_path): 276 | continue 277 | 278 | # Skip if it's not executable. 279 | if not os.access(terraform_path, os.X_OK): 280 | continue 281 | 282 | # Skip if it's a symlink to Pretf. 283 | real_name = os.path.basename(os.path.realpath(terraform_path)) 284 | if real_name == "pretf": 285 | continue 286 | 287 | # This is a valid executable, run it. 288 | return util.execute( 289 | file=terraform_path, 290 | args=args, 291 | cwd=cwd, 292 | env=env, 293 | capture=capture, 294 | verbose=verbose, 295 | ) 296 | 297 | log.bad("terraform: command not found") 298 | raise CalledProcessError( 299 | returncode=1, 300 | cmd=" ".join(shlex.quote(arg) for arg in args), 301 | ) 302 | 303 | 304 | def load_parent(**kwargs: Any) -> CompletedProcess: 305 | """ 306 | Looks for the closest pretf.workflow.py file in parent directories 307 | and calls the pretf_workflow() function. Errors if there are no 308 | pretf.workflow.py files in any parent directories. 309 | 310 | """ 311 | 312 | # Find the calling directory of this function, usually the directory 313 | # containing the pretf.workflow.py file that has called this function. 314 | frame = inspect.currentframe() 315 | if not frame: 316 | raise Exception("workflow: load_parent() called from unknown frame") 317 | caller_frame = frame.f_back 318 | if not caller_frame: 319 | raise Exception("workflow: load_parent() called from unknown caller") 320 | caller_info = inspect.getframeinfo(caller_frame) 321 | caller_file = caller_info.filename 322 | caller_directory = Path(caller_file).parent 323 | 324 | # Look for pretf.workflow.py in parent directories. 325 | parent_directory = caller_directory.parent 326 | path = util.find_workflow_path(cwd=parent_directory) 327 | 328 | if not path: 329 | raise log.bad( 330 | f"workflow: load_parent() called in {caller_file} but pretf.workflow.py not found in parent directories" 331 | ) 332 | 333 | return custom(path, context=kwargs) 334 | 335 | 336 | def link_files( 337 | *path_patterns: Union[Path, str], 338 | exclude_name_patterns: Sequence[str] = [".*", "_*", "pretf.workflow.py"], 339 | cwd: Optional[Union[Path, str]] = None, 340 | verbose: Optional[bool] = None, 341 | ) -> List[Path]: 342 | """ 343 | Creates symlinks from all files and directories matching 344 | the source patterns into the current directory. 345 | 346 | """ 347 | 348 | if cwd is None: 349 | cwd = Path.cwd() 350 | elif isinstance(cwd, str): 351 | cwd = Path(cwd) 352 | 353 | # Find the calling directory of this function, usually the directory 354 | # containing the pretf.workflow.py file that has called this function. 355 | frame = inspect.currentframe() 356 | if not frame: 357 | raise Exception("workflow: link_files() called from unknown frame") 358 | caller_frame = frame.f_back 359 | if not caller_frame: 360 | raise Exception("workflow: link_files() called from unknown caller") 361 | caller_info = inspect.getframeinfo(caller_frame) 362 | caller_file = caller_info.filename 363 | caller_directory = Path(caller_file).parent 364 | 365 | # Start a list of source paths to symlink into the working directory. 366 | paths: List[Path] = [] 367 | 368 | # Separate the path patterns into file name patterns (no slashes) 369 | # and ones that represent relative paths (can be absolute too). 370 | name_patterns: List[str] = [] 371 | relative_patterns: List[str] = [] 372 | for value in path_patterns: 373 | if isinstance(value, Path): 374 | # Use Path objects directly. 375 | paths.append(value) 376 | elif isinstance(value, str): 377 | if "/" in value: 378 | relative_patterns.append(value) 379 | else: 380 | name_patterns.append(value) 381 | else: 382 | raise TypeError(value) 383 | 384 | # Find paths relative to the working directory (can be absolute too). 385 | if relative_patterns: 386 | paths.extend( 387 | util.find_paths( 388 | path_patterns=relative_patterns, 389 | exclude_name_patterns=exclude_name_patterns, 390 | cwd=cwd, 391 | ) 392 | ) 393 | 394 | # Find files in parent directories of the working directory up to 395 | # the directory with the pretf.workflow.py file that called this. 396 | if name_patterns: 397 | here = cwd.parent 398 | while True: 399 | try: 400 | here.relative_to(caller_directory) 401 | except ValueError: 402 | break 403 | else: 404 | paths.extend( 405 | util.find_paths( 406 | path_patterns=name_patterns, 407 | exclude_name_patterns=exclude_name_patterns, 408 | cwd=here, 409 | ) 410 | ) 411 | here = here.parent 412 | 413 | # Create a map of symlink paths to original paths. 414 | create: Dict[Path, str] = {} 415 | for real_path in paths: 416 | 417 | try: 418 | cwd.relative_to(os.path.normpath(real_path)) 419 | except ValueError: 420 | is_parent_directory = False 421 | else: 422 | is_parent_directory = True 423 | 424 | if is_parent_directory: 425 | continue 426 | 427 | link_path = cwd / real_path.name 428 | 429 | if link_path in create: 430 | continue 431 | 432 | if link_path.exists(): 433 | continue 434 | 435 | relative_path = os.path.relpath(real_path, cwd) 436 | 437 | create[link_path] = relative_path 438 | 439 | if create and is_verbose(verbose): 440 | names = [path.name for path in create.keys()] 441 | log.ok(f"link: {' '.join(sorted(names))}") 442 | 443 | # Create new symlinks. 444 | created = [] 445 | for link_path, relative_path in create.items(): 446 | link_path.symlink_to(relative_path) 447 | created.append(link_path) 448 | 449 | return created 450 | 451 | 452 | def link_module( 453 | source: str, 454 | version: Optional[str] = None, 455 | update: bool = False, 456 | cache_dir: Optional[Union[Path, str]] = None, 457 | cwd: Optional[Union[Path, str]] = None, 458 | verbose: Optional[bool] = None, 459 | ) -> List[Path]: 460 | """ 461 | Creates symlinks from all files and directories in a module into 462 | the current directory. Remote modules are first downloaded into a 463 | cache directory. 464 | 465 | """ 466 | 467 | if is_verbose(verbose): 468 | if version: 469 | log.ok(f"module: {source} {version}") 470 | else: 471 | log.ok(f"module: {source}") 472 | 473 | if cwd is None: 474 | cwd = Path.cwd() 475 | elif isinstance(cwd, str): 476 | cwd = Path(cwd) 477 | 478 | paths: List[Path] = [] 479 | 480 | if source.startswith(".") or source.startswith("/"): 481 | 482 | # Modules already on the filesystem can be used directly. 483 | paths.extend( 484 | util.find_paths( 485 | path_patterns=["*"], 486 | cwd=source, 487 | ) 488 | ) 489 | 490 | else: 491 | 492 | # Remote modules will be managed by Terraform in a 493 | # cache directory and then symlinked from there. 494 | module_name = "mirror-module" 495 | 496 | # Ensure the module cache directory exists. 497 | if cache_dir is None: 498 | cache_dir = cwd / ".terraform" / "pretf" / module_name 499 | elif isinstance(cache_dir, str): 500 | cache_dir = Path(cache_dir) 501 | cache_dir.mkdir(parents=True, exist_ok=True) 502 | 503 | # Create a Terraform root module in the cache directory 504 | # that just references the specified module. 505 | module_config_path = cache_dir / "main.tf.json" 506 | module_body = {"source": source} 507 | if version: 508 | module_body["version"] = version 509 | module_json = json.dumps([{"module": {module_name: module_body}}], indent=2) 510 | module_config_path.write_text(module_json) 511 | 512 | # Run "terraform get" to download the module using Terraform. 513 | from .command import TerraformCommand 514 | 515 | terraform_get_args = ["-update"] if update else [] 516 | TerraformCommand(cwd=cache_dir).get(*terraform_get_args) 517 | 518 | # Get the path to the module. 519 | modules_manifest_path = cache_dir / ".terraform" / "modules" / "modules.json" 520 | modules_manifest = json.loads(modules_manifest_path.read_text()) 521 | for module in modules_manifest["Modules"]: 522 | if module["Key"] == module_name: 523 | module_dir = cache_dir / module["Dir"] 524 | # Use files from the downloaded module directory. 525 | paths.extend(util.find_paths(path_patterns=["*"], cwd=module_dir)) 526 | 527 | return link_files(*paths, cwd=cwd, verbose=verbose) 528 | 529 | 530 | def mirror_files( 531 | *path_patterns: str, 532 | exclude_name_patterns: Sequence[str] = [".*", "_*"], 533 | include_directories: bool = True, 534 | cwd: Optional[Union[Path, str]] = None, 535 | verbose: Optional[bool] = None, 536 | ) -> List[Path]: 537 | """ 538 | Creates symlinks from all files and directories matching 539 | the source patterns into the current directory. Deletes 540 | all pre-existing symlinks in the current directory. 541 | """ 542 | 543 | log.bad("workflow: mirror_files() has been deprecated") 544 | 545 | if cwd is None: 546 | cwd = Path.cwd() 547 | elif isinstance(cwd, str): 548 | cwd = Path(cwd) 549 | 550 | # Delete old symlinks. 551 | for path in cwd.iterdir(): 552 | if path.is_symlink(): 553 | if not include_directories and path.is_dir(): 554 | continue 555 | path.unlink() 556 | 557 | # Find files to mirror. 558 | create = {} 559 | paths = util.find_paths( 560 | path_patterns=path_patterns, 561 | exclude_name_patterns=exclude_name_patterns, 562 | cwd=cwd, 563 | ) 564 | for real_path in paths: 565 | 566 | try: 567 | cwd.relative_to(os.path.normpath(real_path)) 568 | except ValueError: 569 | is_parent_directory = False 570 | else: 571 | is_parent_directory = True 572 | 573 | if is_parent_directory: 574 | continue 575 | 576 | if not include_directories and real_path.is_dir(): 577 | continue 578 | 579 | link_path = cwd / real_path.name 580 | 581 | if link_path.exists(): 582 | continue 583 | 584 | relative_path = os.path.relpath(real_path, cwd) 585 | 586 | create[link_path] = relative_path 587 | 588 | if create and is_verbose(verbose): 589 | names = [path.name for path in create.keys()] 590 | log.ok(f"mirror: {' '.join(sorted(names))}") 591 | 592 | # Create new symlinks. 593 | created = [] 594 | for link_path, relative_path in create.items(): 595 | link_path.symlink_to(relative_path) 596 | created.append(link_path) 597 | 598 | return created 599 | 600 | 601 | def require_files(*name_patterns: str) -> None: 602 | """ 603 | Raises an exception if the specified files are not found in the current 604 | directory. Pretf will catch this exception, display an error message, 605 | and show other directories that do contain the files. 606 | 607 | This can be used to restrict where Pretf/Terraform can run, 608 | while informing users where it can run if they make a mistake. 609 | 610 | If multiple patterns are provided, the directory must contain 611 | files that match all patterns (performing an AND search). 612 | 613 | """ 614 | 615 | cwd = Path.cwd() 616 | 617 | matches = 0 618 | for pattern in name_patterns: 619 | if list(cwd.glob(pattern)): 620 | matches += 1 621 | 622 | if matches == len(name_patterns): 623 | return 624 | 625 | # Find the calling directory of this function, usually the directory 626 | # containing the pretf.workflow.py file that has called this function. 627 | frame = inspect.currentframe() 628 | if not frame: 629 | raise Exception("workflow: require_files() called from unknown frame") 630 | caller_frame = frame.f_back 631 | if not caller_frame: 632 | raise Exception("workflow: require_files() called from unknown caller") 633 | caller_info = inspect.getframeinfo(caller_frame) 634 | caller_file = caller_info.filename 635 | caller_directory = Path(caller_file).parent 636 | 637 | raise RequiredFilesNotFoundError(name_patterns=name_patterns, root=caller_directory) 638 | 639 | 640 | __all__ = [ 641 | "create_files", 642 | "custom", 643 | "default", 644 | "delete_files", 645 | "execute_terraform", 646 | "mirror_files", 647 | "require_files", 648 | ] 649 | --------------------------------------------------------------------------------