├── .github └── workflows │ ├── ci.yml │ └── docker.yml ├── .gitignore ├── .pre-commit-config.yaml ├── Dockerfile ├── justfile ├── poetry.lock ├── pyproject.toml ├── readme.md ├── setup.cfg ├── tests ├── conftest.py ├── functional │ ├── terraform │ │ └── aws_kinesis_stream │ │ │ └── main.tf │ └── test_fresources.py └── unit │ ├── data │ ├── app_lb.json │ ├── elasticache.json │ ├── kinesis_stream.json │ ├── schema.elasticache_replication_group.json │ ├── schema.kinesis_stream.json │ └── schema.lb.json │ └── test_resources.py └── tfdevops ├── __init__.py ├── cfn.py ├── cli.py ├── resource.py └── utils.py /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: "CI" 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | branches: 9 | - main 10 | 11 | jobs: 12 | Lint: 13 | runs-on: ubuntu-latest 14 | steps: 15 | - uses: actions/checkout@v2 16 | - uses: actions/setup-python@v1 17 | with: 18 | python-version: 3.8 19 | - name: Linting 20 | run: | 21 | pip install pre-commit 22 | pre-commit run --all-files 23 | 24 | Tests: 25 | needs: Lint 26 | runs-on: ubuntu-latest 27 | strategy: 28 | matrix: 29 | python-version: [3.8, 3.9] 30 | steps: 31 | - uses: actions/checkout@v2 32 | - name: Set up Python ${{ matrix.python-version }} 33 | uses: actions/setup-python@v2 34 | with: 35 | python-version: ${{ matrix.python-version }} 36 | 37 | - name: Install poetry 38 | shell: bash 39 | run: | 40 | pip install poetry 41 | 42 | - name: Configure poetry 43 | shell: bash 44 | run: poetry config virtualenvs.in-project true 45 | 46 | - name: Set up cache 47 | uses: actions/cache@v1 48 | id: cache 49 | with: 50 | path: .venv 51 | key: venv-${{ runner.os }}-${{ steps.full-python-version.outputs.version }}-${{ hashFiles('**/poetry.lock') }} 52 | 53 | - name: Ensure cache is healthy 54 | if: steps.cache.outputs.cache-hit == 'true' 55 | shell: bash 56 | run: poetry run pip --version >/dev/null 2>&1 || rm -rf .venv 57 | 58 | - name: Install dependencies 59 | shell: bash 60 | run: poetry install 61 | 62 | - name: Pytest 63 | shell: bash 64 | run: poetry run pytest -v tests/unit 65 | -------------------------------------------------------------------------------- /.github/workflows/docker.yml: -------------------------------------------------------------------------------- 1 | name: Create and publish a Docker image 2 | 3 | on: 4 | push: 5 | branches: ['main'] 6 | 7 | env: 8 | REGISTRY: ghcr.io 9 | IMAGE_NAME: ${{ github.repository }} 10 | 11 | jobs: 12 | build-and-push-image: 13 | runs-on: ubuntu-latest 14 | permissions: 15 | contents: read 16 | packages: write 17 | 18 | steps: 19 | - name: Checkout repository 20 | uses: actions/checkout@v2 21 | 22 | - name: Log in to the Container registry 23 | uses: docker/login-action@v1.10.0 24 | with: 25 | registry: ${{ env.REGISTRY }} 26 | username: ${{ github.actor }} 27 | password: ${{ secrets.GITHUB_TOKEN }} 28 | 29 | - name: Extract metadata (tags, labels) for Docker 30 | id: meta 31 | uses: docker/metadata-action@v3.6.0 32 | with: 33 | images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} 34 | 35 | - name: Build and push Docker image 36 | uses: docker/build-push-action@v2.7.0 37 | with: 38 | context: . 39 | push: true 40 | tags: ${{ steps.meta.outputs.tags }} 41 | labels: ${{ steps.meta.outputs.labels }} 42 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .tox/ 2 | .coverage 3 | .tfcache/ 4 | *pyc 5 | __pycache__ 6 | *egg-info 7 | dist/ 8 | pytestdebug.log 9 | .venv 10 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | exclude: ".(json|md)" 2 | repos: 3 | - repo: https://github.com/psf/black 4 | rev: 19.10b0 5 | hooks: 6 | - id: black 7 | 8 | - repo: https://gitlab.com/pycqa/flake8 9 | rev: 3.7.8 10 | hooks: 11 | - id: flake8 12 | 13 | - repo: https://github.com/timothycrosley/isort 14 | rev: 4.3.21-2 15 | hooks: 16 | - id: isort 17 | 18 | - repo: https://github.com/pre-commit/pre-commit-hooks 19 | rev: v2.3.0 20 | hooks: 21 | - id: trailing-whitespace 22 | exclude: ^tests/.*/fixtures/.* 23 | - id: end-of-file-fixer 24 | exclude: ^tests/.*/fixtures/.* 25 | - id: debug-statements 26 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # syntax = docker/dockerfile:1.3 2 | ARG PYTHON_BUILD_IMAGE=python:3.10.0-bullseye 3 | ARG PYTHON_DIST_IMAGE=python:3.10.0-slim-bullseye 4 | 5 | # Build Container 6 | FROM $PYTHON_BUILD_IMAGE as build 7 | 8 | RUN adduser --disabled-login --gecos "" tfdevops 9 | 10 | RUN python3 -m venv /app && chown -R tfdevops: /app 11 | USER tfdevops 12 | 13 | ENV POETRY_VERSION=1.1.11 \ 14 | VIRTUAL_ENV="/app" \ 15 | PATH="/home/tfdevops/.local/bin:/app/bin:${PATH}" 16 | 17 | RUN curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/install-poetry.py | python3 - 18 | 19 | COPY pyproject.toml poetry.lock /app 20 | WORKDIR /app 21 | 22 | RUN --mount=type=cache,target=/home/tfdevops/.cache,uid=1000 poetry install --no-root --no-dev 23 | 24 | ADD . /app 25 | RUN --mount=type=cache,target=/home/tfdevops/.cache,uid=1000 poetry install --no-dev 26 | 27 | FROM $PYTHON_DIST_IMAGE 28 | 29 | ENV PYTHONFAULTHANDLER=1 \ 30 | PYTHONUNBUFFERED=1 \ 31 | PYTHONHASHSEED=random \ 32 | AWS_RETRY_MODE=adaptive \ 33 | AWS_STS_REGIONAL_ENDPOINTS=regional \ 34 | AWS_MAX_ATTEMPTS=6 \ 35 | LC_ALL="C.UTF-8" LANG="C.UTF-8" 36 | 37 | COPY --from=build /etc/passwd /etc/passwd 38 | COPY --from=build /etc/group /etc/group 39 | COPY --chown=tfdevops:tfdevops --from=build /app /app 40 | 41 | USER tfdevops 42 | WORKDIR /app 43 | ENTRYPOINT ["/app/bin/tfdevops"] 44 | CMD ["--help"] 45 | -------------------------------------------------------------------------------- /justfile: -------------------------------------------------------------------------------- 1 | 2 | 3 | lint: 4 | pre-commit run --all-files 5 | 6 | test: 7 | pytest --cov=tfdevops --cov-report=term-missing -v tests/unit 8 | 9 | image: 10 | DOCKER_BUILDKIT=1 docker buildx build \ 11 | --label "org.opencontainers.image.vendor=stacklet" \ 12 | --label "org.opencontainers.image.source=https://github.com/stacklet/tfdevops" \ 13 | --label "org.opencontainers.image.licenses=Apache-2.0" \ 14 | --label "org.opencontainers.image.title=Terraform Devops Guru" \ 15 | -t "tfdevops:latest" \ 16 | --progress plain \ 17 | --load . 18 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | [[package]] 2 | name = "atomicwrites" 3 | version = "1.4.0" 4 | description = "Atomic file writes." 5 | category = "dev" 6 | optional = false 7 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 8 | 9 | [[package]] 10 | name = "attrs" 11 | version = "21.2.0" 12 | description = "Classes Without Boilerplate" 13 | category = "main" 14 | optional = false 15 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 16 | 17 | [package.extras] 18 | dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit"] 19 | docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] 20 | tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface"] 21 | tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins"] 22 | 23 | [[package]] 24 | name = "black" 25 | version = "21.11b1" 26 | description = "The uncompromising code formatter." 27 | category = "dev" 28 | optional = false 29 | python-versions = ">=3.6.2" 30 | 31 | [package.dependencies] 32 | click = ">=7.1.2" 33 | mypy-extensions = ">=0.4.3" 34 | pathspec = ">=0.9.0,<1" 35 | platformdirs = ">=2" 36 | regex = ">=2021.4.4" 37 | tomli = ">=0.2.6,<2.0.0" 38 | typing-extensions = [ 39 | {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}, 40 | {version = "!=3.10.0.1", markers = "python_version >= \"3.10\""}, 41 | ] 42 | 43 | [package.extras] 44 | colorama = ["colorama (>=0.4.3)"] 45 | d = ["aiohttp (>=3.7.4)"] 46 | jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] 47 | python2 = ["typed-ast (>=1.4.3)"] 48 | uvloop = ["uvloop (>=0.15.2)"] 49 | 50 | [[package]] 51 | name = "boto3" 52 | version = "1.20.10" 53 | description = "The AWS SDK for Python" 54 | category = "main" 55 | optional = false 56 | python-versions = ">= 3.6" 57 | 58 | [package.dependencies] 59 | botocore = ">=1.23.10,<1.24.0" 60 | jmespath = ">=0.7.1,<1.0.0" 61 | s3transfer = ">=0.5.0,<0.6.0" 62 | 63 | [package.extras] 64 | crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] 65 | 66 | [[package]] 67 | name = "botocore" 68 | version = "1.23.10" 69 | description = "Low-level, data-driven core of boto 3." 70 | category = "main" 71 | optional = false 72 | python-versions = ">= 3.6" 73 | 74 | [package.dependencies] 75 | jmespath = ">=0.7.1,<1.0.0" 76 | python-dateutil = ">=2.1,<3.0.0" 77 | urllib3 = ">=1.25.4,<1.27" 78 | 79 | [package.extras] 80 | crt = ["awscrt (==0.12.5)"] 81 | 82 | [[package]] 83 | name = "click" 84 | version = "8.0.3" 85 | description = "Composable command line interface toolkit" 86 | category = "main" 87 | optional = false 88 | python-versions = ">=3.6" 89 | 90 | [package.dependencies] 91 | colorama = {version = "*", markers = "platform_system == \"Windows\""} 92 | 93 | [[package]] 94 | name = "colorama" 95 | version = "0.4.4" 96 | description = "Cross-platform colored terminal text." 97 | category = "main" 98 | optional = false 99 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 100 | 101 | [[package]] 102 | name = "execnet" 103 | version = "1.9.0" 104 | description = "execnet: rapid multi-Python deployment" 105 | category = "dev" 106 | optional = false 107 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 108 | 109 | [package.extras] 110 | testing = ["pre-commit"] 111 | 112 | [[package]] 113 | name = "iniconfig" 114 | version = "1.1.1" 115 | description = "iniconfig: brain-dead simple config-ini parsing" 116 | category = "dev" 117 | optional = false 118 | python-versions = "*" 119 | 120 | [[package]] 121 | name = "jmespath" 122 | version = "0.10.0" 123 | description = "JSON Matching Expressions" 124 | category = "main" 125 | optional = false 126 | python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" 127 | 128 | [[package]] 129 | name = "jsonschema" 130 | version = "3.2.0" 131 | description = "An implementation of JSON Schema validation for Python" 132 | category = "main" 133 | optional = false 134 | python-versions = "*" 135 | 136 | [package.dependencies] 137 | attrs = ">=17.4.0" 138 | pyrsistent = ">=0.14.0" 139 | six = ">=1.11.0" 140 | 141 | [package.extras] 142 | format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] 143 | format_nongpl = ["idna", "jsonpointer (>1.13)", "webcolors", "rfc3986-validator (>0.1.0)", "rfc3339-validator"] 144 | 145 | [[package]] 146 | name = "mypy-extensions" 147 | version = "0.4.3" 148 | description = "Experimental type system extensions for programs checked with the mypy typechecker." 149 | category = "dev" 150 | optional = false 151 | python-versions = "*" 152 | 153 | [[package]] 154 | name = "packaging" 155 | version = "21.3" 156 | description = "Core utilities for Python packages" 157 | category = "dev" 158 | optional = false 159 | python-versions = ">=3.6" 160 | 161 | [package.dependencies] 162 | pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" 163 | 164 | [[package]] 165 | name = "pathspec" 166 | version = "0.9.0" 167 | description = "Utility library for gitignore style pattern matching of file paths." 168 | category = "dev" 169 | optional = false 170 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" 171 | 172 | [[package]] 173 | name = "platformdirs" 174 | version = "2.4.0" 175 | description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." 176 | category = "dev" 177 | optional = false 178 | python-versions = ">=3.6" 179 | 180 | [package.extras] 181 | docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"] 182 | test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] 183 | 184 | [[package]] 185 | name = "pluggy" 186 | version = "1.0.0" 187 | description = "plugin and hook calling mechanisms for python" 188 | category = "dev" 189 | optional = false 190 | python-versions = ">=3.6" 191 | 192 | [package.extras] 193 | dev = ["pre-commit", "tox"] 194 | testing = ["pytest", "pytest-benchmark"] 195 | 196 | [[package]] 197 | name = "portalocker" 198 | version = "2.3.2" 199 | description = "Wraps the portalocker recipe for easy usage" 200 | category = "dev" 201 | optional = false 202 | python-versions = ">=3.5" 203 | 204 | [package.dependencies] 205 | pywin32 = {version = ">=226", markers = "platform_system == \"Windows\""} 206 | 207 | [package.extras] 208 | docs = ["sphinx (>=1.7.1)"] 209 | redis = ["redis"] 210 | tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "sphinx (>=3.0.3)", "pytest-flake8 (>=1.0.5)", "pytest-mypy (>=0.8.0)", "redis"] 211 | 212 | [[package]] 213 | name = "py" 214 | version = "1.11.0" 215 | description = "library with cross-python path, ini-parsing, io, code, log facilities" 216 | category = "dev" 217 | optional = false 218 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 219 | 220 | [[package]] 221 | name = "pyparsing" 222 | version = "3.0.6" 223 | description = "Python parsing module" 224 | category = "dev" 225 | optional = false 226 | python-versions = ">=3.6" 227 | 228 | [package.extras] 229 | diagrams = ["jinja2", "railroad-diagrams"] 230 | 231 | [[package]] 232 | name = "pyrsistent" 233 | version = "0.18.0" 234 | description = "Persistent/Functional/Immutable data structures" 235 | category = "main" 236 | optional = false 237 | python-versions = ">=3.6" 238 | 239 | [[package]] 240 | name = "pytest" 241 | version = "6.2.5" 242 | description = "pytest: simple powerful testing with Python" 243 | category = "dev" 244 | optional = false 245 | python-versions = ">=3.6" 246 | 247 | [package.dependencies] 248 | atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} 249 | attrs = ">=19.2.0" 250 | colorama = {version = "*", markers = "sys_platform == \"win32\""} 251 | iniconfig = "*" 252 | packaging = "*" 253 | pluggy = ">=0.12,<2.0" 254 | py = ">=1.8.2" 255 | toml = "*" 256 | 257 | [package.extras] 258 | testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] 259 | 260 | [[package]] 261 | name = "pytest-forked" 262 | version = "1.3.0" 263 | description = "run tests in isolated forked subprocesses" 264 | category = "dev" 265 | optional = false 266 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 267 | 268 | [package.dependencies] 269 | py = "*" 270 | pytest = ">=3.10" 271 | 272 | [[package]] 273 | name = "pytest-terraform" 274 | version = "0.6.0" 275 | description = "A pytest plugin for using terraform fixtures" 276 | category = "dev" 277 | optional = false 278 | python-versions = ">=3.6,<4.0" 279 | 280 | [package.dependencies] 281 | jmespath = ">=0.10.0" 282 | portalocker = ">=1.7.0" 283 | pytest = ">=6.0" 284 | pytest-xdist = ">=1.31.0" 285 | 286 | [[package]] 287 | name = "pytest-xdist" 288 | version = "2.4.0" 289 | description = "pytest xdist plugin for distributed testing and loop-on-failing modes" 290 | category = "dev" 291 | optional = false 292 | python-versions = ">=3.6" 293 | 294 | [package.dependencies] 295 | execnet = ">=1.1" 296 | pytest = ">=6.0.0" 297 | pytest-forked = "*" 298 | 299 | [package.extras] 300 | psutil = ["psutil (>=3.0)"] 301 | setproctitle = ["setproctitle"] 302 | testing = ["filelock"] 303 | 304 | [[package]] 305 | name = "python-dateutil" 306 | version = "2.8.2" 307 | description = "Extensions to the standard Python datetime module" 308 | category = "main" 309 | optional = false 310 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" 311 | 312 | [package.dependencies] 313 | six = ">=1.5" 314 | 315 | [[package]] 316 | name = "pywin32" 317 | version = "302" 318 | description = "Python for Window Extensions" 319 | category = "dev" 320 | optional = false 321 | python-versions = "*" 322 | 323 | [[package]] 324 | name = "regex" 325 | version = "2021.11.10" 326 | description = "Alternative regular expression module, to replace re." 327 | category = "dev" 328 | optional = false 329 | python-versions = "*" 330 | 331 | [[package]] 332 | name = "s3transfer" 333 | version = "0.5.0" 334 | description = "An Amazon S3 Transfer Manager" 335 | category = "main" 336 | optional = false 337 | python-versions = ">= 3.6" 338 | 339 | [package.dependencies] 340 | botocore = ">=1.12.36,<2.0a.0" 341 | 342 | [package.extras] 343 | crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] 344 | 345 | [[package]] 346 | name = "six" 347 | version = "1.16.0" 348 | description = "Python 2 and 3 compatibility utilities" 349 | category = "main" 350 | optional = false 351 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" 352 | 353 | [[package]] 354 | name = "toml" 355 | version = "0.10.2" 356 | description = "Python Library for Tom's Obvious, Minimal Language" 357 | category = "dev" 358 | optional = false 359 | python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" 360 | 361 | [[package]] 362 | name = "tomli" 363 | version = "1.2.2" 364 | description = "A lil' TOML parser" 365 | category = "dev" 366 | optional = false 367 | python-versions = ">=3.6" 368 | 369 | [[package]] 370 | name = "typing-extensions" 371 | version = "4.0.0" 372 | description = "Backported and Experimental Type Hints for Python 3.6+" 373 | category = "dev" 374 | optional = false 375 | python-versions = ">=3.6" 376 | 377 | [[package]] 378 | name = "urllib3" 379 | version = "1.26.7" 380 | description = "HTTP library with thread-safe connection pooling, file post, and more." 381 | category = "main" 382 | optional = false 383 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" 384 | 385 | [package.extras] 386 | brotli = ["brotlipy (>=0.6.0)"] 387 | secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] 388 | socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] 389 | 390 | [metadata] 391 | lock-version = "1.1" 392 | python-versions = "^3.8" 393 | content-hash = "a3029de6d0cfb5f866dcd302dec994779c3244e6d0397fab82a22c70a0492705" 394 | 395 | [metadata.files] 396 | atomicwrites = [ 397 | {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, 398 | {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, 399 | ] 400 | attrs = [ 401 | {file = "attrs-21.2.0-py2.py3-none-any.whl", hash = "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1"}, 402 | {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"}, 403 | ] 404 | black = [ 405 | {file = "black-21.11b1-py3-none-any.whl", hash = "sha256:802c6c30b637b28645b7fde282ed2569c0cd777dbe493a41b6a03c1d903f99ac"}, 406 | {file = "black-21.11b1.tar.gz", hash = "sha256:a042adbb18b3262faad5aff4e834ff186bb893f95ba3a8013f09de1e5569def2"}, 407 | ] 408 | boto3 = [ 409 | {file = "boto3-1.20.10-py3-none-any.whl", hash = "sha256:e2b5ce2679424a6c2bfc2ee4bb42d9100c8c08b21eff8d74cff85a7243a76d7b"}, 410 | {file = "boto3-1.20.10.tar.gz", hash = "sha256:20a5109a37414a52c55d2048388f02cb7cf46fc0ca7be08b3bf81f4c5c053feb"}, 411 | ] 412 | botocore = [ 413 | {file = "botocore-1.23.10-py3-none-any.whl", hash = "sha256:11670d3ac14eed1122e0154a7e1563c2c270beef43996466f8d11fbf5cf31611"}, 414 | {file = "botocore-1.23.10.tar.gz", hash = "sha256:0adda9a4a95221027312eaaee0ec9fe2239fb2f285fced3ddca54b1310b864ee"}, 415 | ] 416 | click = [ 417 | {file = "click-8.0.3-py3-none-any.whl", hash = "sha256:353f466495adaeb40b6b5f592f9f91cb22372351c84caeb068132442a4518ef3"}, 418 | {file = "click-8.0.3.tar.gz", hash = "sha256:410e932b050f5eed773c4cda94de75971c89cdb3155a72a0831139a79e5ecb5b"}, 419 | ] 420 | colorama = [ 421 | {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, 422 | {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, 423 | ] 424 | execnet = [ 425 | {file = "execnet-1.9.0-py2.py3-none-any.whl", hash = "sha256:a295f7cc774947aac58dde7fdc85f4aa00c42adf5d8f5468fc630c1acf30a142"}, 426 | {file = "execnet-1.9.0.tar.gz", hash = "sha256:8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5"}, 427 | ] 428 | iniconfig = [ 429 | {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, 430 | {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, 431 | ] 432 | jmespath = [ 433 | {file = "jmespath-0.10.0-py2.py3-none-any.whl", hash = "sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f"}, 434 | {file = "jmespath-0.10.0.tar.gz", hash = "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9"}, 435 | ] 436 | jsonschema = [ 437 | {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, 438 | {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, 439 | ] 440 | mypy-extensions = [ 441 | {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, 442 | {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, 443 | ] 444 | packaging = [ 445 | {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, 446 | {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, 447 | ] 448 | pathspec = [ 449 | {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, 450 | {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, 451 | ] 452 | platformdirs = [ 453 | {file = "platformdirs-2.4.0-py3-none-any.whl", hash = "sha256:8868bbe3c3c80d42f20156f22e7131d2fb321f5bc86a2a345375c6481a67021d"}, 454 | {file = "platformdirs-2.4.0.tar.gz", hash = "sha256:367a5e80b3d04d2428ffa76d33f124cf11e8fff2acdaa9b43d545f5c7d661ef2"}, 455 | ] 456 | pluggy = [ 457 | {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, 458 | {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, 459 | ] 460 | portalocker = [ 461 | {file = "portalocker-2.3.2-py2.py3-none-any.whl", hash = "sha256:d8c9f7c542e768dbef006a3e49875046ca170d2d41ca712080719110bd066cc4"}, 462 | {file = "portalocker-2.3.2.tar.gz", hash = "sha256:75cfe02f702737f1726d83e04eedfa0bda2cc5b974b1ceafb8d6b42377efbd5f"}, 463 | ] 464 | py = [ 465 | {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, 466 | {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, 467 | ] 468 | pyparsing = [ 469 | {file = "pyparsing-3.0.6-py3-none-any.whl", hash = "sha256:04ff808a5b90911829c55c4e26f75fa5ca8a2f5f36aa3a51f68e27033341d3e4"}, 470 | {file = "pyparsing-3.0.6.tar.gz", hash = "sha256:d9bdec0013ef1eb5a84ab39a3b3868911598afa494f5faa038647101504e2b81"}, 471 | ] 472 | pyrsistent = [ 473 | {file = "pyrsistent-0.18.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f4c8cabb46ff8e5d61f56a037974228e978f26bfefce4f61a4b1ac0ba7a2ab72"}, 474 | {file = "pyrsistent-0.18.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:da6e5e818d18459fa46fac0a4a4e543507fe1110e808101277c5a2b5bab0cd2d"}, 475 | {file = "pyrsistent-0.18.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:5e4395bbf841693eaebaa5bb5c8f5cdbb1d139e07c975c682ec4e4f8126e03d2"}, 476 | {file = "pyrsistent-0.18.0-cp36-cp36m-win32.whl", hash = "sha256:527be2bfa8dc80f6f8ddd65242ba476a6c4fb4e3aedbf281dfbac1b1ed4165b1"}, 477 | {file = "pyrsistent-0.18.0-cp36-cp36m-win_amd64.whl", hash = "sha256:2aaf19dc8ce517a8653746d98e962ef480ff34b6bc563fc067be6401ffb457c7"}, 478 | {file = "pyrsistent-0.18.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58a70d93fb79dc585b21f9d72487b929a6fe58da0754fa4cb9f279bb92369396"}, 479 | {file = "pyrsistent-0.18.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4916c10896721e472ee12c95cdc2891ce5890898d2f9907b1b4ae0f53588b710"}, 480 | {file = "pyrsistent-0.18.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:73ff61b1411e3fb0ba144b8f08d6749749775fe89688093e1efef9839d2dcc35"}, 481 | {file = "pyrsistent-0.18.0-cp37-cp37m-win32.whl", hash = "sha256:b29b869cf58412ca5738d23691e96d8aff535e17390128a1a52717c9a109da4f"}, 482 | {file = "pyrsistent-0.18.0-cp37-cp37m-win_amd64.whl", hash = "sha256:097b96f129dd36a8c9e33594e7ebb151b1515eb52cceb08474c10a5479e799f2"}, 483 | {file = "pyrsistent-0.18.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:772e94c2c6864f2cd2ffbe58bb3bdefbe2a32afa0acb1a77e472aac831f83427"}, 484 | {file = "pyrsistent-0.18.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:c1a9ff320fa699337e05edcaae79ef8c2880b52720bc031b219e5b5008ebbdef"}, 485 | {file = "pyrsistent-0.18.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cd3caef37a415fd0dae6148a1b6957a8c5f275a62cca02e18474608cb263640c"}, 486 | {file = "pyrsistent-0.18.0-cp38-cp38-win32.whl", hash = "sha256:e79d94ca58fcafef6395f6352383fa1a76922268fa02caa2272fff501c2fdc78"}, 487 | {file = "pyrsistent-0.18.0-cp38-cp38-win_amd64.whl", hash = "sha256:a0c772d791c38bbc77be659af29bb14c38ced151433592e326361610250c605b"}, 488 | {file = "pyrsistent-0.18.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d5ec194c9c573aafaceebf05fc400656722793dac57f254cd4741f3c27ae57b4"}, 489 | {file = "pyrsistent-0.18.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:6b5eed00e597b5b5773b4ca30bd48a5774ef1e96f2a45d105db5b4ebb4bca680"}, 490 | {file = "pyrsistent-0.18.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:48578680353f41dca1ca3dc48629fb77dfc745128b56fc01096b2530c13fd426"}, 491 | {file = "pyrsistent-0.18.0-cp39-cp39-win32.whl", hash = "sha256:f3ef98d7b76da5eb19c37fda834d50262ff9167c65658d1d8f974d2e4d90676b"}, 492 | {file = "pyrsistent-0.18.0-cp39-cp39-win_amd64.whl", hash = "sha256:404e1f1d254d314d55adb8d87f4f465c8693d6f902f67eb6ef5b4526dc58e6ea"}, 493 | {file = "pyrsistent-0.18.0.tar.gz", hash = "sha256:773c781216f8c2900b42a7b638d5b517bb134ae1acbebe4d1e8f1f41ea60eb4b"}, 494 | ] 495 | pytest = [ 496 | {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, 497 | {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, 498 | ] 499 | pytest-forked = [ 500 | {file = "pytest-forked-1.3.0.tar.gz", hash = "sha256:6aa9ac7e00ad1a539c41bec6d21011332de671e938c7637378ec9710204e37ca"}, 501 | {file = "pytest_forked-1.3.0-py2.py3-none-any.whl", hash = "sha256:dc4147784048e70ef5d437951728825a131b81714b398d5d52f17c7c144d8815"}, 502 | ] 503 | pytest-terraform = [ 504 | {file = "pytest-terraform-0.6.0.tar.gz", hash = "sha256:5b694598b15f072f61b731c9f2be5e083b8f21896798e99ad2f43469c0c7db47"}, 505 | {file = "pytest_terraform-0.6.0-py3-none-any.whl", hash = "sha256:f601b509f1f0801b0ff8448468424802f1e1d528902300f7490cfee62fd6afbd"}, 506 | ] 507 | pytest-xdist = [ 508 | {file = "pytest-xdist-2.4.0.tar.gz", hash = "sha256:89b330316f7fc475f999c81b577c2b926c9569f3d397ae432c0c2e2496d61ff9"}, 509 | {file = "pytest_xdist-2.4.0-py3-none-any.whl", hash = "sha256:7b61ebb46997a0820a263553179d6d1e25a8c50d8a8620cd1aa1e20e3be99168"}, 510 | ] 511 | python-dateutil = [ 512 | {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, 513 | {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, 514 | ] 515 | pywin32 = [ 516 | {file = "pywin32-302-cp310-cp310-win32.whl", hash = "sha256:251b7a9367355ccd1a4cd69cd8dd24bd57b29ad83edb2957cfa30f7ed9941efa"}, 517 | {file = "pywin32-302-cp310-cp310-win_amd64.whl", hash = "sha256:79cf7e6ddaaf1cd47a9e50cc74b5d770801a9db6594464137b1b86aa91edafcc"}, 518 | {file = "pywin32-302-cp36-cp36m-win32.whl", hash = "sha256:fe21c2fb332d03dac29de070f191bdbf14095167f8f2165fdc57db59b1ecc006"}, 519 | {file = "pywin32-302-cp36-cp36m-win_amd64.whl", hash = "sha256:d3761ab4e8c5c2dbc156e2c9ccf38dd51f936dc77e58deb940ffbc4b82a30528"}, 520 | {file = "pywin32-302-cp37-cp37m-win32.whl", hash = "sha256:48dd4e348f1ee9538dd4440bf201ea8c110ea6d9f3a5010d79452e9fa80480d9"}, 521 | {file = "pywin32-302-cp37-cp37m-win_amd64.whl", hash = "sha256:496df89f10c054c9285cc99f9d509e243f4e14ec8dfc6d78c9f0bf147a893ab1"}, 522 | {file = "pywin32-302-cp38-cp38-win32.whl", hash = "sha256:e372e477d938a49266136bff78279ed14445e00718b6c75543334351bf535259"}, 523 | {file = "pywin32-302-cp38-cp38-win_amd64.whl", hash = "sha256:543552e66936378bd2d673c5a0a3d9903dba0b0a87235ef0c584f058ceef5872"}, 524 | {file = "pywin32-302-cp39-cp39-win32.whl", hash = "sha256:2393c1a40dc4497fd6161b76801b8acd727c5610167762b7c3e9fd058ef4a6ab"}, 525 | {file = "pywin32-302-cp39-cp39-win_amd64.whl", hash = "sha256:af5aea18167a31efcacc9f98a2ca932c6b6a6d91ebe31f007509e293dea12580"}, 526 | ] 527 | regex = [ 528 | {file = "regex-2021.11.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9345b6f7ee578bad8e475129ed40123d265464c4cfead6c261fd60fc9de00bcf"}, 529 | {file = "regex-2021.11.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:416c5f1a188c91e3eb41e9c8787288e707f7d2ebe66e0a6563af280d9b68478f"}, 530 | {file = "regex-2021.11.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0538c43565ee6e703d3a7c3bdfe4037a5209250e8502c98f20fea6f5fdf2965"}, 531 | {file = "regex-2021.11.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ee1227cf08b6716c85504aebc49ac827eb88fcc6e51564f010f11a406c0a667"}, 532 | {file = "regex-2021.11.10-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6650f16365f1924d6014d2ea770bde8555b4a39dc9576abb95e3cd1ff0263b36"}, 533 | {file = "regex-2021.11.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30ab804ea73972049b7a2a5c62d97687d69b5a60a67adca07eb73a0ddbc9e29f"}, 534 | {file = "regex-2021.11.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:68a067c11463de2a37157930d8b153005085e42bcb7ad9ca562d77ba7d1404e0"}, 535 | {file = "regex-2021.11.10-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:162abfd74e88001d20cb73ceaffbfe601469923e875caf9118333b1a4aaafdc4"}, 536 | {file = "regex-2021.11.10-cp310-cp310-win32.whl", hash = "sha256:98ba568e8ae26beb726aeea2273053c717641933836568c2a0278a84987b2a1a"}, 537 | {file = "regex-2021.11.10-cp310-cp310-win_amd64.whl", hash = "sha256:780b48456a0f0ba4d390e8b5f7c661fdd218934388cde1a974010a965e200e12"}, 538 | {file = "regex-2021.11.10-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:dba70f30fd81f8ce6d32ddeef37d91c8948e5d5a4c63242d16a2b2df8143aafc"}, 539 | {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1f54b9b4b6c53369f40028d2dd07a8c374583417ee6ec0ea304e710a20f80a0"}, 540 | {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fbb9dc00e39f3e6c0ef48edee202f9520dafb233e8b51b06b8428cfcb92abd30"}, 541 | {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666abff54e474d28ff42756d94544cdfd42e2ee97065857413b72e8a2d6a6345"}, 542 | {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5537f71b6d646f7f5f340562ec4c77b6e1c915f8baae822ea0b7e46c1f09b733"}, 543 | {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2e07c6a26ed4bea91b897ee2b0835c21716d9a469a96c3e878dc5f8c55bb23"}, 544 | {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ca5f18a75e1256ce07494e245cdb146f5a9267d3c702ebf9b65c7f8bd843431e"}, 545 | {file = "regex-2021.11.10-cp36-cp36m-win32.whl", hash = "sha256:93a5051fcf5fad72de73b96f07d30bc29665697fb8ecdfbc474f3452c78adcf4"}, 546 | {file = "regex-2021.11.10-cp36-cp36m-win_amd64.whl", hash = "sha256:b483c9d00a565633c87abd0aaf27eb5016de23fed952e054ecc19ce32f6a9e7e"}, 547 | {file = "regex-2021.11.10-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fff55f3ce50a3ff63ec8e2a8d3dd924f1941b250b0aac3d3d42b687eeff07a8e"}, 548 | {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e32d2a2b02ccbef10145df9135751abea1f9f076e67a4e261b05f24b94219e36"}, 549 | {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:53db2c6be8a2710b359bfd3d3aa17ba38f8aa72a82309a12ae99d3c0c3dcd74d"}, 550 | {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2207ae4f64ad3af399e2d30dde66f0b36ae5c3129b52885f1bffc2f05ec505c8"}, 551 | {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5ca078bb666c4a9d1287a379fe617a6dccd18c3e8a7e6c7e1eb8974330c626a"}, 552 | {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd33eb9bdcfbabab3459c9ee651d94c842bc8a05fabc95edf4ee0c15a072495e"}, 553 | {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:05b7d6d7e64efe309972adab77fc2af8907bb93217ec60aa9fe12a0dad35874f"}, 554 | {file = "regex-2021.11.10-cp37-cp37m-win32.whl", hash = "sha256:e71255ba42567d34a13c03968736c5d39bb4a97ce98188fafb27ce981115beec"}, 555 | {file = "regex-2021.11.10-cp37-cp37m-win_amd64.whl", hash = "sha256:07856afef5ffcc052e7eccf3213317fbb94e4a5cd8177a2caa69c980657b3cb4"}, 556 | {file = "regex-2021.11.10-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba05430e819e58544e840a68b03b28b6d328aff2e41579037e8bab7653b37d83"}, 557 | {file = "regex-2021.11.10-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7f301b11b9d214f83ddaf689181051e7f48905568b0c7017c04c06dfd065e244"}, 558 | {file = "regex-2021.11.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aaa4e0705ef2b73dd8e36eeb4c868f80f8393f5f4d855e94025ce7ad8525f50"}, 559 | {file = "regex-2021.11.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:788aef3549f1924d5c38263104dae7395bf020a42776d5ec5ea2b0d3d85d6646"}, 560 | {file = "regex-2021.11.10-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f8af619e3be812a2059b212064ea7a640aff0568d972cd1b9e920837469eb3cb"}, 561 | {file = "regex-2021.11.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85bfa6a5413be0ee6c5c4a663668a2cad2cbecdee367630d097d7823041bdeec"}, 562 | {file = "regex-2021.11.10-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f23222527b307970e383433daec128d769ff778d9b29343fb3496472dc20dabe"}, 563 | {file = "regex-2021.11.10-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:da1a90c1ddb7531b1d5ff1e171b4ee61f6345119be7351104b67ff413843fe94"}, 564 | {file = "regex-2021.11.10-cp38-cp38-win32.whl", hash = "sha256:0617383e2fe465732af4509e61648b77cbe3aee68b6ac8c0b6fe934db90be5cc"}, 565 | {file = "regex-2021.11.10-cp38-cp38-win_amd64.whl", hash = "sha256:a3feefd5e95871872673b08636f96b61ebef62971eab044f5124fb4dea39919d"}, 566 | {file = "regex-2021.11.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f7f325be2804246a75a4f45c72d4ce80d2443ab815063cdf70ee8fb2ca59ee1b"}, 567 | {file = "regex-2021.11.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:537ca6a3586931b16a85ac38c08cc48f10fc870a5b25e51794c74df843e9966d"}, 568 | {file = "regex-2021.11.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eef2afb0fd1747f33f1ee3e209bce1ed582d1896b240ccc5e2697e3275f037c7"}, 569 | {file = "regex-2021.11.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:432bd15d40ed835a51617521d60d0125867f7b88acf653e4ed994a1f8e4995dc"}, 570 | {file = "regex-2021.11.10-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b43c2b8a330a490daaef5a47ab114935002b13b3f9dc5da56d5322ff218eeadb"}, 571 | {file = "regex-2021.11.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:962b9a917dd7ceacbe5cd424556914cb0d636001e393b43dc886ba31d2a1e449"}, 572 | {file = "regex-2021.11.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fa8c626d6441e2d04b6ee703ef2d1e17608ad44c7cb75258c09dd42bacdfc64b"}, 573 | {file = "regex-2021.11.10-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3c5fb32cc6077abad3bbf0323067636d93307c9fa93e072771cf9a64d1c0f3ef"}, 574 | {file = "regex-2021.11.10-cp39-cp39-win32.whl", hash = "sha256:3b5df18db1fccd66de15aa59c41e4f853b5df7550723d26aa6cb7f40e5d9da5a"}, 575 | {file = "regex-2021.11.10-cp39-cp39-win_amd64.whl", hash = "sha256:83ee89483672b11f8952b158640d0c0ff02dc43d9cb1b70c1564b49abe92ce29"}, 576 | {file = "regex-2021.11.10.tar.gz", hash = "sha256:f341ee2df0999bfdf7a95e448075effe0db212a59387de1a70690e4acb03d4c6"}, 577 | ] 578 | s3transfer = [ 579 | {file = "s3transfer-0.5.0-py3-none-any.whl", hash = "sha256:9c1dc369814391a6bda20ebbf4b70a0f34630592c9aa520856bf384916af2803"}, 580 | {file = "s3transfer-0.5.0.tar.gz", hash = "sha256:50ed823e1dc5868ad40c8dc92072f757aa0e653a192845c94a3b676f4a62da4c"}, 581 | ] 582 | six = [ 583 | {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, 584 | {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, 585 | ] 586 | toml = [ 587 | {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, 588 | {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, 589 | ] 590 | tomli = [ 591 | {file = "tomli-1.2.2-py3-none-any.whl", hash = "sha256:f04066f68f5554911363063a30b108d2b5a5b1a010aa8b6132af78489fe3aade"}, 592 | {file = "tomli-1.2.2.tar.gz", hash = "sha256:c6ce0015eb38820eaf32b5db832dbc26deb3dd427bd5f6556cf0acac2c214fee"}, 593 | ] 594 | typing-extensions = [ 595 | {file = "typing_extensions-4.0.0-py3-none-any.whl", hash = "sha256:829704698b22e13ec9eaf959122315eabb370b0884400e9818334d8b677023d9"}, 596 | {file = "typing_extensions-4.0.0.tar.gz", hash = "sha256:2cdf80e4e04866a9b3689a51869016d36db0814d84b8d8a568d22781d45d27ed"}, 597 | ] 598 | urllib3 = [ 599 | {file = "urllib3-1.26.7-py2.py3-none-any.whl", hash = "sha256:c4fdf4019605b6e5423637e01bc9fe4daef873709a7973e195ceba0a62bbc844"}, 600 | {file = "urllib3-1.26.7.tar.gz", hash = "sha256:4987c65554f7a2dbf30c18fd48778ef124af6fab771a377103da0585e2336ece"}, 601 | ] 602 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "tfdevops" 3 | version = "0.1.1" 4 | description = "Terraform Support for AWS DevOps Guru" 5 | authors = ["Kapil Thangavelu "] 6 | license = "Apache-2.0" 7 | readme = "readme.md" 8 | 9 | [tool.poetry.scripts] 10 | tfdevops = 'tfdevops.cli:cli' 11 | 12 | [tool.poetry.dependencies] 13 | python = "^3.8" 14 | boto3 = "^1.18.16" 15 | click = "^8.0.1" 16 | jmespath = "^0.10.0" 17 | jsonschema = "^3.2.0" 18 | 19 | [tool.poetry.dev-dependencies] 20 | pytest = "^6.2.4" 21 | black = "^21.7b0" 22 | pytest-terraform = "^0.6.0" 23 | 24 | [build-system] 25 | requires = ["poetry-core>=1.0.0"] 26 | build-backend = "poetry.core.masonry.api" 27 | -------------------------------------------------------------------------------- /readme.md: -------------------------------------------------------------------------------- 1 | # tfdevops 2 | 3 | Terraform support for Amazon DevOps Guru. The service natively only supports AWS CloudFormation stacks. 4 | https://aws.amazon.com/devops-guru/features/ 5 | 6 | This project provides support for terraform users by automatically 7 | converting terraform state to an imported CloudFormation stack 8 | and optionally enabling it with DevOps guru. 9 | 10 | Note Amazon DevOps Guru only supports roughly 25 resources. 11 | https://aws.amazon.com/devops-guru/pricing/ 12 | 13 | 14 | ## How it works 15 | 16 | - Translates terraform state into a CloudFormation template with a retain deletion policy 17 | - Creates a CloudFormation stack with imported resources 18 | - Enrolls the stack into Amazon DevOps Guru 19 | 20 | ## Usage 21 | 22 | Install it. 23 | 24 | ``` 25 | pip install tfdevops 26 | ``` 27 | 28 | You've got a deployed terraform root module extant, let's generate a CloudFormation template and a set of importable resources for it 29 | 30 | ``` 31 | tfdevops cfn -d ~/path/to/terraform/module --template mycfn.json --resources importable-ids.json 32 | ``` 33 | 34 | 35 | And now we can go ahead and create a CloudFormation stack, import resources, and activate DevOps Guru on our stack. 36 | 37 | ``` 38 | tfdevops deploy --template mycfn.json --resources importable-ids.json 39 | ... 40 | INFO:tfdevops:Found existing stack, state:IMPORT_COMPLETE 41 | INFO:tfdevops:Creating import change set, 8 resources to import 42 | INFO:tfdevops:Executing change set to import resources 43 | INFO:tfdevops:Waiting for import to complete 44 | INFO:tfdevops:Cloudformation Stack Deployed - Terraform resources imported 45 | ``` 46 | 47 | You can now visit the stack in the DevOps Guru dashboard. 48 | 49 | Depending on the level activity of the resources it can take DevOps Guru a few hours to generate any actionable insight. 50 | 51 | 52 | As a bonus, we can validate the generated template (or any other pure CloudFormation template, aka sans intrinsics funcs or vars ), with the following 53 | command, which will download the jsonschema for the various resource types and validate each template resource against its schema. 54 | 55 | ``` 56 | tfdevops validate --template mycfn.json 57 | ``` 58 | 59 | ## Large Resource/Templates 60 | 61 | AWS CloudFormation has various size limitations (50k api upload, 500k s3 upload) on the resource size it supports, both the `gen` and `deploy` subcommands support passing 62 | in an s3 path for the template and some resources which have larger configuration (step function workflows, etc). Note the s3 path for deploy is the actual template 63 | path. 64 | 65 | ## FAQ 66 | 67 | 1. Is this a generic terraform to CloudFormation converter? 68 | 69 | No, while it has some facilities that resemble that, its very targeted at simply producing enough cfn to make Amazon DevOps Guru work. 70 | 71 | ## Supported resources 72 | 73 | 74 | At the moment tfdevops supports the following resources 75 | 76 | - AWS::StepFunctions::StateMachine 77 | - AWS::ECS::Service 78 | - AWS::SQS::Queue 79 | - AWS::SNS::Topic 80 | - AWS::RDS::DBInstance 81 | - AWS::Lambda::Function 82 | - AWS::Events::Rule 83 | - AWS::DynamoDB::Table -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-line-length = 100 3 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import json 2 | from pathlib import Path 3 | 4 | import boto3 5 | import jsonschema 6 | import pytest 7 | 8 | 9 | def load_data(filename, location="unit"): 10 | path = Path(__file__).parent / location / "data" / filename 11 | if not path.exists(): 12 | return None 13 | with open(path) as f: 14 | return json.load(f) 15 | 16 | 17 | def write_data(filename, data, location="unit"): 18 | fpath = Path(__file__).parent / location / "data" / filename 19 | if not fpath.exists(): 20 | fpath.write_text(data) 21 | 22 | 23 | @pytest.fixture() 24 | def validate(): 25 | def schema_validate(translator, resource): 26 | schema_path = f"schema.{translator.tf_type}.json" 27 | schema = load_data(schema_path) 28 | if schema is None: 29 | cfn = boto3.client("cloudformation") 30 | rtype = cfn.describe_type(TypeName=translator.cfn_type, Type="RESOURCE") 31 | schema = json.loads(rtype["Schema"]) 32 | write_data(schema_path, json.dumps(schema, indent=2)) 33 | 34 | props = set(resource) 35 | sprops = set(schema["properties"].keys()) 36 | unknown = props.difference(sprops) 37 | if unknown: 38 | raise KeyError("unknown resource keys %s" % (", ".join(unknown))) 39 | 40 | validator = jsonschema.Draft7Validator(schema) 41 | 42 | errors = list(validator.iter_errors(resource)) 43 | if errors: 44 | print("%s errors %d" % (translator.cfn_type, len(errors))) 45 | 46 | for e in errors: 47 | print("Resource %s error:\n %s" % (translator.cfn_type, str(e))) 48 | 49 | if errors: 50 | raise ValueError( 51 | f"resource type {translator.cfn_type} had translation errors" 52 | ) 53 | 54 | return schema_validate 55 | -------------------------------------------------------------------------------- /tests/functional/terraform/aws_kinesis_stream/main.tf: -------------------------------------------------------------------------------- 1 | resource "random_pet" "name" { 2 | length = 2 3 | separator = "-" 4 | } 5 | 6 | 7 | resource "aws_kinesis_stream" "test_stream" { 8 | name = "test-${random_pet.name.id}" 9 | shard_count = 1 10 | retention_period = 48 11 | 12 | shard_level_metrics = [ 13 | "IncomingBytes", 14 | "OutgoingBytes", 15 | ] 16 | 17 | tags = { 18 | Environment = "test" 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /tests/functional/test_fresources.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | import conftest 4 | from pytest_terraform import terraform 5 | from tfdevops.resource import Translator, get_state_resources 6 | 7 | 8 | def get_state_path(tmpdir, tf_resources): 9 | with open(tmpdir / "state.json", "w") as fh: 10 | fh.write(json.dumps(tf_resources.terraform.show(), indent=2)) 11 | return fh.name 12 | 13 | 14 | @terraform("aws_kinesis_stream") 15 | def test_kinesis_stream(tmpdir, aws_kinesis_stream, validate): 16 | resources = get_state_resources(None, get_state_path(tmpdir, aws_kinesis_stream)) 17 | translator = Translator.get_translator("kinesis_stream")() 18 | props = translator.get_properties(resources["aws_kinesis_stream"][0]) 19 | conftest.write_data( 20 | "kinesis_stream.json", json.dumps(resources["aws_kinesis_stream"][0], indent=2) 21 | ) 22 | validate(translator, props) 23 | -------------------------------------------------------------------------------- /tests/unit/data/app_lb.json: -------------------------------------------------------------------------------- 1 | { 2 | "address": "module.sample.aws_lb.s3_proxy", 3 | "mode": "managed", 4 | "type": "aws_lb", 5 | "name": "s3_proxy", 6 | "provider_name": "registry.terraform.io/hashicorp/aws", 7 | "schema_version": 0, 8 | "values": { 9 | "access_logs": [ 10 | { 11 | "bucket": "", 12 | "enabled": false, 13 | "prefix": "" 14 | } 15 | ], 16 | "arn": "arn:aws:elasticloadbalancing:us-east-1:112233445566:loadbalancer/app/dev-stacklet-s3-proxy/f3b1263036fbba6e", 17 | "arn_suffix": "app/dev-stacklet-s3-proxy/f3b1263036fbba6e", 18 | "customer_owned_ipv4_pool": "", 19 | "dns_name": "dev-stacklet-s3-proxy-1492509259.us-east-1.elb.amazonaws.com", 20 | "drop_invalid_header_fields": false, 21 | "enable_cross_zone_load_balancing": null, 22 | "enable_deletion_protection": false, 23 | "enable_http2": true, 24 | "id": "arn:aws:elasticloadbalancing:us-east-1:112233445566:loadbalancer/app/dev-stacklet-s3-proxy/f3b1263036fbba6e", 25 | "idle_timeout": 60, 26 | "internal": false, 27 | "ip_address_type": "ipv4", 28 | "load_balancer_type": "application", 29 | "name": "dev-stacklet-s3-proxy", 30 | "name_prefix": null, 31 | "security_groups": [ 32 | "sg-05a5354352e11fcaf" 33 | ], 34 | "subnet_mapping": [ 35 | { 36 | "allocation_id": "", 37 | "ipv6_address": "", 38 | "outpost_id": "", 39 | "private_ipv4_address": "", 40 | "subnet_id": "subnet-0002702c64ad63187" 41 | }, 42 | { 43 | "allocation_id": "", 44 | "ipv6_address": "", 45 | "outpost_id": "", 46 | "private_ipv4_address": "", 47 | "subnet_id": "subnet-0395f575be6823738" 48 | }, 49 | { 50 | "allocation_id": "", 51 | "ipv6_address": "", 52 | "outpost_id": "", 53 | "private_ipv4_address": "", 54 | "subnet_id": "subnet-03f1645fc355b8886" 55 | }, 56 | { 57 | "allocation_id": "", 58 | "ipv6_address": "", 59 | "outpost_id": "", 60 | "private_ipv4_address": "", 61 | "subnet_id": "subnet-0867db2dcb1462488" 62 | }, 63 | { 64 | "allocation_id": "", 65 | "ipv6_address": "", 66 | "outpost_id": "", 67 | "private_ipv4_address": "", 68 | "subnet_id": "subnet-0ba582f1e2af5d98a" 69 | }, 70 | { 71 | "allocation_id": "", 72 | "ipv6_address": "", 73 | "outpost_id": "", 74 | "private_ipv4_address": "", 75 | "subnet_id": "subnet-0f980e725d9272471" 76 | } 77 | ], 78 | "subnets": [ 79 | "subnet-0002702c64ad63187", 80 | "subnet-0395f575be6823738", 81 | "subnet-03f1645fc355b8886", 82 | "subnet-0867db2dcb1462488", 83 | "subnet-0ba582f1e2af5d98a", 84 | "subnet-0f980e725d9272471" 85 | ], 86 | "tags": {}, 87 | "tags_all": { 88 | "stacklet:app": "Stacklet Platform" 89 | }, 90 | "timeouts": null, 91 | "vpc_id": "vpc-029654b93830acd5a", 92 | "zone_id": "Z35SXDOTRQ7X7K" 93 | }, 94 | "sensitive_values": { 95 | "access_logs": [ 96 | {} 97 | ], 98 | "security_groups": [ 99 | false 100 | ], 101 | "subnet_mapping": [ 102 | {}, 103 | {}, 104 | {}, 105 | {}, 106 | {}, 107 | {} 108 | ], 109 | "subnets": [ 110 | false, 111 | false, 112 | false, 113 | false, 114 | false, 115 | false 116 | ], 117 | "tags": {}, 118 | "tags_all": {} 119 | }, 120 | "depends_on": [ 121 | "module.sample.aws_ec2_managed_prefix_list.intra_stacklet_vpc_prefixes", 122 | "module.sample.aws_security_group.customer_inbound_security_group", 123 | "module.sample.data.aws_subnet.stacklet_app_subnet", 124 | "module.sample.data.aws_vpc.stacklet_app_vpc" 125 | ] 126 | } 127 | -------------------------------------------------------------------------------- /tests/unit/data/elasticache.json: -------------------------------------------------------------------------------- 1 | { 2 | "address": "aws_elasticache_replication_group.buffer", 3 | "mode": "managed", 4 | "type": "aws_elasticache_replication_group", 5 | "name": "buffer", 6 | "provider_name": "registry.terraform.io/hashicorp/aws", 7 | "schema_version": 1, 8 | "values": { 9 | "apply_immediately": true, 10 | "arn": "arn:aws:elasticache:us-east-2:112233445566:replicationgroup:stack-sample-buffer", 11 | "at_rest_encryption_enabled": true, 12 | "auth_token": "", 13 | "auto_minor_version_upgrade": true, 14 | "automatic_failover_enabled": false, 15 | "availability_zones": null, 16 | "cluster_enabled": false, 17 | "cluster_mode": [ 18 | { 19 | "num_node_groups": 1, 20 | "replicas_per_node_group": 0 21 | } 22 | ], 23 | "configuration_endpoint_address": null, 24 | "engine": "redis", 25 | "engine_version": "6.x", 26 | "engine_version_actual": "6.0.5", 27 | "final_snapshot_identifier": null, 28 | "global_replication_group_id": null, 29 | "id": "stack-sample-buffer", 30 | "kms_key_id": "arn:aws:kms:us-east-2:112233445566:key/a33e6586-615d-4214-b2cc-17c3d48d7aea", 31 | "maintenance_window": "mon:06:00-mon:07:00", 32 | "member_clusters": [ 33 | "stack-sample-buffer-001" 34 | ], 35 | "multi_az_enabled": false, 36 | "node_type": "cache.m6g.large", 37 | "notification_topic_arn": null, 38 | "number_cache_clusters": 1, 39 | "parameter_group_name": "default.redis6.x", 40 | "port": 6379, 41 | "primary_endpoint_address": "master.stack-sample-buffer.iyyvzj.use2.cache.amazonaws.com", 42 | "reader_endpoint_address": "replica.stack-sample-buffer.iyyvzj.use2.cache.amazonaws.com", 43 | "replication_group_description": "Elasticache cluster with encrypted redis", 44 | "replication_group_id": "stack-sample-buffer", 45 | "security_group_ids": [ 46 | "sg-0168ebe76be6927ce" 47 | ], 48 | "security_group_names": [], 49 | "snapshot_arns": null, 50 | "snapshot_name": null, 51 | "snapshot_retention_limit": 0, 52 | "snapshot_window": "02:30-03:30", 53 | "subnet_group_name": "stack-sample-buffer", 54 | "tags": {}, 55 | "tags_all": { 56 | "App": "Sample" 57 | }, 58 | "timeouts": null, 59 | "transit_encryption_enabled": true 60 | }, 61 | "sensitive_values": { 62 | "cluster_mode": [ 63 | {} 64 | ], 65 | "member_clusters": [ 66 | false 67 | ], 68 | "security_group_ids": [ 69 | false 70 | ], 71 | "security_group_names": [], 72 | "tags": {}, 73 | "tags_all": {} 74 | }, 75 | "depends_on": [ 76 | "data.aws_region.current", 77 | "aws_elasticache_subnet_group.buffer", 78 | "aws_iam_role.app_role", 79 | "aws_kms_key.cache_kms_encrypt", 80 | "aws_security_group.db", 81 | "data.aws_caller_identity.current", 82 | "data.aws_iam_policy_document.app_role_assume_role_policy", 83 | "data.aws_iam_policy_document.cache_kms_policy" 84 | ] 85 | } 86 | -------------------------------------------------------------------------------- /tests/unit/data/kinesis_stream.json: -------------------------------------------------------------------------------- 1 | { 2 | "address": "aws_kinesis_stream.test_stream", 3 | "mode": "managed", 4 | "type": "aws_kinesis_stream", 5 | "name": "test_stream", 6 | "provider_name": "registry.terraform.io/hashicorp/aws", 7 | "schema_version": 1, 8 | "values": { 9 | "arn": "arn:aws:kinesis:us-east-2:112233445566:stream/test-poetic-marten", 10 | "encryption_type": "NONE", 11 | "enforce_consumer_deletion": false, 12 | "id": "arn:aws:kinesis:us-east-2:112233445566:stream/test-poetic-marten", 13 | "kms_key_id": "", 14 | "name": "test-poetic-marten", 15 | "retention_period": 48, 16 | "shard_count": 1, 17 | "shard_level_metrics": [ 18 | "IncomingBytes", 19 | "OutgoingBytes" 20 | ], 21 | "tags": { 22 | "Environment": "test" 23 | }, 24 | "tags_all": { 25 | "Environment": "test" 26 | }, 27 | "timeouts": null 28 | }, 29 | "sensitive_values": { 30 | "shard_level_metrics": [ 31 | false, 32 | false 33 | ], 34 | "tags": {}, 35 | "tags_all": {} 36 | }, 37 | "depends_on": [ 38 | "random_pet.name" 39 | ] 40 | } -------------------------------------------------------------------------------- /tests/unit/data/schema.elasticache_replication_group.json: -------------------------------------------------------------------------------- 1 | { 2 | "typeName": "AWS::ElastiCache::ReplicationGroup", 3 | "description": "Resource Type definition for AWS::ElastiCache::ReplicationGroup", 4 | "additionalProperties": false, 5 | "properties": { 6 | "PreferredCacheClusterAZs": { 7 | "type": "array", 8 | "uniqueItems": true, 9 | "items": { 10 | "type": "string" 11 | } 12 | }, 13 | "PrimaryEndPointPort": { 14 | "type": "string" 15 | }, 16 | "CacheSecurityGroupNames": { 17 | "type": "array", 18 | "uniqueItems": true, 19 | "items": { 20 | "type": "string" 21 | } 22 | }, 23 | "ReaderEndPointPort": { 24 | "type": "string" 25 | }, 26 | "NodeGroupConfiguration": { 27 | "type": "array", 28 | "uniqueItems": true, 29 | "items": { 30 | "$ref": "#/definitions/NodeGroupConfiguration" 31 | } 32 | }, 33 | "SnapshotArns": { 34 | "type": "array", 35 | "uniqueItems": true, 36 | "items": { 37 | "type": "string" 38 | } 39 | }, 40 | "ConfigurationEndPointPort": { 41 | "type": "string" 42 | }, 43 | "Port": { 44 | "type": "integer" 45 | }, 46 | "ReadEndPointPortsList": { 47 | "type": "array", 48 | "uniqueItems": false, 49 | "items": { 50 | "type": "string" 51 | } 52 | }, 53 | "NumNodeGroups": { 54 | "type": "integer" 55 | }, 56 | "NotificationTopicArn": { 57 | "type": "string" 58 | }, 59 | "SnapshotName": { 60 | "type": "string" 61 | }, 62 | "AutomaticFailoverEnabled": { 63 | "type": "boolean" 64 | }, 65 | "ReplicasPerNodeGroup": { 66 | "type": "integer" 67 | }, 68 | "ReplicationGroupDescription": { 69 | "type": "string" 70 | }, 71 | "ReaderEndPointAddress": { 72 | "type": "string" 73 | }, 74 | "MultiAZEnabled": { 75 | "type": "boolean" 76 | }, 77 | "TransitEncryptionEnabled": { 78 | "type": "boolean" 79 | }, 80 | "ReplicationGroupId": { 81 | "type": "string" 82 | }, 83 | "Engine": { 84 | "type": "string" 85 | }, 86 | "Tags": { 87 | "type": "array", 88 | "uniqueItems": false, 89 | "items": { 90 | "$ref": "#/definitions/Tag" 91 | } 92 | }, 93 | "NumCacheClusters": { 94 | "type": "integer" 95 | }, 96 | "PrimaryEndPointAddress": { 97 | "type": "string" 98 | }, 99 | "GlobalReplicationGroupId": { 100 | "type": "string" 101 | }, 102 | "ConfigurationEndPointAddress": { 103 | "type": "string" 104 | }, 105 | "EngineVersion": { 106 | "type": "string" 107 | }, 108 | "KmsKeyId": { 109 | "type": "string" 110 | }, 111 | "CacheSubnetGroupName": { 112 | "type": "string" 113 | }, 114 | "CacheParameterGroupName": { 115 | "type": "string" 116 | }, 117 | "PreferredMaintenanceWindow": { 118 | "type": "string" 119 | }, 120 | "PrimaryClusterId": { 121 | "type": "string" 122 | }, 123 | "ReadEndPointPorts": { 124 | "type": "string" 125 | }, 126 | "AtRestEncryptionEnabled": { 127 | "type": "boolean" 128 | }, 129 | "AutoMinorVersionUpgrade": { 130 | "type": "boolean" 131 | }, 132 | "SecurityGroupIds": { 133 | "type": "array", 134 | "uniqueItems": true, 135 | "items": { 136 | "type": "string" 137 | } 138 | }, 139 | "SnapshotWindow": { 140 | "type": "string" 141 | }, 142 | "CacheNodeType": { 143 | "type": "string" 144 | }, 145 | "SnapshotRetentionLimit": { 146 | "type": "integer" 147 | }, 148 | "ReadEndPointAddressesList": { 149 | "type": "array", 150 | "uniqueItems": false, 151 | "items": { 152 | "type": "string" 153 | } 154 | }, 155 | "SnapshottingClusterId": { 156 | "type": "string" 157 | }, 158 | "UserGroupIds": { 159 | "type": "array", 160 | "uniqueItems": true, 161 | "items": { 162 | "type": "string" 163 | } 164 | }, 165 | "AuthToken": { 166 | "type": "string" 167 | }, 168 | "LogDeliveryConfigurations": { 169 | "type": "array", 170 | "uniqueItems": true, 171 | "items": { 172 | "$ref": "#/definitions/LogDeliveryConfigurationRequest" 173 | } 174 | }, 175 | "ReadEndPointAddresses": { 176 | "type": "string" 177 | } 178 | }, 179 | "definitions": { 180 | "LogDeliveryConfigurationRequest": { 181 | "type": "object", 182 | "additionalProperties": false, 183 | "properties": { 184 | "LogType": { 185 | "type": "string" 186 | }, 187 | "LogFormat": { 188 | "type": "string" 189 | }, 190 | "DestinationType": { 191 | "type": "string" 192 | }, 193 | "DestinationDetails": { 194 | "$ref": "#/definitions/DestinationDetails" 195 | } 196 | }, 197 | "required": [ 198 | "LogFormat", 199 | "LogType", 200 | "DestinationType", 201 | "DestinationDetails" 202 | ] 203 | }, 204 | "KinesisFirehoseDestinationDetails": { 205 | "type": "object", 206 | "additionalProperties": false, 207 | "properties": { 208 | "DeliveryStream": { 209 | "type": "string" 210 | } 211 | }, 212 | "required": [ 213 | "DeliveryStream" 214 | ] 215 | }, 216 | "CloudWatchLogsDestinationDetails": { 217 | "type": "object", 218 | "additionalProperties": false, 219 | "properties": { 220 | "LogGroup": { 221 | "type": "string" 222 | } 223 | }, 224 | "required": [ 225 | "LogGroup" 226 | ] 227 | }, 228 | "NodeGroupConfiguration": { 229 | "type": "object", 230 | "additionalProperties": false, 231 | "properties": { 232 | "Slots": { 233 | "type": "string" 234 | }, 235 | "PrimaryAvailabilityZone": { 236 | "type": "string" 237 | }, 238 | "ReplicaAvailabilityZones": { 239 | "type": "array", 240 | "uniqueItems": true, 241 | "items": { 242 | "type": "string" 243 | } 244 | }, 245 | "NodeGroupId": { 246 | "type": "string" 247 | }, 248 | "ReplicaCount": { 249 | "type": "integer" 250 | } 251 | } 252 | }, 253 | "Tag": { 254 | "type": "object", 255 | "additionalProperties": false, 256 | "properties": { 257 | "Value": { 258 | "type": "string" 259 | }, 260 | "Key": { 261 | "type": "string" 262 | } 263 | }, 264 | "required": [ 265 | "Value", 266 | "Key" 267 | ] 268 | }, 269 | "DestinationDetails": { 270 | "type": "object", 271 | "additionalProperties": false, 272 | "properties": { 273 | "CloudWatchLogsDetails": { 274 | "$ref": "#/definitions/CloudWatchLogsDestinationDetails" 275 | }, 276 | "KinesisFirehoseDetails": { 277 | "$ref": "#/definitions/KinesisFirehoseDestinationDetails" 278 | } 279 | } 280 | } 281 | }, 282 | "required": [ 283 | "ReplicationGroupDescription" 284 | ], 285 | "createOnlyProperties": [ 286 | "/properties/KmsKeyId", 287 | "/properties/Port", 288 | "/properties/SnapshotArns", 289 | "/properties/SnapshotName", 290 | "/properties/TransitEncryptionEnabled", 291 | "/properties/CacheSubnetGroupName", 292 | "/properties/AtRestEncryptionEnabled", 293 | "/properties/ReplicationGroupId", 294 | "/properties/GlobalReplicationGroupId", 295 | "/properties/ReplicasPerNodeGroup", 296 | "/properties/Engine", 297 | "/properties/PreferredCacheClusterAZs" 298 | ], 299 | "primaryIdentifier": [ 300 | "/properties/ReplicationGroupId" 301 | ], 302 | "readOnlyProperties": [ 303 | "/properties/ConfigurationEndPoint.Address", 304 | "/properties/PrimaryEndPoint.Address", 305 | "/properties/PrimaryEndPoint.Port", 306 | "/properties/ReaderEndPoint.Address", 307 | "/properties/ConfigurationEndPoint.Port", 308 | "/properties/ReadEndPoint.Addresses.List", 309 | "/properties/ReadEndPoint.Ports.List", 310 | "/properties/ReaderEndPoint.Port", 311 | "/properties/ReadEndPoint.Addresses", 312 | "/properties/ReadEndPoint.Ports", 313 | "/properties/ReplicationGroupId" 314 | ] 315 | } -------------------------------------------------------------------------------- /tests/unit/data/schema.kinesis_stream.json: -------------------------------------------------------------------------------- 1 | { 2 | "typeName": "AWS::Kinesis::Stream", 3 | "description": "Resource Type definition for AWS::Kinesis::Stream", 4 | "sourceUrl": "https://github.com/aws-cloudformation/aws-cloudformation-resource-providers-kinesis.git", 5 | "definitions": { 6 | "StreamEncryption": { 7 | "description": "When specified, enables or updates server-side encryption using an AWS KMS key for a specified stream. Removing this property from your stack template and updating your stack disables encryption.", 8 | "type": "object", 9 | "additionalProperties": false, 10 | "properties": { 11 | "EncryptionType": { 12 | "description": "The encryption type to use. The only valid value is KMS. ", 13 | "type": "string", 14 | "enum": [ 15 | "KMS" 16 | ] 17 | }, 18 | "KeyId": { 19 | "description": "The GUID for the customer-managed AWS KMS key to use for encryption. This value can be a globally unique identifier, a fully specified Amazon Resource Name (ARN) to either an alias or a key, or an alias name prefixed by \"alias/\".You can also use a master key owned by Kinesis Data Streams by specifying the alias aws/kinesis.", 20 | "type": "string", 21 | "minLength": 1, 22 | "maxLength": 2048 23 | } 24 | }, 25 | "required": [ 26 | "EncryptionType", 27 | "KeyId" 28 | ] 29 | }, 30 | "Tag": { 31 | "description": "An arbitrary set of tags (key-value pairs) to associate with the Kinesis stream.", 32 | "type": "object", 33 | "additionalProperties": false, 34 | "properties": { 35 | "Key": { 36 | "description": "The key name of the tag. You can specify a value that is 1 to 128 Unicode characters in length and cannot be prefixed with aws:. You can use any of the following characters: the set of Unicode letters, digits, whitespace, _, ., /, =, +, and -.", 37 | "type": "string", 38 | "minLength": 1, 39 | "maxLength": 128 40 | }, 41 | "Value": { 42 | "description": "The value for the tag. You can specify a value that is 0 to 255 Unicode characters in length and cannot be prefixed with aws:. You can use any of the following characters: the set of Unicode letters, digits, whitespace, _, ., /, =, +, and -.", 43 | "type": "string", 44 | "minLength": 0, 45 | "maxLength": 255 46 | } 47 | }, 48 | "required": [ 49 | "Key", 50 | "Value" 51 | ] 52 | } 53 | }, 54 | "properties": { 55 | "Arn": { 56 | "description": "The Amazon resource name (ARN) of the Kinesis stream", 57 | "type": "string" 58 | }, 59 | "Name": { 60 | "description": "The name of the Kinesis stream.", 61 | "type": "string", 62 | "minLength": 1, 63 | "maxLength": 128, 64 | "pattern": "^[a-zA-Z0-9_.-]+$" 65 | }, 66 | "RetentionPeriodHours": { 67 | "description": "The number of hours for the data records that are stored in shards to remain accessible.", 68 | "type": "integer", 69 | "minimum": 24 70 | }, 71 | "ShardCount": { 72 | "description": "The number of shards that the stream uses.", 73 | "type": "integer", 74 | "minimum": 1 75 | }, 76 | "StreamEncryption": { 77 | "description": "When specified, enables or updates server-side encryption using an AWS KMS key for a specified stream.", 78 | "$ref": "#/definitions/StreamEncryption" 79 | }, 80 | "Tags": { 81 | "description": "An arbitrary set of tags (key\u2013value pairs) to associate with the Kinesis stream.", 82 | "type": "array", 83 | "uniqueItems": false, 84 | "insertionOrder": false, 85 | "items": { 86 | "$ref": "#/definitions/Tag" 87 | } 88 | } 89 | }, 90 | "additionalProperties": false, 91 | "required": [ 92 | "ShardCount" 93 | ], 94 | "readOnlyProperties": [ 95 | "/properties/Arn" 96 | ], 97 | "createOnlyProperties": [ 98 | "/properties/Name" 99 | ], 100 | "primaryIdentifier": [ 101 | "/properties/Name" 102 | ], 103 | "handlers": { 104 | "create": { 105 | "permissions": [ 106 | "kinesis:DescribeStreamSummary", 107 | "kinesis:CreateStream", 108 | "kinesis:IncreaseStreamRetentionPeriod", 109 | "kinesis:StartStreamEncryption", 110 | "kinesis:AddTagsToStream", 111 | "kinesis:ListTagsForStream" 112 | ] 113 | }, 114 | "read": { 115 | "permissions": [ 116 | "kinesis:DescribeStreamSummary", 117 | "kinesis:ListTagsForStream" 118 | ] 119 | }, 120 | "update": { 121 | "permissions": [ 122 | "kinesis:DescribeStreamSummary", 123 | "kinesis:UpdateShardCount", 124 | "kinesis:IncreaseStreamRetentionPeriod", 125 | "kinesis:DecreaseStreamRetentionPeriod", 126 | "kinesis:StartStreamEncryption", 127 | "kinesis:StopStreamEncryption", 128 | "kinesis:AddTagsToStream", 129 | "kinesis:RemoveTagsFromStream", 130 | "kinesis:ListTagsForStream" 131 | ] 132 | }, 133 | "delete": { 134 | "permissions": [ 135 | "kinesis:DescribeStreamSummary", 136 | "kinesis:DeleteStream", 137 | "kinesis:RemoveTagsFromStream" 138 | ] 139 | }, 140 | "list": { 141 | "permissions": [ 142 | "kinesis:ListStreams" 143 | ] 144 | } 145 | } 146 | } -------------------------------------------------------------------------------- /tests/unit/data/schema.lb.json: -------------------------------------------------------------------------------- 1 | { 2 | "typeName": "AWS::ElasticLoadBalancingV2::LoadBalancer", 3 | "description": "Resource Type definition for AWS::ElasticLoadBalancingV2::LoadBalancer", 4 | "additionalProperties": false, 5 | "properties": { 6 | "IpAddressType": { 7 | "type": "string" 8 | }, 9 | "SecurityGroups": { 10 | "type": "array", 11 | "uniqueItems": true, 12 | "items": { 13 | "type": "string" 14 | } 15 | }, 16 | "LoadBalancerAttributes": { 17 | "type": "array", 18 | "uniqueItems": true, 19 | "items": { 20 | "$ref": "#/definitions/LoadBalancerAttribute" 21 | } 22 | }, 23 | "Scheme": { 24 | "type": "string" 25 | }, 26 | "DNSName": { 27 | "type": "string" 28 | }, 29 | "Name": { 30 | "type": "string" 31 | }, 32 | "LoadBalancerName": { 33 | "type": "string" 34 | }, 35 | "Subnets": { 36 | "type": "array", 37 | "uniqueItems": true, 38 | "items": { 39 | "type": "string" 40 | } 41 | }, 42 | "Type": { 43 | "type": "string" 44 | }, 45 | "CanonicalHostedZoneID": { 46 | "type": "string" 47 | }, 48 | "Id": { 49 | "type": "string" 50 | }, 51 | "Tags": { 52 | "type": "array", 53 | "uniqueItems": false, 54 | "items": { 55 | "$ref": "#/definitions/Tag" 56 | } 57 | }, 58 | "LoadBalancerFullName": { 59 | "type": "string" 60 | }, 61 | "SubnetMappings": { 62 | "type": "array", 63 | "uniqueItems": true, 64 | "items": { 65 | "$ref": "#/definitions/SubnetMapping" 66 | } 67 | } 68 | }, 69 | "definitions": { 70 | "SubnetMapping": { 71 | "type": "object", 72 | "additionalProperties": false, 73 | "properties": { 74 | "IPv6Address": { 75 | "type": "string" 76 | }, 77 | "SubnetId": { 78 | "type": "string" 79 | }, 80 | "AllocationId": { 81 | "type": "string" 82 | }, 83 | "PrivateIPv4Address": { 84 | "type": "string" 85 | } 86 | }, 87 | "required": [ 88 | "SubnetId" 89 | ] 90 | }, 91 | "LoadBalancerAttribute": { 92 | "type": "object", 93 | "additionalProperties": false, 94 | "properties": { 95 | "Value": { 96 | "type": "string" 97 | }, 98 | "Key": { 99 | "type": "string" 100 | } 101 | } 102 | }, 103 | "Tag": { 104 | "type": "object", 105 | "additionalProperties": false, 106 | "properties": { 107 | "Value": { 108 | "type": "string" 109 | }, 110 | "Key": { 111 | "type": "string" 112 | } 113 | }, 114 | "required": [ 115 | "Value", 116 | "Key" 117 | ] 118 | } 119 | }, 120 | "createOnlyProperties": [ 121 | "/properties/Name", 122 | "/properties/Type", 123 | "/properties/Scheme" 124 | ], 125 | "primaryIdentifier": [ 126 | "/properties/Id" 127 | ], 128 | "readOnlyProperties": [ 129 | "/properties/LoadBalancerName", 130 | "/properties/CanonicalHostedZoneID", 131 | "/properties/Id", 132 | "/properties/LoadBalancerFullName", 133 | "/properties/DNSName" 134 | ] 135 | } -------------------------------------------------------------------------------- /tests/unit/test_resources.py: -------------------------------------------------------------------------------- 1 | from conftest import load_data 2 | from tfdevops.resource import Translator 3 | from tfdevops.utils import filter_empty 4 | 5 | 6 | def xtest_elasticache_replication_group(validate): 7 | translator = Translator.get_translator("elasticache_replication_group")() 8 | resource = load_data("elasticache.json") 9 | props = translator.get_properties(resource) 10 | validate(translator, filter_empty(props)) 11 | 12 | 13 | def test_app_lb(validate): 14 | translator = Translator.get_translator("lb")() 15 | resource = load_data("app_lb.json") 16 | props = translator.get_properties(resource) 17 | validate(translator, props) 18 | 19 | 20 | def test_kinesis(validate): 21 | translator = Translator.get_translator("kinesis_stream")() 22 | resource = load_data("kinesis_stream.json") 23 | validate(translator, translator.get_properties(resource)) 24 | -------------------------------------------------------------------------------- /tfdevops/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/stacklet/tfdevops/27b9686a7c7f58860fd3ae8a060d3d608590988c/tfdevops/__init__.py -------------------------------------------------------------------------------- /tfdevops/cfn.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | import boto3 4 | from botocore.exceptions import ClientError, WaiterError 5 | from botocore.waiter import WaiterModel, create_waiter_with_client 6 | 7 | from .resource import Translator 8 | from .utils import filter_empty, get_state_resources, log 9 | 10 | # manually construct waiter models for change sets since the service 11 | # team didn't bother to publish one in their smithy models, perhaps 12 | # understandbly since one only needs these for unhappy paths. 13 | # re smithy https://awslabs.github.io/smithy/ 14 | 15 | ChangeSetWaiters = { 16 | "version": 2, 17 | "waiters": { 18 | "ChangeSetDeleteComplete": { 19 | "operation": "DescribeChangeSet", 20 | "delay": 10, 21 | "maxAttempts": 40, 22 | "acceptors": [ 23 | { 24 | "expected": "DELETE_FAILED", 25 | "matcher": "path", 26 | "state": "failure", 27 | "argument": "Status", 28 | }, 29 | { 30 | "expected": "DELETE_COMPLETE", 31 | "matcher": "path", 32 | "argument": "Status", 33 | "state": "success", 34 | }, 35 | ], 36 | }, 37 | "ChangeSetExecuteComplete": { 38 | "operation": "DescribeChangeSet", 39 | "delay": 10, 40 | "maxAttempts": 40, 41 | "acceptors": [ 42 | { 43 | "expected": "EXECUTE_FAILED", 44 | "matcher": "path", 45 | "state": "failure", 46 | "argument": "ExecutionStatus", 47 | }, 48 | { 49 | "expected": "EXECUTE_COMPLETE", 50 | "matcher": "path", 51 | "argument": "ExecutionStatus", 52 | "state": "success", 53 | }, 54 | ], 55 | }, 56 | }, 57 | } 58 | 59 | 60 | def get_cfn_template(s3_client, s3_path, module, state_file, types): 61 | state = get_state_resources(module, state_file) 62 | 63 | ctemplate = { 64 | "AWSTemplateFormatVersion": "2010-09-09", 65 | "Description": "TF to CFN Guru Meditation Ops", 66 | "Resources": {}, 67 | } 68 | translators = Translator.get_translator_map() 69 | ids = [] 70 | 71 | for k, v in state.items(): 72 | provider, k = k.split("_", 1) 73 | if types and k not in types: 74 | continue 75 | if k not in translators: 76 | log.debug("no cfn type for tf %s" % k) 77 | continue 78 | 79 | translator_class = translators.get(k) 80 | cfn_type = translator_class.cfn_type 81 | if not translator_class: 82 | log.debug("no translator for %s" % k) 83 | continue 84 | else: 85 | translator = translator_class({"s3_path": s3_path, "s3": s3_client}) 86 | 87 | for r in v: 88 | rname = translator.get_name(r) 89 | if rname in ctemplate["Resources"]: 90 | log.debug("resource override %s" % rname) 91 | rname = "%s%s" % (rname, cfn_type.split("::")[-1]) 92 | props = translator.get_properties(r) 93 | if props is None: 94 | continue 95 | props = filter_empty(props) 96 | ctemplate["Resources"][rname] = { 97 | "Type": cfn_type, 98 | "DeletionPolicy": "Retain", 99 | "Properties": props, 100 | } 101 | 102 | ids.append( 103 | { 104 | "ResourceType": cfn_type, 105 | "LogicalResourceId": rname, 106 | "ResourceIdentifier": translator.get_identity(r), 107 | } 108 | ) 109 | return ctemplate, ids 110 | 111 | 112 | def deploy(stack_name, stack_content, template_url, import_resources, change_name): 113 | client = boto3.client("cloudformation") 114 | 115 | try: 116 | stack_info = client.describe_stacks(StackName=stack_name)["Stacks"][0] 117 | log.info("Found existing stack, state:%s", stack_info["StackStatus"]) 118 | except ClientError: 119 | # somewhat annoying the service team hasn't put a proper customized 120 | # exception in place for a common error issue. ala they have one for 121 | # client.exceptions.StackNotFoundException but didn't bother 122 | # to actually use it for this, or its histerical raison compatibility. 123 | # This unfortunately means we have to catch a very generic client error. 124 | # ie. we're trying to catch errors like this. 125 | # botocore.exceptions.ClientError: An error occurred (ValidationError) when 126 | # calling the DescribeStacks operation: Stack with id GuruStack does not exist 127 | stack_info = None 128 | 129 | # so for each stack and each resource we have to deal with the complexity 130 | # of cfn's underlying state workflow for each, as outlined by the internal state 131 | # machine complexity. 132 | # 133 | # This is a great example of why terraform represent's sanity, as well how 134 | # customer feedback driven product development (aka we want rollback) can lead 135 | # to a worse experience for customers, if one doesn't keep the bigger picture in mind. 136 | # 137 | # It also leads to brittleness and complexity for any tool building on 138 | # cloudformation, exhibit A being the unusability of stacksets in the 139 | # real world. 140 | # 141 | # Its gets worse when you consider the compatibility complexity matrix 142 | # on the various versions and bugs, like the lack of a proper error code 143 | # for stack not found above. 144 | # 145 | # Nonetheless, we persevere and try to present a humane interface. 146 | # 147 | # Stack State Enumeration: 148 | # CREATE_COMPLETE 149 | # CREATE_FAILED 150 | # CREATE_IN_PROGRESS 151 | # DELETE_COMPLETE 152 | # DELETE_FAILED 153 | # DELETE_IN_PROGRESS 154 | # IMPORT_COMPLETE 155 | # IMPORT_IN_PROGRESS 156 | # IMPORT_ROLLBACK_COMPLETE 157 | # IMPORT_ROLLBACK_FAILED 158 | # IMPORT_ROLLBACK_IN_PROGRESS 159 | # REVIEW_IN_PROGRESS 160 | # ROLLBACK_COMPLETE 161 | # ROLLBACK_FAILED 162 | # ROLLBACK_IN_PROGRESS 163 | # UPDATE_COMPLETE 164 | # UPDATE_COMPLETE_CLEANUP_IN_PROGRESS 165 | # UPDATE_IN_PROGRESS 166 | # UPDATE_ROLLBACK_COMPLETE 167 | # UPDATE_ROLLBACK_COMPLETE_CLEANUP_IN_PROGRESS 168 | # UPDATE_ROLLBACK_FAILED 169 | # UPDATE_ROLLBACK_IN_PROGRESS 170 | 171 | if stack_info and stack_info["StackStatus"] == "ROLLBACK_COMPLETE": 172 | log.info("Deleting failed stack") 173 | client.delete_stack(StackName=stack_name) 174 | waiter = client.get_waiter("stack_delete_complete") 175 | waiter.wait(StackName=stack_name) 176 | stack_info = None 177 | elif stack_info and stack_info["StackStatus"] == "REVIEW_IN_PROGRESS": 178 | pass 179 | elif stack_info and stack_info["StackStatus"].endswith("IN_PROGRESS"): 180 | log.info( 181 | "Cloudformation stack undergoing change %s, please try again later", 182 | stack_info["StackStatus"], 183 | ) 184 | return 185 | elif stack_info and stack_info["StackStatus"] == "DELETE_COMPLETE": 186 | stack_info = None 187 | elif stack_info: 188 | stack_resources = { 189 | sr["LogicalResourceId"] 190 | for sr in client.describe_stack_resources(StackName=stack_name).get( 191 | "StackResources", [] 192 | ) 193 | } 194 | import_resources = [ 195 | i for i in import_resources if i["LogicalResourceId"] not in stack_resources 196 | ] 197 | if not import_resources: 198 | log.info("All resources have already been imported") 199 | return 200 | 201 | # Check for an extant change set 202 | try: 203 | cinfo = client.describe_change_set( 204 | StackName=stack_name, ChangeSetName=change_name 205 | ) 206 | except (client.exceptions.ChangeSetNotFoundException, ClientError): 207 | cinfo = None 208 | 209 | if cinfo and cinfo["Status"] == "FAILED": 210 | log.warning( 211 | "Previous change set failed with reason %s", cinfo.get("StatusReason", "") 212 | ) 213 | client.delete_change_set(StackName=stack_name, ChangeSetName=change_name) 214 | waiter = create_waiter_with_client( 215 | "ChangeSetDeleteComplete", WaiterModel(ChangeSetWaiters), client 216 | ) 217 | try: 218 | waiter.wait( 219 | StackName=stack_name, 220 | ChangeSetName=change_name, 221 | WaiterConfig={"Delay": 10, "MaxAttempts": 60}, 222 | ) 223 | except WaiterError as e: 224 | if ( 225 | "Error" in e.last_response 226 | and e.last_response["Error"]["Code"] == "ChangeSetNotFound" 227 | ): 228 | # happy path instant delete 229 | pass 230 | else: 231 | raise 232 | 233 | log.info( 234 | "Creating import change set, %d resources to import", len(import_resources) 235 | ) 236 | params = dict( 237 | StackName=stack_name, 238 | ChangeSetType="IMPORT", 239 | Capabilities=["CAPABILITY_NAMED_IAM"], 240 | ChangeSetName=change_name, 241 | ResourcesToImport=import_resources, 242 | ) 243 | if template_url: 244 | params["TemplateURL"] = template_url 245 | elif stack_content: 246 | params["TemplateBody"] = json.dumps(stack_content) 247 | 248 | # returns ids which are mostly useless, because we have to use unique at moment names in the api 249 | client.create_change_set(**params) 250 | 251 | # Change Set States 252 | # CREATE_COMPLETE 253 | # CREATE_IN_PROGRESS 254 | # CREATE_PENDING 255 | # DELETE_COMPLETE 256 | # DELETE_FAILED 257 | # DELETE_IN_PROGRESS 258 | # DELETE_PENDING 259 | # FAILED 260 | 261 | waiter = client.get_waiter("change_set_create_complete") 262 | try: 263 | waiter.wait( 264 | StackName=stack_name, 265 | ChangeSetName=change_name, 266 | WaiterConfig={"Delay": 10, "MaxAtempts": 60}, 267 | ) 268 | except WaiterError as e: 269 | log.error( 270 | "Changeset creation failed status: %s reason: %s", 271 | e.last_response["Status"], 272 | e.last_response["StatusReason"], 273 | ) 274 | return 275 | 276 | log.info("Executing change set to import resources") 277 | client.execute_change_set(ChangeSetName=change_name, StackName=stack_name) 278 | 279 | # Aha changesets have another state workflow representing execution progress 280 | # AVAILABLE 281 | # EXECUTE_COMPLETE 282 | # EXECUTE_FAILED 283 | # EXECUTE_IN_PROGRESS 284 | # OBSOLETE 285 | # UNAVAILABLE 286 | 287 | waiter = create_waiter_with_client( 288 | "ChangeSetExecuteComplete", WaiterModel(ChangeSetWaiters), client 289 | ) 290 | try: 291 | waiter.wait( 292 | StackName=stack_name, 293 | ChangeSetName=change_name, 294 | WaiterConfig={"Delay": 10, "MaxAttempts": 60}, 295 | ) 296 | except WaiterError as e: 297 | # the happy path is a changeset executes really quickly and disappears while the status of 298 | # stack itself reflects the actual async progress. lulz, we do a waiter because 299 | # who knows the other 1% of the times, because the cfn exposed model of change set 300 | # suggests it may have other states, rather than instantly disappearing on execution. 301 | if ( 302 | "Error" in e.last_response 303 | and e.last_response["Error"]["Code"] == "ChangeSetNotFound" 304 | ): 305 | # common happy path, change set disappears before change is complete :/ 306 | pass 307 | else: 308 | raise 309 | 310 | # but now we have to wait for the stack status to reflect back on steady state 311 | waiter = client.get_waiter("stack_import_complete") 312 | log.info("Waiting for import to complete") 313 | waiter.wait(StackName=stack_name, WaiterConfig={"Delay": 15, "MaxAttempts": 100}) 314 | 315 | log.info("Cloudformation Stack Deployed - Terraform resources imported") 316 | -------------------------------------------------------------------------------- /tfdevops/cli.py: -------------------------------------------------------------------------------- 1 | # Copyright Stacklet, Inc. 2 | # SPDX-License-Identifier: Apache-2.0 3 | # 4 | import json 5 | import logging 6 | 7 | import boto3 8 | import click 9 | import jsonschema 10 | 11 | from . import cfn 12 | from .utils import format_s3_path, format_template_url, log, write_s3_key 13 | 14 | __author__ = "Kapil Thangavelu " 15 | 16 | 17 | DEFAULT_STACK_NAME = "GuruStack" 18 | DEFAULT_CHANGESET_NAME = "GuruImport" 19 | 20 | 21 | @click.group() 22 | @click.option("-v", "--verbose", is_flag=True) 23 | def cli(verbose): 24 | """Terraform to Cloudformation and AWS DevOps Guru""" 25 | logging.basicConfig(level=verbose and logging.DEBUG or logging.INFO) 26 | if verbose: 27 | logging.getLogger("botocore").setLevel(logging.INFO) 28 | logging.getLogger("urllib3").setLevel(logging.INFO) 29 | 30 | 31 | @cli.command() 32 | @click.option("-t", "--template", type=click.File("r")) 33 | @click.option("-r", "--resources", type=click.File("r"), required=True) 34 | @click.option("-u", "--template-url", help="s3 path to template") 35 | @click.option("-s", "--stack-name", default=DEFAULT_STACK_NAME) 36 | @click.option("--change-name", default=DEFAULT_CHANGESET_NAME) 37 | @click.option("--no-guru", is_flag=True, default=False) 38 | def deploy(template, resources, stack_name, no_guru, template_url, change_name): 39 | """Deploy a cloudformation stack with imported resources 40 | 41 | Imports terraform resources into a cloudformation stack. 42 | 43 | Consumes outputs of cfn generation subcommand. 44 | 45 | Specify --guru flag to automatically enable Amazon DevOps Guru. 46 | """ 47 | if not template and not template_url: 48 | raise SyntaxError("Either template or template_url parameter must be passed") 49 | if template: 50 | stack_content = json.load(template) 51 | import_resources = json.load(resources) 52 | cfn.deploy(stack_name, stack_content, template_url, import_resources, change_name) 53 | 54 | if no_guru is False: 55 | ensure_devops_guru(stack_name) 56 | 57 | 58 | def ensure_devops_guru(stack_name): 59 | log.info("Enrolling terraform stack into devops guru") 60 | guru = boto3.client("devops-guru") 61 | guru.update_resource_collection( 62 | Action="ADD", 63 | ResourceCollection={"CloudFormation": {"StackNames": [stack_name]}}, 64 | ) 65 | 66 | 67 | @cli.command() 68 | @click.option("-t", "--template", type=click.File("r"), required=True) 69 | def validate(template): 70 | """validate resources in a template per their jsonschema def""" 71 | data = json.load(template) 72 | rtypes = set() 73 | for logical_id, resource in data.get("Resources", {}).items(): 74 | rtypes.add(resource["Type"]) 75 | 76 | type_schema_map = {} 77 | client = boto3.client("cloudformation") 78 | for r in rtypes: 79 | rinfo = client.describe_type(TypeName=r, Type="RESOURCE") 80 | schema = json.loads(rinfo["Schema"]) 81 | type_schema_map[r] = { 82 | "validator": jsonschema.Draft7Validator(schema), 83 | "schema": schema, 84 | } 85 | 86 | template_error = False 87 | for logical_id, resource in data.get("Resources", {}).items(): 88 | rmeta = type_schema_map[resource["Type"]] 89 | props = set(resource["Properties"]) 90 | sprops = set(rmeta["schema"]["properties"].keys()) 91 | unknown = props.difference(sprops) 92 | if unknown: 93 | log.warning( 94 | "%s -> %s unknown props %s" % (logical_id, resource["Type"], unknown) 95 | ) 96 | 97 | errors = list(rmeta["validator"].iter_errors(resource["Properties"])) 98 | if errors: 99 | log.warning( 100 | "%s -> %s errors %d" % (logical_id, resource["Type"], len(errors)) 101 | ) 102 | template_error = True 103 | for e in errors: 104 | log.warning("Resource %s error:\n %s" % (logical_id, str(e))) 105 | if template_error is False: 106 | log.info("Congratulations! - the template validates") 107 | 108 | 109 | @cli.command(name="cfn") 110 | @click.option("-d", "--module", help="Terraform root module directory") 111 | @click.option( 112 | "-t", 113 | "--template", 114 | type=click.File("w"), 115 | default="-", 116 | help="Cloudformation template output path", 117 | ) 118 | @click.option( 119 | "-r", 120 | "--resources", 121 | type=click.File("w"), 122 | help="Output file for resources to import", 123 | ) 124 | @click.option( 125 | "--s3-path", 126 | help="S3 Bucket and Prefix (s3://bucket/pre/fix) for oversized templates and resources", 127 | ) 128 | @click.option( 129 | "--state-file", help="Terraform state file - output of terraform show -json", 130 | ) 131 | @click.option("--types", multiple=True, help="Only consider these terraform types") 132 | def gen_cfn(module, template, resources, types, s3_path, state_file): 133 | """Export a cloudformation template and importable resources 134 | 135 | s3 path only needs to be specified when handling resources with verbose 136 | definitions (step functions) or a large cardinality of resources which would 137 | overflow cloudformation's api limits on templates (50k). 138 | """ 139 | s3_client = s3_path and boto3.client("s3") 140 | ctemplate, ids = cfn.get_cfn_template(s3_client, s3_path, module, state_file, types) 141 | # overflow to s3 for actual deployment on large templates 142 | serialized_template = json.dumps(ctemplate).encode("utf8") 143 | 144 | if s3_path: # and len(serialized_template) > 49000: 145 | s3_url = format_template_url( 146 | s3_client, 147 | format_s3_path( 148 | write_s3_key( 149 | s3_client, s3_path, "%s.json" % DEFAULT_STACK_NAME, ctemplate 150 | ) 151 | ), 152 | ) 153 | log.info("wrote s3 template url: %s", s3_url) 154 | elif len(serialized_template) > 49000: 155 | log.warning( 156 | "template too large for local deploy, pass --s3-path to deploy from s3" 157 | ) 158 | 159 | template.write(json.dumps(ctemplate, indent=2)) 160 | 161 | if resources: 162 | resources.write(json.dumps(ids, indent=2)) 163 | 164 | 165 | if __name__ == "__main__": 166 | cli() 167 | -------------------------------------------------------------------------------- /tfdevops/resource.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | from .utils import write_s3_key 4 | 5 | 6 | class Translator: 7 | 8 | id = None 9 | tf_type = None 10 | strip = () 11 | rename = {} 12 | flatten = () 13 | 14 | def __init__(self, config=None): 15 | self.config = config 16 | 17 | @classmethod 18 | def get_translator(cls, tf_type): 19 | return cls.get_translator_map()[tf_type] 20 | 21 | @classmethod 22 | def get_translator_map(cls): 23 | d = {} 24 | for scls in cls.__subclasses__(): 25 | if scls.tf_type: 26 | d[scls.tf_type] = scls 27 | return d 28 | 29 | def get_name(self, r): 30 | return self._camel_str(r["name"]) 31 | 32 | def get_identity(self, r): 33 | return {self.id: r["values"]["id"]} 34 | 35 | def get_properties(self, tf): 36 | tfv = self.filter_empty(tf["values"]) 37 | tfv.pop("id", None) 38 | tfv.pop("arn", None) 39 | tfv.pop("tags_all", None) 40 | for s in self.strip: 41 | tfv.pop(s, None) 42 | 43 | for f in self.flatten: 44 | if f in tfv and isinstance(tfv[f], list) and len(tfv[f]) >= 1: 45 | tfv[f] = tfv[f][0] 46 | 47 | renamed = {} 48 | for src, tgt in self.rename.items(): 49 | if src not in tfv: 50 | continue 51 | v = tfv.pop(src) 52 | renamed[tgt] = v 53 | cf = self.camel(tfv) 54 | cf.update(renamed) 55 | return cf 56 | 57 | def filter_empty(self, d): 58 | r = {} 59 | for k, v in d.items(): 60 | if v: 61 | r[k] = v 62 | return r 63 | 64 | def _camel_str(self, k): 65 | parts = [p.capitalize() for p in k.split("_")] 66 | return "".join(parts) 67 | 68 | def get_tags(self, tag_map): 69 | return [{"Key": k, "Value": v} for k, v in tag_map.items()] 70 | 71 | def camel(self, d): 72 | r = {} 73 | 74 | for k, v in d.items(): 75 | if isinstance(v, dict): 76 | v = self.camel(v) 77 | if isinstance(v, list) and v and isinstance(v[0], dict): 78 | v = [self.camel(i) for i in v] 79 | r[self._camel_str(k)] = v 80 | return r 81 | 82 | 83 | class EventRuleTranslator(Translator): 84 | 85 | tf_type = "cloudwatch_event_rule" 86 | cfn_type = "AWS::Events::Rule" 87 | 88 | id = "Name" 89 | 90 | def get_properties(self, r): 91 | cfr = super().get_properties(r) 92 | cfr["State"] = cfr.pop("IsEnabled") is True and "ENABLED" or "DISABLED" 93 | 94 | if cfr.get("EventBusName") != "Default": 95 | return None 96 | 97 | return cfr 98 | 99 | 100 | class DbInstance(Translator): 101 | 102 | tf_type = "db_instance" 103 | cfn_type = "AWS::RDS::DBInstance" 104 | id = "DBInstanceIdentifier" 105 | strip = ( 106 | "hosted_zone_id", 107 | "apply_immediately", 108 | "skip_final_snapshot", 109 | "backup_window", 110 | "maintenance_window", 111 | "resource_id", 112 | "address", 113 | "ca_cert_identifier", 114 | "status", 115 | "latest_restorable_time", 116 | "endpoint", 117 | "performance_insights_kms_key_id", # tf allows key set when insights false 118 | "monitoring_interval", # tf allows 0 value cfn does not 119 | "monitoring_role_arn", 120 | "timeouts", 121 | "engine_version_actual", 122 | ) 123 | rename = { 124 | "username": "MasterUsername", 125 | "name": "DBName", 126 | "multi_az": "MultiAZ", 127 | "identifier": "DBInstanceIdentifier", 128 | "password": "MasterUserPassword", 129 | "instance_class": "DBInstanceClass", 130 | "vpc_security_group_ids": "VPCSecurityGroups", 131 | "db_subnet_group_name": "DBSubnetGroupName", 132 | "parameter_group_name": "DBParameterGroupName", 133 | "iam_database_authentication_enabled": "EnableIAMDatabaseAuthentication", 134 | } 135 | 136 | def get_identity(self, r): 137 | return {self.id: r["values"]["identifier"]} 138 | 139 | def get_properties(self, tf): 140 | cfr = super().get_properties(tf) 141 | cfr["Port"] = str(cfr["Port"]) 142 | cfr["AllocatedStorage"] = str(cfr["AllocatedStorage"]) 143 | return cfr 144 | 145 | 146 | class ElasticacheReplicationGroup: 147 | 148 | tf_type = "elasticache_replication_group" 149 | cfn_type = "AWS::ElastiCache::ReplicationGroup" 150 | 151 | id = "ReplicationGroupId" 152 | rename = { 153 | "subnet_group_name": "CacheSubnetGroupName", 154 | "maintenance_window": "PreferredMaintenanceWindow", 155 | "number_cache_clusters": "NumCacheClusters", 156 | "node_type": "CacheNodeType", 157 | "parameter_group_name": "CacheParameterGroupName", 158 | } 159 | strip = ( 160 | "primary_endpoint_address", 161 | "reader_endpoint_address", 162 | "member_clusters", 163 | "engine_version_actual", 164 | "apply_immediately", 165 | "cluster_mode", 166 | ) 167 | 168 | 169 | class EcsService(Translator): 170 | 171 | tf_type = "ecs_service" 172 | cfn_type = "AWS::ECS::Service" 173 | 174 | id = "ServiceName" 175 | flatten = ("network_configuration", "deployment_controller") 176 | rename = { 177 | "iam_role": "Role", 178 | "enable_ecs_managed_tags": "EnableECSManagedTags", 179 | "cluster": "Cluster", 180 | } 181 | strip = ( 182 | "deployment_circuit_breaker", 183 | "propagate_tags", 184 | "deployment_maximum_percent", 185 | "deployment_minimum_healthy_percent", 186 | ) 187 | 188 | def get_identity(self, r): 189 | return {"ServiceArn": r["values"]["id"], "Cluster": r["values"]["cluster"]} 190 | 191 | def get_properties(self, tf): 192 | cfr = super().get_properties(tf) 193 | network = cfr.pop("NetworkConfiguration") 194 | network["AssignPublicIp"] = ( 195 | network.pop("AssignPublicIp") is True and "ENABLED" or "DISABLED" 196 | ) 197 | cfr["NetworkConfiguration"] = {"AwsvpcConfiguration": network} 198 | return cfr 199 | 200 | 201 | class Sqs(Translator): 202 | 203 | tf_type = "sqs_queue" 204 | cfn_type = "AWS::SQS::Queue" 205 | 206 | id = "QueueUrl" 207 | strip = ("url", "policy", "fifo_throughput_limit", "deduplication_scope") 208 | rename = { 209 | "max_message_size": "MaximumMessageSize", 210 | "name": "QueueName", 211 | "message_retention_seconds": "MessageRetentionPeriod", 212 | "visibility_timeout_seconds": "VisibilityTimeout", 213 | "receive_wait_time_seconds": "ReceiveMessageWaitTimeSeconds", 214 | } 215 | 216 | def get_identity(self, r): 217 | return {self.id: r["values"]["url"]} 218 | 219 | def get_properties(self, tf): 220 | cfr = super().get_properties(tf) 221 | if "RedrivePolicy" in cfr: 222 | cfr["RedrivePolicy"] = json.loads(cfr["RedrivePolicy"]) 223 | return cfr 224 | 225 | 226 | class Topic(Translator): 227 | 228 | tf_type = "sns_topic" 229 | cfn_type = "AWS::SNS::Topic" 230 | 231 | id = "TopicArn" 232 | strip = ("policy", "owner") 233 | rename = {"name": "TopicName"} 234 | 235 | def get_identity(self, r): 236 | return {self.id: r["values"]["arn"]} 237 | 238 | 239 | class KinesisStream(Translator): 240 | 241 | tf_type = "kinesis_stream" 242 | cfn_type = "AWS::Kinesis::Stream" 243 | id = "Name" 244 | strip = ("shard_level_metrics", "encryption_type") 245 | rename = {"retention_period": "RetentionPeriodHours"} 246 | 247 | def get_properties(self, tfr): 248 | cfr = super().get_properties(tfr) 249 | cfr["Tags"] = self.get_tags(cfr.get("Tags", {})) 250 | return cfr 251 | 252 | 253 | class Lambda(Translator): 254 | 255 | tf_type = "lambda_function" 256 | cfn_type = "AWS::Lambda::Function" 257 | 258 | id = "FunctionName" 259 | flatten = ("environment", "tracing_config", "vpc_config") 260 | strip = ( 261 | "version", 262 | "policy", 263 | "source_code_size", 264 | "source_code_hash", 265 | "qualified_arn", 266 | "filename", 267 | "invoke_arn", 268 | "last_modified", 269 | "timeouts", 270 | ) 271 | 272 | def get_identity(self, r): 273 | return {self.id: r["values"]["function_name"]} 274 | 275 | def get_properties(self, tfr): 276 | cfr = super().get_properties(tfr) 277 | if cfr["ReservedConcurrentExecutions"] == -1: 278 | cfr.pop("ReservedConcurrentExecutions") 279 | if tfr["values"].get("environment"): 280 | cfr["Environment"]["Variables"] = tfr["values"]["environment"][0][ 281 | "variables" 282 | ] 283 | cfr["Code"] = {"ZipFile": tfr["values"]["filename"]} 284 | cfr["Tags"] = self.get_tags(tfr["values"].get("Tags", {})) 285 | if "VpcConfig" in cfr: 286 | cfr["VpcConfig"].pop("VpcId") 287 | return cfr 288 | 289 | 290 | class Elbv2(Translator): 291 | 292 | tf_type = "lb" 293 | cfn_type = "AWS::ElasticLoadBalancingV2::LoadBalancer" 294 | id = "LoadBalancerArn" 295 | rename = {"subnet_mapping": "SubnetMappings", "load_balancer_type": "Type"} 296 | strip = ("dns_name", "arn_suffix", "access_logs", "vpc_id", "zone_id") 297 | 298 | attributes = { 299 | "IdleTimeout": "idle_timeout.timeout_seconds", 300 | "EnableHttp2": "routing.http2.enabled", 301 | } 302 | 303 | def get_identity(self, r): 304 | return {self.id: r["values"]["id"]} 305 | 306 | def get_properties(self, tfr): 307 | cfr = super().get_properties(tfr) 308 | for k, v in self.attributes.items(): 309 | cv = cfr.pop(k) 310 | if cv is None: 311 | continue 312 | cfr.setdefault("LoadBalancerAttributes", []).append( 313 | {"Key": v, "Value": cv and "true" or "false"} 314 | ) 315 | 316 | subs = [] 317 | for sub in cfr.get("SubnetMappings", ()): 318 | sub = self.filter_empty(sub) 319 | subs.append(self.camel(sub)) 320 | cfr["SubnetMappings"] = subs 321 | return cfr 322 | 323 | 324 | class StateMachine(Translator): 325 | 326 | tf_type = "sfn_state_machine" 327 | cfn_type = "AWS::StepFunctions::StateMachine" 328 | 329 | id = "Arn" 330 | strip = ( 331 | "definition", 332 | "creation_date", 333 | "status", 334 | "logging_configuration", 335 | "tracing_configuration", 336 | ) 337 | rename = { 338 | "name": "StateMachineName", 339 | "definition": "DefinitionString", 340 | "type": "StateMachineType", 341 | } 342 | 343 | def get_identity(self, r): 344 | return {self.id: r["values"]["arn"]} 345 | 346 | def get_properties(self, tf): 347 | cfr = super().get_properties(tf) 348 | if self.config["s3_path"]: 349 | kinfo = write_s3_key( 350 | self.config["s3"], 351 | self.config["s3_path"], 352 | "%s.json" % tf["name"], 353 | tf["values"]["definition"], 354 | ) 355 | cfr["DefinitionS3Location"] = loc = { 356 | "Bucket": kinfo["Bucket"], 357 | "Key": kinfo["Key"], 358 | } 359 | if kinfo.get("Version"): 360 | loc["Version"] = kinfo["Version"] 361 | else: 362 | cfr["Definition"] = json.loads(tf["values"]["definition"]) 363 | return cfr 364 | 365 | 366 | class DynamodbTable(Translator): 367 | 368 | tf_type = "dynamodb_table" 369 | cfn_type = "AWS::DynamoDB::Table" 370 | 371 | id = "TableName" 372 | rename = {"name": "TableName"} 373 | strip = ( 374 | "ttl", 375 | "point_in_time_recovery", 376 | "stream_enabled", 377 | "server_side_encryption", 378 | "hash_key", 379 | "range_key", 380 | "stream_arn", 381 | "stream_label", 382 | "attribute", 383 | "timeouts", 384 | ) 385 | 386 | def get_properties(self, tf): 387 | cfr = super().get_properties(tf) 388 | if tf["values"]["hash_key"]: 389 | cfr.setdefault("KeySchema", []).append( 390 | {"AttributeName": tf["values"]["hash_key"], "KeyType": "HASH"} 391 | ) 392 | if tf["values"]["range_key"]: 393 | cfr.setdefault("KeySchema", []).append( 394 | {"AttributeName": tf["values"]["range_key"], "KeyType": "RANGE"} 395 | ) 396 | if cfr.get("GlobalSecondaryIndex"): 397 | idxs = [] 398 | for idx in cfr.pop("GlobalSecondaryIndex"): 399 | cidx = {"IndexName": idx["Name"]} 400 | cidx["Projection"] = { 401 | "NonKeyAttributes": idx["NonKeyAttributes"], 402 | "ProjectionType": idx["ProjectionType"], 403 | } 404 | cidx["KeySchema"] = [ 405 | {"KeyType": "RANGE", "AttributeName": idx["RangeKey"]}, 406 | {"KeyType": "HASH", "AttributeName": idx["HashKey"]}, 407 | ] 408 | 409 | idxs.append(cidx) 410 | cfr["GlobalSecondaryIndexes"] = idxs 411 | attrs = [] 412 | for a in tf["values"]["attribute"]: 413 | attrs.append({"AttributeName": a["name"], "AttributeType": a["type"]}) 414 | cfr["AttributeDefinitions"] = attrs 415 | if cfr.get("StreamViewType"): 416 | cfr["StreamSpecification"] = {"StreamViewType": cfr.pop("StreamViewType")} 417 | 418 | if tf["values"].get("server_side_encryption"): 419 | sse = tf["values"]["server_side_encryption"][0] 420 | cfr["SSESpecification"] = { 421 | "SSEEnabled": sse["enabled"], 422 | "KMSMasterKeyId": sse["kms_key_arn"], 423 | } 424 | return cfr 425 | -------------------------------------------------------------------------------- /tfdevops/utils.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | import os 4 | import subprocess 5 | from urllib import parse 6 | 7 | import jmespath 8 | 9 | DEFAULT_S3_ENCRYPT = os.environ.get("TFDEVOPS_S3_ENCRYPT", "AES256") 10 | 11 | log = logging.getLogger("tfdevops") 12 | 13 | 14 | def get_state_resources(tf_dir, tf_state): 15 | if tf_dir: 16 | output = subprocess.check_output(["terraform", "show", "-json"], cwd=tf_dir) 17 | state = json.loads(output) 18 | elif tf_state: 19 | state = json.load(open(tf_state)) 20 | else: 21 | raise SyntaxError("either --module or --state-file needs to be passed") 22 | 23 | state_resources = {} 24 | 25 | resources = jmespath.search("values.root_module.resources", state) or [] 26 | mod_resources = ( 27 | jmespath.search("values.root_module.child_modules[].resources[]", state) or () 28 | ) 29 | resources.extend(mod_resources) 30 | 31 | if not resources: 32 | log.info("empty state") 33 | 34 | for r in resources: 35 | if r["mode"] != "managed": 36 | continue 37 | tresources = state_resources.setdefault(r["type"], []) 38 | tresources.append(r) 39 | return state_resources 40 | 41 | 42 | def filter_empty(d): 43 | if isinstance(d, list): 44 | for v in list(d): 45 | if isinstance(v, dict): 46 | filter_empty(v) 47 | elif isinstance(d, dict): 48 | for k, v in list(d.items()): 49 | if not v: 50 | del d[k] 51 | elif isinstance(v, (dict, list)): 52 | filter_empty(v) 53 | return d 54 | 55 | 56 | def write_s3_key(client, s3_path, key, content): 57 | kinfo = {} 58 | parsed = parse.urlparse(s3_path) 59 | kinfo["Bucket"] = parsed.netloc 60 | prefix = parsed.path.strip("/") 61 | kinfo["Key"] = "%s/%s" % (prefix, key) 62 | if not isinstance(content, str): 63 | content = json.dumps(content) 64 | result = client.put_object( 65 | Bucket=kinfo["Bucket"], 66 | Key=kinfo["Key"], 67 | # this is the default but i've seen some orgs try to force this via request policy checks 68 | ACL="private", 69 | ServerSideEncryption=DEFAULT_S3_ENCRYPT, 70 | Body=content, 71 | ) 72 | if result.get("VersionId"): 73 | kinfo["Version"] = result["VersionId"] 74 | return kinfo 75 | 76 | 77 | def format_s3_path(kinfo): 78 | t = "s3://{Bucket}/{Key}" 79 | if "Version" in kinfo: 80 | t += "?versionId={Version}" 81 | return t.format(**kinfo) 82 | 83 | 84 | def format_template_url(client, s3_path): 85 | parsed = parse.urlparse(s3_path) 86 | bucket = parsed.netloc 87 | key = parsed.path.strip("/") 88 | version_id = None 89 | if parsed.query: 90 | query = parse.parse_qs(parsed.query) 91 | version_id = query.get("versionId", (None,)) 92 | region = ( 93 | client.get_bucket_location(Bucket=bucket).get("LocationConstraint") 94 | or "us-east-1" 95 | ) 96 | url = "https://{bucket}.s3.{region}.amazonaws.com/{key}" 97 | if version_id: 98 | url += "?versionId={version_id}" 99 | return url.format(bucket=bucket, key=key, version_id=version_id, region=region) 100 | --------------------------------------------------------------------------------