├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.md │ ├── config.yml │ ├── feature-request---new-table.md │ └── feature_request.md ├── PULL_REQUEST_TEMPLATE.md ├── dependabot.yml └── workflows │ ├── golangci-lint.yml │ ├── registry-publish.yml │ ├── stale.yml │ ├── steampipe-anywhere.yml │ └── sync-labels.yml ├── .gitignore ├── .goreleaser.yml ├── CHANGELOG.md ├── LICENSE ├── Makefile ├── README.md ├── config └── terraform.spc ├── docs ├── LICENSE ├── index.md └── tables │ ├── terraform_data_source.md │ ├── terraform_local.md │ ├── terraform_module.md │ ├── terraform_output.md │ ├── terraform_provider.md │ ├── terraform_resource.md │ └── terraform_variable.md ├── go.mod ├── go.sum ├── main.go └── terraform ├── connection_config.go ├── parse_tfplan.go ├── plugin.go ├── table_terraform_data_source.go ├── table_terraform_local.go ├── table_terraform_module.go ├── table_terraform_output.go ├── table_terraform_provider.go ├── table_terraform_resource.go ├── table_terraform_variable.go └── utils.go /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: bug 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **Steampipe version (`steampipe -v`)** 14 | Example: v0.3.0 15 | 16 | **Plugin version (`steampipe plugin list`)** 17 | Example: v0.5.0 18 | 19 | **To reproduce** 20 | Steps to reproduce the behavior (please include relevant code and/or commands). 21 | 22 | **Expected behavior** 23 | A clear and concise description of what you expected to happen. 24 | 25 | **Additional context** 26 | Add any other context about the problem here. 27 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: false 2 | contact_links: 3 | - name: Questions 4 | url: https://turbot.com/community/join 5 | about: GitHub issues in this repository are only intended for bug reports and feature requests. Other issues will be closed. Please ask and answer questions through the Steampipe Slack community. 6 | - name: Steampipe CLI Bug Reports and Feature Requests 7 | url: https://github.com/turbot/steampipe/issues/new/choose 8 | about: Steampipe CLI has its own codebase. Bug reports and feature requests for those pieces of functionality should be directed to that repository. -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature-request---new-table.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request - New table 3 | about: Suggest a new table for this project 4 | title: Add table terraform__ 5 | labels: enhancement, new table 6 | assignees: '' 7 | 8 | --- 9 | 10 | **References** 11 | Add any related links that will help us understand the resource, including vendor documentation, related GitHub issues, and Go SDK documentation. 12 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: enhancement 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. 21 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | # Integration test logs 2 |
3 | Logs 4 | 5 | ``` 6 | Add passing integration test logs here 7 | ``` 8 |
9 | 10 | # Example query results 11 |
12 | Results 13 | 14 | ``` 15 | Add example SQL query results here (please include the input queries as well) 16 | ``` 17 |
18 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # To get started with Dependabot version updates, you'll need to specify which 2 | # package ecosystems to update and where the package manifests are located. 3 | # Please see the documentation for all configuration options: 4 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 5 | 6 | version: 2 7 | updates: 8 | - package-ecosystem: "gomod" # See documentation for possible values 9 | directory: "/" # Location of package manifests 10 | schedule: 11 | interval: "weekly" 12 | pull-request-branch-name: 13 | separator: "-" 14 | assignees: 15 | - "misraved" 16 | - "madhushreeray30" 17 | labels: 18 | - "dependencies" 19 | -------------------------------------------------------------------------------- /.github/workflows/golangci-lint.yml: -------------------------------------------------------------------------------- 1 | name: golangci-lint 2 | on: 3 | push: 4 | tags: 5 | - v* 6 | branches: 7 | - main 8 | pull_request: 9 | 10 | jobs: 11 | golangci_lint_workflow: 12 | uses: turbot/steampipe-workflows/.github/workflows/golangci-lint.yml@main 13 | -------------------------------------------------------------------------------- /.github/workflows/registry-publish.yml: -------------------------------------------------------------------------------- 1 | name: Build and Deploy OCI Image 2 | 3 | on: 4 | push: 5 | tags: 6 | - 'v*' 7 | 8 | jobs: 9 | registry_publish_workflow_ghcr: 10 | uses: turbot/steampipe-workflows/.github/workflows/registry-publish-ghcr.yml@main 11 | secrets: inherit 12 | with: 13 | releaseTimeout: 60m 14 | -------------------------------------------------------------------------------- /.github/workflows/stale.yml: -------------------------------------------------------------------------------- 1 | name: Stale Issues and PRs 2 | on: 3 | schedule: 4 | - cron: "30 23 * * *" 5 | workflow_dispatch: 6 | inputs: 7 | dryRun: 8 | description: Set to true for a dry run 9 | required: false 10 | default: "false" 11 | type: string 12 | 13 | jobs: 14 | stale_workflow: 15 | uses: turbot/steampipe-workflows/.github/workflows/stale.yml@main 16 | with: 17 | dryRun: ${{ github.event.inputs.dryRun }} 18 | -------------------------------------------------------------------------------- /.github/workflows/steampipe-anywhere.yml: -------------------------------------------------------------------------------- 1 | name: Release Steampipe Anywhere Components 2 | 3 | on: 4 | push: 5 | tags: 6 | - 'v*' 7 | 8 | 9 | jobs: 10 | anywhere_publish_workflow: 11 | uses: turbot/steampipe-workflows/.github/workflows/steampipe-anywhere.yml@main 12 | secrets: inherit 13 | -------------------------------------------------------------------------------- /.github/workflows/sync-labels.yml: -------------------------------------------------------------------------------- 1 | name: Sync Labels 2 | on: 3 | schedule: 4 | - cron: "30 22 * * 1" 5 | workflow_dispatch: 6 | 7 | jobs: 8 | sync_labels_workflow: 9 | uses: turbot/steampipe-workflows/.github/workflows/sync-labels.yml@main 10 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Binaries for programs and plugins 2 | *.exe 3 | *.exe~ 4 | *.dll 5 | *.so 6 | *.dylib 7 | 8 | # Test binary, built with `go test -c` 9 | *.test 10 | 11 | # Output of the go coverage tool, specifically when used with LiteIDE 12 | *.out 13 | 14 | # Dependency directories (remove the comment below to include it) 15 | # vendor/ 16 | -------------------------------------------------------------------------------- /.goreleaser.yml: -------------------------------------------------------------------------------- 1 | # This is an example goreleaser.yaml file with some sane defaults. 2 | # Make sure to check the documentation at http://goreleaser.com 3 | before: 4 | hooks: 5 | - go mod tidy 6 | builds: 7 | - env: 8 | - CGO_ENABLED=0 9 | - GO111MODULE=on 10 | - GOPRIVATE=github.com/turbot 11 | goos: 12 | - linux 13 | - darwin 14 | 15 | goarch: 16 | - amd64 17 | - arm64 18 | 19 | id: "steampipe" 20 | binary: "{{ .ProjectName }}.plugin" 21 | flags: 22 | - -tags=netgo 23 | 24 | archives: 25 | - format: gz 26 | name_template: "{{ .ProjectName }}_{{ .Os }}_{{ .Arch }}" 27 | files: 28 | - none* 29 | checksum: 30 | name_template: "{{ .ProjectName }}_{{ .Version }}_SHA256SUMS" 31 | algorithm: sha256 32 | changelog: 33 | sort: asc 34 | filters: 35 | exclude: 36 | - "^docs:" 37 | - "^test:" 38 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | ## v1.1.1 [2025-04-18] 2 | 3 | _Bug fixes_ 4 | 5 | - Fixed Linux AMD64 plugin build failures for `Postgres 14 FDW`, `Postgres 15 FDW`, and `SQLite Extension` by upgrading GitHub Actions runners from `ubuntu-20.04` to `ubuntu-22.04`. 6 | 7 | ## v1.1.0 [2025-04-17] 8 | 9 | _Dependencies_ 10 | 11 | - Recompiled plugin with Go version `1.23.1`. ([#125](https://github.com/turbot/steampipe-plugin-terraform/pull/125)) 12 | - Recompiled plugin with [steampipe-plugin-sdk v5.11.5](https://github.com/turbot/steampipe-plugin-sdk/blob/v5.11.5/CHANGELOG.md#v5115-2025-03-31) that addresses critical and high vulnerabilities in dependent packages. ([#125](https://github.com/turbot/steampipe-plugin-terraform/pull/125)) 13 | 14 | ## v1.0.1 [2025-02-12] 15 | 16 | _Bug fixes_ 17 | 18 | - Fixed the `terraform_resource` table to correctly return data instead of `index out of range` error when the `state_file_paths` config argument is set in the `terraform.spc` file. ([#109](https://github.com/turbot/steampipe-plugin-terraform/pull/109)) 19 | 20 | ## v1.0.0 [2024-10-22] 21 | 22 | There are no significant changes in this plugin version; it has been released to align with [Steampipe's v1.0.0](https://steampipe.io/changelog/steampipe-cli-v1-0-0) release. This plugin adheres to [semantic versioning](https://semver.org/#semantic-versioning-specification-semver), ensuring backward compatibility within each major version. 23 | 24 | _Dependencies_ 25 | 26 | - Recompiled plugin with Go version `1.22`. ([#107](https://github.com/turbot/steampipe-plugin-terraform/pull/107)) 27 | - Recompiled plugin with [steampipe-plugin-sdk v5.10.4](https://github.com/turbot/steampipe-plugin-sdk/blob/develop/CHANGELOG.md#v5104-2024-08-29) that fixes logging in the plugin export tool. ([#107](https://github.com/turbot/steampipe-plugin-terraform/pull/107)) 28 | 29 | ## v0.12.0 [2024-06-21] 30 | 31 | _What's new?_ 32 | 33 | - New tables added 34 | - [terraform_variable](https://hub.steampipe.io/plugins/turbot/terraform/tables/terraform_variable) ([#97](https://github.com/turbot/steampipe-plugin-terraform/pull/97)) 35 | 36 | _Enhancements_ 37 | 38 | - The Plugin and the Steampipe Anywhere binaries are now built with the `netgo` package. ([#101](https://github.com/turbot/steampipe-plugin-terraform/pull/101)) 39 | - Added the `version` flag to the plugin's Export tool. ([#65](https://github.com/turbot/steampipe-export/pull/65)) 40 | 41 | _Bug fixes_ 42 | 43 | - Fixed the `arguments` column of `terraform_resource` table to correctly return the `type` field. ([#99](https://github.com/turbot/steampipe-plugin-terraform/pull/99)) ([#92](https://github.com/turbot/steampipe-plugin-terraform/pull/92)) 44 | 45 | _Dependencies_ 46 | 47 | - Recompiled plugin with [steampipe-plugin-sdk v5.10.1](https://github.com/turbot/steampipe-plugin-sdk/blob/main/CHANGELOG.md#v5100-2024-04-10) that adds support for connection key columns. ([#92](https://github.com/turbot/steampipe-plugin-terraform/pull/92)) 48 | 49 | ## v0.11.2 [2023-12-12] 50 | 51 | _Bug fixes_ 52 | 53 | - Fix the missing optional tag on the plugin's connection config arguments. [#84](https://github.com/turbot/steampipe-plugin-terraform/pull/84) 54 | 55 | ## v0.11.1 [2023-12-12] 56 | 57 | _Bug fixes_ 58 | 59 | - Fixed the plugin's connection config variables to use the `hcl` syntax. ([#82](https://github.com/turbot/steampipe-plugin-terraform/pull/82)) 60 | 61 | ## v0.11.0 [2023-12-12] 62 | 63 | _What's new?_ 64 | 65 | - The plugin can now be downloaded and used with the [Steampipe CLI](https://steampipe.io/docs), as a [Postgres FDW](https://steampipe.io/docs/steampipe_postgres/overview), as a [SQLite extension](https://steampipe.io/docs//steampipe_sqlite/overview) and as a standalone [exporter](https://steampipe.io/docs/steampipe_export/overview). ([#80](https://github.com/turbot/steampipe-plugin-terraform/pull/80)) 66 | - The table docs have been updated to provide corresponding example queries for Postgres FDW and SQLite extension. ([#80](https://github.com/turbot/steampipe-plugin-terraform/pull/80)) 67 | - Docs license updated to match Steampipe [CC BY-NC-ND license](https://github.com/turbot/steampipe-plugin-terraform/blob/main/docs/LICENSE). ([#80](https://github.com/turbot/steampipe-plugin-terraform/pull/80)) 68 | 69 | _Dependencies_ 70 | 71 | - Recompiled plugin with [steampipe-plugin-sdk v5.8.0](https://github.com/turbot/steampipe-plugin-sdk/blob/main/CHANGELOG.md#v580-2023-12-11) that includes plugin server encapsulation for in-process and GRPC usage, adding Steampipe Plugin SDK version to `_ctx` column, and fixing connection and potential divide-by-zero bugs. ([#79](https://github.com/turbot/steampipe-plugin-terraform/pull/79)) 72 | 73 | ## v0.10.0 [2023-10-03] 74 | 75 | _Breaking changes_ 76 | 77 | - Removed `instances` column from `terraform_resource` table. ([#64](https://github.com/turbot/steampipe-plugin-terraform/pull/64)) 78 | - All `arguments` and `lifecycle` columns now return `null` instead of `{}` if empty. ([#64](https://github.com/turbot/steampipe-plugin-terraform/pull/64)) 79 | 80 | _Enhancements_ 81 | 82 | - Added `address`, `attributes`, and `attributes_std` columns to `terraform_resource` table. ([#64](https://github.com/turbot/steampipe-plugin-terraform/pull/64)) 83 | 84 | _Bug fixes_ 85 | 86 | - Fixed the `start_line`, `end_line` and `source` column values in the `terraform_resource` table to return correct values regardless of file indentation. ([#64](https://github.com/turbot/steampipe-plugin-terraform/pull/64)) 87 | - Fixed the plugin to check all files even if a non-existent file name is provided in any `file_paths` config arg. ([#67](https://github.com/turbot/steampipe-plugin-terraform/pull/67)) 88 | 89 | _Dependencies_ 90 | 91 | - Recompiled plugin with [steampipe-plugin-sdk v5.6.2](https://github.com/turbot/steampipe-plugin-sdk/blob/main/CHANGELOG.md#v562-2023-10-03) which prevents nil pointer reference errors for implicit hydrate configs. ([#63](https://github.com/turbot/steampipe-plugin-terraform/pull/63)) 92 | 93 | ## v0.9.0 [2023-10-02] 94 | 95 | _Dependencies_ 96 | 97 | - Upgraded to [steampipe-plugin-sdk v5.6.1](https://github.com/turbot/steampipe-plugin-sdk/blob/main/CHANGELOG.md#v561-2023-09-29) with support for rate limiters. ([#63](https://github.com/turbot/steampipe-plugin-terraform/pull/63)) 98 | - Recompiled plugin with Go version `1.21`. ([#63](https://github.com/turbot/steampipe-plugin-terraform/pull/63)) 99 | 100 | ## v0.8.1 [2023-09-15] 101 | 102 | _Bug fixes_ 103 | 104 | - Fixed the `invalid memory address or nil pointer dereference` errors when querying Terraform configuration or plan or state files that included `null` valued arguments. ([#56](https://github.com/turbot/steampipe-plugin-terraform/pull/56)) 105 | 106 | _Dependencies_ 107 | 108 | - Recompiled plugin with [steampipe-plugin-sdk v5.5.1](https://github.com/turbot/steampipe-plugin-sdk/blob/main/CHANGELOG.md#v551-2023-07-26). ([#49](https://github.com/turbot/steampipe-plugin-terraform/pull/49)) 109 | - Recompiled plugin with `github.com/turbot/go-kit v0.7.0`. ([#52](https://github.com/turbot/steampipe-plugin-terraform/pull/52)) 110 | 111 | ## v0.8.0 [2023-09-07] 112 | 113 | _What's new?_ 114 | 115 | - Added support to parse Terraform plan and state files. This can be set using the `plan_file_paths` and `state_file_paths` config arguments in the `terraform.spc` file. ([#40](https://github.com/turbot/steampipe-plugin-terraform/pull/40)) 116 | 117 | _Deprecated_ 118 | 119 | - The `paths` argument in the `terraform.spc` file has been deprecated and replaced with the `configuration_file_paths` argument. ([#40](https://github.com/turbot/steampipe-plugin-terraform/pull/40)) 120 | 121 | ## v0.7.0 [2023-06-20] 122 | 123 | _Dependencies_ 124 | 125 | - Recompiled plugin with [steampipe-plugin-sdk v5.5.0](https://github.com/turbot/steampipe-plugin-sdk/blob/v5.5.0/CHANGELOG.md#v550-2023-06-16) which significantly reduces API calls and boosts query performance, resulting in faster data retrieval. This update significantly lowers the plugin initialization time of dynamic plugins by avoiding recursing into child folders when not necessary. ([#41](https://github.com/turbot/steampipe-plugin-terraform/pull/41)) 126 | 127 | ## v0.6.0 [2023-06-07] 128 | 129 | _Bug fixes_ 130 | 131 | - Fixed the `arguments` column of `terraform_module` table to correctly return data instead of `null`. ([#36](https://github.com/turbot/steampipe-plugin-terraform/pull/36)) (Thanks [@rollwagen](https://github.com/rollwagen) for the contribution!!) 132 | 133 | _Dependencies_ 134 | 135 | - Recompiled plugin with [steampipe-plugin-sdk v5.4.1](https://github.com/turbot/steampipe-plugin-sdk/blob/main/CHANGELOG.md#v541-2023-05-05) which fixes increased plugin initialization time due to multiple connections causing the schema to be loaded repeatedly. ([#38](https://github.com/turbot/steampipe-plugin-terraform/pull/38)) 136 | 137 | ## v0.5.0 [2023-04-11] 138 | 139 | _Dependencies_ 140 | 141 | - Recompiled plugin with [steampipe-plugin-sdk v5.3.0](https://github.com/turbot/steampipe-plugin-sdk/blob/main/CHANGELOG.md#v530-2023-03-16) which includes fixes for query cache pending item mechanism and aggregator connections not working for dynamic tables. ([#33](https://github.com/turbot/steampipe-plugin-terraform/pull/33)) 142 | 143 | ## v0.4.0 [2023-02-16] 144 | 145 | _What's new?_ 146 | 147 | - New tables added 148 | - [terraform_module](https://hub.steampipe.io/plugins/turbot/terraform/tables/terraform_module) ([#28](https://github.com/turbot/steampipe-plugin-terraform/pull/28)) (Thanks [@rollwagen](https://github.com/rollwagen) for the contribution!!) 149 | 150 | ## v0.3.0 [2022-11-16] 151 | 152 | _What's new?_ 153 | 154 | - Added support for retrieving Terraform configuration files from remote Git repositories and S3 buckets. For more information, please see [Supported Path Formats](https://hub.steampipe.io/plugins/turbot/terraform#supported-path-formats). ([#25](https://github.com/turbot/steampipe-plugin-terraform/pull/25)) 155 | - Added file watching support for files included in the `paths` config argument. ([#25](https://github.com/turbot/steampipe-plugin-terraform/pull/25)) 156 | 157 | _Enhancements_ 158 | 159 | - Added `end_line` and `source` columns to all tables. ([#25](https://github.com/turbot/steampipe-plugin-terraform/pull/25)) 160 | 161 | _Dependencies_ 162 | 163 | - Recompiled plugin with [steampipe-plugin-sdk v5.0.0](https://github.com/turbot/steampipe-plugin-sdk/blob/main/CHANGELOG.md#v500-2022-11-16) which includes support for fetching remote files with go-getter and file watching. ([#25](https://github.com/turbot/steampipe-plugin-terraform/pull/25)) 164 | 165 | ## v0.2.0 [2022-09-09] 166 | 167 | _Dependencies_ 168 | 169 | - Recompiled plugin with [steampipe-plugin-sdk v4.1.6](https://github.com/turbot/steampipe-plugin-sdk/blob/main/CHANGELOG.md#v416-2022-09-02) which includes several caching and memory management improvements. ([#21](https://github.com/turbot/steampipe-plugin-terraform/pull/21)) 170 | - Recompiled plugin with Go version `1.19`. ([#21](https://github.com/turbot/steampipe-plugin-terraform/pull/21)) 171 | 172 | ## v0.1.0 [2022-04-28] 173 | 174 | _Enhancements_ 175 | 176 | - Added support for native Linux ARM and Mac M1 builds. ([#17](https://github.com/turbot/steampipe-plugin-terraform/pull/17)) 177 | - Recompiled plugin with [steampipe-plugin-sdk v3.1.0](https://github.com/turbot/steampipe-plugin-sdk/blob/main/CHANGELOG.md#v310--2022-03-30) and Go version `1.18`. ([#16](https://github.com/turbot/steampipe-plugin-terraform/pull/16)) 178 | 179 | ## v0.0.5 [2022-02-10] 180 | 181 | _What's new?_ 182 | 183 | - File loading and matching through the `paths` argument has been updated to make the plugin easier to use: 184 | - The `paths` argument is no longer commented out by default for new plugin installations and now defaults to the current working directory 185 | - Home directory expansion (`~`) is now supported 186 | - Recursive directory searching (`**`) is now supported 187 | - Previously, when using wildcard matching (`*`), non-Terraform configuration files were automatically excluded to prevent parsing errors. These files are no longer automatically excluded to allow for a wider range of matches. If your current configuration uses wildcard matching, e.g., `paths = [ "/path/to/my/files/*" ]`, please update it to include the file extension, e.g., `paths = [ "/path/to/my/files/*.tf" ]`. 188 | 189 | ## v0.0.4 [2022-02-01] 190 | 191 | _Bug fixes_ 192 | 193 | - Fixed: Add lock to file parsing function to prevent concurrent map read/write errors ([#9](https://github.com/turbot/steampipe-plugin-terraform/pull/9)) 194 | 195 | ## v0.0.3 [2022-01-14] 196 | 197 | _Enhancements_ 198 | 199 | - Recompiled plugin with [steampipe-plugin-sdk v1.8.3](https://github.com/turbot/steampipe-plugin-sdk/blob/main/CHANGELOG.md#v183--2021-12-23) 200 | 201 | _Bug fixes_ 202 | 203 | - Fixed `terraform_local` queries intermittently failing due to a value conversion error 204 | 205 | ## v0.0.2 [2021-12-16] 206 | 207 | _Enhancements_ 208 | 209 | - Recompiled plugin with Go version 1.17 ([#4](https://github.com/turbot/steampipe-plugin-terraform/pull/4)) 210 | 211 | ## v0.0.1 [2021-12-02] 212 | 213 | _What's new?_ 214 | 215 | - New tables added 216 | 217 | - [terraform_data_source](https://hub.steampipe.io/plugins/turbot/terraform/tables/terraform_data_source) 218 | - [terraform_local](https://hub.steampipe.io/plugins/turbot/terraform/tables/terraform_local) 219 | - [terraform_output](https://hub.steampipe.io/plugins/turbot/terraform/tables/terraform_output) 220 | - [terraform_provider](https://hub.steampipe.io/plugins/turbot/terraform/tables/terraform_provider) 221 | - [terraform_resource](https://hub.steampipe.io/plugins/turbot/terraform/tables/terraform_resource) 222 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | STEAMPIPE_INSTALL_DIR ?= ~/.steampipe 2 | BUILD_TAGS = netgo 3 | install: 4 | go build -o $(STEAMPIPE_INSTALL_DIR)/plugins/hub.steampipe.io/plugins/turbot/terraform@latest/steampipe-plugin-terraform.plugin -tags "${BUILD_TAGS}" *.go -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![image](https://hub.steampipe.io/images/plugins/turbot/terraform-social-graphic.png) 2 | 3 | # Terraform Plugin for Steampipe 4 | 5 | Use SQL to query data from Terraform configuration files. 6 | 7 | - **[Get started →](https://hub.steampipe.io/plugins/turbot/terraform)** 8 | - Documentation: [Table definitions & examples](https://hub.steampipe.io/plugins/turbot/terraform/tables) 9 | - Community: [Join #steampipe on Slack →](https://turbot.com/community/join) 10 | - Get involved: [Issues](https://github.com/turbot/steampipe-plugin-terraform/issues) 11 | 12 | ## Quick start 13 | 14 | Install the plugin with [Steampipe](https://steampipe.io): 15 | 16 | ```shell 17 | steampipe plugin install terraform 18 | ``` 19 | 20 | Configure your [config file](https://hub.steampipe.io/plugins/turbot/terraform#configuration) to include directories with Terraform configuration files. If no directory is specified, the current working directory will be used. 21 | 22 | Run steampipe: 23 | 24 | ```shell 25 | steampipe query 26 | ``` 27 | 28 | Query all resources in your Terraform files: 29 | 30 | ```sql 31 | select 32 | name, 33 | type, 34 | jsonb_pretty(arguments) as args 35 | from 36 | terraform_resource; 37 | ``` 38 | 39 | ``` 40 | > select name, type, jsonb_pretty(arguments) as args from terraform_resource; 41 | +------------+----------------+--------------------------------------------+ 42 | | name | type | args | 43 | +------------+----------------+--------------------------------------------+ 44 | | app_server | aws_instance | { | 45 | | | | "ami": "ami-830c94e3", | 46 | | | | "tags": { | 47 | | | | "Name": "ExampleAppServerInstance" | 48 | | | | }, | 49 | | | | "instance_type": "t2.micro" | 50 | | | | } | 51 | | app_volume | aws_ebs_volume | { | 52 | | | | "size": 40, | 53 | | | | "tags": { | 54 | | | | "Name": "HelloWorld" | 55 | | | | }, | 56 | | | | "availability_zone": "us-west-2a" | 57 | | | | } | 58 | | app_bucket | aws_s3_bucket | { | 59 | | | | "acl": "private", | 60 | | | | "tags": { | 61 | | | | "Name": "Test bucket", | 62 | | | | "Environment": "Dev" | 63 | | | | }, | 64 | | | | "bucket": "my-app-bucket" | 65 | | | | } | 66 | +------------+----------------+--------------------------------------------+ 67 | ``` 68 | 69 | ## Engines 70 | 71 | This plugin is available for the following engines: 72 | 73 | | Engine | Description 74 | |---------------|------------------------------------------ 75 | | [Steampipe](https://steampipe.io/docs) | The Steampipe CLI exposes APIs and services as a high-performance relational database, giving you the ability to write SQL-based queries to explore dynamic data. Mods extend Steampipe's capabilities with dashboards, reports, and controls built with simple HCL. The Steampipe CLI is a turnkey solution that includes its own Postgres database, plugin management, and mod support. 76 | | [Postgres FDW](https://steampipe.io/docs/steampipe_postgres/overview) | Steampipe Postgres FDWs are native Postgres Foreign Data Wrappers that translate APIs to foreign tables. Unlike Steampipe CLI, which ships with its own Postgres server instance, the Steampipe Postgres FDWs can be installed in any supported Postgres database version. 77 | | [SQLite Extension](https://steampipe.io/docs/steampipe_sqlite/overview) | Steampipe SQLite Extensions provide SQLite virtual tables that translate your queries into API calls, transparently fetching information from your API or service as you request it. 78 | | [Export](https://steampipe.io/docs/steampipe_export/overview) | Steampipe Plugin Exporters provide a flexible mechanism for exporting information from cloud services and APIs. Each exporter is a stand-alone binary that allows you to extract data using Steampipe plugins without a database. 79 | | [Turbot Pipes](https://turbot.com/pipes/docs) | Turbot Pipes is the only intelligence, automation & security platform built specifically for DevOps. Pipes provide hosted Steampipe database instances, shared dashboards, snapshots, and more. 80 | 81 | ## Developing 82 | 83 | Prerequisites: 84 | 85 | - [Steampipe](https://steampipe.io/downloads) 86 | - [Golang](https://golang.org/doc/install) 87 | 88 | Clone: 89 | 90 | ```sh 91 | git clone https://github.com/turbot/steampipe-plugin-terraform.git 92 | cd steampipe-plugin-terraform 93 | ``` 94 | 95 | Build, which automatically installs the new version to your `~/.steampipe/plugins` directory: 96 | 97 | ``` 98 | make 99 | ``` 100 | 101 | Configure the plugin: 102 | 103 | ``` 104 | cp config/* ~/.steampipe/config 105 | vi ~/.steampipe/config/terraform.spc 106 | ``` 107 | 108 | Try it! 109 | 110 | ``` 111 | steampipe query 112 | > .inspect terraform 113 | ``` 114 | 115 | Further reading: 116 | 117 | - [Writing plugins](https://steampipe.io/docs/develop/writing-plugins) 118 | - [Writing your first table](https://steampipe.io/docs/develop/writing-your-first-table) 119 | 120 | ## Open Source & Contributing 121 | 122 | This repository is published under the [Apache 2.0](https://www.apache.org/licenses/LICENSE-2.0) (source code) and [CC BY-NC-ND](https://creativecommons.org/licenses/by-nc-nd/2.0/) (docs) licenses. Please see our [code of conduct](https://github.com/turbot/.github/blob/main/CODE_OF_CONDUCT.md). We look forward to collaborating with you! 123 | 124 | [Steampipe](https://steampipe.io) is a product produced from this open source software, exclusively by [Turbot HQ, Inc](https://turbot.com). It is distributed under our commercial terms. Others are allowed to make their own distribution of the software, but cannot use any of the Turbot trademarks, cloud services, etc. You can learn more in our [Open Source FAQ](https://turbot.com/open-source). 125 | 126 | ## Get Involved 127 | 128 | **[Join #steampipe on Slack →](https://turbot.com/community/join)** 129 | 130 | Want to help but don't know where to start? Pick up one of the `help wanted` issues: 131 | 132 | - [Steampipe](https://github.com/turbot/steampipe/labels/help%20wanted) 133 | - [Terraform Plugin](https://github.com/turbot/steampipe-plugin-terraform/labels/help%20wanted) 134 | -------------------------------------------------------------------------------- /config/terraform.spc: -------------------------------------------------------------------------------- 1 | connection "terraform" { 2 | plugin = "terraform" 3 | 4 | # Configuration file paths is a list of locations to search for Terraform configuration files 5 | # Plan File Paths is a list of locations to search for Terraform plan files 6 | # State File Paths is a list of locations to search for Terraform state files 7 | # Configuration, plan or state file paths can be configured with a local directory, a remote Git repository URL, or an S3 bucket URL 8 | # Wildcard based searches are supported, including recursive searches 9 | # Local paths are resolved relative to the current working directory (CWD) 10 | 11 | # For example: 12 | # - "*.tf" matches all Terraform configuration files in the CWD 13 | # - "**/*.tf" matches all Terraform configuration files in the CWD and all sub-directories 14 | # - "../*.tf" matches all Terraform configuration files in the CWD's parent directory 15 | # - "steampipe*.tf" matches all Terraform configuration files starting with "steampipe" in the CWD 16 | # - "/path/to/dir/*.tf" matches all Terraform configuration files in a specific directory 17 | # - "/path/to/dir/main.tf" matches a specific file 18 | 19 | # If paths includes "*", all files (including non-Terraform configuration files) in 20 | # the CWD will be matched, which may cause errors if incompatible file types exist 21 | 22 | # Defaults to CWD 23 | configuration_file_paths = ["*.tf"] 24 | plan_file_paths = ["tfplan.json", "*.tfplan.json"] 25 | state_file_paths = ["*.tfstate"] 26 | } 27 | -------------------------------------------------------------------------------- /docs/LICENSE: -------------------------------------------------------------------------------- 1 | Attribution-NonCommercial-NoDerivatives 4.0 International 2 | 3 | ======================================================================= 4 | 5 | Creative Commons Corporation ("Creative Commons") is not a law firm and 6 | does not provide legal services or legal advice. Distribution of 7 | Creative Commons public licenses does not create a lawyer-client or 8 | other relationship. Creative Commons makes its licenses and related 9 | information available on an "as-is" basis. Creative Commons gives no 10 | warranties regarding its licenses, any material licensed under their 11 | terms and conditions, or any related information. Creative Commons 12 | disclaims all liability for damages resulting from their use to the 13 | fullest extent possible. 14 | 15 | Using Creative Commons Public Licenses 16 | 17 | Creative Commons public licenses provide a standard set of terms and 18 | conditions that creators and other rights holders may use to share 19 | original works of authorship and other material subject to copyright 20 | and certain other rights specified in the public license below. The 21 | following considerations are for informational purposes only, are not 22 | exhaustive, and do not form part of our licenses. 23 | 24 | Considerations for licensors: Our public licenses are 25 | intended for use by those authorized to give the public 26 | permission to use material in ways otherwise restricted by 27 | copyright and certain other rights. Our licenses are 28 | irrevocable. Licensors should read and understand the terms 29 | and conditions of the license they choose before applying it. 30 | Licensors should also secure all rights necessary before 31 | applying our licenses so that the public can reuse the 32 | material as expected. Licensors should clearly mark any 33 | material not subject to the license. This includes other CC- 34 | licensed material, or material used under an exception or 35 | limitation to copyright. More considerations for licensors: 36 | wiki.creativecommons.org/Considerations_for_licensors 37 | 38 | Considerations for the public: By using one of our public 39 | licenses, a licensor grants the public permission to use the 40 | licensed material under specified terms and conditions. If 41 | the licensor's permission is not necessary for any reason--for 42 | example, because of any applicable exception or limitation to 43 | copyright--then that use is not regulated by the license. Our 44 | licenses grant only permissions under copyright and certain 45 | other rights that a licensor has authority to grant. Use of 46 | the licensed material may still be restricted for other 47 | reasons, including because others have copyright or other 48 | rights in the material. A licensor may make special requests, 49 | such as asking that all changes be marked or described. 50 | Although not required by our licenses, you are encouraged to 51 | respect those requests where reasonable. More considerations 52 | for the public: 53 | wiki.creativecommons.org/Considerations_for_licensees 54 | 55 | ======================================================================= 56 | 57 | Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 58 | International Public License 59 | 60 | By exercising the Licensed Rights (defined below), You accept and agree 61 | to be bound by the terms and conditions of this Creative Commons 62 | Attribution-NonCommercial-NoDerivatives 4.0 International Public 63 | License ("Public License"). To the extent this Public License may be 64 | interpreted as a contract, You are granted the Licensed Rights in 65 | consideration of Your acceptance of these terms and conditions, and the 66 | Licensor grants You such rights in consideration of benefits the 67 | Licensor receives from making the Licensed Material available under 68 | these terms and conditions. 69 | 70 | 71 | Section 1 -- Definitions. 72 | 73 | a. Adapted Material means material subject to Copyright and Similar 74 | Rights that is derived from or based upon the Licensed Material 75 | and in which the Licensed Material is translated, altered, 76 | arranged, transformed, or otherwise modified in a manner requiring 77 | permission under the Copyright and Similar Rights held by the 78 | Licensor. For purposes of this Public License, where the Licensed 79 | Material is a musical work, performance, or sound recording, 80 | Adapted Material is always produced where the Licensed Material is 81 | synched in timed relation with a moving image. 82 | 83 | b. Copyright and Similar Rights means copyright and/or similar rights 84 | closely related to copyright including, without limitation, 85 | performance, broadcast, sound recording, and Sui Generis Database 86 | Rights, without regard to how the rights are labeled or 87 | categorized. For purposes of this Public License, the rights 88 | specified in Section 2(b)(1)-(2) are not Copyright and Similar 89 | Rights. 90 | 91 | c. Effective Technological Measures means those measures that, in the 92 | absence of proper authority, may not be circumvented under laws 93 | fulfilling obligations under Article 11 of the WIPO Copyright 94 | Treaty adopted on December 20, 1996, and/or similar international 95 | agreements. 96 | 97 | d. Exceptions and Limitations means fair use, fair dealing, and/or 98 | any other exception or limitation to Copyright and Similar Rights 99 | that applies to Your use of the Licensed Material. 100 | 101 | e. Licensed Material means the artistic or literary work, database, 102 | or other material to which the Licensor applied this Public 103 | License. 104 | 105 | f. Licensed Rights means the rights granted to You subject to the 106 | terms and conditions of this Public License, which are limited to 107 | all Copyright and Similar Rights that apply to Your use of the 108 | Licensed Material and that the Licensor has authority to license. 109 | 110 | g. Licensor means the individual(s) or entity(ies) granting rights 111 | under this Public License. 112 | 113 | h. NonCommercial means not primarily intended for or directed towards 114 | commercial advantage or monetary compensation. For purposes of 115 | this Public License, the exchange of the Licensed Material for 116 | other material subject to Copyright and Similar Rights by digital 117 | file-sharing or similar means is NonCommercial provided there is 118 | no payment of monetary compensation in connection with the 119 | exchange. 120 | 121 | i. Share means to provide material to the public by any means or 122 | process that requires permission under the Licensed Rights, such 123 | as reproduction, public display, public performance, distribution, 124 | dissemination, communication, or importation, and to make material 125 | available to the public including in ways that members of the 126 | public may access the material from a place and at a time 127 | individually chosen by them. 128 | 129 | j. Sui Generis Database Rights means rights other than copyright 130 | resulting from Directive 96/9/EC of the European Parliament and of 131 | the Council of 11 March 1996 on the legal protection of databases, 132 | as amended and/or succeeded, as well as other essentially 133 | equivalent rights anywhere in the world. 134 | 135 | k. You means the individual or entity exercising the Licensed Rights 136 | under this Public License. Your has a corresponding meaning. 137 | 138 | 139 | Section 2 -- Scope. 140 | 141 | a. License grant. 142 | 143 | 1. Subject to the terms and conditions of this Public License, 144 | the Licensor hereby grants You a worldwide, royalty-free, 145 | non-sublicensable, non-exclusive, irrevocable license to 146 | exercise the Licensed Rights in the Licensed Material to: 147 | 148 | a. reproduce and Share the Licensed Material, in whole or 149 | in part, for NonCommercial purposes only; and 150 | 151 | b. produce and reproduce, but not Share, Adapted Material 152 | for NonCommercial purposes only. 153 | 154 | 2. Exceptions and Limitations. For the avoidance of doubt, where 155 | Exceptions and Limitations apply to Your use, this Public 156 | License does not apply, and You do not need to comply with 157 | its terms and conditions. 158 | 159 | 3. Term. The term of this Public License is specified in Section 160 | 6(a). 161 | 162 | 4. Media and formats; technical modifications allowed. The 163 | Licensor authorizes You to exercise the Licensed Rights in 164 | all media and formats whether now known or hereafter created, 165 | and to make technical modifications necessary to do so. The 166 | Licensor waives and/or agrees not to assert any right or 167 | authority to forbid You from making technical modifications 168 | necessary to exercise the Licensed Rights, including 169 | technical modifications necessary to circumvent Effective 170 | Technological Measures. For purposes of this Public License, 171 | simply making modifications authorized by this Section 2(a) 172 | (4) never produces Adapted Material. 173 | 174 | 5. Downstream recipients. 175 | 176 | a. Offer from the Licensor -- Licensed Material. Every 177 | recipient of the Licensed Material automatically 178 | receives an offer from the Licensor to exercise the 179 | Licensed Rights under the terms and conditions of this 180 | Public License. 181 | 182 | b. No downstream restrictions. You may not offer or impose 183 | any additional or different terms or conditions on, or 184 | apply any Effective Technological Measures to, the 185 | Licensed Material if doing so restricts exercise of the 186 | Licensed Rights by any recipient of the Licensed 187 | Material. 188 | 189 | 6. No endorsement. Nothing in this Public License constitutes or 190 | may be construed as permission to assert or imply that You 191 | are, or that Your use of the Licensed Material is, connected 192 | with, or sponsored, endorsed, or granted official status by, 193 | the Licensor or others designated to receive attribution as 194 | provided in Section 3(a)(1)(A)(i). 195 | 196 | b. Other rights. 197 | 198 | 1. Moral rights, such as the right of integrity, are not 199 | licensed under this Public License, nor are publicity, 200 | privacy, and/or other similar personality rights; however, to 201 | the extent possible, the Licensor waives and/or agrees not to 202 | assert any such rights held by the Licensor to the limited 203 | extent necessary to allow You to exercise the Licensed 204 | Rights, but not otherwise. 205 | 206 | 2. Patent and trademark rights are not licensed under this 207 | Public License. 208 | 209 | 3. To the extent possible, the Licensor waives any right to 210 | collect royalties from You for the exercise of the Licensed 211 | Rights, whether directly or through a collecting society 212 | under any voluntary or waivable statutory or compulsory 213 | licensing scheme. In all other cases the Licensor expressly 214 | reserves any right to collect such royalties, including when 215 | the Licensed Material is used other than for NonCommercial 216 | purposes. 217 | 218 | 219 | Section 3 -- License Conditions. 220 | 221 | Your exercise of the Licensed Rights is expressly made subject to the 222 | following conditions. 223 | 224 | a. Attribution. 225 | 226 | 1. If You Share the Licensed Material, You must: 227 | 228 | a. retain the following if it is supplied by the Licensor 229 | with the Licensed Material: 230 | 231 | i. identification of the creator(s) of the Licensed 232 | Material and any others designated to receive 233 | attribution, in any reasonable manner requested by 234 | the Licensor (including by pseudonym if 235 | designated); 236 | 237 | ii. a copyright notice; 238 | 239 | iii. a notice that refers to this Public License; 240 | 241 | iv. a notice that refers to the disclaimer of 242 | warranties; 243 | 244 | v. a URI or hyperlink to the Licensed Material to the 245 | extent reasonably practicable; 246 | 247 | b. indicate if You modified the Licensed Material and 248 | retain an indication of any previous modifications; and 249 | 250 | c. indicate the Licensed Material is licensed under this 251 | Public License, and include the text of, or the URI or 252 | hyperlink to, this Public License. 253 | 254 | For the avoidance of doubt, You do not have permission under 255 | this Public License to Share Adapted Material. 256 | 257 | 2. You may satisfy the conditions in Section 3(a)(1) in any 258 | reasonable manner based on the medium, means, and context in 259 | which You Share the Licensed Material. For example, it may be 260 | reasonable to satisfy the conditions by providing a URI or 261 | hyperlink to a resource that includes the required 262 | information. 263 | 264 | 3. If requested by the Licensor, You must remove any of the 265 | information required by Section 3(a)(1)(A) to the extent 266 | reasonably practicable. 267 | 268 | 269 | Section 4 -- Sui Generis Database Rights. 270 | 271 | Where the Licensed Rights include Sui Generis Database Rights that 272 | apply to Your use of the Licensed Material: 273 | 274 | a. for the avoidance of doubt, Section 2(a)(1) grants You the right 275 | to extract, reuse, reproduce, and Share all or a substantial 276 | portion of the contents of the database for NonCommercial purposes 277 | only and provided You do not Share Adapted Material; 278 | 279 | b. if You include all or a substantial portion of the database 280 | contents in a database in which You have Sui Generis Database 281 | Rights, then the database in which You have Sui Generis Database 282 | Rights (but not its individual contents) is Adapted Material; and 283 | 284 | c. You must comply with the conditions in Section 3(a) if You Share 285 | all or a substantial portion of the contents of the database. 286 | 287 | For the avoidance of doubt, this Section 4 supplements and does not 288 | replace Your obligations under this Public License where the Licensed 289 | Rights include other Copyright and Similar Rights. 290 | 291 | 292 | Section 5 -- Disclaimer of Warranties and Limitation of Liability. 293 | 294 | a. UNLESS OTHERWISE SEPARATELY UNDERTAKEN BY THE LICENSOR, TO THE 295 | EXTENT POSSIBLE, THE LICENSOR OFFERS THE LICENSED MATERIAL AS-IS 296 | AND AS-AVAILABLE, AND MAKES NO REPRESENTATIONS OR WARRANTIES OF 297 | ANY KIND CONCERNING THE LICENSED MATERIAL, WHETHER EXPRESS, 298 | IMPLIED, STATUTORY, OR OTHER. THIS INCLUDES, WITHOUT LIMITATION, 299 | WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR 300 | PURPOSE, NON-INFRINGEMENT, ABSENCE OF LATENT OR OTHER DEFECTS, 301 | ACCURACY, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT 302 | KNOWN OR DISCOVERABLE. WHERE DISCLAIMERS OF WARRANTIES ARE NOT 303 | ALLOWED IN FULL OR IN PART, THIS DISCLAIMER MAY NOT APPLY TO YOU. 304 | 305 | b. TO THE EXTENT POSSIBLE, IN NO EVENT WILL THE LICENSOR BE LIABLE 306 | TO YOU ON ANY LEGAL THEORY (INCLUDING, WITHOUT LIMITATION, 307 | NEGLIGENCE) OR OTHERWISE FOR ANY DIRECT, SPECIAL, INDIRECT, 308 | INCIDENTAL, CONSEQUENTIAL, PUNITIVE, EXEMPLARY, OR OTHER LOSSES, 309 | COSTS, EXPENSES, OR DAMAGES ARISING OUT OF THIS PUBLIC LICENSE OR 310 | USE OF THE LICENSED MATERIAL, EVEN IF THE LICENSOR HAS BEEN 311 | ADVISED OF THE POSSIBILITY OF SUCH LOSSES, COSTS, EXPENSES, OR 312 | DAMAGES. WHERE A LIMITATION OF LIABILITY IS NOT ALLOWED IN FULL OR 313 | IN PART, THIS LIMITATION MAY NOT APPLY TO YOU. 314 | 315 | c. The disclaimer of warranties and limitation of liability provided 316 | above shall be interpreted in a manner that, to the extent 317 | possible, most closely approximates an absolute disclaimer and 318 | waiver of all liability. 319 | 320 | 321 | Section 6 -- Term and Termination. 322 | 323 | a. This Public License applies for the term of the Copyright and 324 | Similar Rights licensed here. However, if You fail to comply with 325 | this Public License, then Your rights under this Public License 326 | terminate automatically. 327 | 328 | b. Where Your right to use the Licensed Material has terminated under 329 | Section 6(a), it reinstates: 330 | 331 | 1. automatically as of the date the violation is cured, provided 332 | it is cured within 30 days of Your discovery of the 333 | violation; or 334 | 335 | 2. upon express reinstatement by the Licensor. 336 | 337 | For the avoidance of doubt, this Section 6(b) does not affect any 338 | right the Licensor may have to seek remedies for Your violations 339 | of this Public License. 340 | 341 | c. For the avoidance of doubt, the Licensor may also offer the 342 | Licensed Material under separate terms or conditions or stop 343 | distributing the Licensed Material at any time; however, doing so 344 | will not terminate this Public License. 345 | 346 | d. Sections 1, 5, 6, 7, and 8 survive termination of this Public 347 | License. 348 | 349 | 350 | Section 7 -- Other Terms and Conditions. 351 | 352 | a. The Licensor shall not be bound by any additional or different 353 | terms or conditions communicated by You unless expressly agreed. 354 | 355 | b. Any arrangements, understandings, or agreements regarding the 356 | Licensed Material not stated herein are separate from and 357 | independent of the terms and conditions of this Public License. 358 | 359 | 360 | Section 8 -- Interpretation. 361 | 362 | a. For the avoidance of doubt, this Public License does not, and 363 | shall not be interpreted to, reduce, limit, restrict, or impose 364 | conditions on any use of the Licensed Material that could lawfully 365 | be made without permission under this Public License. 366 | 367 | b. To the extent possible, if any provision of this Public License is 368 | deemed unenforceable, it shall be automatically reformed to the 369 | minimum extent necessary to make it enforceable. If the provision 370 | cannot be reformed, it shall be severed from this Public License 371 | without affecting the enforceability of the remaining terms and 372 | conditions. 373 | 374 | c. No term or condition of this Public License will be waived and no 375 | failure to comply consented to unless expressly agreed to by the 376 | Licensor. 377 | 378 | d. Nothing in this Public License constitutes or may be interpreted 379 | as a limitation upon, or waiver of, any privileges and immunities 380 | that apply to the Licensor or You, including from the legal 381 | processes of any jurisdiction or authority. 382 | 383 | ======================================================================= 384 | 385 | Creative Commons is not a party to its public 386 | licenses. Notwithstanding, Creative Commons may elect to apply one of 387 | its public licenses to material it publishes and in those instances 388 | will be considered the “Licensor.” The text of the Creative Commons 389 | public licenses is dedicated to the public domain under the CC0 Public 390 | Domain Dedication. Except for the limited purpose of indicating that 391 | material is shared under a Creative Commons public license or as 392 | otherwise permitted by the Creative Commons policies published at 393 | creativecommons.org/policies, Creative Commons does not authorize the 394 | use of the trademark "Creative Commons" or any other trademark or logo 395 | of Creative Commons without its prior written consent including, 396 | without limitation, in connection with any unauthorized modifications 397 | to any of its public licenses or any other arrangements, 398 | understandings, or agreements concerning use of licensed material. For 399 | the avoidance of doubt, this paragraph does not form part of the 400 | public licenses. 401 | 402 | Creative Commons may be contacted at creativecommons.org. -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | --- 2 | organization: Turbot 3 | category: ["software development"] 4 | icon_url: "/images/plugins/turbot/terraform.svg" 5 | brand_color: "#844FBA" 6 | display_name: "Terraform" 7 | short_name: "terraform" 8 | description: "Steampipe plugin to query data from Terraform files." 9 | og_description: "Query Terraform files with SQL! Open source CLI. No DB required." 10 | og_image: "/images/plugins/turbot/terraform-social-graphic.png" 11 | engines: ["steampipe", "sqlite", "postgres", "export"] 12 | --- 13 | 14 | # Terraform + Steampipe 15 | 16 | A Terraform configuration file is used to declare resources, variables, modules, and more. 17 | 18 | [Steampipe](https://steampipe.io) is an open source CLI to instantly query data using SQL. 19 | 20 | The plugin supports scanning Terraform configuration files from various sources (e.g., [Local files](#configuring-local-file-paths), [Git](#configuring-remote-git-repository-urls), [S3](#configuring-s3-urls) etc.), [parsing Terraform states](#scanning-terraform-state) and [parsing Terraform plans](#scanning-terraform-plan) as well. 21 | 22 | ## Documentation 23 | 24 | - **[Table definitions & examples →](/plugins/turbot/terraform/tables)** 25 | 26 | ## Get Started 27 | 28 | ### Install 29 | 30 | Download and install the latest Terraform plugin: 31 | 32 | ```bash 33 | steampipe plugin install terraform 34 | ``` 35 | 36 | ### Configuration 37 | 38 | Installing the latest terraform plugin will create a config file (`~/.steampipe/config/terraform.spc`) with a single connection named `terraform`: 39 | 40 | ```hcl 41 | connection "terraform" { 42 | plugin = "terraform" 43 | 44 | configuration_file_paths = ["*.tf"] 45 | plan_file_paths = ["tfplan.json", "*.tfplan.json"] 46 | state_file_paths = ["*.tfstate"] 47 | } 48 | ``` 49 | 50 | For a full list of configuration arguments, please see the [default configuration file](https://github.com/turbot/steampipe-plugin-terraform/blob/main/config/terraform.spc). 51 | 52 | ### Run a Query 53 | 54 | Run steampipe: 55 | 56 | ```shell 57 | steampipe query 58 | ``` 59 | 60 | Query all resources in your Terraform files: 61 | 62 | ```sql 63 | select 64 | name, 65 | type, 66 | jsonb_pretty(arguments) as args 67 | from 68 | terraform_resource; 69 | ``` 70 | 71 | ```sh 72 | > select name, type, jsonb_pretty(arguments) as args from terraform_resource; 73 | +------------+----------------+--------------------------------------------+ 74 | | name | type | args | 75 | +------------+----------------+--------------------------------------------+ 76 | | app_server | aws_instance | { | 77 | | | | "ami": "ami-830c94e3", | 78 | | | | "tags": { | 79 | | | | "Name": "ExampleAppServerInstance" | 80 | | | | }, | 81 | | | | "instance_type": "t2.micro" | 82 | | | | } | 83 | | app_volume | aws_ebs_volume | { | 84 | | | | "size": 40, | 85 | | | | "tags": { | 86 | | | | "Name": "HelloWorld" | 87 | | | | }, | 88 | | | | "availability_zone": "us-west-2a" | 89 | | | | } | 90 | | app_bucket | aws_s3_bucket | { | 91 | | | | "acl": "private", | 92 | | | | "tags": { | 93 | | | | "Name": "Test bucket", | 94 | | | | "Environment": "Dev" | 95 | | | | }, | 96 | | | | "bucket": "my-app-bucket" | 97 | | | | } | 98 | +------------+----------------+--------------------------------------------+ 99 | ``` 100 | 101 | ## Configuring Paths 102 | 103 | The plugin requires a list of locations to search for the Terraform configuration files. Paths can be configured with [Local files](#configuring-local-file-paths), [Git URLs](#configuring-remote-git-repository-urls), [S3 URLs](#configuring-s3-urls) etc. 104 | 105 | **Note:** Local file paths are resolved relative to the current working directory (CWD). 106 | 107 | ```hcl 108 | connection "terraform" { 109 | plugin = "terraform" 110 | 111 | configuration_file_paths = [ 112 | "terraform_test.tf", 113 | "github.com/turbot/steampipe-plugin-aws//aws-test/tests/aws_acm_certificate//variables.tf" 114 | ] 115 | } 116 | ``` 117 | 118 | Paths may [include wildcards](https://pkg.go.dev/path/filepath#Match) and support `**` for recursive matching. For example: 119 | 120 | ```hcl 121 | connection "terraform" { 122 | plugin = "terraform" 123 | 124 | configuration_file_paths = [ 125 | "*.tf", 126 | "~/*.tf", 127 | "github.com/turbot/steampipe-plugin-aws//aws-test/tests/aws_acm_certificate//*.tf", 128 | "github.com/hashicorp/terraform-guides//infrastructure-as-code//**/*.tf", 129 | "bitbucket.org/benturrell/terraform-arcgis-portal//modules/shared//*.tf", 130 | "gitlab.com/gitlab-org/configure/examples/gitlab-terraform-aws//*.tf", 131 | "s3::https://bucket.s3.us-east-1.amazonaws.com/test_folder//*.tf" 132 | ] 133 | } 134 | ``` 135 | 136 | **Note**: If any path matches on `*` without `.tf`, all files (including non-Terraform configuration files) in the directory will be matched, which may cause errors if incompatible file types exist. 137 | 138 | ### Configuring Local File Paths 139 | 140 | You can define a list of local directory paths to search for terraform files. Paths are resolved relative to the current working directory. For example: 141 | 142 | - `*.tf` matches all Terraform configuration files in the CWD. 143 | - `**/*.tf` matches all Terraform configuration files in the CWD and all sub-directories. 144 | - `../*.tf` matches all Terraform configuration files in the CWD's parent directory. 145 | - `steampipe*.tf` matches all Terraform configuration files starting with "steampipe" in the CWD. 146 | - `/path/to/dir/*.tf` matches all Terraform configuration files in a specific directory. For example: 147 | - `~/*.tf` matches all Terraform configuration files in the home directory. 148 | - `~/**/*.tf` matches all Terraform configuration files recursively in the home directory. 149 | - `/path/to/dir/main.tf` matches a specific file. 150 | 151 | ```hcl 152 | connection "terraform" { 153 | plugin = "terraform" 154 | 155 | configuration_file_paths = [ "*.tf", "~/*.tf", "/path/to/dir/main.tf" ] 156 | } 157 | ``` 158 | 159 | ### Configuring Remote Git Repository URLs 160 | 161 | You can also configure `paths` with any Git remote repository URLs, e.g., GitHub, BitBucket, GitLab. The plugin will then attempt to retrieve any Terraform configuration files from the remote repositories. 162 | 163 | For example: 164 | 165 | - `github.com/turbot/steampipe-plugin-aws//*.tf` matches all top-level Terraform configuration files in the specified repository. 166 | - `github.com/turbot/steampipe-plugin-aws//**/*.tf` matches all Terraform configuration files in the specified repository and all subdirectories. 167 | - `github.com/turbot/steampipe-plugin-aws//**/*.tf?ref=fix_7677` matches all Terraform configuration files in the specific tag of a repository. 168 | - `github.com/turbot/steampipe-plugin-aws//aws-test/tests/aws_acm_certificate//*.tf` matches all Terraform configuration files in the specified folder path. 169 | 170 | If the example formats above do not work for private repositories, this could be due to git credentials being stored by another tool, e.g., VS Code. An alternative format you can try is: 171 | 172 | - `git::ssh://git@github.com/test_org/test_repo//*.tf` 173 | 174 | You can specify a subdirectory after a double-slash (`//`) if you want to download only a specific subdirectory from a downloaded directory. 175 | 176 | ```hcl 177 | connection "terraform" { 178 | plugin = "terraform" 179 | 180 | configuration_file_paths = [ "github.com/turbot/steampipe-plugin-aws//aws-test/tests/aws_acm_certificate//*.tf" ] 181 | } 182 | ``` 183 | 184 | Similarly, you can define a list of GitLab and BitBucket URLs to search for Terraform configuration files: 185 | 186 | ```hcl 187 | connection "terraform" { 188 | plugin = "terraform" 189 | 190 | configuration_file_paths = [ 191 | "github.com/turbot/steampipe-plugin-aws//**/*.tf", 192 | "github.com/hashicorp/terraform-guides//infrastructure-as-code//**/*.tf", 193 | "bitbucket.org/benturrell/terraform-arcgis-portal//modules/shared//*.tf", 194 | "bitbucket.org/benturrell/terraform-arcgis-portal//modules//**/*.tf", 195 | "gitlab.com/gitlab-org/configure/examples/gitlab-terraform-aws//*.tf", 196 | "gitlab.com/gitlab-org/configure/examples/gitlab-terraform-aws//**/*.tf" 197 | ] 198 | } 199 | ``` 200 | 201 | ### Configuring S3 URLs 202 | 203 | You can also query all Terraform configuration files stored inside an S3 bucket (public or private) using the bucket URL. 204 | 205 | #### Accessing a Private Bucket 206 | 207 | In order to access your files in a private S3 bucket, you will need to configure your credentials. You can use your configured AWS profile from local `~/.aws/config`, or pass the credentials using the standard AWS environment variables, e.g., `AWS_PROFILE`, `AWS_ACCESS_KEY_ID`, `AWS_SECRET_ACCESS_KEY`, `AWS_REGION`. 208 | 209 | We recommend using AWS profiles for authentication. 210 | 211 | **Note:** Make sure that `region` is configured in the config. If not set in the config, `region` will be fetched from the standard environment variable `AWS_REGION`. 212 | 213 | You can also authenticate your request by setting the AWS profile and region in `paths`. For example: 214 | 215 | ```hcl 216 | connection "terraform" { 217 | plugin = "terraform" 218 | 219 | configuration_file_paths = [ 220 | "s3::https://bucket-2.s3.us-east-1.amazonaws.com//*.tf?aws_profile=", 221 | "s3::https://bucket-2.s3.us-east-1.amazonaws.com/test_folder//*.tf?aws_profile=" 222 | ] 223 | } 224 | ``` 225 | 226 | **Note:** 227 | 228 | In order to access the bucket, the IAM user or role will require the following IAM permissions: 229 | 230 | - `s3:ListBucket` 231 | - `s3:GetObject` 232 | - `s3:GetObjectVersion` 233 | 234 | If the bucket is in another AWS account, the bucket policy will need to grant access to your user or role. For example: 235 | 236 | ```json 237 | { 238 | "Version": "2012-10-17", 239 | "Statement": [ 240 | { 241 | "Sid": "ReadBucketObject", 242 | "Effect": "Allow", 243 | "Principal": { 244 | "AWS": "arn:aws:iam::123456789012:user/YOUR_USER" 245 | }, 246 | "Action": ["s3:ListBucket", "s3:GetObject", "s3:GetObjectVersion"], 247 | "Resource": ["arn:aws:s3:::test-bucket1", "arn:aws:s3:::test-bucket1/*"] 248 | } 249 | ] 250 | } 251 | ``` 252 | 253 | #### Accessing a Public Bucket 254 | 255 | Public access granted to buckets and objects through ACLs and bucket policies allows any user access to data in the bucket. We do not recommend making S3 buckets public, but if there are specific objects you'd like to make public, please see [How can I grant public read access to some objects in my Amazon S3 bucket?](https://aws.amazon.com/premiumsupport/knowledge-center/read-access-objects-s3-bucket/). 256 | 257 | You can query any public S3 bucket directly using the URL without passing credentials. For example: 258 | 259 | ```hcl 260 | connection "terraform" { 261 | plugin = "terraform" 262 | 263 | configuration_file_paths = [ 264 | "s3::https://bucket-1.s3.us-east-1.amazonaws.com/test_folder//*.tf", 265 | "s3::https://bucket-2.s3.us-east-1.amazonaws.com/test_folder//**/*.tf" 266 | ] 267 | } 268 | ``` 269 | 270 | ## Scanning Terraform 271 | 272 | The plugin supports scanning the Terraform plans given in JSON and allows the users to query them using Steampipe. 273 | 274 | **Note:** The plugin only scans resources from the Terraform plan. Tables will return details of the resources as they will be after the plan has been applied. 275 | 276 | To get the Terraform plan in JSON format simply follow the below steps: 277 | 278 | - Run `terraform plan` with `-out` flag to store the generated plan to the given filename. Terraform will allow any filename for the plan file, but a typical convention is to name it `tfplan`. 279 | 280 | ```shell 281 | terraform plan -out=tfplan 282 | ``` 283 | 284 | - Run `terraform show` command with `-json` flag to get the plan in JSON format, and store the output in a file. 285 | 286 | ```shell 287 | terraform show -json tfplan > tfplan.json 288 | ``` 289 | 290 | - And, finally add the path `tfplan.json` to the `plan_file_paths` argument in the config to read the plan using Steampipe. 291 | 292 | ```hcl 293 | connection "terraform" { 294 | plugin = "terraform" 295 | 296 | plan_file_paths = [ 297 | "/path/to/tfplan.json", 298 | "github.com/turbot/steampipe-plugin-aws//aws-test/tests/plan_files//tfplan.json", 299 | "s3::https://bucket-1.s3.us-east-1.amazonaws.com/test_plan//*.json" 300 | ] 301 | } 302 | ``` 303 | 304 | ## Scanning Terraform State 305 | 306 | The plugin supports scanning the Terraform states and allows the users to query them using Steampipe. 307 | 308 | **Note:** The plugin only scans the the outputs and resources from the Terraform state. 309 | 310 | To get the Terraform state simply follow the below steps: 311 | 312 | - Run `terraform apply` to automatically generate state file `terraform.tfstate`. 313 | 314 | ```shell 315 | terraform apply 316 | ``` 317 | 318 | - Add the path of the file `terraform.tfstate` to the `state_file_paths` argument in the config to read the state using Steampipe. 319 | 320 | ```hcl 321 | connection "terraform" { 322 | plugin = "terraform" 323 | 324 | state_file_paths = [ 325 | "terraform.tfstate", 326 | "github.com/turbot/steampipe-plugin-aws//aws-test/tests/state_files//terraform.tfstate", 327 | "s3::https://bucket-1.s3.us-east-1.amazonaws.com/state_files//*.tfstate" 328 | ] 329 | } 330 | ``` 331 | 332 | ## Get Involved 333 | 334 | - Open source: https://github.com/turbot/steampipe-plugin-terraform 335 | - Community: [Join #steampipe on Slack →](https://turbot.com/community/join) 336 | -------------------------------------------------------------------------------- /docs/tables/terraform_data_source.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Steampipe Table: terraform_data_source - Query Terraform Data Sources using SQL" 3 | description: "Allows users to query Terraform Data Sources, specifically providing insights into the configuration and state of data sources in Terraform." 4 | --- 5 | 6 | # Table: terraform_data_source - Query Terraform Data Sources using SQL 7 | 8 | Terraform Data Sources are a type of resource in Terraform that allow users to fetch data from a specific source or service. This data can then be used within other resources or outputs within your Terraform configuration. It is a powerful tool that allows for the dynamic configuration of resources based on data retrieved from external sources. 9 | 10 | ## Table Usage Guide 11 | 12 | The `terraform_data_source` table provides insights into data sources within Terraform. As a DevOps engineer, explore data source-specific details through this table, including the configuration and state of each data source. Utilize it to understand how data is being fetched and used within your Terraform configuration, and to ensure that data sources are being used effectively and securely. 13 | 14 | ## Examples 15 | 16 | ### Basic info 17 | Explore various data sources in your Terraform configuration to identify their names, types, and paths to understand the structure and organization of your infrastructure. This can be useful when you want to review or modify your configuration.This query can help you explore the different data sources in your Terraform setup. It's useful for understanding the types of data your infrastructure is relying on and where that data is coming from. 18 | 19 | ```sql+postgres 20 | select 21 | name, 22 | type, 23 | arguments, 24 | path 25 | from 26 | terraform_data_source; 27 | ``` 28 | 29 | ```sql+sqlite 30 | select 31 | name, 32 | type, 33 | arguments, 34 | path 35 | from 36 | terraform_data_source; 37 | ``` 38 | 39 | ### List AWS EC2 AMIs 40 | Determine the areas in which specific AWS EC2 AMIs are used, by analyzing the data source. This can help in understanding the distribution and application of different AMIs within your infrastructure.Explore which Amazon Machine Images (AMIs) are available in your AWS EC2 environment using this query. It helps in assessing the elements within your infrastructure and aids in making informed decisions for resource allocation and management. 41 | 42 | ```sql+postgres 43 | select 44 | name, 45 | type, 46 | arguments, 47 | path 48 | from 49 | terraform_data_source 50 | where 51 | type = 'aws_ami'; 52 | ``` 53 | 54 | ```sql+sqlite 55 | select 56 | name, 57 | type, 58 | arguments, 59 | path 60 | from 61 | terraform_data_source 62 | where 63 | type = 'aws_ami'; 64 | ``` 65 | 66 | ### Get filters for each AWS EC2 AMI 67 | Discover the segments that help to identify the specific filters applied to each AWS EC2 AMI. This is beneficial for understanding the configuration and management of your EC2 AMIs, aiding in resource optimization and security.Explore which AWS EC2 AMIs have specific filters applied to them. This is useful for understanding your AMI configurations and ensuring they align with your security and operational requirements. 68 | 69 | 70 | ```sql+postgres 71 | with filters as ( 72 | select 73 | name, 74 | type, 75 | jsonb_array_elements(arguments -> 'filter') as filter, 76 | path 77 | from 78 | terraform_data_source 79 | where 80 | type = 'aws_ami' 81 | ) 82 | select 83 | name, 84 | type, 85 | filter -> 'name' as name, 86 | filter -> 'values' as values, 87 | path 88 | from 89 | filters; 90 | ``` 91 | 92 | ```sql+sqlite 93 | with filters as ( 94 | select 95 | name, 96 | type, 97 | json_each(arguments, '$.filter') as filter, 98 | path 99 | from 100 | terraform_data_source 101 | where 102 | type = 'aws_ami' 103 | ) 104 | select 105 | name, 106 | type, 107 | json_extract(filter.value, '$.name') as name, 108 | json_extract(filter.value, '$.values') as values, 109 | path 110 | from 111 | filters; 112 | ``` -------------------------------------------------------------------------------- /docs/tables/terraform_local.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Steampipe Table: terraform_local - Query Terraform Local Values using SQL" 3 | description: "Allows users to query Terraform Local Values, particularly the final local value name and its corresponding expression." 4 | --- 5 | 6 | # Table: terraform_local - Query Terraform Local Values using SQL 7 | 8 | Terraform Local Values are a convenient naming mechanism that allows users to assign a name to an expression so it can be used multiple times within a module without repeating it. Local Values can be helpful to avoid repeating the same values or expressions multiple times in a Terraform configuration. If overused, they can also make configuration hard to read and understand if the reader has to continually lookup the values. 9 | 10 | ## Table Usage Guide 11 | 12 | The `terraform_local` table provides insights into local values within Terraform. As a DevOps engineer, explore local value-specific details through this table, including the final local value name and its corresponding expression. Utilize it to uncover information about local values, such as those that are used multiple times, to avoid repetition and enhance the readability of the Terraform configuration. 13 | 14 | ## Examples 15 | 16 | ### Basic info 17 | Analyze the settings to understand the basic information stored in your Terraform local configurations. This can assist in identifying potential configuration issues or inconsistencies.This query allows you to gain insights into the local values defined in your Terraform code. It can be useful to understand your configuration and to identify specific settings or paths that may need to be modified or reviewed. 18 | 19 | 20 | ```sql+postgres 21 | select 22 | name, 23 | value, 24 | path 25 | from 26 | terraform_local; 27 | ``` 28 | 29 | ```sql+sqlite 30 | select 31 | name, 32 | value, 33 | path 34 | from 35 | terraform_local; 36 | ``` 37 | 38 | ### List 'Owner' locals (case insensitive) 39 | Analyze the settings to understand who the owners are across various paths in a case-insensitive manner. This can be beneficial in managing access rights and maintaining security protocols.Identify instances where the 'owner' locals are used in the Terraform configuration to understand the ownership details in the system. This can be particularly useful in managing and organizing resources effectively. 40 | 41 | 42 | ```sql+postgres 43 | select 44 | name, 45 | value, 46 | path 47 | from 48 | terraform_local 49 | where 50 | name ilike 'owner'; 51 | ``` 52 | 53 | ```sql+sqlite 54 | select 55 | name, 56 | value, 57 | path 58 | from 59 | terraform_local 60 | where 61 | name like 'owner'; 62 | ``` -------------------------------------------------------------------------------- /docs/tables/terraform_module.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Steampipe Table: terraform_module - Query Terraform Modules using SQL" 3 | description: "Allows users to query Terraform Modules, specifically the module source, version, and other metadata, providing insights into the configuration and usage of Terraform modules." 4 | --- 5 | 6 | # Table: terraform_module - Query Terraform Modules using SQL 7 | 8 | Terraform Modules are a set of Terraform resources that are grouped together and managed as a single entity. They provide a way to encapsulate common service configurations and reuse them across multiple environments or projects. Modules help in managing complex infrastructure setups by breaking them down into smaller, manageable components. 9 | 10 | ## Table Usage Guide 11 | 12 | The `terraform_module` table provides insights into Terraform Modules within Terraform. As a DevOps engineer, explore module-specific details through this table, including source, version, and other metadata. Utilize it to uncover information about modules, such as their configuration, usage across different environments or projects, and the management of complex infrastructure setups. 13 | 14 | **Important Notes** 15 | 16 | - The `source` argument in a module block tells Terraform where to find 17 | the source code for the desired child module. Due to name clashes, the 18 | column name for the `source` argument is `module_source`. 19 | - Registry modules support versioning via the `version` argument. 20 | 21 | ## Examples 22 | 23 | ### Basic info 24 | Explore the different modules in your Terraform configuration to understand their source and version. This can help ensure you're using the most up-to-date and secure modules in your infrastructure.Discover the segments that are using different versions of Terraform modules. This can help in managing updates and ensuring consistency across your infrastructure. 25 | 26 | 27 | ```sql+postgres 28 | select 29 | name, 30 | module_source, 31 | version 32 | from 33 | terraform_module; 34 | ``` 35 | 36 | ```sql+sqlite 37 | select 38 | name, 39 | module_source, 40 | version 41 | from 42 | terraform_module; 43 | ``` 44 | 45 | ### List all modules that reference a source on 'gitlab.com' but don't use a version number as reference 46 | This example highlights the identification of Terraform modules that reference sources on 'gitlab.com' but do not utilize a version number for referencing. This is useful for ensuring proper version control and avoiding potential inconsistencies or conflicts in your infrastructure setup.Explore modules that link to 'gitlab.com' but lack a specified version number. This is useful for identifying potential areas of instability in your infrastructure, as modules without version numbers can introduce unpredictability. 47 | 48 | 49 | ```sql+postgres 50 | select 51 | name, 52 | split_part(module_source,'=',-1) as ref 53 | from 54 | terraform_module 55 | where 56 | module_source like '%gitlab.com%' 57 | and not split_part(module_source,'=',-1) ~ '^[0-9]'; 58 | ``` 59 | 60 | ```sql+sqlite 61 | Error: SQLite does not support split_part function and regular expression matching like '~'. 62 | ``` -------------------------------------------------------------------------------- /docs/tables/terraform_output.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Steampipe Table: terraform_output - Query Terraform Outputs using SQL" 3 | description: "Allows users to query Terraform Outputs, thus providing a means to extract information about the outputs from Terraform state files." 4 | --- 5 | 6 | # Table: terraform_output - Query Terraform Outputs using SQL 7 | 8 | Terraform Outputs serve as a means to extract data from a Terraform state. These outputs can be simple strings or complex data structures such as lists or maps. They provide a way to share information between modules, access computed values, and manage resource configurations. 9 | 10 | Output values are like the return values of a Terraform module, and have several uses: 11 | 12 | - A child module can use outputs to expose a subset of its resource attributes to a parent module. 13 | - A root module can use outputs to print certain values in the CLI output after running terraform apply. 14 | - When using remote state, root module outputs can be accessed by other configurations via a terraform_remote_state data source. 15 | 16 | ## Table Usage Guide 17 | 18 | The `terraform_output` table provides insights into the outputs from Terraform state files. As a DevOps engineer, you can explore output-specific details through this table, including the values, types, and associated state files. Utilize it to manage and monitor your Terraform infrastructure, ensuring configurations are as expected and aiding in troubleshooting. 19 | 20 | ## Examples 21 | 22 | ### Basic info 23 | Discover the segments that contain specific values within your Terraform outputs. This can be particularly useful in understanding the distribution and organization of your data.Explore the key details of your Terraform configuration outputs. This can help you understand the values and paths associated with different elements of your configuration, and can be useful in troubleshooting or optimizing your setup. 24 | 25 | 26 | ```sql+postgres 27 | select 28 | name, 29 | description, 30 | value, 31 | path 32 | from 33 | terraform_output; 34 | ``` 35 | 36 | ```sql+sqlite 37 | select 38 | name, 39 | description, 40 | value, 41 | path 42 | from 43 | terraform_output; 44 | ``` 45 | 46 | ### List sensitive outputs 47 | Discover the segments that contain sensitive information within your Terraform outputs. This can help in identifying potential security risks and take necessary precautions to protect your data.Explore which outputs in your Terraform configuration are marked as sensitive. This is useful for maintaining data security and confidentiality. 48 | 49 | 50 | ```sql+postgres 51 | select 52 | name, 53 | description, 54 | path 55 | from 56 | terraform_output 57 | where 58 | sensitive; 59 | ``` 60 | 61 | ```sql+sqlite 62 | select 63 | name, 64 | description, 65 | path 66 | from 67 | terraform_output 68 | where 69 | sensitive = 1; 70 | ``` 71 | 72 | ### List outputs referring to AWS S3 bucket ARN attributes 73 | Explore which Terraform outputs reference AWS S3 bucket ARN attributes. This can be useful for identifying dependencies or potential configuration issues.Analyze the settings to understand the connections between your Terraform outputs and AWS S3 bucket ARN attributes. This is useful to identify potential dependencies or configurations that may impact your S3 bucket usage. 74 | 75 | 76 | ```sql+postgres 77 | select 78 | name, 79 | description, 80 | value, 81 | path 82 | from 83 | terraform_output 84 | where 85 | value::text like '%aws_s3_bucket.%.arn%'; 86 | ``` 87 | 88 | ```sql+sqlite 89 | select 90 | name, 91 | description, 92 | value, 93 | path 94 | from 95 | terraform_output 96 | where 97 | value like '%aws_s3_bucket.%.arn%'; 98 | ``` -------------------------------------------------------------------------------- /docs/tables/terraform_provider.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Steampipe Table: terraform_provider - Query Terraform Providers using SQL" 3 | description: "Allows users to query Terraform Providers, specifically the details about the provider plugins in the Terraform state." 4 | --- 5 | 6 | # Table: terraform_provider - Query Terraform Providers using SQL 7 | 8 | Terraform Providers are plugins that Terraform uses to manage resources. A provider is responsible for understanding API interactions and exposing resources. Providers generally are an IaaS (e.g., Alibaba Cloud, AWS, GCP, Microsoft Azure, OpenStack), PaaS (e.g., Heroku), or SaaS services (e.g., Terraform Cloud, DNSimple, Cloudflare). 9 | 10 | ## Table Usage Guide 11 | 12 | The `terraform_provider` table provides insights into provider plugins in the Terraform state. As a DevOps engineer, explore provider-specific details through this table, including the provider type, version, and associated metadata. Utilize it to uncover information about providers, such as their versions, and the verification of provider configurations. 13 | 14 | ## Examples 15 | 16 | ### Basic info 17 | Explore the specifics of your Terraform provider, including its name, alias, and associated arguments. This can help you better understand the configuration and structure of your Terraform environment.Explore the basic information about your Terraform providers to understand their names, aliases, and arguments, and to locate their paths. This can help streamline your configuration management and troubleshooting processes. 18 | 19 | ```sql+postgres 20 | select 21 | name, 22 | alias, 23 | arguments, 24 | path 25 | from 26 | terraform_provider; 27 | ``` 28 | 29 | ```sql+sqlite 30 | select 31 | name, 32 | alias, 33 | arguments, 34 | path 35 | from 36 | terraform_provider; 37 | ``` 38 | 39 | ### List providers using deprecated 'version' argument 40 | This example helps you identify the instances where deprecated 'version' arguments are still being used in your Terraform providers. This can aid in ensuring your configuration is up-to-date and compliant with current best practices.Discover the segments that are utilizing outdated 'version' arguments in their configuration. This aids in identifying areas for potential updates and improvements. 41 | 42 | ```sql+postgres 43 | select 44 | name, 45 | alias, 46 | version, 47 | path 48 | from 49 | terraform_provider 50 | where 51 | version is not null; 52 | ``` 53 | 54 | ```sql+sqlite 55 | select 56 | name, 57 | alias, 58 | version, 59 | path 60 | from 61 | terraform_provider 62 | where 63 | version is not null; 64 | ``` 65 | 66 | ### List AWS providers with their regions 67 | Explore the configuration of your AWS providers to understand the specific regions in which they operate. This can be beneficial in managing and optimizing your resource allocation across different geographical locations.Explore which AWS providers are configured across different regions. This can assist in managing resources and ensuring efficient distribution across various geographical locations. 68 | 69 | 70 | ```sql+postgres 71 | select 72 | name, 73 | alias, 74 | arguments ->> 'region' as region, 75 | path 76 | from 77 | terraform_provider 78 | where 79 | name = 'aws'; 80 | ``` 81 | 82 | ```sql+sqlite 83 | select 84 | name, 85 | alias, 86 | json_extract(arguments, '$.region') as region, 87 | path 88 | from 89 | terraform_provider 90 | where 91 | name = 'aws'; 92 | ``` -------------------------------------------------------------------------------- /docs/tables/terraform_resource.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Steampipe Table: terraform_resource - Query Terraform Resources using SQL" 3 | description: "Allows users to query Terraform Resources, specifically the configuration, state, and provider details, providing insights into resource management and potential configuration issues." 4 | --- 5 | 6 | # Table: terraform_resource - Query Terraform Resources using SQL 7 | 8 | Terraform is an open-source infrastructure as code software tool that enables users to define and provision a data center infrastructure using a high-level configuration language. It supports a multitude of providers such as AWS, GCP, Azure, and more. The Terraform Resources are the main component in a Terraform configuration, and they describe one or more infrastructure objects, such as virtual networks, compute instances, or higher-level components such as DNS records. 9 | 10 | ## Table Usage Guide 11 | 12 | The `terraform_resource` table provides insights into Terraform Resources within the Terraform environment. As a DevOps engineer, explore resource-specific details through this table, including configuration, state, and provider details. Utilize it to uncover information about resources, such as their current state, the provider they are associated with, and the details of their configuration. 13 | 14 | ## Examples 15 | 16 | ### Basic info 17 | Explore the fundamental details of your Terraform resources to gain a better understanding of their configuration and location. This can be beneficial in managing resources and assessing their setup.Explore which resources are currently in use within your Terraform configuration. This allows you to gain insights into the types, addresses, and paths of these resources, aiding you in your infrastructure management tasks. 18 | 19 | 20 | ```sql+postgres 21 | select 22 | name, 23 | type, 24 | address, 25 | attributes_std, 26 | path 27 | from 28 | terraform_resource; 29 | ``` 30 | 31 | ```sql+sqlite 32 | select 33 | name, 34 | type, 35 | address, 36 | attributes_std, 37 | path 38 | from 39 | terraform_resource; 40 | ``` 41 | 42 | ### List AWS IAM roles 43 | Explore the configuration of your AWS infrastructure by identifying the roles assigned within it. This can help in understanding the access and permissions structure, aiding in security audits and compliance checks.Explore the various roles within your AWS IAM setup to understand their configurations and attributes. This could help in managing access control and ensuring security protocols are being followed. 44 | 45 | 46 | ```sql+postgres 47 | select 48 | name, 49 | type, 50 | address, 51 | attributes_std, 52 | path 53 | from 54 | terraform_resource 55 | where 56 | type = 'aws_iam_role'; 57 | ``` 58 | 59 | ```sql+sqlite 60 | select 61 | name, 62 | type, 63 | address, 64 | attributes_std, 65 | path 66 | from 67 | terraform_resource 68 | where 69 | type = 'aws_iam_role'; 70 | ``` 71 | 72 | ### List AWS IAM `assume_role_policy` Statements 73 | Explore which AWS Identity and Access Management (IAM) roles have specific permissions. This is particularly useful for auditing security and compliance purposes, as it allows you to identify potential vulnerabilities in your IAM roles' permissions.Analyze the settings to understand the policies associated with AWS IAM roles. This can be useful to identify instances where specific roles have been granted certain permissions, ensuring secure and appropriate access control within your AWS environment. 74 | 75 | 76 | ```sql+postgres 77 | select 78 | path, 79 | name, 80 | address, 81 | (attributes_std ->> 'assume_role_policy')::jsonb -> 'Statement' as statement 82 | from 83 | terraform_resource 84 | where 85 | type = 'aws_iam_role' 86 | ``` 87 | 88 | ```sql+sqlite 89 | select 90 | path, 91 | name, 92 | address, 93 | json_extract(attributes_std, '$.assume_role_policy.Statement') as statement 94 | from 95 | terraform_resource 96 | where 97 | type = 'aws_iam_role' 98 | ``` 99 | 100 | ### Get AMI for each AWS EC2 instance 101 | Explore which AWS EC2 instances are associated with each Amazon Machine Image (AMI). This can help identify instances that may be using outdated or unsecured AMIs, supporting better security and compliance management.Explore which Amazon Machine Images (AMIs) are used for each Amazon Web Services (AWS) Elastic Compute Cloud (EC2) instance. This is useful for understanding the software configurations of your EC2 instances. 102 | 103 | 104 | ```sql+postgres 105 | select 106 | address, 107 | name, 108 | attributes_std ->> 'ami' as ami, 109 | path 110 | from 111 | terraform_resource 112 | where 113 | type = 'aws_instance'; 114 | ``` 115 | 116 | ```sql+sqlite 117 | select 118 | address, 119 | name, 120 | json_extract(attributes_std, '$.ami') as ami, 121 | path 122 | from 123 | terraform_resource 124 | where 125 | type = 'aws_instance'; 126 | ``` 127 | 128 | ### List AWS CloudTrail trails that are not encrypted 129 | Analyze the settings to understand which AWS CloudTrail trails are not encrypted, helping to identify potential security risks in your AWS environment.Determine the areas in which AWS CloudTrail trails are not encrypted to ensure data security and compliance. This is crucial for identifying potential security vulnerabilities in your AWS environment. 130 | 131 | 132 | ```sql+postgres 133 | select 134 | address, 135 | name, 136 | path 137 | from 138 | terraform_resource 139 | where 140 | type = 'aws_cloudtrail' 141 | and attributes_std -> 'kms_key_id' is null; 142 | ``` 143 | 144 | ```sql+sqlite 145 | select 146 | address, 147 | name, 148 | path 149 | from 150 | terraform_resource 151 | where 152 | type = 'aws_cloudtrail' 153 | and json_extract(attributes_std, '$.kms_key_id') is null; 154 | ``` 155 | 156 | ### List Azure storage accounts that allow public blob access 157 | Explore which Azure storage accounts permit public access to their blobs. This is useful in identifying potential security vulnerabilities where sensitive data might be exposed.Explore which Azure storage accounts permit public blob access. This can be useful in identifying potential security risks and ensuring that sensitive data is not inadvertently exposed to the public. 158 | 159 | 160 | ```sql+postgres 161 | select 162 | address, 163 | name, 164 | case 165 | when attributes_std -> 'allow_blob_public_access' is null then false 166 | else (attributes_std -> 'allow_blob_public_access')::boolean 167 | end as allow_blob_public_access, 168 | path 169 | from 170 | terraform_resource 171 | where 172 | type = 'azurerm_storage_account' 173 | -- Optional arg that defaults to false 174 | and (attributes_std -> 'allow_blob_public_access')::boolean; 175 | ``` 176 | 177 | ```sql+sqlite 178 | select 179 | address, 180 | name, 181 | case 182 | when json_extract(attributes_std, '$.allow_blob_public_access') is null then 0 183 | else json_extract(attributes_std, '$.allow_blob_public_access') 184 | end as allow_blob_public_access, 185 | path 186 | from 187 | terraform_resource 188 | where 189 | type = 'azurerm_storage_account' 190 | and json_extract(attributes_std, '$.allow_blob_public_access'); 191 | ``` 192 | 193 | ### List Azure MySQL servers that don't enforce SSL 194 | Explore which Azure MySQL servers are potentially vulnerable by identifying those that do not enforce SSL. This can help enhance security by pinpointing areas to strengthen encryption measures.Determine the areas in which Azure MySQL servers are not enforcing SSL. This is useful to identify potential security vulnerabilities and ensure all servers are adhering to best practices for secure connections. 195 | 196 | 197 | ```sql+postgres 198 | select 199 | address, 200 | name, 201 | attributes_std -> 'ssl_enforcement_enabled' as ssl_enforcement_enabled, 202 | path 203 | from 204 | terraform_resource 205 | where 206 | type = 'azurerm_mysql_server' 207 | and not (attributes_std -> 'ssl_enforcement_enabled')::boolean; 208 | ``` 209 | 210 | ```sql+sqlite 211 | select 212 | address, 213 | name, 214 | json_extract(attributes_std, '$.ssl_enforcement_enabled') as ssl_enforcement_enabled, 215 | path 216 | from 217 | terraform_resource 218 | where 219 | type = 'azurerm_mysql_server' 220 | and not json_extract(attributes_std, '$.ssl_enforcement_enabled'); 221 | ``` 222 | 223 | ### List Azure MySQL servers with public network access enabled 224 | Determine the Azure MySQL servers that have public network access enabled. This can be useful for identifying potential security risks and ensuring that your servers are configured according to your organization's security policies.Determine the Azure MySQL servers that have public network access enabled. This helps in identifying potential security risks by highlighting servers that are exposed to the public internet. 225 | 226 | 227 | ```sql+postgres 228 | select 229 | address, 230 | name, 231 | case 232 | when attributes_std -> 'public_network_access_enabled' is null then true 233 | else (attributes_std -> 'public_network_access_enabled')::boolean 234 | end as public_network_access_enabled, 235 | path 236 | from 237 | terraform_resource 238 | where 239 | type in ('azurerm_mssql_server', 'azurerm_mysql_server') 240 | -- Optional arg that defaults to true 241 | and (attributes_std -> 'public_network_access_enabled' is null or (attributes_std -> 'public_network_access_enabled')::boolean); 242 | ``` 243 | 244 | ```sql+sqlite 245 | select 246 | address, 247 | name, 248 | case 249 | when json_extract(attributes_std, '$.public_network_access_enabled') is null then 1 250 | else json_extract(attributes_std, '$.public_network_access_enabled') 251 | end as public_network_access_enabled, 252 | path 253 | from 254 | terraform_resource 255 | where 256 | type in ('azurerm_mssql_server', 'azurerm_mysql_server') 257 | and (json_extract(attributes_std, '$.public_network_access_enabled') is null or json_extract(attributes_std, '$.public_network_access_enabled')); 258 | ``` 259 | 260 | ### List resources from a plan file 261 | This query allows you to analyze the resources outlined in a specific Terraform plan file. It helps in gaining insights into the different elements like name, type, and address, which can be beneficial for understanding the structure and configuration of your infrastructure.Explore which resources are included in a specific plan file. This can help identify instances where certain resources may need to be added, removed, or modified, providing insights into the overall configuration of your project. 262 | 263 | ```sql+postgres 264 | select 265 | name, 266 | type, 267 | address, 268 | attributes_std, 269 | path 270 | from 271 | terraform_resource 272 | where 273 | path = '/path/to/tfplan.json'; 274 | ``` 275 | 276 | ```sql+sqlite 277 | select 278 | name, 279 | type, 280 | address, 281 | attributes_std, 282 | path 283 | from 284 | terraform_resource 285 | where 286 | path = '/path/to/tfplan.json'; 287 | ``` 288 | 289 | ### List resources from a state file 290 | Explore which resources are contained within a specific state file. This is useful for understanding the structure and content of your Terraform infrastructure without needing to navigate through multiple files or directories.Determine the resources within a specific state file in Terraform. This is useful for understanding the components of your infrastructure and their attributes, especially when managing large-scale deployments. 291 | 292 | 293 | ```sql+postgres 294 | select 295 | name, 296 | type, 297 | address, 298 | attributes_std, 299 | path 300 | from 301 | terraform_resource 302 | where 303 | path = '/path/to/terraform.tfstate'; 304 | ``` 305 | 306 | ```sql+sqlite 307 | select 308 | name, 309 | type, 310 | address, 311 | attributes_std, 312 | path 313 | from 314 | terraform_resource 315 | where 316 | path = '/path/to/terraform.tfstate'; 317 | ``` -------------------------------------------------------------------------------- /docs/tables/terraform_variable.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Steampipe Table: terraform_variable - Query Terraform Variables using SQL" 3 | description: "Allows users to query Terraform Variables, providing insights into variable definitions and their properties within Terraform configurations." 4 | --- 5 | 6 | # Table: terraform_variable - Query Terraform Variables using SQL 7 | 8 | Terraform Variables define the parameters that can be customized during the execution of a Terraform configuration. They allow users to provide input values for Terraform configurations, making them reusable and more flexible. Variables can have default values, descriptions, types, and validations to ensure proper usage. 9 | 10 | ## Table Usage Guide 11 | 12 | The `terraform_variable` table provides insights into the variables defined in your Terraform configurations. As a DevOps engineer, you can explore variable-specific details through this table, including their names, types, default values, descriptions, and validation rules. Utilize it to manage and monitor your Terraform infrastructure, ensuring configurations are as expected and aiding in troubleshooting. 13 | 14 | ## Examples 15 | 16 | ### Basic Info 17 | Explore the key details of your Terraform configuration variables. This can help you understand the values and paths associated with different elements of your configuration, and can be useful in troubleshooting or optimizing your setup. 18 | 19 | ```sql+postgres 20 | select 21 | name, 22 | description, 23 | type, 24 | default_value, 25 | path 26 | from 27 | terraform_variable; 28 | ``` 29 | 30 | ```sql+sqlite 31 | select 32 | name, 33 | description, 34 | type, 35 | default_value, 36 | path 37 | from 38 | terraform_variable; 39 | ``` 40 | 41 | ### List Variables with Validation Rules 42 | Identify the variables that have validation rules applied. This is useful for ensuring that the constraints on variable values are properly understood and managed. 43 | 44 | ```sql+postgres 45 | select 46 | name, 47 | validation, 48 | type 49 | from 50 | terraform_variable 51 | where 52 | validation is not null; 53 | ``` 54 | 55 | ```sql+sqlite 56 | select 57 | name, 58 | validation, 59 | type 60 | from 61 | terraform_variable 62 | where 63 | validation is not null; 64 | ``` 65 | 66 | ### Sensitive Variables 67 | Discover which variables in your Terraform configuration are marked as sensitive. This is useful for maintaining data security and confidentiality. 68 | 69 | ```sql+postgres 70 | select 71 | name, 72 | description, 73 | sensitive 74 | from 75 | terraform_variable 76 | where 77 | sensitive; 78 | ``` 79 | 80 | ```sql+sqlite 81 | select 82 | name, 83 | description, 84 | sensitive 85 | from 86 | terraform_variable 87 | where 88 | sensitive = 1; 89 | ``` 90 | -------------------------------------------------------------------------------- /go.mod: -------------------------------------------------------------------------------- 1 | module github.com/turbot/steampipe-plugin-terraform 2 | 3 | go 1.23.1 4 | 5 | toolchain go1.23.2 6 | 7 | require ( 8 | github.com/Checkmarx/kics v1.7.13 9 | github.com/hashicorp/hcl/v2 v2.20.1 10 | github.com/turbot/go-kit v1.1.0 11 | github.com/turbot/steampipe-plugin-sdk/v5 v5.11.5 12 | github.com/zclconf/go-cty v1.14.4 13 | ) 14 | 15 | require ( 16 | cloud.google.com/go v0.112.1 // indirect 17 | cloud.google.com/go/compute/metadata v0.3.0 // indirect 18 | cloud.google.com/go/iam v1.1.6 // indirect 19 | cloud.google.com/go/storage v1.38.0 // indirect 20 | github.com/BurntSushi/toml v1.3.2 // indirect 21 | github.com/VividCortex/ewma v1.2.0 // indirect 22 | github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d // indirect 23 | github.com/agext/levenshtein v1.2.3 // indirect 24 | github.com/alexmullins/zip v0.0.0-20180717182244-4affb64b04d0 // indirect 25 | github.com/allegro/bigcache/v3 v3.1.0 // indirect 26 | github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect 27 | github.com/aws/aws-sdk-go v1.44.295 // indirect 28 | github.com/beorn7/perks v1.0.1 // indirect 29 | github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d // indirect 30 | github.com/boombuler/barcode v1.0.1 // indirect 31 | github.com/btubbs/datetime v0.1.1 // indirect 32 | github.com/cenkalti/backoff/v4 v4.3.0 // indirect 33 | github.com/cespare/xxhash/v2 v2.3.0 // indirect 34 | github.com/cheggaaa/pb/v3 v3.1.2 // indirect 35 | github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964 // indirect 36 | github.com/davecgh/go-spew v1.1.1 // indirect 37 | github.com/dgraph-io/ristretto v0.2.0 // indirect 38 | github.com/dustin/go-humanize v1.0.1 // indirect 39 | github.com/eko/gocache/lib/v4 v4.1.6 // indirect 40 | github.com/eko/gocache/store/bigcache/v4 v4.2.1 // indirect 41 | github.com/eko/gocache/store/ristretto/v4 v4.2.1 // indirect 42 | github.com/emicklei/go-restful/v3 v3.11.0 // indirect 43 | github.com/evanphx/json-patch/v5 v5.6.0 // indirect 44 | github.com/fatih/color v1.17.0 // indirect 45 | github.com/felixge/httpsnoop v1.0.4 // indirect 46 | github.com/fsnotify/fsnotify v1.7.0 // indirect 47 | github.com/gertd/go-pluralize v0.2.1 // indirect 48 | github.com/getsentry/sentry-go v0.20.0 // indirect 49 | github.com/ghodss/yaml v1.0.0 // indirect 50 | github.com/go-logr/logr v1.4.1 // indirect 51 | github.com/go-logr/stdr v1.2.2 // indirect 52 | github.com/go-openapi/jsonpointer v0.19.6 // indirect 53 | github.com/go-openapi/jsonreference v0.20.2 // indirect 54 | github.com/go-openapi/swag v0.22.3 // indirect 55 | github.com/gocarina/gocsv v0.0.0-20220310154401-d4df709ca055 // indirect 56 | github.com/gogo/protobuf v1.3.2 // indirect 57 | github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect 58 | github.com/golang/mock v1.6.0 // indirect 59 | github.com/golang/protobuf v1.5.4 // indirect 60 | github.com/google/gnostic-models v0.6.8 // indirect 61 | github.com/google/go-cmp v0.6.0 // indirect 62 | github.com/google/gofuzz v1.2.0 // indirect 63 | github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1 // indirect 64 | github.com/google/s2a-go v0.1.7 // indirect 65 | github.com/google/uuid v1.6.0 // indirect 66 | github.com/googleapis/enterprise-certificate-proxy v0.3.2 // indirect 67 | github.com/googleapis/gax-go/v2 v2.12.3 // indirect 68 | github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.1 // indirect 69 | github.com/hashicorp/go-cleanhttp v0.5.2 // indirect 70 | github.com/hashicorp/go-getter v1.7.5 // indirect 71 | github.com/hashicorp/go-hclog v1.6.3 // indirect 72 | github.com/hashicorp/go-plugin v1.6.1 // indirect 73 | github.com/hashicorp/go-safetemp v1.0.0 // indirect 74 | github.com/hashicorp/go-version v1.7.0 // indirect 75 | github.com/hashicorp/hcl v1.0.0 // indirect 76 | github.com/hashicorp/terraform-json v0.16.0 // indirect 77 | github.com/hashicorp/yamux v0.1.1 // indirect 78 | github.com/iancoleman/strcase v0.3.0 // indirect 79 | github.com/imdario/mergo v0.3.13 // indirect 80 | github.com/inconshreveable/mousetrap v1.1.0 // indirect 81 | github.com/jmespath/go-jmespath v0.4.0 // indirect 82 | github.com/johnfercher/maroto v0.40.0 // indirect 83 | github.com/josharian/intern v1.0.0 // indirect 84 | github.com/json-iterator/go v1.1.12 // indirect 85 | github.com/jung-kurt/gofpdf v1.16.2 // indirect 86 | github.com/klauspost/compress v1.17.2 // indirect 87 | github.com/magiconair/properties v1.8.7 // indirect 88 | github.com/mailru/easyjson v0.7.7 // indirect 89 | github.com/mattn/go-colorable v0.1.13 // indirect 90 | github.com/mattn/go-isatty v0.0.20 // indirect 91 | github.com/mattn/go-runewidth v0.0.15 // indirect 92 | github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect 93 | github.com/mitchellh/go-homedir v1.1.0 // indirect 94 | github.com/mitchellh/go-testing-interface v1.14.1 // indirect 95 | github.com/mitchellh/go-wordwrap v1.0.1 // indirect 96 | github.com/mitchellh/mapstructure v1.5.0 // indirect 97 | github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect 98 | github.com/modern-go/reflect2 v1.0.2 // indirect 99 | github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect 100 | github.com/oklog/run v1.0.0 // indirect 101 | github.com/olekukonko/tablewriter v0.0.5 // indirect 102 | github.com/pelletier/go-toml/v2 v2.0.6 // indirect 103 | github.com/pkg/errors v0.9.1 // indirect 104 | github.com/prometheus/client_golang v1.16.0 // indirect 105 | github.com/prometheus/client_model v0.4.0 // indirect 106 | github.com/prometheus/common v0.44.0 // indirect 107 | github.com/prometheus/procfs v0.10.1 // indirect 108 | github.com/rivo/uniseg v0.2.0 // indirect 109 | github.com/rs/zerolog v1.29.0 // indirect 110 | github.com/ruudk/golang-pdf417 v0.0.0-20201230142125-a7e3863a1245 // indirect 111 | github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06 // indirect 112 | github.com/sethvargo/go-retry v0.2.4 // indirect 113 | github.com/sosedoff/ansible-vault-go v0.1.1 // indirect 114 | github.com/spf13/afero v1.9.3 // indirect 115 | github.com/spf13/cast v1.5.0 // indirect 116 | github.com/spf13/cobra v1.8.0 // indirect 117 | github.com/spf13/jwalterweatherman v1.1.0 // indirect 118 | github.com/spf13/pflag v1.0.5 // indirect 119 | github.com/spf13/viper v1.15.0 // indirect 120 | github.com/stevenle/topsort v0.2.0 // indirect 121 | github.com/subosito/gotenv v1.4.2 // indirect 122 | github.com/tdewolff/minify/v2 v2.12.5 // indirect 123 | github.com/tdewolff/parse/v2 v2.6.5 // indirect 124 | github.com/tkrajina/go-reflector v0.5.6 // indirect 125 | github.com/ulikunitz/xz v0.5.11 // indirect 126 | github.com/yargevad/filepathx v1.0.0 // indirect 127 | go.opencensus.io v0.24.0 // indirect 128 | go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 // indirect 129 | go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 // indirect 130 | go.opentelemetry.io/otel v1.26.0 // indirect 131 | go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.26.0 // indirect 132 | go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.24.0 // indirect 133 | go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.24.0 // indirect 134 | go.opentelemetry.io/otel/metric v1.26.0 // indirect 135 | go.opentelemetry.io/otel/sdk v1.26.0 // indirect 136 | go.opentelemetry.io/otel/sdk/metric v1.26.0 // indirect 137 | go.opentelemetry.io/otel/trace v1.26.0 // indirect 138 | go.opentelemetry.io/proto/otlp v1.2.0 // indirect 139 | golang.org/x/crypto v0.35.0 // indirect 140 | golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 // indirect 141 | golang.org/x/mod v0.19.0 // indirect 142 | golang.org/x/net v0.36.0 // indirect 143 | golang.org/x/oauth2 v0.21.0 // indirect 144 | golang.org/x/sync v0.11.0 // indirect 145 | golang.org/x/sys v0.30.0 // indirect 146 | golang.org/x/term v0.29.0 // indirect 147 | golang.org/x/text v0.22.0 // indirect 148 | golang.org/x/time v0.5.0 // indirect 149 | golang.org/x/tools v0.23.0 // indirect 150 | google.golang.org/api v0.171.0 // indirect 151 | google.golang.org/genproto v0.0.0-20240227224415-6ceb2ff114de // indirect 152 | google.golang.org/genproto/googleapis/api v0.0.0-20240604185151-ef581f913117 // indirect 153 | google.golang.org/genproto/googleapis/rpc v0.0.0-20240604185151-ef581f913117 // indirect 154 | google.golang.org/grpc v1.66.0 // indirect 155 | google.golang.org/protobuf v1.34.2 // indirect 156 | gopkg.in/inf.v0 v0.9.1 // indirect 157 | gopkg.in/ini.v1 v1.67.0 // indirect 158 | gopkg.in/yaml.v2 v2.4.0 // indirect 159 | gopkg.in/yaml.v3 v3.0.1 // indirect 160 | k8s.io/api v0.29.0 // indirect 161 | k8s.io/apimachinery v0.29.0 // indirect 162 | k8s.io/client-go v0.29.0 // indirect 163 | k8s.io/klog/v2 v2.110.1 // indirect 164 | k8s.io/kube-openapi v0.0.0-20231010175941-2dd684a91f00 // indirect 165 | k8s.io/utils v0.0.0-20230726121419-3b25d923346b // indirect 166 | sigs.k8s.io/controller-runtime v0.14.6 // indirect 167 | sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd // indirect 168 | sigs.k8s.io/structured-merge-diff/v4 v4.4.1 // indirect 169 | sigs.k8s.io/yaml v1.4.0 // indirect 170 | ) 171 | -------------------------------------------------------------------------------- /main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "github.com/turbot/steampipe-plugin-sdk/v5/plugin" 5 | "github.com/turbot/steampipe-plugin-terraform/terraform" 6 | ) 7 | 8 | func main() { 9 | plugin.Serve(&plugin.ServeOpts{ 10 | PluginFunc: terraform.Plugin}) 11 | } 12 | -------------------------------------------------------------------------------- /terraform/connection_config.go: -------------------------------------------------------------------------------- 1 | package terraform 2 | 3 | import ( 4 | "github.com/turbot/steampipe-plugin-sdk/v5/plugin" 5 | ) 6 | 7 | type terraformConfig struct { 8 | ConfigurationFilePaths []string `hcl:"configuration_file_paths,optional" steampipe:"watch"` 9 | Paths []string `hcl:"paths,optional" steampipe:"watch"` 10 | PlanFilePaths []string `hcl:"plan_file_paths,optional" steampipe:"watch"` 11 | StateFilePaths []string `hcl:"state_file_paths,optional" steampipe:"watch"` 12 | } 13 | 14 | func ConfigInstance() interface{} { 15 | return &terraformConfig{} 16 | } 17 | 18 | // GetConfig :: retrieve and cast connection config from query data 19 | func GetConfig(connection *plugin.Connection) terraformConfig { 20 | if connection == nil || connection.Config == nil { 21 | return terraformConfig{} 22 | } 23 | config, _ := connection.Config.(terraformConfig) 24 | return config 25 | } 26 | -------------------------------------------------------------------------------- /terraform/parse_tfplan.go: -------------------------------------------------------------------------------- 1 | package terraform 2 | 3 | import ( 4 | "context" 5 | "encoding/json" 6 | "fmt" 7 | ) 8 | 9 | type TerraformPlanResource struct { 10 | Name string `cty:"name"` 11 | Type string `cty:"type"` 12 | Mode string `cty:"mode"` 13 | Values map[string]interface{} `cty:"values"` 14 | Address string `cty:"address"` 15 | } 16 | 17 | type TerraformPlanPlannedValuesRootModule struct { 18 | Resources []TerraformPlanResource `json:"resources"` 19 | } 20 | 21 | type TerraformPlanPlannedValues struct { 22 | RootModule TerraformPlanPlannedValuesRootModule `json:"root_module"` 23 | } 24 | 25 | type TerraformPlanContentStruct struct { 26 | PlannedValues TerraformPlanPlannedValues `json:"planned_values"` 27 | } 28 | 29 | func getTerraformPlanContentFromBytes(rawContent []byte) (*TerraformPlanContentStruct, error) { 30 | var planContent *TerraformPlanContentStruct 31 | err := json.Unmarshal(rawContent, &planContent) 32 | if err != nil { 33 | return nil, fmt.Errorf("failed to unmarshal the plan file content: %v", err) 34 | } 35 | return planContent, nil 36 | } 37 | 38 | func buildTerraformPlanResource(ctx context.Context, path string, resource TerraformPlanResource) (*terraformResource, error) { 39 | tfResource := new(terraformResource) 40 | 41 | tfResource.Path = path 42 | tfResource.Type = resource.Type 43 | tfResource.Name = resource.Name 44 | tfResource.Address = resource.Address 45 | tfResource.Mode = resource.Mode 46 | tfResource.Arguments = resource.Values 47 | tfResource.AttributesStd = tfResource.Arguments 48 | 49 | startLine, endLine, source, err := findBlockLinesFromJSON(ctx, path, "resources", resource.Address, resource.Type) 50 | if err != nil { 51 | return nil, err 52 | } 53 | 54 | tfResource.StartLine = startLine 55 | tfResource.EndLine = endLine 56 | tfResource.Source = source 57 | 58 | return tfResource, nil 59 | } 60 | -------------------------------------------------------------------------------- /terraform/plugin.go: -------------------------------------------------------------------------------- 1 | /* 2 | Package terraform implements a steampipe plugin for terraform. 3 | 4 | This plugin provides data that Steampipe uses to present foreign 5 | tables that represent Terraform resources. 6 | */ 7 | package terraform 8 | 9 | import ( 10 | "context" 11 | 12 | "github.com/turbot/steampipe-plugin-sdk/v5/plugin" 13 | "github.com/turbot/steampipe-plugin-sdk/v5/plugin/transform" 14 | ) 15 | 16 | const pluginName = "steampipe-plugin-terraform" 17 | 18 | // Plugin creates this (terraform) plugin 19 | func Plugin(ctx context.Context) *plugin.Plugin { 20 | p := &plugin.Plugin{ 21 | Name: pluginName, 22 | DefaultTransform: transform.FromCamel().NullIfZero(), 23 | ConnectionConfigSchema: &plugin.ConnectionConfigSchema{ 24 | NewInstance: ConfigInstance, 25 | }, 26 | TableMap: map[string]*plugin.Table{ 27 | "terraform_data_source": tableTerraformDataSource(ctx), 28 | "terraform_local": tableTerraformLocal(ctx), 29 | "terraform_module": tableTerraformModule(ctx), 30 | "terraform_output": tableTerraformOutput(ctx), 31 | "terraform_provider": tableTerraformProvider(ctx), 32 | "terraform_resource": tableTerraformResource(ctx), 33 | "terraform_variable": tableTerraformVariable(ctx), 34 | }, 35 | } 36 | 37 | return p 38 | } 39 | -------------------------------------------------------------------------------- /terraform/table_terraform_data_source.go: -------------------------------------------------------------------------------- 1 | package terraform 2 | 3 | import ( 4 | "context" 5 | "fmt" 6 | "os" 7 | "reflect" 8 | 9 | "github.com/Checkmarx/kics/pkg/model" 10 | "github.com/turbot/steampipe-plugin-sdk/v5/grpc/proto" 11 | "github.com/turbot/steampipe-plugin-sdk/v5/plugin" 12 | "github.com/turbot/steampipe-plugin-sdk/v5/plugin/transform" 13 | "github.com/zclconf/go-cty/cty/gocty" 14 | ctyjson "github.com/zclconf/go-cty/cty/json" 15 | ) 16 | 17 | func tableTerraformDataSource(ctx context.Context) *plugin.Table { 18 | return &plugin.Table{ 19 | Name: "terraform_data_source", 20 | Description: "Terraform data source information.", 21 | List: &plugin.ListConfig{ 22 | ParentHydrate: tfConfigList, 23 | Hydrate: listDataSources, 24 | KeyColumns: plugin.OptionalColumns([]string{"path"}), 25 | }, 26 | Columns: []*plugin.Column{ 27 | { 28 | Name: "name", 29 | Description: "Data source name.", 30 | Type: proto.ColumnType_STRING, 31 | }, 32 | { 33 | Name: "type", 34 | Description: "Data source type.", 35 | Type: proto.ColumnType_STRING, 36 | }, 37 | { 38 | Name: "arguments", 39 | Description: "Data source arguments.", 40 | Type: proto.ColumnType_JSON, 41 | Transform: transform.FromField("Arguments").Transform(NullIfEmptyMap), 42 | }, 43 | { 44 | Name: "count", 45 | Description: "The integer value for the count meta-argument if it's set as a number in a literal expression.", 46 | Type: proto.ColumnType_INT, 47 | }, 48 | { 49 | Name: "count_src", 50 | Description: "The count meta-argument accepts a whole number, and creates that many instances of the resource or module.", 51 | Type: proto.ColumnType_JSON, 52 | }, 53 | { 54 | Name: "for_each", 55 | Description: "The for_each meta-argument accepts a map or a set of strings, and creates an instance for each item in that map or set.", 56 | Type: proto.ColumnType_JSON, 57 | }, 58 | { 59 | Name: "depends_on", 60 | Description: "Use the depends_on meta-argument to handle hidden data source or module dependencies that Terraform can't automatically infer.", 61 | Type: proto.ColumnType_JSON, 62 | }, 63 | { 64 | Name: "provider", 65 | Description: "The provider meta-argument specifies which provider configuration to use for a data source, overriding Terraform's default behavior of selecting one based on the data source type name.", 66 | Type: proto.ColumnType_STRING, 67 | }, 68 | { 69 | Name: "start_line", 70 | Description: "Starting line number.", 71 | Type: proto.ColumnType_INT, 72 | }, 73 | { 74 | Name: "end_line", 75 | Description: "Ending line number.", 76 | Type: proto.ColumnType_INT, 77 | }, 78 | { 79 | Name: "source", 80 | Description: "The block source code.", 81 | Type: proto.ColumnType_STRING, 82 | }, 83 | { 84 | Name: "path", 85 | Description: "Path to the file.", 86 | Type: proto.ColumnType_STRING, 87 | }, 88 | }, 89 | } 90 | } 91 | 92 | type terraformDataSource struct { 93 | Name string 94 | Type string 95 | Path string 96 | StartLine int 97 | EndLine int 98 | Source string 99 | Arguments map[string]interface{} 100 | DependsOn []string 101 | // Count can be a number or refer to a local or variable 102 | Count int 103 | CountSrc string 104 | ForEach string 105 | // A data source's provider arg will always reference a provider block 106 | Provider string 107 | } 108 | 109 | func listDataSources(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { 110 | // The path comes from a parent hydate, defaulting to the config paths or 111 | // available by the optional key column 112 | data := h.Item.(filePath) 113 | path := data.Path 114 | 115 | content, err := os.ReadFile(path) 116 | if err != nil { 117 | plugin.Logger(ctx).Error("terraform_data_source.listDataSources", "read_file_error", err, "path", path) 118 | return nil, err 119 | } 120 | 121 | // Return if the path is a TF plan or state path 122 | if data.IsTFPlanFilePath || isTerraformPlan(content) || data.IsTFStateFilePath { 123 | return nil, nil 124 | } 125 | 126 | combinedParser, err := Parser() 127 | if err != nil { 128 | plugin.Logger(ctx).Error("terraform_data_source.listDataSources", "create_parser_error", err) 129 | return nil, err 130 | } 131 | 132 | tfDataSource := new(terraformDataSource) 133 | 134 | for _, parser := range combinedParser { 135 | parsedDocs, err := ParseContent(ctx, d, path, content, parser) 136 | if err != nil { 137 | plugin.Logger(ctx).Error("terraform_data_source.listDataSources", "parse_error", err, "path", path) 138 | return nil, fmt.Errorf("failed to parse file %s: %v", path, err) 139 | } 140 | 141 | for _, doc := range parsedDocs.Docs { 142 | if doc["data"] != nil { 143 | // Data sources are grouped by data source type 144 | for dataSourceType, dataSources := range doc["data"].(model.Document) { 145 | tfDataSource.Path = path 146 | tfDataSource.Type = dataSourceType 147 | // For each data source, scan its arguments 148 | for dataSourceName, dataSourceData := range dataSources.(model.Document) { 149 | tfDataSource, err = buildDataSource(ctx, path, content, dataSourceType, dataSourceName, dataSourceData.(model.Document)) 150 | if err != nil { 151 | plugin.Logger(ctx).Error("terraform_data_source.listDataSources", "build_data_source_error", err) 152 | return nil, err 153 | } 154 | d.StreamListItem(ctx, tfDataSource) 155 | } 156 | } 157 | } 158 | } 159 | } 160 | 161 | return nil, nil 162 | } 163 | 164 | func buildDataSource(ctx context.Context, path string, content []byte, dataSourceType string, name string, d model.Document) (*terraformDataSource, error) { 165 | var tfDataSource = new(terraformDataSource) 166 | 167 | tfDataSource.Path = path 168 | tfDataSource.Type = dataSourceType 169 | tfDataSource.Name = name 170 | tfDataSource.Arguments = make(map[string]interface{}) 171 | 172 | // Remove all "_kics" arguments 173 | sanitizeDocument(d) 174 | 175 | startPosition, endPosition, source, err := getBlock(ctx, path, content, "data", []string{dataSourceType, name}) 176 | if err != nil { 177 | plugin.Logger(ctx).Error("error getting details of block", err) 178 | return nil, err 179 | } 180 | 181 | tfDataSource.StartLine = startPosition.Line 182 | tfDataSource.Source = source 183 | tfDataSource.EndLine = endPosition.Line 184 | 185 | for k, v := range d { 186 | switch k { 187 | case "count": 188 | valStr, err := convertExpressionValue(v) 189 | if err != nil { 190 | plugin.Logger(ctx).Error("terraform_data_source.buildDataSource", "convert_count_error", err) 191 | return tfDataSource, err 192 | } 193 | tfDataSource.CountSrc = valStr 194 | 195 | // Only attempt to get the int value if the type is SimpleJSONValue 196 | if reflect.TypeOf(v).String() == "json.SimpleJSONValue" { 197 | var countVal int 198 | err := gocty.FromCtyValue(v.(ctyjson.SimpleJSONValue).Value, &countVal) 199 | // Log the error but don't return the err since we have count_src anyway 200 | if err != nil { 201 | plugin.Logger(ctx).Warn("terraform_resource.buildResource", "convert_count_error", err) 202 | } 203 | tfDataSource.Count = countVal 204 | } 205 | 206 | case "provider": 207 | if reflect.TypeOf(v).String() != "string" { 208 | return tfDataSource, fmt.Errorf("The 'provider' argument for data source '%s' must be of type string", name) 209 | } 210 | tfDataSource.Provider = v.(string) 211 | 212 | case "for_each": 213 | valStr, err := convertExpressionValue(v) 214 | if err != nil { 215 | plugin.Logger(ctx).Error("terraform_data_source.buildDataSource", "convert_for_each_error", err) 216 | return tfDataSource, err 217 | } 218 | tfDataSource.ForEach = valStr 219 | 220 | case "depends_on": 221 | if reflect.TypeOf(v).String() != "[]interface {}" { 222 | return tfDataSource, fmt.Errorf("The 'depends_on' argument for data source '%s' must be of type list", name) 223 | } 224 | interfaces := v.([]interface{}) 225 | s := make([]string, len(interfaces)) 226 | for i, v := range interfaces { 227 | s[i] = fmt.Sprint(v) 228 | } 229 | tfDataSource.DependsOn = s 230 | 231 | // It's safe to add any remaining arguments since we've already removed all "_kics" arguments 232 | default: 233 | tfDataSource.Arguments[k] = v 234 | } 235 | } 236 | return tfDataSource, nil 237 | } 238 | -------------------------------------------------------------------------------- /terraform/table_terraform_local.go: -------------------------------------------------------------------------------- 1 | package terraform 2 | 3 | import ( 4 | "context" 5 | "fmt" 6 | "os" 7 | 8 | "github.com/Checkmarx/kics/pkg/model" 9 | "github.com/turbot/steampipe-plugin-sdk/v5/grpc/proto" 10 | "github.com/turbot/steampipe-plugin-sdk/v5/plugin" 11 | ) 12 | 13 | func tableTerraformLocal(ctx context.Context) *plugin.Table { 14 | return &plugin.Table{ 15 | Name: "terraform_local", 16 | Description: "Terraform local information.", 17 | List: &plugin.ListConfig{ 18 | ParentHydrate: tfConfigList, 19 | Hydrate: listLocals, 20 | KeyColumns: plugin.OptionalColumns([]string{"path"}), 21 | }, 22 | Columns: []*plugin.Column{ 23 | { 24 | Name: "name", 25 | Description: "Local name.", 26 | Type: proto.ColumnType_STRING, 27 | }, 28 | { 29 | Name: "value", 30 | Description: "Local value.", 31 | Type: proto.ColumnType_JSON, 32 | }, 33 | { 34 | Name: "start_line", 35 | Description: "Starting line number.", 36 | Type: proto.ColumnType_INT, 37 | }, 38 | { 39 | Name: "end_line", 40 | Description: "Ending line number.", 41 | Type: proto.ColumnType_INT, 42 | }, 43 | { 44 | Name: "source", 45 | Description: "The block source code.", 46 | Type: proto.ColumnType_STRING, 47 | }, 48 | { 49 | Name: "path", 50 | Description: "Path to the file.", 51 | Type: proto.ColumnType_STRING, 52 | }, 53 | }, 54 | } 55 | } 56 | 57 | type terraformLocal struct { 58 | Name string 59 | Value string 60 | Path string 61 | StartLine int 62 | EndLine int 63 | Source string 64 | } 65 | 66 | func listLocals(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { 67 | // The path comes from a parent hydate, defaulting to the config paths or 68 | // available by the optional key column 69 | data := h.Item.(filePath) 70 | path := data.Path 71 | 72 | content, err := os.ReadFile(path) 73 | if err != nil { 74 | plugin.Logger(ctx).Error("terraform_local.listLocals", "read_file_error", err, "path", path) 75 | return nil, err 76 | } 77 | 78 | // Return if the path is a TF plan or state path 79 | if data.IsTFPlanFilePath || isTerraformPlan(content) || data.IsTFStateFilePath { 80 | return nil, nil 81 | } 82 | 83 | combinedParser, err := Parser() 84 | if err != nil { 85 | plugin.Logger(ctx).Error("terraform_local.listLocals", "create_parser_error", err) 86 | return nil, err 87 | } 88 | 89 | for _, parser := range combinedParser { 90 | parsedDocs, err := ParseContent(ctx, d, path, content, parser) 91 | if err != nil { 92 | plugin.Logger(ctx).Error("terraform_local.listLocals", "parse_error", err, "path", path) 93 | return nil, fmt.Errorf("failed to parse file %s: %v", path, err) 94 | } 95 | 96 | for _, doc := range parsedDocs.Docs { 97 | if doc["locals"] != nil { 98 | // Locals are grouped by local blocks 99 | switch localType := doc["locals"].(type) { 100 | 101 | // If more than 1 local block is defined, an array of interfaces is returned 102 | case []interface{}: 103 | for _, locals := range doc["locals"].([]interface{}) { 104 | // Get lines map to use when building each local row 105 | linesMap := locals.(model.Document)["_kics_lines"].(map[string]model.LineObject) 106 | // Remove all "_kics" arguments now that we have the lines map 107 | sanitizeDocument(locals.(model.Document)) 108 | for localName, localValue := range locals.(model.Document) { 109 | tfLocal, err := buildLocal(ctx, path, content, localName, localValue, linesMap) 110 | if err != nil { 111 | plugin.Logger(ctx).Error("terraform_local.listLocals", "build_local_error", err) 112 | return nil, err 113 | } 114 | d.StreamListItem(ctx, tfLocal) 115 | } 116 | } 117 | 118 | // If only 1 local block is defined, a model.Document is returned 119 | case model.Document: 120 | // Get lines map to use when building each local row 121 | linesMap := doc["locals"].(model.Document)["_kics_lines"].(map[string]model.LineObject) 122 | // Remove all "_kics" arguments now that we have the lines map 123 | sanitizeDocument(doc["locals"].(model.Document)) 124 | for localName, localValue := range doc["locals"].(model.Document) { 125 | tfLocal, err := buildLocal(ctx, path, content, localName, localValue, linesMap) 126 | if err != nil { 127 | plugin.Logger(ctx).Error("terraform_local.listLocals", "build_local_error", err) 128 | return nil, err 129 | } 130 | d.StreamListItem(ctx, tfLocal) 131 | } 132 | 133 | default: 134 | plugin.Logger(ctx).Error("terraform_local.listLocals", "unknown_type", localType) 135 | return nil, fmt.Errorf("failed to list locals in %s due to unknown type", path) 136 | } 137 | 138 | } 139 | } 140 | } 141 | return nil, nil 142 | } 143 | 144 | func buildLocal(ctx context.Context, path string, content []byte, name string, value interface{}, lineMap map[string]model.LineObject) (*terraformLocal, error) { 145 | tfLocal := new(terraformLocal) 146 | tfLocal.Path = path 147 | tfLocal.Name = name 148 | 149 | valStr, err := convertExpressionValue(value) 150 | if err != nil { 151 | plugin.Logger(ctx).Error("terraform_local.buildLocal", "convert_value_error", err) 152 | return nil, err 153 | } 154 | tfLocal.Value = valStr 155 | 156 | start, end, source, err := getBlock(ctx, path, content, "locals", []string{}) 157 | if err != nil { 158 | plugin.Logger(ctx).Error("terraform_local.buildLocal", "getBlock", err) 159 | return nil, err 160 | } 161 | tfLocal.StartLine = start.Line 162 | tfLocal.EndLine = end.Line 163 | tfLocal.Source = source 164 | 165 | return tfLocal, nil 166 | } 167 | -------------------------------------------------------------------------------- /terraform/table_terraform_module.go: -------------------------------------------------------------------------------- 1 | package terraform 2 | 3 | import ( 4 | "context" 5 | "fmt" 6 | "os" 7 | "reflect" 8 | 9 | "github.com/zclconf/go-cty/cty/gocty" 10 | ctyjson "github.com/zclconf/go-cty/cty/json" 11 | 12 | "github.com/Checkmarx/kics/pkg/model" 13 | "github.com/turbot/steampipe-plugin-sdk/v5/grpc/proto" 14 | "github.com/turbot/steampipe-plugin-sdk/v5/plugin" 15 | "github.com/turbot/steampipe-plugin-sdk/v5/plugin/transform" 16 | ) 17 | 18 | func tableTerraformModule(_ context.Context) *plugin.Table { 19 | return &plugin.Table{ 20 | Name: "terraform_module", 21 | Description: "Terraform module information.", 22 | List: &plugin.ListConfig{ 23 | ParentHydrate: tfConfigList, 24 | Hydrate: listModules, 25 | KeyColumns: plugin.OptionalColumns([]string{"path"}), 26 | }, 27 | Columns: []*plugin.Column{ 28 | { 29 | Name: "name", 30 | Description: "Module name.", 31 | Type: proto.ColumnType_STRING, 32 | }, 33 | { 34 | Name: "module_source", 35 | Description: "Module source", 36 | Type: proto.ColumnType_STRING, 37 | }, 38 | { 39 | Name: "version", 40 | Description: "Module version", 41 | Type: proto.ColumnType_STRING, 42 | }, 43 | { 44 | Name: "arguments", 45 | Description: "Input arguments passed to this module.", 46 | Type: proto.ColumnType_JSON, 47 | Transform: transform.FromField("Arguments").Transform(NullIfEmptyMap), 48 | }, 49 | { 50 | Name: "count", 51 | Description: "The integer value for the count meta-argument if it's set as a number in a literal expression.", 52 | Type: proto.ColumnType_INT, 53 | }, 54 | { 55 | Name: "count_src", 56 | Description: "The count meta-argument accepts a whole number, and creates that many instances of the resource or module.", 57 | Type: proto.ColumnType_JSON, 58 | }, 59 | { 60 | Name: "for_each", 61 | Description: "The for_each meta-argument accepts a map or a set of strings, and creates an instance for each item in that map or set.", 62 | Type: proto.ColumnType_JSON, 63 | }, 64 | { 65 | Name: "depends_on", 66 | Description: "Use the depends_on meta-argument to handle hidden data source or module dependencies that Terraform can't automatically infer.", 67 | Type: proto.ColumnType_JSON, 68 | }, 69 | { 70 | Name: "provider", 71 | Description: "The provider meta-argument specifies which provider configuration to use for a data source, overriding Terraform's default behavior of selecting one based on the data source type name.", 72 | Type: proto.ColumnType_STRING, 73 | }, 74 | { 75 | Name: "start_line", 76 | Description: "Starting line number.", 77 | Type: proto.ColumnType_INT, 78 | }, 79 | { 80 | Name: "end_line", 81 | Description: "Ending line number.", 82 | Type: proto.ColumnType_INT, 83 | }, 84 | { 85 | Name: "source", 86 | Description: "The block source code.", 87 | Type: proto.ColumnType_STRING, 88 | }, 89 | { 90 | Name: "path", 91 | Description: "Path to the file.", 92 | Type: proto.ColumnType_STRING, 93 | }, 94 | }, 95 | } 96 | } 97 | 98 | type terraformModule struct { 99 | Name string 100 | Path string 101 | StartLine int 102 | EndLine int 103 | Source string 104 | Arguments map[string]interface{} 105 | DependsOn []string 106 | // Count can be a number or refer to a local or variable 107 | Count int 108 | CountSrc string 109 | ForEach string 110 | // A data source's provider arg will always reference a provider block 111 | Provider string 112 | ModuleSource string 113 | Version string 114 | } 115 | 116 | func listModules(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { 117 | // The path comes from a parent hydate, defaulting to the config paths or 118 | // available by the optional key column 119 | data := h.Item.(filePath) 120 | path := data.Path 121 | 122 | content, err := os.ReadFile(path) 123 | if err != nil { 124 | plugin.Logger(ctx).Error("terraform_module.listModules", "read_file_error", err, "path", path) 125 | return nil, err 126 | } 127 | 128 | // Return if the path is a TF plan or state path 129 | if data.IsTFPlanFilePath || isTerraformPlan(content) || data.IsTFStateFilePath { 130 | return nil, nil 131 | } 132 | 133 | combinedParser, err := Parser() 134 | if err != nil { 135 | plugin.Logger(ctx).Error("terraform_module.listModules", "create_parser_error", err) 136 | return nil, err 137 | } 138 | 139 | var tfModule *terraformModule 140 | 141 | for _, parser := range combinedParser { 142 | parsedDocs, err := ParseContent(ctx, d, path, content, parser) 143 | if err != nil { 144 | plugin.Logger(ctx).Error("terraform_module.listModules", "parse_error", err, "path", path) 145 | return nil, fmt.Errorf("failed to parse file %s: %v", path, err) 146 | } 147 | 148 | for _, doc := range parsedDocs.Docs { 149 | if doc["module"] != nil { 150 | for moduleName, moduleData := range doc["module"].(model.Document) { 151 | tfModule, err = buildModule(ctx, path, content, moduleName, moduleData.(model.Document)) 152 | if err != nil { 153 | plugin.Logger(ctx).Error("terraform_module.listModules", "build_module_error", err) 154 | return nil, err 155 | } 156 | d.StreamListItem(ctx, tfModule) 157 | } 158 | } 159 | } 160 | } 161 | 162 | return nil, nil 163 | } 164 | 165 | func buildModule(ctx context.Context, path string, content []byte, name string, d model.Document) (*terraformModule, error) { 166 | tfModule := new(terraformModule) 167 | 168 | tfModule.Path = path 169 | tfModule.Name = name 170 | tfModule.Arguments = make(map[string]interface{}) 171 | 172 | // Remove all "_kics" arguments 173 | sanitizeDocument(d) 174 | 175 | startPosition, endPosition, source, err := getBlock(ctx, path, content, "module", []string{name}) 176 | if err != nil { 177 | plugin.Logger(ctx).Error("error getting details of block", err) 178 | return nil, err 179 | } 180 | 181 | tfModule.StartLine = startPosition.Line 182 | tfModule.Source = source 183 | tfModule.EndLine = endPosition.Line 184 | 185 | for k, v := range d { 186 | switch k { 187 | case "source": 188 | if reflect.TypeOf(v).String() != "string" { 189 | return tfModule, fmt.Errorf("The 'source' argument for module '%s' must be of type string", name) 190 | } 191 | tfModule.ModuleSource = v.(string) 192 | 193 | case "version": 194 | if reflect.TypeOf(v).String() != "string" { 195 | return tfModule, fmt.Errorf("The 'version' argument for module '%s' must be of type string", name) 196 | } 197 | tfModule.Version = v.(string) 198 | 199 | case "count": 200 | valStr, err := convertExpressionValue(v) 201 | if err != nil { 202 | plugin.Logger(ctx).Error("terraform_module.buildDataSource", "convert_count_error", err) 203 | return tfModule, err 204 | } 205 | tfModule.CountSrc = valStr 206 | 207 | // Only attempt to get the int value if the type is SimpleJSONValue 208 | if reflect.TypeOf(v).String() == "json.SimpleJSONValue" { 209 | var countVal int 210 | err := gocty.FromCtyValue(v.(ctyjson.SimpleJSONValue).Value, &countVal) 211 | // Log the error but don't return the err since we have count_src anyway 212 | if err != nil { 213 | plugin.Logger(ctx).Warn("terraform_module.buildResource", "convert_count_error", err) 214 | } 215 | tfModule.Count = countVal 216 | } 217 | 218 | case "provider": 219 | if reflect.TypeOf(v).String() != "string" { 220 | return tfModule, fmt.Errorf("The 'provider' argument for module '%s' must be of type string", name) 221 | } 222 | tfModule.Provider = v.(string) 223 | 224 | case "for_each": 225 | valStr, err := convertExpressionValue(v) 226 | if err != nil { 227 | plugin.Logger(ctx).Error("terraform_module.buildDataSource", "convert_for_each_error", err) 228 | return tfModule, err 229 | } 230 | tfModule.ForEach = valStr 231 | 232 | case "depends_on": 233 | if reflect.TypeOf(v).String() != "[]interface {}" { 234 | return tfModule, fmt.Errorf("The 'depends_on' argument for module '%s' must be of type list", name) 235 | } 236 | interfaces := v.([]interface{}) 237 | s := make([]string, len(interfaces)) 238 | for i, v := range interfaces { 239 | s[i] = fmt.Sprint(v) 240 | } 241 | tfModule.DependsOn = s 242 | 243 | case "lifecycle": 244 | // ignoring as lifecycle block is reserved for future versions, see 245 | // https://developer.hashicorp.com/terraform/language/modules/syntax#meta-arguments 246 | 247 | default: 248 | // safe to add any remaining arguments since already removed all "_kics" arguments 249 | tfModule.Arguments[k] = v 250 | 251 | } 252 | } 253 | return tfModule, nil 254 | } 255 | -------------------------------------------------------------------------------- /terraform/table_terraform_output.go: -------------------------------------------------------------------------------- 1 | package terraform 2 | 3 | import ( 4 | "context" 5 | "fmt" 6 | "os" 7 | "reflect" 8 | "strings" 9 | 10 | "github.com/Checkmarx/kics/pkg/model" 11 | p "github.com/Checkmarx/kics/pkg/parser/json" 12 | "github.com/turbot/steampipe-plugin-sdk/v5/grpc/proto" 13 | "github.com/turbot/steampipe-plugin-sdk/v5/plugin" 14 | "github.com/zclconf/go-cty/cty/gocty" 15 | ctyjson "github.com/zclconf/go-cty/cty/json" 16 | ) 17 | 18 | func tableTerraformOutput(ctx context.Context) *plugin.Table { 19 | return &plugin.Table{ 20 | Name: "terraform_output", 21 | Description: "Terraform output information.", 22 | List: &plugin.ListConfig{ 23 | ParentHydrate: tfConfigList, 24 | Hydrate: listOutputs, 25 | KeyColumns: plugin.OptionalColumns([]string{"path"}), 26 | }, 27 | Columns: []*plugin.Column{ 28 | { 29 | Name: "name", 30 | Description: "Output name.", 31 | Type: proto.ColumnType_STRING, 32 | }, 33 | { 34 | Name: "value", 35 | Description: "The value argument takes an expression whose result is to be returned to the user.", 36 | Type: proto.ColumnType_JSON, 37 | }, 38 | { 39 | Name: "description", 40 | Description: "Because the output values of a module are part of its user interface, you can briefly describe the purpose of each value using the optional description argument.", 41 | Type: proto.ColumnType_STRING, 42 | }, 43 | { 44 | Name: "sensitive", 45 | Description: "An output can be marked as containing sensitive material using the optional sensitive argument.", 46 | Type: proto.ColumnType_BOOL, 47 | }, 48 | { 49 | Name: "depends_on", 50 | Description: "Use the depends_on meta-argument to handle hidden output or module dependencies that Terraform can't automatically infer.", 51 | Type: proto.ColumnType_JSON, 52 | }, 53 | { 54 | Name: "start_line", 55 | Description: "Starting line number.", 56 | Type: proto.ColumnType_INT, 57 | }, 58 | { 59 | Name: "end_line", 60 | Description: "Ending line number.", 61 | Type: proto.ColumnType_INT, 62 | }, 63 | { 64 | Name: "source", 65 | Description: "The block source code.", 66 | Type: proto.ColumnType_STRING, 67 | }, 68 | { 69 | Name: "path", 70 | Description: "Path to the file.", 71 | Type: proto.ColumnType_STRING, 72 | }, 73 | }, 74 | } 75 | } 76 | 77 | type terraformOutput struct { 78 | Name string 79 | Path string 80 | StartLine int 81 | EndLine int 82 | Source string 83 | DependsOn []string 84 | Description string 85 | Sensitive bool 86 | Value string 87 | //Value cty.Value `column:"value,jsonb"` 88 | //Value interface{} 89 | } 90 | 91 | func listOutputs(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { 92 | // The path comes from a parent hydrate, defaulting to the config paths or 93 | // available by the optional key column 94 | pathInfo := h.Item.(filePath) 95 | path := pathInfo.Path 96 | 97 | content, err := os.ReadFile(path) 98 | if err != nil { 99 | plugin.Logger(ctx).Error("terraform_output.listOutputs", "read_file_error", err, "path", path) 100 | return nil, err 101 | } 102 | 103 | // Return if the path is a TF plan path 104 | if pathInfo.IsTFPlanFilePath || isTerraformPlan(content) { 105 | return nil, nil 106 | } 107 | 108 | var docs []model.Document 109 | 110 | // Check if the file contains TF state 111 | if pathInfo.IsTFStateFilePath { 112 | // Initialize the JSON parser 113 | jsonParser := p.Parser{} 114 | 115 | // Parse the file content using the JSON parser 116 | var str string 117 | documents, _, err := jsonParser.Parse(str, content) 118 | if err != nil { 119 | plugin.Logger(ctx).Error("terraform_output.listOutputs", "state_parse_error", err, "path", path) 120 | return nil, fmt.Errorf("failed to parse state file %s: %v", path, err) 121 | } 122 | 123 | docs = append(docs, documents...) 124 | } else { 125 | // Build the terraform parser 126 | combinedParser, err := Parser() 127 | if err != nil { 128 | plugin.Logger(ctx).Error("terraform_output.listOutputs", "create_parser_error", err) 129 | return nil, err 130 | } 131 | 132 | for _, parser := range combinedParser { 133 | parsedDocs, err := ParseContent(ctx, d, path, content, parser) 134 | if err != nil { 135 | plugin.Logger(ctx).Error("terraform_output.listOutputs", "parse_error", err, "path", path) 136 | return nil, fmt.Errorf("failed to parse file %s: %v", path, err) 137 | } 138 | docs = append(docs, parsedDocs.Docs...) 139 | } 140 | } 141 | 142 | for _, doc := range docs { 143 | if doc["output"] != nil { 144 | // For each output, scan its arguments 145 | for outputName, outputData := range doc["output"].(model.Document) { 146 | tfOutput, err := buildOutput(ctx, pathInfo.IsTFStateFilePath, path, content, outputName, outputData.(model.Document)) 147 | if err != nil { 148 | plugin.Logger(ctx).Error("terraform_output.listOutputs", "build_output_error", err) 149 | return nil, err 150 | } 151 | d.StreamListItem(ctx, tfOutput) 152 | } 153 | } else if doc["outputs"] != nil { 154 | // For each output, scan its arguments 155 | for outputName, outputData := range convertModelDocumentToMapInterface(doc["outputs"]) { 156 | if !strings.HasPrefix(outputName, "_kics") { 157 | tfOutput, err := buildOutput(ctx, pathInfo.IsTFStateFilePath, path, content, outputName, convertModelDocumentToMapInterface(outputData)) 158 | if err != nil { 159 | plugin.Logger(ctx).Error("terraform_output.listOutputs", "build_output_error", err) 160 | return nil, err 161 | } 162 | d.StreamListItem(ctx, tfOutput) 163 | } 164 | } 165 | } 166 | } 167 | 168 | return nil, nil 169 | } 170 | 171 | func buildOutput(ctx context.Context, isTFStateFilePath bool, path string, content []byte, name string, d model.Document) (terraformOutput, error) { 172 | var tfOutput terraformOutput 173 | 174 | tfOutput.Path = path 175 | tfOutput.Name = name 176 | 177 | // Remove all "_kics" arguments 178 | sanitizeDocument(d) 179 | 180 | if isTFStateFilePath { 181 | startLine, endLine, source, err := findBlockLinesFromJSON(ctx, path, "outputs", name) 182 | if err != nil { 183 | return tfOutput, err 184 | } 185 | 186 | tfOutput.StartLine = startLine 187 | tfOutput.EndLine = endLine 188 | tfOutput.Source = source 189 | } else { 190 | start, end, source, err := getBlock(ctx, path, content, "output", []string{name}) 191 | if err != nil { 192 | plugin.Logger(ctx).Error("terraform_output.buildOutput", "getBlock", err) 193 | return tfOutput, err 194 | } 195 | tfOutput.StartLine = start.Line 196 | tfOutput.EndLine = end.Line 197 | tfOutput.Source = source 198 | } 199 | for k, v := range d { 200 | switch k { 201 | case "description": 202 | if reflect.TypeOf(v).String() != "string" { 203 | return tfOutput, fmt.Errorf("The 'description' argument for output '%s' must be of type string", name) 204 | } 205 | tfOutput.Description = v.(string) 206 | 207 | case "value": 208 | valStr, err := convertExpressionValue(v) 209 | if err != nil { 210 | plugin.Logger(ctx).Error("terraform_output.buildOutput", "convert_value_error", err) 211 | return tfOutput, err 212 | } 213 | tfOutput.Value = valStr 214 | 215 | case "sensitive": 216 | // Numbers and bools are both parsed as SimpleJSONValue, so we type check 217 | // through the gocty conversion error handling 218 | var sensitiveVal bool 219 | err := gocty.FromCtyValue(v.(ctyjson.SimpleJSONValue).Value, &sensitiveVal) 220 | if err != nil { 221 | return tfOutput, fmt.Errorf("Failed to resolve 'sensitive' argument for output '%s': %w", name, err) 222 | } 223 | tfOutput.Sensitive = sensitiveVal 224 | 225 | case "depends_on": 226 | if reflect.TypeOf(v).String() != "[]interface {}" { 227 | return tfOutput, fmt.Errorf("The 'depends_on' argument for output '%s' must be of type list", name) 228 | } 229 | interfaces := v.([]interface{}) 230 | s := make([]string, len(interfaces)) 231 | for i, v := range interfaces { 232 | s[i] = fmt.Sprint(v) 233 | } 234 | tfOutput.DependsOn = s 235 | } 236 | } 237 | return tfOutput, nil 238 | } 239 | -------------------------------------------------------------------------------- /terraform/table_terraform_provider.go: -------------------------------------------------------------------------------- 1 | package terraform 2 | 3 | import ( 4 | "context" 5 | "fmt" 6 | "os" 7 | "reflect" 8 | 9 | "github.com/Checkmarx/kics/pkg/model" 10 | "github.com/turbot/steampipe-plugin-sdk/v5/grpc/proto" 11 | "github.com/turbot/steampipe-plugin-sdk/v5/plugin" 12 | "github.com/turbot/steampipe-plugin-sdk/v5/plugin/transform" 13 | ) 14 | 15 | func tableTerraformProvider(ctx context.Context) *plugin.Table { 16 | return &plugin.Table{ 17 | Name: "terraform_provider", 18 | Description: "Terraform provider information.", 19 | List: &plugin.ListConfig{ 20 | ParentHydrate: tfConfigList, 21 | Hydrate: listProviders, 22 | KeyColumns: plugin.OptionalColumns([]string{"path"}), 23 | }, 24 | Columns: []*plugin.Column{ 25 | { 26 | Name: "name", 27 | Description: "Provider name.", 28 | Type: proto.ColumnType_STRING, 29 | }, 30 | { 31 | Name: "arguments", 32 | Description: "Provider arguments.", 33 | Type: proto.ColumnType_JSON, 34 | Transform: transform.FromField("Arguments").Transform(NullIfEmptyMap), 35 | }, 36 | { 37 | Name: "alias", 38 | Description: "The alias meta-argument to provide an extra name segment.", 39 | Type: proto.ColumnType_STRING, 40 | }, 41 | // Version is deprecated as of Terraform 0.13, but some older files may still use it 42 | { 43 | Name: "version", 44 | Description: "The version meta-argument specifies a version constraint for a provider, and works the same way as the version argument in a required_providers block.", 45 | Type: proto.ColumnType_STRING, 46 | }, 47 | { 48 | Name: "start_line", 49 | Description: "Starting line number.", 50 | Type: proto.ColumnType_INT, 51 | }, 52 | { 53 | Name: "end_line", 54 | Description: "Ending line number.", 55 | Type: proto.ColumnType_INT, 56 | }, 57 | { 58 | Name: "source", 59 | Description: "The block source code.", 60 | Type: proto.ColumnType_STRING, 61 | }, 62 | { 63 | Name: "path", 64 | Description: "Path to the file.", 65 | Type: proto.ColumnType_STRING, 66 | }, 67 | }, 68 | } 69 | } 70 | 71 | type terraformProvider struct { 72 | Name string 73 | Path string 74 | StartLine int 75 | EndLine int 76 | Source string 77 | Arguments map[string]interface{} 78 | Alias string 79 | Version string 80 | } 81 | 82 | func listProviders(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { 83 | // The path comes from a parent hydate, defaulting to the config paths or 84 | // available by the optional key column 85 | data := h.Item.(filePath) 86 | path := data.Path 87 | 88 | content, err := os.ReadFile(path) 89 | if err != nil { 90 | plugin.Logger(ctx).Error("terraform_provider.listProviders", "read_file_error", err, "path", path) 91 | return nil, err 92 | } 93 | 94 | // Return if the path is a TF plan or state path 95 | if data.IsTFPlanFilePath || isTerraformPlan(content) || data.IsTFStateFilePath { 96 | return nil, nil 97 | } 98 | 99 | combinedParser, err := Parser() 100 | if err != nil { 101 | plugin.Logger(ctx).Error("terraform_provider.listProviders", "create_parser_error", err) 102 | return nil, err 103 | } 104 | 105 | var tfProvider terraformProvider 106 | 107 | for _, parser := range combinedParser { 108 | parsedDocs, err := ParseContent(ctx, d, path, content, parser) 109 | if err != nil { 110 | plugin.Logger(ctx).Error("terraform_provider.listProviders", "parse_error", err, "path", path) 111 | return nil, fmt.Errorf("failed to parse file %s: %v", path, err) 112 | } 113 | 114 | for _, doc := range parsedDocs.Docs { 115 | if doc["provider"] != nil { 116 | // Providers are grouped by provider name 117 | for providerName, providers := range doc["provider"].(model.Document) { 118 | // If more than 1 provider with the same name, an array of interfaces is returned 119 | switch providerType := providers.(type) { 120 | 121 | case []interface{}: 122 | for _, providerData := range providers.([]interface{}) { 123 | // For each provider, scan its arguments 124 | tfProvider, err = buildProvider(ctx, path, content, providerName, providerData.(model.Document)) 125 | if err != nil { 126 | plugin.Logger(ctx).Error("terraform_provider.listProviders", "build_provider_error", err) 127 | return nil, err 128 | } 129 | d.StreamListItem(ctx, tfProvider) 130 | } 131 | 132 | // If only 1 provider has the name, a model.Document is returned 133 | case model.Document: 134 | // For each provider, scan its arguments 135 | tfProvider, err = buildProvider(ctx, path, content, providerName, providers.(model.Document)) 136 | if err != nil { 137 | plugin.Logger(ctx).Error("terraform_provider.listProviders", "build_provider_error", err) 138 | return nil, err 139 | } 140 | d.StreamListItem(ctx, tfProvider) 141 | 142 | default: 143 | plugin.Logger(ctx).Error("terraform_provider.listProviders", "unknown_type", providerType) 144 | return nil, fmt.Errorf("Failed to list providers due to unknown type for provider %s", providerName) 145 | } 146 | 147 | } 148 | } 149 | } 150 | } 151 | 152 | return nil, nil 153 | } 154 | 155 | func buildProvider(ctx context.Context, path string, content []byte, name string, d model.Document) (terraformProvider, error) { 156 | var tfProvider terraformProvider 157 | tfProvider.Path = path 158 | tfProvider.Name = name 159 | tfProvider.Arguments = make(map[string]interface{}) 160 | 161 | // Remove all "_kics" arguments 162 | sanitizeDocument(d) 163 | 164 | start, end, source, err := getBlock(ctx, path, content, "provider", []string{name}) 165 | if err != nil { 166 | plugin.Logger(ctx).Error("terraform_provider.buildProvider", "getBlock", err) 167 | return tfProvider, err 168 | } 169 | tfProvider.StartLine = start.Line 170 | tfProvider.EndLine = end.Line 171 | tfProvider.Source = source 172 | 173 | for k, v := range d { 174 | switch k { 175 | case "alias": 176 | if reflect.TypeOf(v).String() != "string" { 177 | return tfProvider, fmt.Errorf("The 'alias' argument for provider '%s' must be of type string", name) 178 | } 179 | tfProvider.Alias = v.(string) 180 | 181 | case "version": 182 | if reflect.TypeOf(v).String() != "string" { 183 | return tfProvider, fmt.Errorf("The 'version' argument for provider '%s' must be of type string", name) 184 | } 185 | tfProvider.Version = v.(string) 186 | 187 | // It's safe to add any remaining arguments since we've already removed all "_kics" arguments 188 | default: 189 | tfProvider.Arguments[k] = v 190 | } 191 | } 192 | 193 | return tfProvider, nil 194 | } 195 | -------------------------------------------------------------------------------- /terraform/table_terraform_resource.go: -------------------------------------------------------------------------------- 1 | package terraform 2 | 3 | import ( 4 | "context" 5 | "fmt" 6 | "os" 7 | "reflect" 8 | "strings" 9 | 10 | "github.com/Checkmarx/kics/pkg/model" 11 | p "github.com/Checkmarx/kics/pkg/parser/json" 12 | 13 | "github.com/turbot/steampipe-plugin-sdk/v5/grpc/proto" 14 | "github.com/turbot/steampipe-plugin-sdk/v5/plugin" 15 | "github.com/turbot/steampipe-plugin-sdk/v5/plugin/transform" 16 | "github.com/zclconf/go-cty/cty/gocty" 17 | ctyjson "github.com/zclconf/go-cty/cty/json" 18 | ) 19 | 20 | func tableTerraformResource(ctx context.Context) *plugin.Table { 21 | return &plugin.Table{ 22 | Name: "terraform_resource", 23 | Description: "Terraform resource information.", 24 | List: &plugin.ListConfig{ 25 | ParentHydrate: tfConfigList, 26 | Hydrate: listResources, 27 | KeyColumns: plugin.OptionalColumns([]string{"path"}), 28 | }, 29 | Columns: []*plugin.Column{ 30 | { 31 | Name: "name", 32 | Description: "Resource name.", 33 | Type: proto.ColumnType_STRING, 34 | }, 35 | { 36 | Name: "type", 37 | Description: "Resource type.", 38 | Type: proto.ColumnType_STRING, 39 | }, 40 | { 41 | Name: "mode", 42 | Description: "The type of resource Terraform creates, either a resource (managed) or data source (data).", 43 | Type: proto.ColumnType_STRING, 44 | }, 45 | { 46 | Name: "address", 47 | Description: "The absolute resource address.", 48 | Type: proto.ColumnType_STRING, 49 | }, 50 | { 51 | Name: "arguments", 52 | Description: "Resource arguments.", 53 | Type: proto.ColumnType_JSON, 54 | Transform: transform.FromField("Arguments").Transform(NullIfEmptyMap), 55 | }, 56 | { 57 | Name: "attributes", 58 | Description: "Resource attributes. The value will populate only for the resources that come from a state file.", 59 | Type: proto.ColumnType_JSON, 60 | }, 61 | { 62 | Name: "attributes_std", 63 | Description: "Resource attributes. Contains the value from either the arguments or the attributes property.", 64 | Type: proto.ColumnType_JSON, 65 | }, 66 | 67 | // Meta-arguments 68 | { 69 | Name: "count", 70 | Description: "The integer value for the count meta-argument if it's set as a number in a literal expression.", 71 | Type: proto.ColumnType_INT, 72 | }, 73 | { 74 | Name: "count_src", 75 | Description: "The count meta-argument accepts a whole number, and creates that many instances of the resource or module.", 76 | Type: proto.ColumnType_JSON, 77 | }, 78 | { 79 | Name: "for_each", 80 | Description: "The for_each meta-argument accepts a map or a set of strings, and creates an instance for each item in that map or set.", 81 | Type: proto.ColumnType_JSON, 82 | }, 83 | { 84 | Name: "depends_on", 85 | Description: "Use the depends_on meta-argument to handle hidden resource or module dependencies that Terraform can't automatically infer.", 86 | Type: proto.ColumnType_JSON, 87 | }, 88 | { 89 | Name: "lifecycle", 90 | Description: "The lifecycle meta-argument is a nested block that can appear within a resource block.", 91 | Type: proto.ColumnType_JSON, 92 | Transform: transform.FromField("Lifecycle").Transform(NullIfEmptyMap), 93 | }, 94 | { 95 | Name: "provider", 96 | Description: "The provider meta-argument specifies which provider configuration to use for a resource, overriding Terraform's default behavior of selecting one based on the resource type name.", 97 | Type: proto.ColumnType_STRING, 98 | }, 99 | { 100 | Name: "start_line", 101 | Description: "Starting line number.", 102 | Type: proto.ColumnType_INT, 103 | }, 104 | { 105 | Name: "end_line", 106 | Description: "Ending line number.", 107 | Type: proto.ColumnType_INT, 108 | }, 109 | { 110 | Name: "source", 111 | Description: "The block source code.", 112 | Type: proto.ColumnType_STRING, 113 | }, 114 | { 115 | Name: "path", 116 | Description: "Path to the file.", 117 | Type: proto.ColumnType_STRING, 118 | }, 119 | }, 120 | } 121 | } 122 | 123 | type terraformResource struct { 124 | Name string 125 | Type string 126 | Path string 127 | Mode string 128 | StartLine int 129 | Source string 130 | EndLine int 131 | Arguments map[string]interface{} 132 | DependsOn []string 133 | // Count can be a number or refer to a local or variable 134 | Count int 135 | CountSrc string 136 | ForEach string 137 | // A resource's provider arg will always reference a provider block 138 | Provider string 139 | Lifecycle map[string]interface{} 140 | Attributes interface{} 141 | AttributesStd interface{} 142 | Address string 143 | } 144 | 145 | func listResources(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { 146 | // The path comes from a parent hydrate, defaulting to the config paths or 147 | // available by the optional key column 148 | pathInfo := h.Item.(filePath) 149 | path := pathInfo.Path 150 | 151 | // Read the content from the file 152 | content, err := os.ReadFile(path) 153 | if err != nil { 154 | plugin.Logger(ctx).Error("terraform_resource.listResources", "read_file_error", err, "path", path) 155 | return nil, err 156 | } 157 | 158 | // if the file contains TF plan then set IsTFPlanFilePath to true 159 | if isTerraformPlan(content) { 160 | pathInfo.IsTFPlanFilePath = true 161 | } 162 | 163 | var docs []model.Document 164 | 165 | if pathInfo.IsTFPlanFilePath { 166 | planContent, err := getTerraformPlanContentFromBytes(content) 167 | if err != nil { 168 | plugin.Logger(ctx).Error("terraform_resource.listResources", "get_plan_content_error", err, "path", path) 169 | return nil, err 170 | } 171 | lookupPath := planContent.PlannedValues.RootModule 172 | 173 | for _, resource := range lookupPath.Resources { 174 | tfResource, err := buildTerraformPlanResource(ctx, path, resource) 175 | if err != nil { 176 | return nil, err 177 | } 178 | 179 | d.StreamListItem(ctx, tfResource) 180 | } 181 | } else if pathInfo.IsTFStateFilePath { // Check if the file contains TF plan or state 182 | // Initialize the JSON parser 183 | jsonParser := p.Parser{} 184 | 185 | // Parse the file content using the JSON parser 186 | var str string 187 | documents, _, err := jsonParser.Parse(str, content) 188 | if err != nil { 189 | plugin.Logger(ctx).Error("terraform_resource.listResources", "parse_error", err, "path", path) 190 | return nil, fmt.Errorf("failed to parse plan or state file %s: %v", path, err) 191 | } 192 | docs = append(docs, documents...) 193 | } else { 194 | // Build the terraform parser 195 | combinedParser, err := Parser() 196 | if err != nil { 197 | plugin.Logger(ctx).Error("terraform_resource.listResources", "create_parser_error", err) 198 | return nil, err 199 | } 200 | 201 | for _, parser := range combinedParser { 202 | parsedDocs, err := ParseContent(ctx, d, path, content, parser) 203 | if err != nil { 204 | plugin.Logger(ctx).Error("terraform_resource.listResources", "parse_error", err, "path", path) 205 | return nil, fmt.Errorf("failed to parse file %s: %v", path, err) 206 | } 207 | docs = append(docs, parsedDocs.Docs...) 208 | } 209 | } 210 | 211 | // Stream the data 212 | for _, doc := range docs { 213 | if doc["resource"] != nil { 214 | // Resources are grouped by resource type 215 | for resourceType, resources := range convertModelDocumentToMapInterface(doc["resource"]) { 216 | // For each resource, scan its arguments 217 | for resourceName, resourceData := range convertModelDocumentToMapInterface(resources) { 218 | tfResource, err := buildResource(ctx, pathInfo.IsTFPlanFilePath, content, path, resourceType, resourceName, convertModelDocumentToMapInterface(resourceData)) 219 | if err != nil { 220 | plugin.Logger(ctx).Error("terraform_resource.listResources", "build_resource_error", err) 221 | return nil, err 222 | } 223 | // Copy the arguments data into attributes_std 224 | tfResource.AttributesStd = tfResource.Arguments 225 | 226 | if tfResource.Address == "" { 227 | tfResource.Address = fmt.Sprintf("%s.%s", tfResource.Type, tfResource.Name) 228 | } 229 | 230 | d.StreamListItem(ctx, tfResource) 231 | } 232 | } 233 | } else if doc["resources"] != nil { // state file returns resources 234 | for _, resource := range doc["resources"].([]interface{}) { 235 | resourceData := convertModelDocumentToMapInterface(resource) 236 | 237 | // The property instances contains the configurations of the resource created by terraform 238 | // it contains the full configuration, i.e the attributes passed in the config and attributes generated after the resource creation. 239 | // The instances attribute can contain more than 1 resource configurations if 'count', 'for_each' or any 'dynamic blocks' has been used. 240 | // In that case table should list all the configuration as separate row, as the main intention of the table is to show the terraform configuration per resource. 241 | for _, rs := range resourceData["instances"].([]interface{}) { 242 | tfResource, err := buildResource(ctx, pathInfo.IsTFStateFilePath, content, path, resourceData["type"].(string), resourceData["name"].(string), resourceData) 243 | if err != nil { 244 | plugin.Logger(ctx).Error("terraform_resource.listResources", "build_resource_error", err) 245 | return nil, err 246 | } 247 | 248 | // Extract the value of the 'attributes' property 249 | convertedValue := convertModelDocumentToMapInterface(rs) 250 | cleanedValue := removeKicsLabels(convertedValue).(map[string]interface{}) 251 | for property := range cleanedValue { 252 | if property == "attributes" { 253 | tfResource.Attributes = cleanedValue[property] 254 | } 255 | 256 | // Append the index for unique identification of resources that have been created using "count" or "for_each" 257 | if property == "index_key" { 258 | if index, ok := cleanedValue[property].(float64); ok { 259 | tfResource.Address = fmt.Sprintf("%s.%s[%v]", tfResource.Type, tfResource.Name, index) 260 | } 261 | } 262 | } 263 | 264 | // Copy the attributes value to attributes_std 265 | tfResource.AttributesStd = tfResource.Attributes 266 | 267 | // If the address is empty (resource from terraform config, i.e. .tf files and the terraform plan files) 268 | // Form the address string appending the resource type and resource name 269 | if tfResource.Address == "" { 270 | tfResource.Address = fmt.Sprintf("%s.%s", tfResource.Type, tfResource.Name) 271 | } 272 | 273 | d.StreamListItem(ctx, tfResource) 274 | } 275 | } 276 | } 277 | } 278 | 279 | return nil, nil 280 | } 281 | 282 | func buildResource(ctx context.Context, isTFFilePath bool, content []byte, path string, resourceType string, name string, d model.Document) (*terraformResource, error) { 283 | tfResource := new(terraformResource) 284 | 285 | tfResource.Path = path 286 | tfResource.Type = resourceType 287 | tfResource.Name = name 288 | tfResource.Arguments = make(map[string]interface{}) 289 | tfResource.Lifecycle = make(map[string]interface{}) 290 | 291 | // Remove all "_kics" arguments 292 | sanitizeDocument(d) 293 | 294 | if isTFFilePath { 295 | startLine, endLine, source, err := findBlockLinesFromJSON(ctx, path, "resources", resourceType, name) 296 | if err != nil { 297 | return nil, err 298 | } 299 | 300 | tfResource.StartLine = startLine 301 | tfResource.EndLine = endLine 302 | tfResource.Source = source 303 | } else { 304 | startPosition, endPosition, source, err := getBlock(ctx, path, content, "resource", []string{resourceType, name}) 305 | if err != nil { 306 | plugin.Logger(ctx).Error("error getting details of block", err) 307 | return nil, err 308 | } 309 | 310 | tfResource.StartLine = startPosition.Line 311 | tfResource.Source = source 312 | tfResource.EndLine = endPosition.Line 313 | } 314 | 315 | for k, v := range d { 316 | switch k { 317 | case "count": 318 | // The count_src column can handle numbers or strings (expressions) 319 | valStr, err := convertExpressionValue(v) 320 | if err != nil { 321 | plugin.Logger(ctx).Error("terraform_resource.buildResource", "convert_count_error", err) 322 | return tfResource, err 323 | } 324 | tfResource.CountSrc = valStr 325 | 326 | // Only attempt to get the int value if the type is SimpleJSONValue 327 | if reflect.TypeOf(v).String() == "json.SimpleJSONValue" { 328 | var countVal int 329 | err := gocty.FromCtyValue(v.(ctyjson.SimpleJSONValue).Value, &countVal) 330 | // Log the error but don't return the err since we have count_src anyway 331 | if err != nil { 332 | plugin.Logger(ctx).Warn("terraform_resource.buildResource", "convert_count_error", err) 333 | } 334 | tfResource.Count = countVal 335 | } 336 | 337 | case "provider": 338 | if reflect.TypeOf(v).String() != "string" { 339 | return tfResource, fmt.Errorf("The 'provider' argument for resource '%s' must be of type string", name) 340 | } 341 | tfResource.Provider = v.(string) 342 | 343 | case "name": 344 | if reflect.TypeOf(v).String() != "string" { 345 | return tfResource, fmt.Errorf("The 'name' argument for resource '%s' must be of type string", name) 346 | } 347 | if tfResource.Name == "" { 348 | tfResource.Name = v.(string) 349 | } 350 | 351 | case "type": 352 | if reflect.TypeOf(v).String() != "string" { 353 | return tfResource, fmt.Errorf("The 'type' argument for resource '%s' must be of type string", name) 354 | } 355 | tfResource.Arguments["type"] = v 356 | if tfResource.Name == "" { 357 | tfResource.Type = v.(string) 358 | } 359 | 360 | case "mode": 361 | if reflect.TypeOf(v).String() != "string" { 362 | return tfResource, fmt.Errorf("The 'mode' argument for resource '%s' must be of type string", name) 363 | } 364 | tfResource.Mode = v.(string) 365 | 366 | case "for_each": 367 | valStr, err := convertExpressionValue(v) 368 | if err != nil { 369 | plugin.Logger(ctx).Error("terraform_resource.buildResource", "convert_for_each_error", err) 370 | return tfResource, err 371 | } 372 | tfResource.ForEach = valStr 373 | 374 | case "lifecycle": 375 | if reflect.TypeOf(v).String() != "model.Document" { 376 | return tfResource, fmt.Errorf("The 'lifecycle' argument for resource '%s' must be of type map", name) 377 | } 378 | for k, v := range v.(model.Document) { 379 | if !strings.HasPrefix(k, "_kics") { 380 | tfResource.Lifecycle[k] = v 381 | } 382 | } 383 | 384 | case "depends_on": 385 | if reflect.TypeOf(v).String() != "[]interface {}" { 386 | return tfResource, fmt.Errorf("The 'depends_on' argument for resource '%s' must be of type list", name) 387 | } 388 | interfaces := v.([]interface{}) 389 | s := make([]string, len(interfaces)) 390 | for i, v := range interfaces { 391 | s[i] = fmt.Sprint(v) 392 | } 393 | tfResource.DependsOn = s 394 | 395 | case "instances": 396 | 397 | // It's safe to add any remaining arguments since we've already removed all "_kics" arguments 398 | default: 399 | tfResource.Arguments[k] = v 400 | } 401 | } 402 | 403 | return tfResource, nil 404 | } 405 | 406 | // convertModelDocumentToMapInterface takes the documents in model.Document format and converts it into map[string]interface{} 407 | func convertModelDocumentToMapInterface(data interface{}) map[string]interface{} { 408 | result := map[string]interface{}{} 409 | 410 | switch item := data.(type) { 411 | case model.Document: 412 | result = item 413 | case map[string]interface{}: 414 | result = item 415 | } 416 | return result 417 | } 418 | 419 | func removeKicsLabels(data interface{}) interface{} { 420 | if dataMap, isMap := data.(map[string]interface{}); isMap { 421 | for key, value := range dataMap { 422 | if strings.HasPrefix(key, "_kics") { 423 | delete(dataMap, key) 424 | } else { 425 | dataMap[key] = removeKicsLabels(value) 426 | } 427 | } 428 | return dataMap 429 | } else if dataList, isList := data.([]interface{}); isList { 430 | for i, item := range dataList { 431 | dataList[i] = removeKicsLabels(item) 432 | } 433 | return dataList 434 | } 435 | return data 436 | } 437 | -------------------------------------------------------------------------------- /terraform/table_terraform_variable.go: -------------------------------------------------------------------------------- 1 | package terraform 2 | 3 | import ( 4 | "context" 5 | "fmt" 6 | "os" 7 | "reflect" 8 | "regexp" 9 | "strings" 10 | 11 | "github.com/Checkmarx/kics/pkg/model" 12 | p "github.com/Checkmarx/kics/pkg/parser/json" 13 | "github.com/turbot/steampipe-plugin-sdk/v5/grpc/proto" 14 | "github.com/turbot/steampipe-plugin-sdk/v5/plugin" 15 | "github.com/zclconf/go-cty/cty/gocty" 16 | ctyjson "github.com/zclconf/go-cty/cty/json" 17 | ) 18 | 19 | func tableTerraformVariable(ctx context.Context) *plugin.Table { 20 | return &plugin.Table{ 21 | Name: "terraform_variable", 22 | Description: "Terraform variable information.", 23 | List: &plugin.ListConfig{ 24 | ParentHydrate: tfConfigList, 25 | Hydrate: listVariables, 26 | KeyColumns: plugin.OptionalColumns([]string{"path"}), 27 | }, 28 | Columns: []*plugin.Column{ 29 | { 30 | Name: "name", 31 | Description: "The variable name.", 32 | Type: proto.ColumnType_STRING, 33 | }, 34 | { 35 | Name: "type", 36 | Description: "The variable type.", 37 | Type: proto.ColumnType_STRING, 38 | }, 39 | { 40 | Name: "default_value", 41 | Description: "The default value for the variable.", 42 | Type: proto.ColumnType_JSON, 43 | }, 44 | { 45 | Name: "description", 46 | Description: "Because the variable values of a module are part of its user interface, you can briefly describe the purpose of each value using the optional description argument.", 47 | Type: proto.ColumnType_STRING, 48 | }, 49 | { 50 | Name: "sensitive", 51 | Description: "An variable can be marked as containing sensitive material using the optional sensitive argument.", 52 | Type: proto.ColumnType_BOOL, 53 | }, 54 | { 55 | Name: "start_line", 56 | Description: "Starting line number.", 57 | Type: proto.ColumnType_INT, 58 | }, 59 | { 60 | Name: "validation", 61 | Description: "The validation applied on the variable.", 62 | Type: proto.ColumnType_STRING, 63 | }, 64 | { 65 | Name: "end_line", 66 | Description: "Ending line number.", 67 | Type: proto.ColumnType_INT, 68 | }, 69 | { 70 | Name: "source", 71 | Description: "The block source code.", 72 | Type: proto.ColumnType_STRING, 73 | }, 74 | { 75 | Name: "path", 76 | Description: "Path to the file.", 77 | Type: proto.ColumnType_STRING, 78 | }, 79 | }, 80 | } 81 | } 82 | 83 | type terraformVariable struct { 84 | Name string 85 | Type string 86 | Path string 87 | StartLine int 88 | EndLine int 89 | Source string 90 | Description string 91 | Sensitive bool 92 | DefaultValue string 93 | Validation string 94 | } 95 | 96 | func listVariables(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { 97 | // The path comes from a parent hydrate, defaulting to the config paths or 98 | // available by the optional key column 99 | pathInfo := h.Item.(filePath) 100 | path := pathInfo.Path 101 | 102 | content, err := os.ReadFile(path) 103 | if err != nil { 104 | plugin.Logger(ctx).Error("terraform_variable.listVariables", "read_file_error", err, "path", path) 105 | return nil, err 106 | } 107 | 108 | // Return if the path is a TF plan path 109 | if pathInfo.IsTFPlanFilePath || isTerraformPlan(content) { 110 | return nil, nil 111 | } 112 | 113 | var docs []model.Document 114 | 115 | // Check if the file contains TF state 116 | if pathInfo.IsTFStateFilePath { 117 | // Initialize the JSON parser 118 | jsonParser := p.Parser{} 119 | 120 | // Parse the file content using the JSON parser 121 | var str string 122 | documents, _, err := jsonParser.Parse(str, content) 123 | if err != nil { 124 | plugin.Logger(ctx).Error("terraform_variable.listVariables", "state_parse_error", err, "path", path) 125 | return nil, fmt.Errorf("failed to parse state file %s: %v", path, err) 126 | } 127 | 128 | docs = append(docs, documents...) 129 | } else { 130 | // Build the terraform parser 131 | combinedParser, err := Parser() 132 | if err != nil { 133 | plugin.Logger(ctx).Error("terraform_variable.listVariables", "create_parser_error", err) 134 | return nil, err 135 | } 136 | 137 | for _, parser := range combinedParser { 138 | parsedDocs, err := ParseContent(ctx, d, path, content, parser) 139 | if err != nil { 140 | plugin.Logger(ctx).Error("terraform_variable.listVariables", "parse_error", err, "path", path) 141 | return nil, fmt.Errorf("failed to parse file %s: %v", path, err) 142 | } 143 | docs = append(docs, parsedDocs.Docs...) 144 | } 145 | } 146 | 147 | for _, doc := range docs { 148 | if doc["variable"] != nil { 149 | // For each variable, scan its arguments 150 | for variableName, variableData := range doc["variable"].(model.Document) { 151 | tfVariable, err := buildVariable(ctx, pathInfo.IsTFStateFilePath, path, content, variableName, variableData.(model.Document)) 152 | if err != nil { 153 | plugin.Logger(ctx).Error("terraform_variable.listVariables", "build_variable_error", err) 154 | return nil, err 155 | } 156 | d.StreamListItem(ctx, tfVariable) 157 | } 158 | } else if doc["variables"] != nil { 159 | // For each variable, scan its arguments 160 | for varName, variableData := range convertModelDocumentToMapInterface(doc["variables"]) { 161 | // if !strings.HasPrefix(varName, "_kics") { 162 | tfVar, err := buildVariable(ctx, pathInfo.IsTFStateFilePath, path, content, varName, convertModelDocumentToMapInterface(variableData)) 163 | if err != nil { 164 | plugin.Logger(ctx).Error("terraform_variable.listVariables", "build_variable_error", err) 165 | return nil, err 166 | } 167 | d.StreamListItem(ctx, tfVar) 168 | // } 169 | } 170 | } 171 | } 172 | 173 | return nil, nil 174 | } 175 | 176 | func buildVariable(ctx context.Context, isTFStateFilePath bool, path string, content []byte, name string, d model.Document) (terraformVariable, error) { 177 | var tfVar terraformVariable 178 | 179 | tfVar.Path = path 180 | tfVar.Name = name 181 | 182 | // Remove all "_kics" arguments 183 | sanitizeDocument(d) 184 | 185 | if isTFStateFilePath { 186 | startLine, endLine, source, err := findBlockLinesFromJSON(ctx, path, "variables", name) 187 | if err != nil { 188 | return tfVar, err 189 | } 190 | 191 | tfVar.StartLine = startLine 192 | tfVar.EndLine = endLine 193 | tfVar.Source = source 194 | } else { 195 | start, end, source, err := getBlock(ctx, path, content, "variable", []string{name}) 196 | if err != nil { 197 | plugin.Logger(ctx).Error("terraform_variable.buildVariable", "getBlock", err) 198 | return tfVar, err 199 | } 200 | tfVar.StartLine = start.Line 201 | tfVar.EndLine = end.Line 202 | tfVar.Source = source 203 | val, err := extractValidationBlock(source) 204 | if err != nil { 205 | plugin.Logger(ctx).Debug("No validation block found...") 206 | } else { 207 | tfVar.Validation = val 208 | } 209 | } 210 | for k, v := range d { 211 | switch k { 212 | case "description": 213 | if reflect.TypeOf(v).String() != "string" { 214 | return tfVar, fmt.Errorf("the 'description' argument for variable '%s' must be of type string", name) 215 | } 216 | tfVar.Description = v.(string) 217 | 218 | case "default": 219 | valStr, err := convertExpressionValue(v) 220 | if err != nil { 221 | plugin.Logger(ctx).Error("terraform_variable.buildVariable", "convert_value_error", err) 222 | return tfVar, err 223 | } 224 | tfVar.DefaultValue = valStr 225 | 226 | case "sensitive": 227 | // Numbers and bools are both parsed as SimpleJSONValue, so we type check 228 | // through the gocty conversion error handling 229 | var sensitiveVal bool 230 | err := gocty.FromCtyValue(v.(ctyjson.SimpleJSONValue).Value, &sensitiveVal) 231 | if err != nil { 232 | return tfVar, fmt.Errorf("failed to resolve 'sensitive' argument for variable '%s': %w", name, err) 233 | } 234 | 235 | case "type": 236 | tfVar.Type = formatVariableTypeString(v.(string)) 237 | 238 | } 239 | } 240 | return tfVar, nil 241 | } 242 | 243 | // Cleanup the value for the variable type 244 | // formatString uses regex to remove "${" and "}" from the input string. 245 | func formatVariableTypeString(input string) string { 246 | re := regexp.MustCompile(`^\$\{(.+)\}$`) 247 | matches := re.FindStringSubmatch(input) 248 | if len(matches) > 1 { 249 | return matches[1] 250 | } 251 | return "" 252 | } 253 | 254 | func extractValidationBlock(tfVar string) (string, error) { 255 | // Define a regex pattern to match the validation blocks 256 | validationBlockPattern := `validation\s*\{[^}]+\}` 257 | 258 | // Compile the regex pattern 259 | re, err := regexp.Compile(validationBlockPattern) 260 | if err != nil { 261 | return "", err 262 | } 263 | 264 | // Find all validation blocks in the given string 265 | validationBlocks := re.FindAllString(tfVar, -1) 266 | if len(validationBlocks) == 0 { 267 | return "", fmt.Errorf("no validation blocks found") 268 | } 269 | 270 | return strings.Join(validationBlocks, "\n\n"), nil 271 | } 272 | -------------------------------------------------------------------------------- /terraform/utils.go: -------------------------------------------------------------------------------- 1 | package terraform 2 | 3 | import ( 4 | "bufio" 5 | "context" 6 | _ "embed" // Embed kics CLI img and scan-flags 7 | json "encoding/json" 8 | "errors" 9 | "fmt" 10 | "os" 11 | "reflect" 12 | "regexp" 13 | "strings" 14 | "sync" 15 | 16 | "github.com/Checkmarx/kics/pkg/model" 17 | "github.com/Checkmarx/kics/pkg/parser" 18 | terraformParser "github.com/Checkmarx/kics/pkg/parser/terraform" 19 | "github.com/hashicorp/hcl/v2" 20 | "github.com/hashicorp/hcl/v2/hclparse" 21 | "github.com/hashicorp/hcl/v2/hclsyntax" 22 | "github.com/turbot/steampipe-plugin-sdk/v5/plugin" 23 | "github.com/turbot/steampipe-plugin-sdk/v5/plugin/transform" 24 | ctyjson "github.com/zclconf/go-cty/cty/json" 25 | 26 | filehelpers "github.com/turbot/go-kit/files" 27 | ) 28 | 29 | type filePath struct { 30 | Path string 31 | IsTFPlanFilePath bool 32 | IsTFStateFilePath bool 33 | } 34 | 35 | // Use when parsing any TF file to prevent concurrent map read and write errors 36 | var parseMutex = sync.Mutex{} 37 | 38 | func tfConfigList(ctx context.Context, d *plugin.QueryData, _ *plugin.HydrateData) (interface{}, error) { 39 | 40 | // #1 - Path via qual 41 | 42 | // If the path was requested through qualifier then match it exactly. Globs 43 | // are not supported in this context since the output value for the column 44 | // will never match the requested value. 45 | quals := d.EqualsQuals 46 | if quals["path"] != nil { 47 | 48 | path := d.EqualsQualString("path") 49 | 50 | // check if state file is provide in the qual 51 | if strings.HasSuffix(path, ".tfstate") { 52 | d.StreamListItem(ctx, filePath{Path: path, IsTFStateFilePath: true}) 53 | return nil, nil 54 | } 55 | 56 | d.StreamListItem(ctx, filePath{Path: path}) 57 | return nil, nil 58 | } 59 | 60 | // #2 - paths in config 61 | 62 | // Fail if no paths are specified 63 | terraformConfig := GetConfig(d.Connection) 64 | if terraformConfig.Paths == nil && terraformConfig.ConfigurationFilePaths == nil && terraformConfig.PlanFilePaths == nil && terraformConfig.StateFilePaths == nil { 65 | return nil, nil 66 | } 67 | 68 | // Gather file path matches for the glob 69 | var paths, matches []string 70 | 71 | // TODO:: Remove backward compatibility for the argument 'Paths' 72 | if terraformConfig.Paths != nil { 73 | paths = terraformConfig.Paths 74 | } else { 75 | paths = terraformConfig.ConfigurationFilePaths 76 | } 77 | configurationFilePaths := paths 78 | 79 | for _, i := range configurationFilePaths { 80 | 81 | // List the files in the given source directory 82 | files, err := d.GetSourceFiles(i) 83 | if err != nil { 84 | plugin.Logger(ctx).Error("tfConfigList.configurationFilePaths", "get_source_files_error", err) 85 | 86 | // If the specified path is unavailable, then an empty row should populate 87 | if strings.Contains(err.Error(), "failed to get directory specified by the source") { 88 | continue 89 | } 90 | return nil, err 91 | } 92 | matches = append(matches, files...) 93 | } 94 | 95 | // Sanitize the matches to ignore the directories 96 | for _, i := range matches { 97 | 98 | // Ignore directories 99 | if filehelpers.DirectoryExists(i) { 100 | continue 101 | } 102 | d.StreamListItem(ctx, filePath{Path: i}) 103 | } 104 | 105 | // Gather TF plan file path matches for the glob 106 | var matchedPlanFilePaths []string 107 | planFilePaths := terraformConfig.PlanFilePaths 108 | for _, i := range planFilePaths { 109 | 110 | // List the files in the given source directory 111 | files, err := d.GetSourceFiles(i) 112 | if err != nil { 113 | plugin.Logger(ctx).Error("tfConfigList.planFilePaths", "get_source_files_error", err) 114 | 115 | // If the specified path is unavailable, then an empty row should populate 116 | if strings.Contains(err.Error(), "failed to get directory specified by the source") { 117 | continue 118 | } 119 | return nil, err 120 | } 121 | matchedPlanFilePaths = append(matchedPlanFilePaths, files...) 122 | } 123 | 124 | // Sanitize the matches to ignore the directories 125 | for _, i := range matchedPlanFilePaths { 126 | 127 | // Ignore directories 128 | if filehelpers.DirectoryExists(i) { 129 | continue 130 | } 131 | d.StreamListItem(ctx, filePath{ 132 | Path: i, 133 | IsTFPlanFilePath: true, 134 | }) 135 | } 136 | 137 | // Gather TF state file path matches for the glob 138 | var matchedStateFilePaths []string 139 | stateFilePaths := terraformConfig.StateFilePaths 140 | for _, i := range stateFilePaths { 141 | 142 | // List the files in the given source directory 143 | files, err := d.GetSourceFiles(i) 144 | if err != nil { 145 | plugin.Logger(ctx).Error("tfConfigList.stateFilePaths", "get_source_files_error", err) 146 | 147 | // If the specified path is unavailable, then an empty row should populate 148 | if strings.Contains(err.Error(), "failed to get directory specified by the source") { 149 | continue 150 | } 151 | return nil, err 152 | } 153 | matchedStateFilePaths = append(matchedStateFilePaths, files...) 154 | } 155 | 156 | // Sanitize the matches to ignore the directories 157 | for _, i := range matchedStateFilePaths { 158 | 159 | // Ignore directories 160 | if filehelpers.DirectoryExists(i) { 161 | continue 162 | } 163 | d.StreamListItem(ctx, filePath{ 164 | Path: i, 165 | IsTFStateFilePath: true, 166 | }) 167 | } 168 | return nil, nil 169 | } 170 | 171 | func Parser() ([]*parser.Parser, error) { 172 | 173 | combinedParser, err := parser.NewBuilder(). 174 | Add(terraformParser.NewDefault()). 175 | Build([]string{"Terraform"}, []string{""}) 176 | if err != nil { 177 | return nil, err 178 | } 179 | 180 | return combinedParser, nil 181 | } 182 | 183 | // Remove all "_kics" arguments to avoid noisy data 184 | func sanitizeDocument(d model.Document) { 185 | // Deep sanitize 186 | for k, v := range d { 187 | if strings.HasPrefix(k, "_kics") { 188 | delete(d, k) 189 | } 190 | 191 | // check if the arguments interface is nil 192 | if v != nil { 193 | if reflect.TypeOf(v).String() == "model.Document" { 194 | sanitizeDocument(v.(model.Document)) 195 | } 196 | 197 | // Some map arguments are returned as "[]interface {}" types from the parser 198 | if reflect.TypeOf(v).String() == "[]interface {}" { 199 | for _, v := range v.([]interface{}) { 200 | if reflect.TypeOf(v).String() == "model.Document" { 201 | sanitizeDocument(v.(model.Document)) 202 | } 203 | } 204 | } 205 | } 206 | } 207 | } 208 | 209 | // For any arguments that can be a TF expression, convert to string for easier handling 210 | func convertExpressionValue(v interface{}) (valStr string, err error) { 211 | switch v := v.(type) { 212 | // Numbers and bools 213 | case ctyjson.SimpleJSONValue: 214 | val, err := v.MarshalJSON() 215 | if err != nil { 216 | return "", fmt.Errorf("Failed to convert SimpleJSONValue value %v: %w", v, err) 217 | } 218 | valStr = string(val) 219 | 220 | case string: 221 | val, err := json.Marshal(v) 222 | if err != nil { 223 | return "", fmt.Errorf("Failed to convert string value %v: %w", v, err) 224 | } 225 | valStr = string(val) 226 | 227 | // Maps 228 | case model.Document: 229 | val, err := v.MarshalJSON() 230 | if err != nil { 231 | return "", fmt.Errorf("Failed to convert model.Document value %v: %w", v, err) 232 | } 233 | valStr = string(val) 234 | 235 | // Arrays 236 | case []interface{}: 237 | var valStrs []string 238 | for _, iValue := range v { 239 | tempVal, err := convertExpressionValue(iValue) 240 | if err != nil { 241 | return "", fmt.Errorf("Failed to convert []interface{} value %v: %w", v, err) 242 | } 243 | valStrs = append(valStrs, tempVal) 244 | } 245 | valStr = fmt.Sprintf("[%s]", strings.Join(valStrs, ",")) 246 | 247 | default: 248 | return "", fmt.Errorf("Failed to convert value %v due to unknown type: %T", v, v) 249 | } 250 | return valStr, nil 251 | } 252 | 253 | func ParseContent(ctx context.Context, d *plugin.QueryData, path string, content []byte, p *parser.Parser) (parser.ParsedDocument, error) { 254 | // Only allow parsing of one file at a time to prevent concurrent map read 255 | // and write errors 256 | parseMutex.Lock() 257 | defer parseMutex.Unlock() 258 | 259 | parsedDocs, err := p.Parse(path, content, false, false) 260 | if err != nil { 261 | plugin.Logger(ctx).Error("utils.ParseContent", "parse_error", err, "path", path) 262 | return parser.ParsedDocument{}, err 263 | } 264 | 265 | return parsedDocs, nil 266 | } 267 | 268 | func getBlock(ctx context.Context, path string, content []byte, blockType string, matchLabels []string) (startPos hcl.Pos, endPos hcl.Pos, source string, _ error) { 269 | parser := hclparse.NewParser() 270 | file, _ := parser.ParseHCL(content, path) 271 | fileContent, _, diags := file.Body.PartialContent(terraformSchema) 272 | if diags.HasErrors() { 273 | return hcl.InitialPos, hcl.InitialPos, "", errors.New(diags.Error()) 274 | } 275 | for _, block := range fileContent.Blocks.OfType(blockType) { 276 | if isBlockMatch(block, blockType, matchLabels) { 277 | syntaxBody, ok := block.Body.(*hclsyntax.Body) 278 | if !ok { 279 | // this should never happen 280 | plugin.Logger(ctx).Info("could not cast to hclsyntax") 281 | break 282 | } 283 | 284 | startPos = syntaxBody.SrcRange.Start 285 | endPos = syntaxBody.SrcRange.End 286 | source = strings.Join( 287 | strings.Split( 288 | string(content), 289 | "\n", 290 | )[(syntaxBody.SrcRange.Start.Line-1):syntaxBody.SrcRange.End.Line], 291 | "\n", 292 | ) 293 | 294 | break 295 | } 296 | } 297 | return 298 | } 299 | 300 | func isBlockMatch(block *hcl.Block, blockType string, matchLabels []string) bool { 301 | if !strings.EqualFold(block.Type, blockType) { 302 | return false 303 | } 304 | 305 | if len(block.Labels) != len(matchLabels) { 306 | return false 307 | } 308 | for mIdx, matchLabel := range matchLabels { 309 | if !strings.EqualFold(block.Labels[mIdx], matchLabel) { 310 | return false 311 | } 312 | } 313 | return true 314 | } 315 | 316 | var terraformSchema = &hcl.BodySchema{ 317 | Blocks: []hcl.BlockHeaderSchema{ 318 | { 319 | Type: "terraform", 320 | }, 321 | { 322 | // This one is not really valid, but we include it here so we 323 | // can create a specialized error message hinting the user to 324 | // nest it inside a "terraform" block. 325 | Type: "required_providers", 326 | }, 327 | { 328 | Type: "provider", 329 | LabelNames: []string{"name"}, 330 | }, 331 | { 332 | Type: "variable", 333 | LabelNames: []string{"name"}, 334 | }, 335 | { 336 | Type: "locals", 337 | }, 338 | { 339 | Type: "output", 340 | LabelNames: []string{"name"}, 341 | }, 342 | { 343 | Type: "module", 344 | LabelNames: []string{"name"}, 345 | }, 346 | { 347 | Type: "resource", 348 | LabelNames: []string{"type", "name"}, 349 | }, 350 | { 351 | Type: "data", 352 | LabelNames: []string{"type", "name"}, 353 | }, 354 | { 355 | Type: "moved", 356 | }, 357 | }, 358 | } 359 | 360 | func isTerraformPlan(content []byte) bool { 361 | var data map[string]interface{} 362 | err := json.Unmarshal(content, &data) 363 | if err != nil { 364 | return false 365 | } 366 | 367 | // Check for fields that are common in Terraform plans 368 | _, hasResourceChanges := data["resource_changes"] 369 | _, hasFormatVersion := data["format_version"] 370 | 371 | return hasResourceChanges && hasFormatVersion 372 | } 373 | 374 | // findBlockLinesFromJSON locates the start and end lines of a specific block or nested element within a block. 375 | // The file should contain structured data (e.g., JSON) and this function expects to search for blocks with specific names. 376 | func findBlockLinesFromJSON(ctx context.Context, path string, blockName string, pathName ...string) (int, int, string, error) { 377 | var currentLine, startLine, endLine int 378 | var bracketCounter, startCounter int 379 | 380 | // These boolean flags indicate which part of the structured data we're currently processing. 381 | inBlock, inOutput, inTargetBlock := false, false, false 382 | 383 | file, err := os.Open(path) 384 | if err != nil { 385 | plugin.Logger(ctx).Error("findBlockLinesFromJSON", "file_error", err) 386 | return startLine, endLine, "", err 387 | } 388 | 389 | // Move the file pointer to the start of the file. 390 | _, _ = file.Seek(0, 0) 391 | scanner := bufio.NewScanner(file) 392 | 393 | for scanner.Scan() { 394 | currentLine++ 395 | line := scanner.Text() 396 | trimmedLine := strings.TrimSpace(line) 397 | 398 | // Detect the start of the desired block, path, response, etc. 399 | // Depending on the blockName and provided pathName, different conditions are checked. 400 | 401 | // Generic block detection 402 | if !inBlock && (trimmedLine == fmt.Sprintf(`"%s": {`, blockName) || trimmedLine == fmt.Sprintf(`"%s": [`, blockName)) { 403 | inBlock = true 404 | startLine = currentLine 405 | continue 406 | } else if inBlock && blockName == "outputs" && trimmedLine == fmt.Sprintf(`"%s": {`, pathName[0]) { 407 | // Different output block detection within the "outputs" block 408 | inOutput = true 409 | bracketCounter = 1 410 | startLine = currentLine 411 | continue 412 | } else if inBlock && blockName == "resources" { 413 | if inBlock && strings.Contains(trimmedLine, "{") { 414 | bracketCounter++ 415 | startCounter = currentLine 416 | } 417 | if inBlock && strings.Contains(trimmedLine, "}") { 418 | bracketCounter-- 419 | } 420 | 421 | // Get the start line info for the plan file data 422 | // For terraform plan we need a special handling since 423 | // if we use the count or for_each, in that case the resource configurations in the terraform plan can have more than 1 resource object with same name and type. 424 | // So, to avoid the conflict use address and type instead which is unique and only applicable for terraform plan file. 425 | if inBlock && strings.Contains(trimmedLine, fmt.Sprintf(`"address": "%s"`, pathName[0])) { 426 | peekCounter := 1 427 | nameFound := false 428 | 429 | for { 430 | peekLine, _ := readLineN(file, currentLine+peekCounter) 431 | if strings.Contains(peekLine, fmt.Sprintf(`"type": "%s"`, pathName[1])) { 432 | nameFound = true 433 | break 434 | } 435 | if strings.Contains(peekLine, "}") { 436 | break 437 | } 438 | peekCounter++ 439 | } 440 | 441 | if nameFound { 442 | inTargetBlock = true 443 | startLine = startCounter // Assume the opening brace is at the start of this resource 444 | } 445 | } 446 | 447 | // Get the start line info from terraform state file. 448 | // Match the type and name of the resource to get the start position 449 | if inBlock && strings.Contains(trimmedLine, fmt.Sprintf(`"type": "%s"`, pathName[0])) { 450 | peekCounter := 1 451 | nameFound := false 452 | 453 | for { 454 | peekLine, _ := readLineN(file, currentLine+peekCounter) 455 | if strings.Contains(peekLine, fmt.Sprintf(`"name": "%s"`, pathName[1])) { 456 | nameFound = true 457 | break 458 | } 459 | if strings.Contains(peekLine, "}") { 460 | break 461 | } 462 | peekCounter++ 463 | } 464 | 465 | if nameFound { 466 | inTargetBlock = true 467 | startLine = startCounter // Assume the opening brace is at the start of this resource 468 | } 469 | } 470 | } 471 | // If we are within a block, we need to track the opening and closing brackets 472 | // to determine where the block ends. 473 | if inBlock && inOutput && !inTargetBlock { 474 | bracketCounter += strings.Count(line, "{") 475 | bracketCounter -= strings.Count(line, "}") 476 | 477 | if bracketCounter == 0 { 478 | endLine = currentLine 479 | break 480 | } 481 | } 482 | 483 | if inBlock && inTargetBlock && bracketCounter == 0 { 484 | endLine = currentLine 485 | break 486 | } 487 | } 488 | source := getSourceFromFile(file, startLine, endLine) 489 | 490 | if startLine != 0 && endLine == 0 { 491 | // If we found the start but not the end, reset the start to indicate the block doesn't exist in entirety. 492 | startLine = 0 493 | } 494 | 495 | // By default (when created), the file content is not properly formatted with indentation and all the content remains in line 1 496 | if file != nil && startLine == 0 && endLine == 0 { 497 | // Set the start line as 1, and 498 | // end line as the current line (i.e. total lines) 499 | startLine = 1 500 | endLine = currentLine 501 | 502 | content, err := os.ReadFile(path) 503 | if err != nil { 504 | plugin.Logger(ctx).Error("findBlockLinesFromJSON", "read_file_error", err) 505 | return startLine, endLine, source, err 506 | } 507 | contentStr := string(content) 508 | 509 | // Regex pattern to extract the resources list from the file 510 | pattern := `"planned_values":{.*"root_module":{"resources":(.*)}},"resource_changes"` 511 | 512 | // Compile the regular expression 513 | re := regexp.MustCompile(pattern) 514 | 515 | // Find the match in the JSON string 516 | matches := re.FindStringSubmatch(contentStr) 517 | 518 | // Check if the resources block is present in the plan file content store the resources list 519 | var resources []interface{} 520 | if len(matches) >= 2 { 521 | plannedValues := matches[1] 522 | err := json.Unmarshal([]byte(plannedValues), &resources) 523 | if err != nil { 524 | plugin.Logger(ctx).Error("findBlockLinesFromJSON", "unmarshal_error", err) 525 | return startLine, endLine, source, err 526 | } 527 | } 528 | 529 | // Go through the resources and check for the desired one 530 | for _, r := range resources { 531 | if strings.Contains(fmt.Sprint(r), pathName[0]) && strings.Contains(fmt.Sprint(r), pathName[1]) { 532 | if data, ok := r.(map[string]interface{}); ok { 533 | // Marshal the map to JSON 534 | jsonBytes, err := json.Marshal(data) 535 | if err != nil { 536 | plugin.Logger(ctx).Error("findBlockLinesFromJSON", "unmarshal_error", err) 537 | return startLine, endLine, source, err 538 | } 539 | 540 | // Convert the JSON bytes to a string 541 | jsonString := string(jsonBytes) 542 | // And, set the value as source 543 | source = jsonString 544 | } 545 | } 546 | } 547 | } 548 | 549 | return startLine, endLine, source, nil 550 | } 551 | 552 | func getSourceFromFile(file *os.File, startLine int, endLine int) string { 553 | var source string 554 | _, _ = file.Seek(0, 0) // Go to the start 555 | scanner := bufio.NewScanner(file) 556 | currentSourceLine := 0 557 | for scanner.Scan() { 558 | currentSourceLine++ 559 | if currentSourceLine >= startLine && currentSourceLine <= endLine { 560 | source += scanner.Text() + "\n" 561 | } 562 | if currentSourceLine > endLine { 563 | break 564 | } 565 | } 566 | return source 567 | } 568 | 569 | func readLineN(file *os.File, lineNum int) (string, error) { 570 | _, _ = file.Seek(0, 0) // Go to the start 571 | scanner := bufio.NewScanner(file) 572 | currentLine := 0 573 | for scanner.Scan() { 574 | currentLine++ 575 | if currentLine == lineNum { 576 | return scanner.Text(), nil 577 | } 578 | } 579 | return "", nil 580 | } 581 | 582 | // Transform function to return nil if an empty map 583 | func NullIfEmptyMap(_ context.Context, d *transform.TransformData) (interface{}, error) { 584 | if data, isMap := d.Value.(map[string]interface{}); isMap { 585 | if len(data) == 0 { 586 | return nil, nil 587 | } 588 | } 589 | return d.Value, nil 590 | } 591 | --------------------------------------------------------------------------------