├── .gitattributes ├── .github └── workflows │ └── hn.yml ├── LICENSE ├── README.md └── all ├── aws-cloud-perimeter ├── alb.sql ├── all_urls.sql ├── api_gatewayv2.sql ├── cloudfront_distributions.sql ├── lambda_url.sql ├── public_vpc_ips.sql ├── route53.sql └── s3_buckets.sql ├── aws-codebuild-tf ├── CodePipeline-Template.yaml ├── buildspec-tf-apply.yaml ├── buildspec-tf-plan.yaml └── buildspec.steampipe.yaml ├── aws-codebuild ├── steampipe-buildspec.yaml └── steampipe-cloud-buildspec.yaml ├── aws-compliance-quickstart ├── README.md └── quickstart.sh ├── aws-organizations-scripts ├── README.md ├── generate_all_credential_reports.sh ├── generate_config_for_cross_account_roles.sh ├── generate_config_for_multipayer.py └── generate_config_for_sso.sh ├── aws-top-10 ├── README.md ├── mod.sp └── top-10.sp ├── aws-trusts ├── cross_account_trusts.sql ├── foreign_accounts.sql ├── foreign_accounts_mapped_to_cloudmapper.sql ├── foreign_ami_owners.sql └── parse_cloudmapper.sql ├── config-yaml └── README.md ├── control-output-templates ├── README.md └── md-vs-briefmd.jpg ├── controls-with-descriptions ├── .mod.cache.json ├── README.md └── mod.sp ├── crosstab └── mod.sp ├── dashboard-remix ├── AwsAzureCompute.sp ├── AwsComputeWithControls.sp ├── README.md ├── install_mods.sh ├── mod.sp └── named_resources.sp ├── dashboard-sql-reuse ├── README.md ├── basic.sp ├── functional.sp ├── hcl.sp ├── mod.sp └── mod.sp~ ├── github-actions-oidc ├── README.md ├── aws │ ├── README.md │ ├── default.tfvars │ ├── main.tf │ ├── output.tf │ ├── providers.tf │ ├── steampipe-sample-aws-workflow.yml │ └── variables.tf ├── azure │ ├── README.md │ ├── default.tfvars │ ├── main.tf │ ├── output.tf │ ├── providers.tf │ ├── steampipe-sample-azure-workflow.yml │ └── variables.tf └── gcp │ ├── README.md │ ├── default.tfvars │ ├── main.tf │ ├── output.tf │ ├── providers.tf │ ├── steampipe-sample-gcp-workflow.yml │ └── variables.tf ├── github-activity ├── README.md ├── github-activity.gif ├── in-tableau.gif └── mod.sp ├── github-add-or-remove-topic ├── README.md ├── go.mod ├── go.sum └── main.go ├── github-avatars ├── README.md ├── avatars.csv ├── avatars.py ├── avatars.sql ├── github-avatars.png └── mod.sp ├── github-external-contributor-analysis ├── README.md ├── build-the-script.py ├── typescript.sql └── vscode.sql ├── github-issue-duration ├── README.md └── mod.sp ├── github-traffic ├── README.md ├── mod.sp ├── plpy-traffic-dashboard.sql └── repo-traffic-small-multiples.png ├── gmail ├── README.md └── gmail-query.mp4 ├── hackernews ├── README.md ├── animate.py ├── animation.sp ├── csv.spc ├── hackernews.spc ├── hn_header.txt ├── hn_items_all.csv ├── home.sp ├── mod.sp ├── new_sc.csv ├── posts.sp ├── push.py ├── query.sp ├── repos.sp ├── search.sp ├── sources.sp ├── submissions.sp ├── times.txt ├── update.sh └── urls.sp ├── hcl-dashboard-patterns ├── README.md ├── hcl-dashboard-patterns.png ├── interpolated_sql_file.sql ├── mod.sp ├── parameterized_sql_file.sql └── plain_sql_file.sql ├── histogram └── README.md ├── hypothesis ├── README.md ├── home.sp ├── media_conversations.sp ├── mod.sp └── query.sp ├── introspection └── README.md ├── jira ├── README.md ├── jira.json ├── mod.sp └── tasks_and_subtasks.sp ├── join-csv-and-api └── README.md ├── linkcheck ├── links.sp ├── mod.sp └── query.sp ├── metasearch ├── README.md ├── metasearch.png ├── mod.sp └── zendesk.sql ├── pipes-terraform-provider ├── README.md ├── outputs.tf ├── pipes.tf ├── provider.tf └── variables.tf ├── reddit ├── README.md ├── mod.sp ├── my_reddit_posts.sp └── search_reddits.sp ├── relationship-graph ├── README.md ├── category.sp ├── edge.sp ├── edge_inline_in_graph.sp ├── edge_reusable_with_base_passing_args.sp ├── hcl_with_block.sp ├── input.sp ├── mod.sp ├── node.sp ├── node_inherits_base.sp ├── node_inherits_base_passing_args.sp ├── node_inline.sp ├── plugin_versions_inherit_nodes_and_edges_passing_args.sp ├── plugin_versions_inline_nodes_and_edges.sp ├── plugin_versions_union_nodes_and_edges.sp ├── query.sp └── schemas_and_tables.sp ├── salesforce ├── README.md ├── mod.sp └── salesforce.sp ├── splunk-lookup-tables ├── README.md ├── accounts.sql ├── eni.sql ├── generate_tables.sh └── instances.sql ├── spreadsheet-integrity ├── README.md ├── event_planning.sp ├── people.csv └── sessions.csv └── steampipe-and-metabase └── historical-and-recent-daily-aws-service-cost.md /.gitattributes: -------------------------------------------------------------------------------- 1 | **/*.sp linguist-language=HCL 2 | -------------------------------------------------------------------------------- /.github/workflows/hn.yml: -------------------------------------------------------------------------------- 1 | name: "fetch hackernews items" 2 | 3 | defaults: 4 | run: 5 | working-directory: ./all/hackernews 6 | 7 | push: 8 | branches: 9 | # - main 10 | 11 | jobs: 12 | cron: 13 | runs-on: ubuntu-latest 14 | steps: 15 | 16 | - uses: actions/checkout@v3 17 | 18 | # actions are not compatible with working-directory 19 | # 20 | # - uses: francois2metz/setup-steampipe@v1 21 | # with: 22 | # steampipe-version: 'latest' 23 | # steampipe-plugins: | 24 | # { 25 | # "hackernews": {} 26 | # } 27 | # 28 | # so install steampipe this way 29 | 30 | - name: echo ~ 31 | run: echo ~ 32 | 33 | - name: ls ~ 34 | run: ls ~ 35 | 36 | - name: pwd 37 | run: pwd 38 | 39 | - name: ls 40 | run: ls 41 | 42 | - name: clock1 43 | run: echo `date` 44 | 45 | - name: clock2 46 | run: echo `date` >> ./times.txt 47 | - name: install steampipe 48 | run: sudo /bin/sh -c "$(curl -fsSL https://raw.githubusercontent.com/turbot/steampipe/main/install.sh)" 49 | 50 | - name: install hackernews 51 | run: steampipe plugin install hackernews 52 | 53 | - name: install csv 54 | run: steampipe plugin install csv 55 | 56 | - name: install github 57 | run: steampipe plugin install github 58 | 59 | - name: install net 60 | run: steampipe plugin install net 61 | 62 | - name: configure csv 63 | run: cp ./csv.spc ~/.steampipe/config/csv.spc 64 | 65 | - name: check config 66 | run: more ~/.steampipe/config/csv.spc 67 | 68 | - name: capture hourly snapshot 69 | run: STEAMPIPE_LOG=trace steampipe query "select * from hackernews_new where time > now() - interval '1 hour'" --output csv >> ./csv/hn_`date +%s`.csv 70 | 71 | - name: combine files 72 | run: | 73 | cat hn_header.txt > hn.csv 74 | 75 | for file in ./csv/hn_*.csv; do 76 | tail -n +2 $file >> hn.csv 77 | done 78 | 79 | - name: create hn_items_all 80 | run: | 81 | steampipe query "create table hn_items_tmp as select * from csv.hn" 82 | steampipe query "select count(*) from csv.hn" 83 | steampipe query "create table hn_items_all as select distinct on (id) * from hn_items_tmp" 84 | steampipe query "delete from hn_items_all where substring(time from 1 for 10) < to_char(now() - interval '31 day' , 'YYYY-MM-DD')" 85 | steampipe query "update hn_items_all set score = 0::text where score = ''" 86 | steampipe query "update hn_items_all set descendants = 0::text where descendants = '' or descendants = ''" 87 | steampipe query "select count(*) from hn_items_all" 88 | 89 | - name: create hn_scores_and_comments 90 | run: steampipe query query.create_scores_and_comments 91 | 92 | - name: check hn_scores_and_comments 93 | run: steampipe query "select sum(descendants::bigint) as descendants from hn_scores_and_comments" 94 | 95 | - name: create new_sc 96 | run: steampipe query query.new_scores_and_comments 97 | 98 | - name: check new_sc 99 | run: steampipe query "select sum(descendants::bigint) as descendants from new_sc" 100 | 101 | - name: save new_sc 102 | run: steampipe query "select * from new_sc" --output csv > ./new_sc.csv 103 | 104 | - name: update hn_items_all from new_sc 105 | run: steampipe query query.update_scores_and_comments 106 | 107 | - name: save hn_items_all 108 | run: | 109 | steampipe query "alter table hn_items_all drop column _ctx" 110 | steampipe query "update hn_items_all set score = 0::text where score = ''" 111 | steampipe query "update hn_items_all set descendants = 0::text where descendants = ''" 112 | steampipe query "select * from hn_items_all" --output csv > ./hn_items_all.csv 113 | 114 | - name: setup git 115 | run: | 116 | git config user.name "GitHub Actions Bot" 117 | git config user.email "<>" 118 | 119 | - name: add 120 | run: | 121 | git add ./csv 122 | cp ~/.steampipe/logs/p*.log ./logs 123 | git add ./csv ./logs 124 | 125 | - name: commit 126 | run: 127 | git commit -m "update hn" ./csv ./logs ./times.txt ./new_sc.csv ./hn_items_all.csv 128 | 129 | - name: push 130 | run: 131 | git push origin main 132 | 133 | - name: Exit 134 | if: ${{ steps.checks.outcome == 'failure' }} 135 | run: exit 1 136 | 137 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Steampipe samples 2 | 3 | Examples, samples, snippets and scripts to use with Steampipe. 4 | 5 | ## Benchmarks 6 | - [AWS Compliance Quickstart](./all/aws-compliance-quickstart) 7 | - [Control output templates](./all/control-output-templates) 8 | 9 | ## Dashboards 10 | - [Remixing dashboards](./all/dashboard-remix) 11 | - [Avatars](./all/github-avatars) 12 | - [HCL patterns](./all/hcl-dashboard-patterns) 13 | - [Hacker News](./all/hackernews) 14 | - [Hypothesis](/./all/hypothesis/) 15 | - [Jira](./all/jira) 16 | - [Reddit](./all/reddit/) 17 | - [Salesforce](./all/salesforce/) 18 | 19 | ## File-based query 20 | - [Querying OpenAPI definitions](./all/config-yaml) 21 | 22 | ## GitHub 23 | - [GitHub activity](./all/github-activity) 24 | - [GitHub add or remove topic](./all/github-add-or-remove-topic) 25 | - [GitHub external contributors](./all/github-external-contributor-analysis/) 26 | 27 | ## Joins 28 | - [Joining CSV and API tables](./all/join-csv-and-api) 29 | - [Querying Gmail](./all/gmail) 30 | 31 | ## Relationship Graphs 32 | - [Graphs, Nodes, Edges, and HCL-level With](./all/relationship-graph) 33 | ## Security 34 | - [Splunk lookup tables](./all/splunk-lookup-tables) 35 | 36 | ## Spreadsheets 37 | - [Spreadsheet integrity](./all/spreadsheet-integrity) 38 | 39 | ## Steampipe introspection 40 | - [Steampipe introspection](./all/introspection) 41 | - [Dashboards + introspection](./all/controls-with-descriptions/) 42 | 43 | ## Visualization 44 | - [Postgres+Steampipe histograms](./all/histogram) 45 | 46 | -------------------------------------------------------------------------------- /all/aws-cloud-perimeter/alb.sql: -------------------------------------------------------------------------------- 1 | select 2 | lower(l.protocol) || '://' || lb.dns_name || ':' || l.port as url 3 | from 4 | aws_ec2_application_load_balancer as lb, 5 | aws_ec2_load_balancer_listener as l 6 | where 7 | lb.scheme LIKE 'internet-facing' 8 | and lb.state_code LIKE 'active' 9 | and l.load_balancer_arn = lb.arn; -------------------------------------------------------------------------------- /all/aws-cloud-perimeter/all_urls.sql: -------------------------------------------------------------------------------- 1 | select 2 | lower(l.protocol) || '://' || lb.dns_name || ':' || l.port as url, 3 | 'application_load_balancer' as type 4 | from 5 | aws_ec2_application_load_balancer as lb, 6 | aws_ec2_load_balancer_listener as l 7 | where 8 | lb.scheme LIKE 'internet-facing' 9 | and lb.state_code LIKE 'active' 10 | and l.load_balancer_arn = lb.arn 11 | 12 | UNION ALL 13 | 14 | select 15 | 'https://' || api_id || '.execute-api.' || region || '.amazonaws.com/' || stage_name as url, 16 | 'api_gateway' as type 17 | from aws_api_gatewayv2_stage 18 | 19 | UNION ALL 20 | 21 | select 22 | 'https://' || domain_name as url, 23 | 'cloudfront_distribution' as type 24 | from 25 | aws_cloudfront_distribution 26 | 27 | UNION ALL 28 | 29 | select 30 | 'https://' || jsonb_array_elements_text(aliases -> 'Items') as url, 31 | 'cloudfront_distribution_alias' as type 32 | from 33 | aws_cloudfront_distribution 34 | 35 | UNION ALL 36 | 37 | select 38 | url_config ->> 'FunctionUrl' as url, 39 | 'lambda_url' as type 40 | from aws_lambda_function 41 | where url_config is not Null 42 | 43 | UNION ALL 44 | 45 | select 46 | 'https://' || name || '.s3.' || region || '.amazonaws.com/' as url, 47 | 's3_bucket_url' as type 48 | from 49 | aws_s3_bucket 50 | where 51 | bucket_policy_is_public is True; 52 | 53 | 54 | -------------------------------------------------------------------------------- /all/aws-cloud-perimeter/api_gatewayv2.sql: -------------------------------------------------------------------------------- 1 | -- https://rwvrandomhg.execute-api.us-east-1.amazonaws.com/dev_system 2 | 3 | select 'https://' || api_id || '.execute-api.' || region || '.amazonaws.com/' || stage_name as url 4 | from aws_api_gatewayv2_stage; -------------------------------------------------------------------------------- /all/aws-cloud-perimeter/cloudfront_distributions.sql: -------------------------------------------------------------------------------- 1 | select 2 | 'https://' || domain_name as url 3 | from 4 | aws_cloudfront_distribution 5 | UNION ALL 6 | select 7 | 'https://' || jsonb_array_elements_text(aliases -> 'Items') as url 8 | from 9 | aws_cloudfront_distribution; -------------------------------------------------------------------------------- /all/aws-cloud-perimeter/lambda_url.sql: -------------------------------------------------------------------------------- 1 | select url_config ->> 'FunctionUrl' as url 2 | from aws_lambda_function 3 | where url_config is not Null; -------------------------------------------------------------------------------- /all/aws-cloud-perimeter/public_vpc_ips.sql: -------------------------------------------------------------------------------- 1 | select 2 | eni.association_public_ip AS public_ip 3 | from 4 | aws_ec2_network_interface AS eni 5 | where 6 | eni.association_public_ip is not Null; 7 | -------------------------------------------------------------------------------- /all/aws-cloud-perimeter/route53.sql: -------------------------------------------------------------------------------- 1 | select 2 | r.name as hostname, 3 | type, 4 | jsonb_array_elements_text(records) as resource_record 5 | from 6 | aws_route53_zone as z, 7 | aws_route53_record as r 8 | where r.zone_id = z.id 9 | and (type LIKE 'A' OR type LIKE 'CNAME') 10 | and z.private_zone=false 11 | and jsonb_pretty(records) not like '%dkim%' 12 | and jsonb_pretty(records) not like '%acm-validations.aws.%'; -------------------------------------------------------------------------------- /all/aws-cloud-perimeter/s3_buckets.sql: -------------------------------------------------------------------------------- 1 | select 2 | 'https://' || name || '.s3.' || region || '.amazonaws.com/' as url 3 | from 4 | aws_s3_bucket 5 | where 6 | bucket_policy_is_public is True; 7 | 8 | -- Format of an S3 URL is: 9 | -- https://bucket-name.s3.region-code.amazonaws.com/key-name -------------------------------------------------------------------------------- /all/aws-codebuild-tf/buildspec-tf-apply.yaml: -------------------------------------------------------------------------------- 1 | version: 0.2 2 | 3 | phases: 4 | 5 | install: 6 | commands: 7 | - "curl -s https://releases.hashicorp.com/terraform/1.3.6/terraform_1.3.6_linux_amd64.zip -o terraform.zip" 8 | - "unzip terraform.zip -d /usr/local/bin" 9 | - "chmod 755 /usr/local/bin/terraform" 10 | - "mv $CODEBUILD_SRC_DIR_TerraformPlan/terraform/${env}-terraform.tfplan terraform" 11 | pre_build: 12 | commands: 13 | - "echo env: $env" 14 | - "cd terraform ; terraform init -backend-config=../${env}.tfbackend -reconfigure" 15 | 16 | build: 17 | commands: 18 | - "cd terraform ; terraform apply ${env}-terraform.tfplan" 19 | -------------------------------------------------------------------------------- /all/aws-codebuild-tf/buildspec-tf-plan.yaml: -------------------------------------------------------------------------------- 1 | version: 0.2 2 | 3 | env: 4 | exported-variables: 5 | - BuildID 6 | - BuildTag 7 | 8 | phases: 9 | 10 | install: 11 | commands: 12 | - "curl -s https://releases.hashicorp.com/terraform/1.3.6/terraform_1.3.6_linux_amd64.zip -o terraform.zip" 13 | - "unzip terraform.zip -d /usr/local/bin" 14 | - "chmod 755 /usr/local/bin/terraform" 15 | pre_build: 16 | commands: 17 | - "echo env: $env" 18 | - "cd terraform ; terraform init -backend-config=../${env}.tfbackend -reconfigure" 19 | 20 | build: 21 | commands: 22 | - "cd terraform ; terraform plan -out=${env}-terraform.tfplan -no-color" 23 | - "export BuildID=`echo $CODEBUILD_BUILD_ID | cut -d: -f1`" 24 | - "export BuildTag=`echo $CODEBUILD_BUILD_ID | cut -d: -f2`" 25 | 26 | artifacts: 27 | name: TerraformPlan 28 | files: 29 | - terraform/$env-terraform.tfplan -------------------------------------------------------------------------------- /all/aws-codebuild-tf/buildspec.steampipe.yaml: -------------------------------------------------------------------------------- 1 | version: 0.2 2 | 3 | env: 4 | # Store the Steampipe Cloud host, token and workspace in AWS Secrets Manager 5 | secrets-manager: 6 | STEAMPIPE_CLOUD_TOKEN: $STEAMPIPE_CLOUD_SECRETNAME:STEAMPIPE_CLOUD_TOKEN 7 | WORKSPACE: $STEAMPIPE_CLOUD_SECRETNAME:WORKSPACE 8 | exported-variables: 9 | # STATUS_URL is returned and leveraged by CodePipeline for the Approval message 10 | - STATUS_URL 11 | - ALARM_COUNT 12 | 13 | phases: 14 | install: 15 | commands: 16 | # Each CodeBuild container is ephermal - We need to install this every time 17 | - curl -s -L $STEAMPIPE_BINARY_URL | tar -xzf - 18 | - echo installed steampipe 19 | - git clone https://github.com/turbot/steampipe-mod-terraform-aws-compliance.git 20 | # Steampipe cannot run as root, so we run all the next steps as the codebuild user. But first it needs to own those files 21 | - chown -R codebuild-user . 22 | build: 23 | # Steampipe will return a non-zero exit code with the number of failed checks 24 | # (That may or may not still be the case with the SteamPipe cloud version) 25 | on-failure: CONTINUE 26 | run-as: codebuild-user 27 | commands: 28 | # - pwd # for debugging 29 | # Place the .steampipe install in the local directory for this build 30 | - export STEAMPIPE_INSTALL_DIR=`pwd`/.steampipe 31 | - ./steampipe plugin install terraform 32 | - ./steampipe --version # for debugging 33 | # We need to tell Steampipe where to find the Terraform Module to use 34 | - export STEAMPIPE_MOD_LOCATION=`pwd`/steampipe-mod-terraform-aws-compliance 35 | # Now run the compliance check 36 | - export STEAMPIPE_CLOUD_TOKEN 37 | # - echo "Pushing Dashboard to $STEAMPIPE_CLOUD_HOST in $WORKSPACE" # CodeBuild considers these secrets and doesn't echo them. 38 | - cd terraform ; ../steampipe check all --snapshot-location $WORKSPACE --snapshot-tag repo=$REPO --snapshot-tag branch=$BRANCH --snapshot --snapshot-title "$SNAPSHOT_TITLE" | tee output.txt 39 | # Get some data from the output and export it for the next step in the pipeline 40 | - export ALARM_COUNT=`grep ^ALARM output.txt | awk '{print $3}'` 41 | - export STATUS_URL=`grep "Snapshot uploaded to" output.txt | awk '{ print $NF}'` 42 | -------------------------------------------------------------------------------- /all/aws-codebuild/steampipe-buildspec.yaml: -------------------------------------------------------------------------------- 1 | version: 0.2 2 | 3 | phases: 4 | install: 5 | run-as: codebuild-user 6 | commands: 7 | # Each CodeBuild container is ephemeral - We need to install steampipe every time 8 | - curl -s -L https://github.com/turbot/steampipe/releases/latest/download/steampipe_linux_amd64.tar.gz | tar -xzf - 9 | - echo "installed steampipe" 10 | - ./steampipe plugin install terraform 11 | - git clone https://github.com/turbot/steampipe-mod-terraform-aws-compliance.git 12 | 13 | build: 14 | # Steampipe will return a non-zero exit code with the number of failed checks 15 | on-failure: CONTINUE 16 | run-as: codebuild-user 17 | commands: 18 | - ./steampipe --version # for debugging 19 | - export STEAMPIPE_MOD_LOCATION=`pwd`/steampipe-mod-terraform-aws-compliance 20 | - cd terraform ; ../steampipe check all --output html > steampipe_report.html -------------------------------------------------------------------------------- /all/aws-codebuild/steampipe-cloud-buildspec.yaml: -------------------------------------------------------------------------------- 1 | version: 0.2 2 | 3 | env: 4 | # Store the Steampipe Cloud host, token and workspace in AWS Secrets Manager 5 | secrets-manager: 6 | STEAMPIPE_CLOUD_TOKEN: steampipe-cloud:STEAMPIPE_CLOUD_TOKEN 7 | WORKSPACE: steampipe-cloud:WORKSPACE 8 | 9 | phases: 10 | install: 11 | run-as: codebuild-user 12 | commands: 13 | # Each CodeBuild container is ephemeral - We need to install steampipe every time 14 | - curl -s -L https://github.com/turbot/steampipe/releases/latest/download/steampipe_linux_amd64.tar.gz | tar -xzf - 15 | - echo "installed steampipe" 16 | - ./steampipe plugin install terraform 17 | - git clone https://github.com/turbot/steampipe-mod-terraform-aws-compliance.git 18 | build: 19 | # Steampipe will return a non-zero exit code with the number of failed checks 20 | # (That may or may not still be the case with the SteamPipe cloud version) 21 | on-failure: CONTINUE 22 | run-as: codebuild-user 23 | commands: 24 | - ./steampipe --version # for debugging 25 | # We need to tell Steampipe where to find the Terraform Module to use 26 | - export STEAMPIPE_MOD_LOCATION=`pwd`/steampipe-mod-terraform-aws-compliance 27 | # Now run the compliance check 28 | - - cd terraform ; ../steampipe check all --snapshot-location $WORKSPACE --snapshot --snapshot-title "Terraform Report" 29 | -------------------------------------------------------------------------------- /all/aws-compliance-quickstart/README.md: -------------------------------------------------------------------------------- 1 | You've [installed Steampipe](https://steampipe.io/downloads) on your MacOS, Linux, or WSL2 machine. Now you'd like to run one of the [AWS Compliance](https://hub.steampipe.io/mods/turbot/aws_compliance/controls/benchmark.audit_manager_control_tower) benchmarks. 2 | 3 | This quickstart script will: 4 | 5 | 1. Check if the required AWS plugin is installed, and if not, install it. 6 | 7 | 2. Check if the AWS Compliance mod is installed, and if not, install it. 8 | 9 | 3. Present a menu of 13 benchmarks you can run. 10 | 11 | ``` 12 | ./quickstart.sh 13 | ``` 14 | 15 | Note: The script doesn't handle authentication. If you're running in [AWS CloudShell](https://dev.to/aws-builders/instantly-query-aws-with-sql-in-cloudshell-hd0) you're good to go. Otherwise please see the [plugin documentation](https://hub.steampipe.io/plugins/turbot/aws) for details on the various kinds of credentials and modes of authentication. 16 | -------------------------------------------------------------------------------- /all/aws-compliance-quickstart/quickstart.sh: -------------------------------------------------------------------------------- 1 | cd ~ 2 | 3 | PS3='Choose a benchmark to run: ' 4 | 5 | plugins=`steampipe plugin list` 6 | aws_plugin="turbot/aws" 7 | if [[ "$plugins" == *"$aws_plugin"* ]]; then 8 | echo "The AWS plugin is installed" 9 | else 10 | echo "Installing the AWS plugin" 11 | steampipe plugin install aws 12 | fi 13 | 14 | aws_compliance_mod="steampipe-mod-aws-compliance" 15 | if test -e $aws_compliance_mod; then 16 | echo "The AWS Compliance mod is installed" 17 | else 18 | echo "Installing the AWS Compliance mod" 19 | git clone https://github.com/turbot/steampipe-mod-aws-compliance 20 | fi 21 | 22 | cd steampipe-mod-aws-compliance 23 | 24 | 25 | options=( 26 | audit_manager_control_tower 27 | cis_v130 28 | cis_v140 29 | fedramp_low_rev_4 30 | fedramp_moderate_rev_4 31 | foundational_security 32 | gdpr 33 | hipaa 34 | nist_800_53_rev_4 35 | nist_csf 36 | pci_v321 37 | rbi_cyber_security 38 | soc_2 39 | ) 40 | 41 | select opt in "${options[@]}" 42 | 43 | do 44 | steampipe check aws_compliance.benchmark.$opt 45 | exit 46 | done 47 | 48 | 49 | -------------------------------------------------------------------------------- /all/aws-organizations-scripts/README.md: -------------------------------------------------------------------------------- 1 | # Configuring Steampipe for your AWS Organization(s) 2 | 3 | This directory contains scripts to configure the aws config file (typically `~/.aws/config`) and the Steampipe connections file (typically `~/.steampipe/config/aws.spc`) to support querying across all the AWS accounts in your AWS organization. 4 | 5 | Please refer to the [documentation on steampipe.io](https://steampipe.io/docs/guides/aws-orgs). 6 | 7 | 8 | ## Scripts in this Directory: 9 | 10 | * **[generate_all_credential_reports.sh](https://github.com/turbot/steampipe-samples/tree/main/all/aws-organizations-scripts/generate_all_credential_reports.sh)**\ 11 | This script will iterate through your AWS_CONFIG_FILE (typically `~/.aws/confg`) and trigger the creation of an [IAM Credential Report](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_getting-report.html). This script should be run before any dashboard and benchmarks to ensure the IAM Credential report is available to the [AWS Steampipe plugin](https://hub.steampipe.io/plugins/turbot/aws). 12 | 13 | * **[generate_config_for_cross_account_roles.sh](https://github.com/turbot/steampipe-samples/tree/main/all/aws-organizations-scripts/generate_config_for_cross_account_roles.sh)**\ 14 | This script can be used to generate the aws config file and steampipe aws.spc files for a single AWS Organization. Usage is: 15 | `./generate_config_for_cross_account_roles.sh [IMDS | LOCAL ] `, where: 16 | * `IMDS` if you're running in EC2. 17 | * `LOCAL` if you're running from the local machine. 18 | * `AUDIT_ROLE` is the name fo the [cross-account role](https://docs.aws.amazon.com/IAM/latest/UserGuide/tutorial_cross-account-with-roles.html) created in all accounts. 19 | * `AWS_CONFIG_FILE` is where the script will output the AWS SDK profiles 20 | * `SOURCE_PROFILE` is only required when `LOCAL` is specified. It is the profile with the local credentials used to perform the [AssumeRole](https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html) on the cross-account role. 21 | 22 | * **[generate_config_for_multipayer.py](https://github.com/turbot/steampipe-samples/tree/main/all/aws-organizations-scripts/generate_config_for_multipayer.py)**\ 23 | This script takes a list of AWS Management Accounts, and uses the specified `--rolename` to AssumeRole into the management account, list the child accounts, and build an AWS Config File and aws.spc file. Usage is: 24 | ```bash 25 | usage: generate_config_for_multipayer.py [-h] [--debug] 26 | [--aws-config-file AWS_CONFIG_FILE] 27 | [--steampipe-connection-file STEAMPIPE_CONNECTION_FILE] 28 | --rolename ROLENAME 29 | --payers PAYERS [PAYERS ...] 30 | [--role-session-name ROLE_SESSION_NAME] 31 | ``` 32 | -------------------------------------------------------------------------------- /all/aws-organizations-scripts/generate_all_credential_reports.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | 4 | # 5 | # This script will list the profiles from the AWS config file (defined by $AWS_CONFIG_FILE) and leverage 6 | # the profile to generate an IAM Credential report 7 | # 8 | # This script is useful to run ahead of any AWS Benchmark or Mod where IAM queries are made 9 | # 10 | 11 | if [ -z ${AWS_CONFIG_FILE+x} ] ; then 12 | AWS_CONFIG_FILE="~/.aws/config" 13 | fi 14 | 15 | PROFILES=`grep '\[profile' $AWS_CONFIG_FILE | awk '{print $2}' | sed s/\]//g` 16 | 17 | for p in $PROFILES ; do 18 | echo "Generating credential report in $p" 19 | aws iam generate-credential-report --profile $p --output text 20 | done -------------------------------------------------------------------------------- /all/aws-organizations-scripts/generate_config_for_multipayer.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # 4 | # This python script will leverage the supplied cross account role to list the accounts in the 5 | # AWS Management (aka payer) accounts supplied to the --payers option. 6 | # 7 | # For all command line options call this script as 8 | # generate_config_for_multipayer.py --help 9 | # 10 | 11 | 12 | import sys, argparse, os 13 | import boto3 14 | from botocore.exceptions import ClientError 15 | import json 16 | 17 | def main(args): 18 | 19 | aws_config_file = f""" 20 | [default] 21 | region=us-east-1 22 | 23 | """ 24 | 25 | # we need to create an aggregate of payers, by the payer names 26 | payer_names = [] 27 | 28 | steampipe_connections = "" 29 | 30 | for payer_id in args.payers: 31 | 32 | accounts = list_accounts(payer_id, args) 33 | for a in accounts: 34 | 35 | sp_account_name = a['Name'].replace('-', '_') 36 | 37 | if a['Id'] in args.payers: 38 | payer_names.append(f"aws_{sp_account_name}") 39 | 40 | aws_config_file += f""" 41 | # {a['Name']} 42 | [profile {a['Name']}] 43 | role_arn = arn:aws:iam::{a['Id']}:role/{args.rolename} 44 | credential_source = Ec2InstanceMetadata 45 | role_session_name = {args.role_session_name} 46 | """ 47 | 48 | 49 | steampipe_connections += f""" 50 | connection "aws_{sp_account_name}" {{ 51 | plugin = "aws" 52 | profile = "{a['Name']}" 53 | regions = ["*"] 54 | options "connection" {{ 55 | cache = true # true, false 56 | cache_ttl = 3600 # expiration (TTL) in seconds 57 | }} 58 | }} 59 | """ 60 | 61 | 62 | steampipe_spc_file = f""" 63 | # Create an aggregator of _all_ the accounts as the first entry in the search path. 64 | connection "aws" {{ 65 | plugin = "aws" 66 | type = "aggregator" 67 | connections = ["aws_*"] 68 | }} 69 | 70 | connection "aws_payer" {{ 71 | plugin = "aws" 72 | type = "aggregator" 73 | regions = ["us-east-1"] # This aggregator is only used for global queries 74 | connections = {json.dumps(payer_names)} 75 | }} 76 | 77 | {steampipe_connections} 78 | 79 | """ 80 | 81 | file = open(os.path.expanduser(args.aws_config_file), "w") 82 | file.write(aws_config_file) 83 | file.close() 84 | 85 | file = open(os.path.expanduser(args.steampipe_connection_file), "w") 86 | file.write(steampipe_spc_file) 87 | file.close() 88 | exit(0) 89 | 90 | 91 | def list_accounts(payer_id, args): 92 | try: 93 | 94 | client = boto3.client('sts') 95 | session = client.assume_role(RoleArn=f"arn:aws:iam::{payer_id}:role/{args.rolename}", RoleSessionName=args.role_session_name) 96 | creds = session['Credentials'] 97 | 98 | org_client = boto3.client('organizations', 99 | aws_access_key_id = creds['AccessKeyId'], 100 | aws_secret_access_key = creds['SecretAccessKey'], 101 | aws_session_token = creds['SessionToken'], 102 | region_name = "us-east-1") 103 | 104 | output = [] 105 | response = org_client.list_accounts(MaxResults=20) 106 | while 'NextToken' in response: 107 | output = output + response['Accounts'] 108 | response = org_client.list_accounts(MaxResults=20, NextToken=response['NextToken']) 109 | 110 | output = output + response['Accounts'] 111 | return(output) 112 | except ClientError as e: 113 | if e.response['Error']['Code'] == 'AWSOrganizationsNotInUseException': 114 | print("AWS Organiations is not in use or this is not a payer account") 115 | return(None) 116 | else: 117 | raise ClientError(e) 118 | 119 | def do_args(): 120 | parser = argparse.ArgumentParser() 121 | parser.add_argument("--debug", help="print debugging info", action='store_true') 122 | parser.add_argument("--aws-config-file", help="Where to write the AWS config file", default="~/.aws/config") 123 | parser.add_argument("--steampipe-connection-file", help="Where to write the AWS config file", default="~/.steampipe/config/aws.spc") 124 | parser.add_argument("--rolename", help="Role Name to Assume", required=True) 125 | parser.add_argument("--payers", nargs='+', help="List of Payers to configure", required=True) 126 | parser.add_argument("--role-session-name", help="Role Session Name to use", default="steampipe") 127 | args = parser.parse_args() 128 | return(args) 129 | 130 | if __name__ == '__main__': 131 | try: 132 | args = do_args() 133 | main(args) 134 | exit(0) 135 | except KeyboardInterrupt: 136 | exit(1) -------------------------------------------------------------------------------- /all/aws-organizations-scripts/generate_config_for_sso.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # 4 | # For this script, we authenticate to AWS SSO, then get a list of the AWS Accounts and AWS SSO roles available to the user. 5 | # 6 | # This is done via five AWS CLI commands: 7 | # 1. `aws sso-oidc register-client` - creates a client for use in the next steps 8 | # 2. `aws sso-oidc start-device-authorization` - manually create the redirection to the browser that you see when you 9 | # do the normal `aws sso login` 10 | # 3. `aws sso-oidc create-token` - Creates the SSO Authentication token once the user has authorized the connection via 11 | # AWS Identity Center and their identity provider 12 | # 4. `aws sso list-accounts` - leveraging the token from the previous command, this lists all the accounts and roles 13 | # the user is allowed to access in AWS Identity Center. 14 | # 5. `aws sso list-account-roles` - list the roles available to the user for each of the accounts 15 | # 16 | # 17 | # Usage: 18 | # ./generate_config_for_sso.sh 19 | # 20 | # Example: 21 | # ./generate_config_for_sso.sh fooli security-audit ~/.steampipe/config/aws.spc ~/.aws/fooli-config 22 | # 23 | # Note: You can specify where both the AWS and Steampipe config files will be written 24 | 25 | 26 | SSO_PREFIX=$1 27 | SSO_ROLE=$2 28 | STEAMPIPE_CONFIG=$3 29 | AWS_CONFIG=$4 30 | 31 | if [ -z "$AWS_CONFIG" ] ; then 32 | echo "Missing SSO Prefix" 33 | echo "Usage: $0 " 34 | exit 1 35 | fi 36 | 37 | START_URL="https://${SSO_PREFIX}.awsapps.com/start" 38 | 39 | aws sso-oidc register-client --client-name 'profiletool' --client-type 'public' --region "${AWS_DEFAULT_REGION}" > client.json 40 | 41 | # Returns: 42 | # { 43 | # "clientId": "REDACTED", 44 | # "clientSecret": "REDACTED, 45 | # "clientIdIssuedAt": 1667594405, 46 | # "clientSecretExpiresAt": 1675370405 47 | # } 48 | 49 | clientid=`cat client.json | jq .clientId -r` 50 | secret=`cat client.json | jq .clientSecret -r` 51 | rm client.json 52 | 53 | aws sso-oidc start-device-authorization --client-id "$clientid" --client-secret "$secret" --start-url "${START_URL}" --region "${AWS_DEFAULT_REGION}" > device_auth.json 54 | 55 | # Returns: 56 | # { 57 | # "deviceCode": "REDACTED", 58 | # "userCode": "RHHX-BCTS", 59 | # "verificationUri": "https://device.sso.us-east-1.amazonaws.com/", 60 | # "verificationUriComplete": "https://device.sso.us-east-1.amazonaws.com/?user_code=RHHX-BCTS", 61 | # "expiresIn": 600, 62 | # "interval": 1 63 | # } 64 | 65 | auth_url=`cat device_auth.json | jq -r .verificationUriComplete` 66 | devicecode=`cat device_auth.json | jq -r .deviceCode` 67 | rm device_auth.json 68 | 69 | open $auth_url 70 | 71 | echo "$auth_url was opened in your browser. Please click allow." 72 | echo "Press Enter when complete" 73 | read s 74 | 75 | token=`aws sso-oidc create-token --client-id "$clientid" --client-secret "$secret" --grant-type 'urn:ietf:params:oauth:grant-type:device_code' --device-code "$devicecode" --region "${AWS_DEFAULT_REGION}" --query accessToken --output text` 76 | 77 | # Returns: 78 | # { 79 | # "accessToken": "REDACTED", 80 | # "tokenType": "Bearer", 81 | # "expiresIn": 14839 82 | # } 83 | 84 | 85 | echo "Creating Steampipe Connections in $STEAMPIPE_CONFIG and AWS Profiles in $AWS_CONFIG" 86 | echo "# Automatically Generated at `date`" > $STEAMPIPE_CONFIG 87 | echo "# Steampipe profiles, Automatically Generated at `date`" > $AWS_CONFIG 88 | 89 | 90 | cat <>$STEAMPIPE_CONFIG 91 | 92 | # Create an aggregator of _all_ the accounts as the first entry in the search path. 93 | connection "aws" { 94 | plugin = "aws" 95 | type = "aggregator" 96 | connections = ["aws_*"] 97 | } 98 | 99 | EOF 100 | 101 | 102 | for a in `aws sso list-accounts --access-token "$token" --region "${AWS_DEFAULT_REGION}" --output text | awk '{print $2":"$3}'` ; do 103 | 104 | acctnum=`echo $a | awk -F: '{print $1}'` 105 | acctname=`echo $a | awk -F: '{print $2}'` 106 | 107 | # Steampipe doesn't like dashes, so we need to swap for underscores 108 | SP_NAME=`echo $acctname | sed s/-/_/g` 109 | 110 | aws sso list-account-roles --account-id "$acctnum" --access-token "$token" --region "${AWS_DEFAULT_REGION}" | grep $SSO_ROLE > /dev/null 111 | if [ $? -ne 0 ] ; then 112 | echo "# $SSO_ROLE was not in list of SSO roles available to this user. Skipping account $acctname ($acctnum) \n" 113 | else 114 | 115 | cat << EOF>> $AWS_CONFIG 116 | 117 | [profile ${acctname}] 118 | sso_start_url = ${START_URL} 119 | sso_region = ${AWS_DEFAULT_REGION} 120 | sso_account_id = ${acctnum} 121 | sso_role_name = ${SSO_ROLE} 122 | EOF 123 | 124 | # And append an entry to the Steampipe config file 125 | cat <>$STEAMPIPE_CONFIG 126 | connection "aws_${SP_NAME}" { 127 | plugin = "aws" 128 | profile = "${acctname}" 129 | regions = ["*"] 130 | } 131 | 132 | EOF 133 | 134 | fi 135 | 136 | 137 | 138 | done 139 | 140 | -------------------------------------------------------------------------------- /all/aws-top-10/README.md: -------------------------------------------------------------------------------- 1 | # AWS Top 10 security items 2 | 3 | This is the companion to our blog post [Build a custom benchmark for the top 10 AWS security tips].(https://steampipe.io/blog/aws-security-top-10). 4 | 5 | To run it, in a clone of this repo: 6 | 7 | ``` 8 | git clone https://github.com/turbot/steampipe-samples 9 | cd ./all/aws-top-10 10 | steampipe dashboard 11 | ``` -------------------------------------------------------------------------------- /all/aws-top-10/mod.sp: -------------------------------------------------------------------------------- 1 | mod "local" { 2 | title = "AWS Top 10" 3 | 4 | require { 5 | mod "github.com/turbot/steampipe-mod-aws-compliance" { 6 | version = "*" 7 | } 8 | mod "github.com/turbot/steampipe-mod-aws-perimeter" { 9 | version = "*" 10 | } 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /all/aws-top-10/top-10.sp: -------------------------------------------------------------------------------- 1 | locals { 2 | aws_top_10_tags = { 3 | category = "AWS Top 10" 4 | service = "AWS" 5 | type = "AWS Top 10" 6 | } 7 | } 8 | 9 | benchmark "aws_top_10" { 10 | title = "AWS Top 10" 11 | description = "The top 10 AWS..." 12 | 13 | children = [ 14 | benchmark.accurate_account_info, 15 | benchmark.use_mfa, 16 | benchmark.no_secrets, 17 | benchmark.limit_security_groups, 18 | benchmark.intentional_data_policies, 19 | benchmark.centralize_cloudtrail_logs, 20 | benchmark.validate_iam_roles, 21 | benchmark.take_action_on_findings, 22 | benchmark.rotate_keys 23 | ] 24 | 25 | tags = merge(local.aws_top_10_tags, { 26 | type = "Benchmark" 27 | }) 28 | } 29 | 30 | benchmark "accurate_account_info" { 31 | title = "1. Accurate account information" 32 | children = [ 33 | aws_compliance.control.cis_v120_1_18 34 | ] 35 | } 36 | 37 | benchmark "use_mfa" { 38 | title = "2. Use multi-factor authentication (MFA)" 39 | children = [ 40 | aws_compliance.control.iam_root_user_mfa_enabled, 41 | aws_compliance.control.iam_user_mfa_enabled, 42 | aws_compliance.control.iam_user_console_access_mfa_enabled, 43 | ] 44 | } 45 | 46 | benchmark "no_secrets" { 47 | title = "3. No hard-coding secrets" 48 | children = [ 49 | aws_compliance.control.cloudformation_stack_output_no_secrets, 50 | aws_compliance.control.ecs_task_definition_container_environment_no_secret, 51 | aws_compliance.control.ec2_instance_user_data_no_secrets 52 | ] 53 | } 54 | 55 | benchmark "limit_security_groups" { 56 | title = "4. Limit security groups" 57 | children = [ 58 | aws_compliance.control.vpc_security_group_restricted_common_ports, 59 | aws_compliance.control.vpc_security_group_allows_ingress_authorized_ports, 60 | aws_compliance.control.vpc_security_group_restrict_ingress_redis_port, 61 | aws_compliance.control.vpc_security_group_allows_ingress_to_oracle_ports, 62 | aws_compliance.control.vpc_security_group_allows_ingress_to_mongodb_ports, 63 | aws_compliance.control.vpc_security_group_allows_ingress_to_memcached_port, 64 | aws_compliance.control.vpc_security_group_allows_ingress_to_cassandra_ports, 65 | aws_compliance.control.vpc_security_group_restrict_ingress_kafka_port, 66 | aws_compliance.control.ec2_instance_no_launch_wizard_security_group 67 | ] 68 | } 69 | 70 | benchmark "intentional_data_policies" { 71 | title = "5. Intentional data policies" 72 | children = [ 73 | aws_compliance.control.foundational_security_s3_6, 74 | aws_compliance.control.s3_public_access_block_bucket_account, 75 | aws_compliance.control.foundational_security_s3_1, 76 | aws_compliance.control.foundational_security_s3_2, 77 | aws_compliance.control.foundational_security_s3_3 78 | ] 79 | } 80 | 81 | benchmark "centralize_cloudtrail_logs" { 82 | title = "6. Centralize CloudTrail logs" 83 | children = [ 84 | aws_compliance.control.foundational_security_cloudtrail_1, 85 | aws_compliance.control.foundational_security_cloudtrail_5, 86 | aws_compliance.benchmark.foundational_security_cloudtrail 87 | ] 88 | } 89 | 90 | benchmark "validate_iam_roles" { 91 | title = "7. Validate IAM roles" 92 | children = [ 93 | aws_compliance.control.cis_v150_1_20, 94 | aws_compliance.control.iam_access_analyzer_enabled_without_findings, 95 | aws_perimeter.control.iam_role_trust_policy_prohibit_public_access 96 | ] 97 | } 98 | 99 | benchmark "take_action_on_findings" { 100 | title = "8. Take action on findings" 101 | children = [ 102 | aws_compliance.control.foundational_security_guardduty_1, 103 | aws_compliance.control.guardduty_no_high_severity_findings, 104 | aws_compliance.control.cis_v150_4_16 105 | 106 | ] 107 | } 108 | 109 | benchmark "rotate_keys" { 110 | title = "9. Rotate keys" 111 | children = [ 112 | aws_compliance.control.cis_v120_1_12, 113 | aws_compliance.control.cis_v150_1_13, 114 | aws_compliance.control.cis_v150_1_14 115 | ] 116 | } 117 | 118 | 119 | 120 | 121 | -------------------------------------------------------------------------------- /all/aws-trusts/cross_account_trusts.sql: -------------------------------------------------------------------------------- 1 | WITH org_accounts AS ( 2 | SELECT 3 | id 4 | FROM 5 | payerFIXME.aws_organizations_account 6 | ), 7 | roles AS ( 8 | SELECT 9 | name, 10 | (regexp_match(principals, ':([0-9]+):')) [ 1 ] AS trusted_account, 11 | _ctx ->> 'connection_name' AS account_name 12 | FROM 13 | aws_iam_role AS role, 14 | jsonb_array_elements(role.assume_role_policy_std -> 'Statement') AS statement, 15 | jsonb_array_elements_text(statement -> 'Principal' -> 'AWS') AS principals 16 | ) 17 | SELECT 18 | roles.name as role_name, 19 | roles.account_name, 20 | roles.trusted_account 21 | FROM 22 | org_accounts 23 | RIGHT JOIN roles ON org_accounts.id = roles.trusted_account 24 | WHERE 25 | org_accounts.id IS NULL -------------------------------------------------------------------------------- /all/aws-trusts/foreign_accounts.sql: -------------------------------------------------------------------------------- 1 | WITH instances AS ( 2 | SELECT 3 | instance_id, 4 | instance_type, 5 | account_id, 6 | tags ->> 'Name' AS instance_name, 7 | _ctx ->> 'connection_name' AS account_name, 8 | instance_state, 9 | region, 10 | image_id 11 | FROM 12 | aws_ec2_instance 13 | ), 14 | org_accounts AS ( 15 | SELECT 16 | id 17 | FROM 18 | aws_payer.aws_organizations_account 19 | ), 20 | roles AS ( 21 | SELECT 22 | (regexp_match(principals, ':([0-9]+):')) [ 1 ] AS foreign_account_id 23 | FROM 24 | aws_iam_role AS role, 25 | jsonb_array_elements(role.assume_role_policy_std -> 'Statement') AS statement, 26 | jsonb_array_elements_text(statement -> 'Principal' -> 'AWS') AS principals 27 | ) 28 | 29 | 30 | SELECT DISTINCT 31 | aws_ec2_ami_shared.owner_id AS foreign_account_id 32 | FROM 33 | instances 34 | LEFT JOIN aws_ec2_ami_shared ON aws_ec2_ami_shared.image_id=instances.image_id 35 | WHERE aws_ec2_ami_shared.image_owner_alias != 'amazon' 36 | AND aws_ec2_ami_shared.image_owner_alias != 'self' 37 | 38 | UNION 39 | 40 | SELECT DISTINCT 41 | roles.foreign_account_id AS foreign_account_id 42 | FROM 43 | org_accounts 44 | RIGHT JOIN roles ON org_accounts.id = roles.foreign_account_id 45 | WHERE 46 | org_accounts.id IS NULL -------------------------------------------------------------------------------- /all/aws-trusts/foreign_accounts_mapped_to_cloudmapper.sql: -------------------------------------------------------------------------------- 1 | WITH all_foreign_accounts AS ( 2 | WITH instances AS ( 3 | SELECT 4 | instance_id, 5 | instance_type, 6 | account_id, 7 | tags ->> 'Name' AS instance_name, 8 | _ctx ->> 'connection_name' AS account_name, 9 | instance_state, 10 | region, 11 | image_id 12 | FROM 13 | aws_ec2_instance 14 | ), 15 | org_accounts AS ( 16 | SELECT 17 | id 18 | FROM 19 | aws_payer.aws_organizations_account 20 | ), 21 | roles AS ( 22 | SELECT 23 | (regexp_match(principals, ':([0-9]+):')) [ 1 ] AS foreign_account_id 24 | FROM 25 | aws_iam_role AS role, 26 | jsonb_array_elements(role.assume_role_policy_std -> 'Statement') AS statement, 27 | jsonb_array_elements_text(statement -> 'Principal' -> 'AWS') AS principals 28 | ) 29 | 30 | SELECT DISTINCT 31 | aws_ec2_ami_shared.owner_id AS foreign_account_id 32 | FROM 33 | instances 34 | LEFT JOIN aws_ec2_ami_shared ON aws_ec2_ami_shared.image_id=instances.image_id 35 | WHERE aws_ec2_ami_shared.image_owner_alias != 'amazon' 36 | AND aws_ec2_ami_shared.image_owner_alias != 'self' 37 | 38 | UNION 39 | 40 | SELECT DISTINCT 41 | roles.foreign_account_id AS foreign_account_id 42 | FROM 43 | org_accounts 44 | RIGHT JOIN roles ON org_accounts.id = roles.foreign_account_id 45 | WHERE 46 | org_accounts.id IS NULL 47 | ), 48 | known_aws_accounts AS ( 49 | WITH name_data AS ( 50 | SELECT 51 | split_part(key_path::text, '.', 1) AS id, 52 | value AS name 53 | FROM 54 | cloudmapper.yml_key_value 55 | WHERE 56 | key_path::text LIKE '%.name%' 57 | ), account_data AS ( 58 | SELECT 59 | split_part(key_path::text, '.', 1) AS id, 60 | value AS account 61 | FROM 62 | cloudmapper.yml_key_value 63 | WHERE 64 | key_path::text LIKE '%.accounts.%' 65 | ) 66 | SELECT 67 | n.name, 68 | a.account 69 | FROM 70 | name_data n 71 | JOIN 72 | account_data a ON n.id = a.id 73 | ORDER BY 74 | n.name, a.account 75 | ) 76 | SELECT 77 | all_foreign_accounts.foreign_account_id, 78 | known_aws_accounts.name 79 | FROM 80 | all_foreign_accounts 81 | LEFT JOIN known_aws_accounts 82 | ON all_foreign_accounts.foreign_account_id = known_aws_accounts.account -------------------------------------------------------------------------------- /all/aws-trusts/foreign_ami_owners.sql: -------------------------------------------------------------------------------- 1 | WITH instances AS ( 2 | SELECT 3 | instance_id, 4 | instance_type, 5 | account_id, 6 | tags ->> 'Name' AS instance_name, 7 | _ctx ->> 'connection_name' AS account_name, 8 | instance_state, 9 | region, 10 | image_id 11 | FROM 12 | aws_ec2_instance 13 | ) 14 | SELECT DISTINCT 15 | aws_ec2_ami_shared.image_id AS image_id, 16 | aws_ec2_ami_shared.owner_id AS image_owner_id, 17 | aws_ec2_ami_shared.image_owner_alias AS image_owner_name, 18 | instances.instance_name, 19 | instances.account_name, 20 | instances.region, 21 | aws_ec2_ami_shared.name AS image_name 22 | FROM 23 | instances 24 | LEFT JOIN aws_ec2_ami_shared ON aws_ec2_ami_shared.image_id=instances.image_id 25 | WHERE aws_ec2_ami_shared.image_owner_alias != 'amazon' 26 | AND aws_ec2_ami_shared.image_owner_alias != 'self' -------------------------------------------------------------------------------- /all/aws-trusts/parse_cloudmapper.sql: -------------------------------------------------------------------------------- 1 | WITH name_data AS ( 2 | SELECT 3 | split_part(key_path::text, '.', 1) AS id, 4 | value AS name 5 | FROM 6 | cloudmapper.yml_key_value 7 | WHERE 8 | key_path::text LIKE '%.name%' 9 | ), account_data AS ( 10 | SELECT 11 | split_part(key_path::text, '.', 1) AS id, 12 | value AS account 13 | FROM 14 | cloudmapper.yml_key_value 15 | WHERE 16 | key_path::text LIKE '%.accounts.%' 17 | ) 18 | SELECT 19 | n.name, 20 | a.account 21 | FROM 22 | name_data n 23 | JOIN 24 | account_data a ON n.id = a.id 25 | ORDER BY 26 | n.name, a.account -------------------------------------------------------------------------------- /all/config-yaml/README.md: -------------------------------------------------------------------------------- 1 | Querying structured files has become another Steampipe superpower. First came [CSV](https://hub.steampipe.io/plugins/csv), then [Terraform](https://hub.steampipe.io/plugins/terraform), and now [Config](https://hub.steampipe.io/plugins/config) which enables queries of YAML/JSON/INI config files. This example shows how to query the [OpenAPI example definitions](https://github.com/OAI/OpenAPI-Specification). 2 | 3 | https://user-images.githubusercontent.com/46509/154373235-ccda2f1f-b1c0-431f-8cee-bbd6aad7c2d7.mp4 4 | 5 | 6 | ## Install the plugin 7 | 8 | ``` 9 | steampipe plugin install config 10 | ``` 11 | 12 | ## Clone the repo 13 | 14 | ``` 15 | cd ~ 16 | git clone https://github.com/OAI/OpenAPI-specification 17 | ``` 18 | 19 | ## Edit `~/.steampipe/config/config.spc`, recursively enumerate .yaml files in the repo 20 | 21 | ``` 22 | connection "config" { 23 | plugin = "config" 24 | yml_paths = [ "~/OpenAPI-Specification/examples/**/*.yaml" ] 25 | } 26 | ``` 27 | 28 | ## Run your queries! 29 | 30 | ### find titles 31 | 32 | ``` 33 | select 34 | replace(path, '/home/jon/OpenAPI-Specification/examples/','') as path, 35 | content -> 'info' -> 'title' as title 36 | from yml_file 37 | ``` 38 | 39 | ### find uri paths 40 | 41 | ``` 42 | select 43 | replace(path, '/home/jon/OpenAPI-Specification/examples/','') as path, 44 | jsonb_object_keys(content -> 'paths') as uri_path 45 | from yml_file 46 | ``` 47 | 48 | ### find components with required elements 49 | 50 | ``` 51 | with schema_keys as ( 52 | select 53 | replace(path, '/home/jon/OpenAPI-Specification/examples/','') as path, 54 | jsonb_object_keys(content -> 'components' -> 'schemas') as schema_key 55 | from yml_file 56 | ), 57 | schemas as ( 58 | select 59 | replace(path, '/home/jon/OpenAPI-Specification/examples/','') as path, 60 | content -> 'components' -> 'schemas' as schema 61 | from yml_file 62 | where content -> 'components' -> 'schemas' is not null 63 | ), 64 | required as ( 65 | select 66 | s.path, 67 | k.schema_key, 68 | s.schema -> k.schema_key -> 'required' as required 69 | from schemas s join schema_keys k using (path) 70 | ) 71 | select * from required where jsonb_typeof(required) = 'array' 72 | ``` 73 | 74 | 75 | -------------------------------------------------------------------------------- /all/control-output-templates/README.md: -------------------------------------------------------------------------------- 1 | # Overview 2 | 3 | Steampipe provides a templating mechanism for control outputs, based on the golang `text/template` package. For each output format there's a directory, in `~/.steampipe/check/templates/`, that minimally includes a file called `output.tmpl`. The name of the directory defines the name of an output format. The content of `output.tmpl` defines how to unpack and format a golang struct, called `Data`, that's passed to the template. 4 | 5 | In [Writing Control Output Templates](https://steampipe.io/docs/develop/writing-control-output-templates) we give an example that defines a new output type called `summary` that works like this: 6 | 7 | ``` 8 | steampipe check --output=summary all 9 | ``` 10 | 11 | ``` 12 | Summary for Zoom Compliance 13 | 14 | total: 185 15 | passed: 119 16 | failed: 66 17 | skipped: 0 18 | ``` 19 | 20 | We'll add more examples here. 21 | 22 | # Suppress OK rows 23 | 24 | You can use the `--output brief` argument to produce a plaintext output that does this. But suppose you want to suppress OK rows using Markdown format? You can do that by altering the existing `~/.steampipe/check/templates/md/output.tmpl`, or you can create a variant Markdown format. We'll do the latter here. 25 | 26 | ``` 27 | $ cd ~/.steampipe/check/templates` 28 | $ mkdir briefmd 29 | $ cd briefmd 30 | $ cp ../md/output.tmpl . 31 | ``` 32 | 33 | We'll change from: 34 | 35 | ``` 36 | {{ define "control_row_template" }} 37 | | {{ template "statusicon" .Status }} | {{ .Reason }}| {{range .Dimensions}}`{{.Value}}` {{ end }} | 38 | {{- end }} 39 | ``` 40 | 41 | To: 42 | 43 | ``` 44 | {{ define "control_row_template" }} 45 | {{ if ne .Status "ok" }}{{ template "statusicon" .Status }} | {{ .Reason }}| {{range .Dimensions}}`{{.Value}}` {{ end }} |{{ end }} 46 | {{- end }} 47 | ``` 48 | 49 | That will suppress detail rows when `Status` is `ok`. 50 | 51 | We can export to both formats using a single command. 52 | 53 | ``` 54 | steampipe check benchmark.public_access --export output.md --export output.briefmd 55 | ``` 56 | 57 | Since your Markdown viewer likely won't recognize the `.briefmd` extension, you might want to rename to `output.md`. Alternatively you could use `--output briefmd` and pipe the output to `output.md`. 58 | 59 | ``` 60 | steampipe check benchmark.public_access --output briefmd > output.md 61 | ``` 62 | 63 | Here's a comparison of the original Markdown output and the modified output. Everything else is the same, and you can see the count of `OK` results for each control, but the details are suppressed. 64 | 65 | ![md-vs-briefmd](./md-vs-briefmd.jpg) 66 | 67 | 68 | 69 | 70 | -------------------------------------------------------------------------------- /all/control-output-templates/md-vs-briefmd.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/turbot/steampipe-samples/b22ba2de97d625a6912fb463bfed3d2b948fdac6/all/control-output-templates/md-vs-briefmd.jpg -------------------------------------------------------------------------------- /all/controls-with-descriptions/.mod.cache.json: -------------------------------------------------------------------------------- 1 | { 2 | "local": { 3 | "github.com/turbot/steampipe-mod-aws-thrifty": { 4 | "name": "github.com/turbot/steampipe-mod-aws-thrifty", 5 | "alias": "aws_thrifty", 6 | "version": "0.12.0", 7 | "constraint": "*" 8 | } 9 | } 10 | } -------------------------------------------------------------------------------- /all/controls-with-descriptions/README.md: -------------------------------------------------------------------------------- 1 | When launched in a directory that contains mod resources, Steampipe builds introspection tables including `steampipe_query`, `steampipe_benchmark`, and `steampipe_control`. This example shows that you can visualize those tables in a dashboard. 2 | 3 | The setup: 4 | 5 | ``` 6 | steampipe mod install github.com/turbot/steampipe-mod-aws-thrifty 7 | 8 | Installed 1 mod: 9 | 10 | local 11 | └── github.com/turbot/steampipe-mod-aws-thrifty@v0.12 12 | 13 | steampipe dashboard 14 | [ Wait ] Loading Workspace 15 | [ Wait ] Starting Dashboard Server 16 | [ Message ] Workspace loaded 17 | [ Ready ] Dashboard server started on 9194 and listening on local 18 | [ Message ] Visit http://localhost:9194 19 | [ Message ] Press Ctrl+C to exit 20 | [ Message ] Initialization complete 21 | 22 | https://localhost:9194 23 | ``` 24 | 25 | The `source_definition` column of `steampipe_control` has the Postgres type `text`, not `jsonb`, so this example illustrates the use of the Postgres `regexp_matches` function to dig out the query and descriptions from the `source_definition`. 26 | 27 | Links back to the hub are accomplished using the [HCL href argument with a jq template](https://steampipe.io/docs/reference/mod-resources/table#jq-templates), in combination with HCL variable interpolation of a [local variable](https://steampipe.io/docs/reference/mod-resources/locals). 28 | 29 | 30 | https://user-images.githubusercontent.com/46509/164295189-10413a09-409e-4186-bb58-c51df18fa413.mp4 31 | 32 | 33 | -------------------------------------------------------------------------------- /all/controls-with-descriptions/mod.sp: -------------------------------------------------------------------------------- 1 | mod "local" { 2 | title = "my-aws-thrifty" 3 | require { 4 | mod "github.com/turbot/steampipe-mod-aws-thrifty" { 5 | version = "latest" 6 | } 7 | } 8 | } 9 | 10 | locals { 11 | hub_path = "https://hub.steampipe.io/mods/turbot/aws_thrifty" 12 | } 13 | 14 | dashboard "All-controls-with-descriptions" { 15 | table { 16 | sql = <> 'service' as service, 22 | (regexp_matches(source_definition, 'description\s*=\s*"([^"]+)'))[1] as description 23 | from 24 | steampipe_control 25 | order by 26 | query, control_name 27 | ) 28 | select 29 | service, 30 | control_name, 31 | query, 32 | description 33 | from 34 | controls 35 | order by 36 | service, control_name 37 | EOQ 38 | column "control_name" { 39 | href = "${local.hub_path}{{'/controls/control.' + .'control_name'}}" 40 | } 41 | column "query" { 42 | href = "${local.hub_path}{{'/queries/' + .'query'}}" 43 | } 44 | column "description" { 45 | wrap = "all" 46 | } 47 | column "source_definition" { 48 | wrap = "all" 49 | } 50 | } 51 | } -------------------------------------------------------------------------------- /all/crosstab/mod.sp: -------------------------------------------------------------------------------- 1 | mod "chart_formats" { 2 | } 3 | 4 | 5 | /* 6 | create table data_column_format as 7 | with data (region, buckets, vpcs) as ( 8 | values 9 | ('us-east-1', 10, 7), 10 | ('us-west-1', 11, 3) 11 | ) 12 | select * from data 13 | */ 14 | 15 | /* 16 | create table data_row_format as 17 | with data (region, label, count) as ( 18 | values 19 | ('us-east-1', 'buckets', 10), 20 | ('us-east-1', 'vpcs', 7), 21 | ('us-west-1', 'buckets', 11), 22 | ('us-west-1', 'vpcs', 3) 23 | ) 24 | select * from data 25 | */ 26 | 27 | 28 | dashboard "chart_formats" { 29 | 30 | title = "Steampipe charts: data formats" 31 | 32 | text { 33 | width = 6 34 | value = "See the [documentation](https://steampipe.io/docs/reference/mod-resources/chart#data-format) for details on chart data formats, and [this repo](https://github.com/turbot/steampipe-samples/blob/main/all/crosstab/mod.sp) for source code." 35 | } 36 | 37 | container { 38 | 39 | title = "Data can be provided in 2 formats. Either in classic Excel-like column format, where each series data is contained in its own column" 40 | 41 | table { 42 | width = 6 43 | sql = <> 'Project' as project_tag_value 81 | from 82 | data 83 | where 84 | tag_name = 'Project' 85 | ) 86 | select 87 | arn as resource, 88 | case 89 | when project_tag_values ? project_tag_value then 'ok' 90 | else 'alarm' 91 | end as status, 92 | case 93 | when project_tag_values ? project_tag_value then 'Project tag has allowed value' 94 | else 'Project tag has wrong value' 95 | end as reason, 96 | project_tag_value 97 | from 98 | project_tags 99 | EOQ 100 | 101 | } -------------------------------------------------------------------------------- /all/dashboard-remix/README.md: -------------------------------------------------------------------------------- 1 | # Remixing dashboards 2 | 3 | See https://steampipe.io/blog/remixing-dashboards 4 | 5 | ## Getting started 6 | 7 | Run `./install.sh` to install the required mods. 8 | 9 | Run `steampipe dashboard` 10 | 11 | Visit http://localhost:9194 12 | -------------------------------------------------------------------------------- /all/dashboard-remix/install_mods.sh: -------------------------------------------------------------------------------- 1 | steampipe mod install github.com/turbot/steampipe-mod-aws-insights 2 | steampipe mod install github.com/turbot/steampipe-mod-aws-compliance 3 | steampipe mod install github.com/turbot/steampipe-mod-azure-insights 4 | -------------------------------------------------------------------------------- /all/dashboard-remix/mod.sp: -------------------------------------------------------------------------------- 1 | mod "local" { 2 | title = "mymod" 3 | require { 4 | mod "github.com/turbot/steampipe-mod-aws-compliance" { 5 | version = "latest" 6 | } 7 | mod "github.com/turbot/steampipe-mod-aws-insights" { 8 | version = "latest" 9 | } 10 | mod "github.com/turbot/steampipe-mod-aws-tags" { 11 | version = "latest" 12 | } 13 | mod "github.com/turbot/steampipe-mod-azure-insights" { 14 | version = "latest" 15 | } 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /all/dashboard-remix/named_resources.sp: -------------------------------------------------------------------------------- 1 | card "ec2_instance_status" { 2 | type = "info" 3 | width = 4 4 | args = [ self.input.instance_input.value ] 5 | query = aws_insights.query.ec2_instance_status 6 | } 7 | 8 | table "ec2_instance_overview" { 9 | query = aws_insights.query.ec2_instance_overview 10 | } 11 | 12 | table "ec2_public_ips" { 13 | title = "public ips" 14 | type = "line" 15 | sql = <= 'TLS v1.2' then 'ok' 103 | else 'alarm' 104 | end as status, 105 | case 106 | when max_version >= 'TLS v1.2' then $1 || ' TLS max_version is compliant: ' || max_version 107 | else $1 || ' TLS version is NOT compliant: ' || max_version 108 | end as reason 109 | from 110 | data 111 | EOQ 112 | param "domain" {} 113 | } 114 | 115 | chart "tls_version_basic" { 116 | type = "donut" 117 | width = 6 118 | sql = <= 'TLS v1.2' then 'ok' 104 | else 'alarm' 105 | end as status, 106 | case 107 | when max_version >= 'TLS v1.2' then $1 || ' TLS max_version is compliant: ' || max_version 108 | else $1 || ' TLS version is NOT compliant: ' || max_version 109 | end as reason 110 | from 111 | data 112 | EOQ 113 | } 114 | 115 | chart "tls_version_functional" { 116 | type = "donut" 117 | width = 6 118 | sql = <= 'TLS v1.2' then 'ok' 22 | else 'alarm' 23 | end as status, 24 | case 25 | when max_version >= 'TLS v1.2' then $1 || ' TLS max_version is compliant: ' || max_version 26 | else $1 || ' TLS version is NOT compliant: ' || max_version 27 | end as reason 28 | from 29 | data 30 | EOQ 31 | } 32 | 33 | 34 | dashboard "hcl" { 35 | 36 | /* 37 | text "debug" { 38 | value = replace( 39 | replace( 40 | replace( 41 | replace(local.tls_control_sql, "__SELECT_STATEMENT__", local.tls_connection_sql), 42 | "__DOMAIN__", 43 | "$1" 44 | ), 45 | "*", 46 | "max(version) as max_version, address" 47 | ), 48 | "'$1'", 49 | "$1" 50 | ) 51 | 52 | } 53 | */ 54 | 55 | benchmark "tls_version_hcl" { 56 | title = "sample benchmark" 57 | children = [ 58 | control.tls_whitehouse, 59 | control.tls_steampipe 60 | ] 61 | } 62 | 63 | card { 64 | width = 3 65 | title = "whitehouse.gov" 66 | sql = replace( 67 | replace(local.tls_connection_sql, "__DOMAIN__", "whitehouse.gov"), 68 | "*", 69 | " max(version) " 70 | ) 71 | } 72 | 73 | card { 74 | width = 3 75 | title = "steampipe.io" 76 | sql = replace( 77 | replace(local.tls_connection_sql, "__DOMAIN__", "steampipe.io"), 78 | "*", 79 | " max(version) " 80 | ) 81 | } 82 | 83 | container { 84 | chart "tls_version_hcl_whitehouse" { 85 | args = [ "whitehouse.gov" ] 86 | title = "whitehouse.gov" 87 | base = chart.tls_version_hcl 88 | width = 6 89 | } 90 | 91 | chart "tls_version_hcl_steampipe" { 92 | args = [ "steampipe.io" ] 93 | title = "steampipe.io" 94 | base = chart.tls_version_hcl 95 | } 96 | } 97 | 98 | container { 99 | 100 | table "tls_version_hcl_whitehouse" { 101 | title = "whitehouse.gov" 102 | width = 6 103 | sql = replace(local.tls_connection_sql, "__DOMAIN__", "whitehouse.gov") 104 | } 105 | 106 | table "tls_version_hcl_steampipe" { 107 | width = 6 108 | title = "steampipe.io" 109 | sql = replace(local.tls_connection_sql, "__DOMAIN__", "steampipe.io") 110 | } 111 | } 112 | 113 | } 114 | 115 | control "tls_whitehouse" { 116 | args = [ "whitehouse.gov" ] 117 | title = "control.tls_whitehouse" 118 | query = query.tls_control_hcl 119 | } 120 | 121 | control "tls_steampipe" { 122 | args = [ "steampipe.io" ] 123 | title = "control.tls_steampipe" 124 | query = query.tls_control_hcl 125 | } 126 | 127 | query "tls_control_hcl" { 128 | sql = replace( 129 | replace( 130 | replace( 131 | replace(local.tls_control_sql, "__SELECT_STATEMENT__", local.tls_connection_sql), 132 | "__DOMAIN__", 133 | "$1" 134 | ), 135 | "*", 136 | "max(version) as max_version, address" 137 | ), 138 | "'$1'", 139 | "$1" 140 | ) 141 | } 142 | 143 | chart "tls_version_hcl" { 144 | type = "donut" 145 | width = 6 146 | sql = replace( 147 | replace( 148 | replace(local.tls_connection_sql, "'__DOMAIN__'", "$1"), 149 | "completed", 150 | "completed group by version" 151 | ), 152 | "*", 153 | " version, count(*)" 154 | ) 155 | } 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | 164 | 165 | -------------------------------------------------------------------------------- /all/dashboard-sql-reuse/mod.sp: -------------------------------------------------------------------------------- 1 | mod "DashboardSqlReuse" { 2 | } -------------------------------------------------------------------------------- /all/dashboard-sql-reuse/mod.sp~: -------------------------------------------------------------------------------- 1 | dashboard "DashboardSqlReuse" { 2 | } -------------------------------------------------------------------------------- /all/github-actions-oidc/README.md: -------------------------------------------------------------------------------- 1 | # GitHub Actions + OIDC Overview 2 | 3 | OpenID Connect (OIDC) allows your workflows to exchange short-lived tokens directly from your cloud provider. 4 | If you are new to running GitHub Actions with OIDC, please refer to this [link](https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect) for more details. 5 | 6 | ### Preface 7 | 8 | - This is a collection of Terraform samples to get started with GitHub Actions and OIDC. 9 | 10 | - This is designed to support the documents on integrating [Steampipe with Github Actions](https://steampipe.io/docs/integrations/github_action), but can also be used beyond the scope of this section. 11 | 12 | - These terraform samples create an Open ID Connect provider in your cloud environment and an IAM resource(Role, Service Principle, Service account) with a **ReadOnly** access on the account in order to run Steampipe Benchmarks. You may choose to use least privilege principle. 13 | 14 | - Add all the Terraform Outputs with prefix "OIDC\_" to GitHub secrets of your repository. 15 | -------------------------------------------------------------------------------- /all/github-actions-oidc/aws/README.md: -------------------------------------------------------------------------------- 1 | # OpenID Connect (OIDC) in AWS 2 | 3 | OpenID Connect (OIDC) allows your GitHub Actions workflows to access resources in Amazon Web Services (AWS), without needing to store the AWS credentials as long-lived GitHub secrets. You can learn more [here](https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect). 4 | 5 | ## Implementation details 6 | 7 | This Terraform template creates the following AWS resources: 8 | 9 | - `AWS > IAM > Identity provider > token.actions.githubusercontent.com` 10 | - `AWS > IAM > Role (steampipe_gh_oidc_demo)` 11 | 12 | **NOTE**: The AWS IAM Role(steampipe_gh_oidc_demo) is attached with the AWS Managed policy "arn:aws:iam::aws:policy/ReadOnlyAccess". 13 | 14 | ## Prerequisites 15 | 16 | To run this example, you must install: 17 | 18 | - [Terraform](https://www.terraform.io) Version 0.13, minimum. 19 | - [AWS Terraform Provider](https://registry.terraform.io/providers/hashicorp/aws/latest). 20 | 21 | This example is tested with the following versions. 22 | 23 | - Terraform v0.13.7 24 | - provider registry.terraform.io/hashicorp/aws v3.75.2 25 | 26 | ### Authentication and Configuration 27 | 28 | You must set your AWS environment variables to create above resources in your AWS Account. Please refer to the Terraform documentation on [Authentication and Configuration](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#authentication-and-configuration). 29 | 30 | ## Running the example 31 | 32 | Scripts can be run in the folder that contains the script. 33 | 34 | ### Configure the script 35 | 36 | Update [default.tfvars](default.tfvars) or create a new Terraform configuration file. 37 | 38 | Variables that are exposed by this script are: 39 | 40 | - github_repo 41 | - github_branch 42 | - aws_iam_role_name 43 | 44 | Open the file [variables.tf](variables.tf) for further details. 45 | 46 | ### Initialize Terraform 47 | 48 | If not previously run then initialize Terraform to get all necessary providers. 49 | 50 | Command: `terraform init` 51 | 52 | ### Apply using default configuration 53 | 54 | If seeking to apply the configuration using the configuration file [defaults.tfvars](defaults.tfvars). 55 | 56 | Command: `terraform apply -var-file=default.tfvars` 57 | 58 | ### Apply using custom configuration 59 | 60 | If seeking to apply the configuration using a custom configuration file `.tfvars`. 61 | 62 | Command: `terraform apply -var-file=.tfvars` 63 | 64 | ### Destroy using default configuration 65 | 66 | If seeking to apply the configuration using the configuration file [defaults.tfvars](defaults.tfvars). 67 | 68 | Command: `terraform destroy -var-file=default.tfvars` 69 | 70 | ### Destroy using custom configuration 71 | 72 | If seeking to apply the configuration using a custom configuration file `.tfvars`. 73 | 74 | Command: `terraform destroy -var-file=.tfvars` 75 | 76 | ## GitHub Actions Workflow 77 | 78 | A sample GitHub Actions Workflow for AWS is available [here](./steampipe-sample-aws-workflow.yml). Add the below Terraform output to GitHub Secret of your repository. 79 | 80 | - OIDC_AWS_ROLE_TO_ASSUME 81 | -------------------------------------------------------------------------------- /all/github-actions-oidc/aws/default.tfvars: -------------------------------------------------------------------------------- 1 | # Required 2 | # GitHub repository that needs the access token. Example: octo-org/octo-repo 3 | # github_repo = "" 4 | 5 | # GitHub branch that runs the workflow. Example: demo-branch 6 | # github_branch = "" 7 | 8 | # Name of the AWS IAM Role to create. Example: steampipe_gh_oidc_demo 9 | # aws_iam_role_name = "" 10 | -------------------------------------------------------------------------------- /all/github-actions-oidc/aws/main.tf: -------------------------------------------------------------------------------- 1 | data "aws_caller_identity" "current" {} 2 | 3 | resource "aws_iam_openid_connect_provider" "github_actions" { 4 | url = "https://token.actions.githubusercontent.com" 5 | 6 | client_id_list = [ 7 | "sts.amazonaws.com", 8 | ] 9 | 10 | thumbprint_list = ["6938fd4d98bab03faadb97b34396831e3780aea1", "1c58a3a8518e8759bf075b76b750d4f2df264fcd"] 11 | # NOTE: If Github changes/renews the GitHub Actions SSL certificates then the thumbprint may change. 12 | # More details at, https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_providers_create_oidc_verify-thumbprint.html 13 | 14 | tags = { 15 | "owner" : "johndoe@example.com", 16 | "purpose" : "steampipe_gh_oidc_demo" 17 | } 18 | } 19 | 20 | data "aws_iam_policy_document" "openid_trustrelationship" { 21 | statement { 22 | sid = "OIDCTrust" 23 | actions = ["sts:AssumeRoleWithWebIdentity"] 24 | 25 | principals { 26 | type = "Federated" 27 | identifiers = [aws_iam_openid_connect_provider.github_actions.arn] 28 | } 29 | 30 | condition { 31 | test = "StringEquals" 32 | variable = "token.actions.githubusercontent.com:aud" 33 | values = [ 34 | "sts.amazonaws.com", 35 | ] 36 | } 37 | 38 | condition { 39 | test = "StringLike" 40 | variable = "token.actions.githubusercontent.com:sub" 41 | values = [ 42 | "repo:${var.github_repo}:ref:refs/heads/${var.github_branch}", 43 | # Syntax: repo::ref:refs/heads/branchName 44 | # Example: repo:octo-org/octo-repo:ref:refs/heads/demo-branch 45 | ] 46 | } 47 | } 48 | } 49 | 50 | resource "aws_iam_role" "openid_role" { 51 | name = var.aws_iam_role_name 52 | path = "/steampipe/" 53 | assume_role_policy = data.aws_iam_policy_document.openid_trustrelationship.json 54 | max_session_duration = 3600 55 | managed_policy_arns = ["arn:aws:iam::aws:policy/ReadOnlyAccess"] 56 | 57 | tags = { 58 | "owner" : "johndoe@example.com", 59 | "purpose" : "steampipe_gh_oidc_demo" 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /all/github-actions-oidc/aws/output.tf: -------------------------------------------------------------------------------- 1 | output "account_id" { 2 | description = "Current AWS Account ID" 3 | value = data.aws_caller_identity.current.account_id 4 | } 5 | 6 | output "openid_arn" { 7 | description = "ARN of the OIDC" 8 | value = aws_iam_openid_connect_provider.github_actions.arn 9 | } 10 | 11 | output "OIDC_AWS_ROLE_TO_ASSUME" { 12 | description = "ARN of the IAM Assume Role. Add this to your GitHub Secrets" 13 | value = aws_iam_role.openid_role.arn 14 | } 15 | -------------------------------------------------------------------------------- /all/github-actions-oidc/aws/providers.tf: -------------------------------------------------------------------------------- 1 | # AWS Provider source and version being used 2 | terraform { 3 | required_providers { 4 | aws = { 5 | source = "hashicorp/aws" 6 | version = "~> 4.0" 7 | } 8 | } 9 | } 10 | 11 | # Configure the AWS Provider 12 | provider "aws" { 13 | region = "us-east-1" 14 | } 15 | -------------------------------------------------------------------------------- /all/github-actions-oidc/aws/steampipe-sample-aws-workflow.yml: -------------------------------------------------------------------------------- 1 | name: Steampipe AWS Benchmark 2 | on: 3 | # run a workflow manually on demand 4 | workflow_dispatch: 5 | # runs on the 7th, 14th, 21st and 28th day of every month at 04:00 UTC i.e., 09:30 AM IST 6 | schedule: 7 | - cron: "0 4 7,14,21,28 * *" 8 | 9 | # These permissions are needed to interact with GitHub's OIDC Token endpoint. 10 | permissions: 11 | id-token: write # This is required for requesting the JWT 12 | contents: write # This is required for actions/checkout 13 | 14 | jobs: 15 | aws: 16 | name: Steampipe AWS Demo 17 | runs-on: ubuntu-latest 18 | 19 | steps: 20 | - name: "Checkout working branch" 21 | uses: actions/checkout@v3 22 | 23 | - name: "Configure AWS credentials" 24 | id: config-aws-auth 25 | uses: aws-actions/configure-aws-credentials@v2 26 | with: 27 | role-to-assume: ${{ secrets.OIDC_AWS_ROLE_TO_ASSUME }} 28 | role-session-name: "steampipe-demo" 29 | role-duration-seconds: 900 30 | aws-region: "us-east-1" 31 | # Steampipe benchmark runs against this region unless a connection is specified in aws.spc file. 32 | # More details at https://hub.steampipe.io/plugins/turbot/aws#credentials-from-environment-variables 33 | 34 | - name: "Install Steampipe cli and plugin" 35 | id: steampipe-installation 36 | uses: turbot/steampipe-action-setup@v1 37 | with: 38 | steampipe-version: 'latest' 39 | plugin-connections: | 40 | connection "aws" { 41 | plugin = "aws" 42 | } 43 | 44 | - name: "Run Steampipe benchmark" 45 | id: steampipe-benchmark 46 | continue-on-error: true 47 | run: | 48 | 49 | # Install the Steampipe AWS Compliance mod 50 | steampipe mod install github.com/turbot/steampipe-mod-aws-compliance 51 | cd .steampipe/mods/github.com/turbot/steampipe-mod-aws-compliance* 52 | 53 | # Run the AWS CIS v2.0.0 benchmark 54 | steampipe check benchmark.cis_v200 --export=$GITHUB_WORKSPACE/steampipe/benchmarks/aws/cis_v200_"$(date +"%d_%B_%Y")".html --output=none 55 | 56 | - name: "Commit the file to github" 57 | id: push-to-gh 58 | working-directory: steampipe/benchmarks/aws 59 | run: | 60 | 61 | git config user.name github-actions 62 | git config user.email github-actions@github.com 63 | git add cis_v200_"$(date +"%d_%B_%Y")".html 64 | git commit -m "Add Steampipe Benchmark Results" 65 | git push 66 | 67 | - name: Cleanup Steampipe files 68 | id: cleanup 69 | run: rm -rf ~/.steampipe && rm -rf .steampipe 70 | -------------------------------------------------------------------------------- /all/github-actions-oidc/aws/variables.tf: -------------------------------------------------------------------------------- 1 | variable "github_repo" { 2 | type = string 3 | description = "GitHub repository that needs the access token. Example: octo-org/octo-repo" 4 | } 5 | 6 | variable "github_branch" { 7 | type = string 8 | description = "GitHub branch that runs the workflow. Example: demo-branch" 9 | } 10 | 11 | variable "aws_iam_role_name" { 12 | type = string 13 | description = "Name of the AWS IAM Role to create. Example: steampipe_gh_oidc_demo" 14 | default = "steampipe-gh-oidc-demo" 15 | } 16 | -------------------------------------------------------------------------------- /all/github-actions-oidc/azure/README.md: -------------------------------------------------------------------------------- 1 | # OpenID Connect (OIDC) in Azure 2 | 3 | OpenID Connect (OIDC) allows your GitHub Actions workflows to access resources in Azure, without needing to store the Azure credentials as long-lived GitHub secrets. You can learn more [here](https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect). 4 | 5 | ## Implementation details 6 | 7 | This Terraform template creates the following Azure resources: 8 | 9 | - `Azure > Active Directory > Service Principal (steampipe_gh_oidc_demo)` 10 | - `Azure > Active Directory > Service Principal (steampipe_gh_oidc_demo) > Federated credential` 11 | 12 | **NOTE**: The Azure AD Service Principal(steampipe_gh_oidc_demo) has the BuiltInRole `Reader` assigned. 13 | 14 | ## Prerequisites 15 | 16 | To run this example, you must install: 17 | 18 | - [Terraform](https://www.terraform.io) Version 13, minimum. 19 | - [Azure](https://registry.terraform.io/providers/hashicorp/azurerm/latest) and [AzureAD](https://registry.terraform.io/providers/hashicorp/azuread/latest) Terraform Providers. 20 | 21 | This example is tested with the following versions. 22 | 23 | - Terraform v0.13.7 24 | - provider registry.terraform.io/hashicorp/azuread v2.30.0 25 | - provider registry.terraform.io/hashicorp/azurerm v3.29.1 26 | 27 | **NOTE**: Once the Azure AD Application is created, you have to manually [Grant admin consent for tenant](https://learn.microsoft.com/en-us/azure/active-directory/manage-apps/grant-admin-consent). 28 | 29 | ### Authentication and Configuration 30 | 31 | You must set your Azure environment variables to create above resources in your Azure Subscription. Please refer to the Terraform documentation on Authentication and Configuration for [AzureAD](https://registry.terraform.io/providers/hashicorp/azuread/latest/docs#authenticating-to-azure-active-directory) and [AzureRM](https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs#authenticating-to-azure). 32 | 33 | ## Running the example 34 | 35 | Scripts can be run in the folder that contains the script. 36 | 37 | ### Configure the script 38 | 39 | Update [default.tfvars](default.tfvars) or create a new Terraform configuration file. 40 | 41 | Variables that are exposed by this script are: 42 | 43 | - github_repo 44 | - github_branch 45 | - azuread_application_name 46 | 47 | Open the file [variables.tf](variables.tf) for further details. 48 | 49 | ### Initialize Terraform 50 | 51 | If not previously run then initialize Terraform to get all necessary providers. 52 | 53 | Command: `terraform init` 54 | 55 | ### Apply using default configuration 56 | 57 | If seeking to apply the configuration using the configuration file [defaults.tfvars](defaults.tfvars). 58 | 59 | Command: `terraform apply -var-file=default.tfvars` 60 | 61 | ### Apply using custom configuration 62 | 63 | If seeking to apply the configuration using a custom configuration file `.tfvars`. 64 | 65 | Command: `terraform apply -var-file=.tfvars` 66 | 67 | ### Destroy using default configuration 68 | 69 | If seeking to apply the configuration using the configuration file [defaults.tfvars](defaults.tfvars). 70 | 71 | Command: `terraform destroy -var-file=default.tfvars` 72 | 73 | ### Destroy using custom configuration 74 | 75 | If seeking to apply the configuration using a custom configuration file `.tfvars`. 76 | 77 | Command: `terraform destroy -var-file=.tfvars` 78 | 79 | ## GitHub Actions Workflow 80 | 81 | A sample GitHub Actions Workflow for Azure is available [here](./steampipe-sample-azure-workflow.yml). Below GitHub Secrets are to be added in your repository. 82 | 83 | - OIDC_AZURE_CLIENT_ID 84 | - OIDC_AZURE_TENANT_ID 85 | - OIDC_AZURE_SUBSCRIPTION_ID 86 | -------------------------------------------------------------------------------- /all/github-actions-oidc/azure/default.tfvars: -------------------------------------------------------------------------------- 1 | # Required 2 | # GitHub repository that needs the access token. Example: octo-org/octo-repo 3 | # github_repo = "" 4 | 5 | # GitHub branch that runs the workflow. Example: demo-branch 6 | # github_branch = "" 7 | 8 | # Name of the Azure AD Application to create. Example: steampipe_gh_oidc_demo 9 | # azuread_application_name = "" 10 | -------------------------------------------------------------------------------- /all/github-actions-oidc/azure/main.tf: -------------------------------------------------------------------------------- 1 | data "azurerm_client_config" "current" {} 2 | data "azuread_application_published_app_ids" "well_known" {} 3 | 4 | # Get information about the configured Azure subscription 5 | data "azurerm_subscription" "primary" {} 6 | 7 | # Create an AD Application 8 | resource "azuread_application" "github_actions_app" { 9 | display_name = var.azuread_application_name 10 | 11 | # Trying to add the basic permissions listed at https://hub.steampipe.io/plugins/turbot/azuread#credentials 12 | required_resource_access { 13 | resource_app_id = "00000003-0000-0000-c000-000000000000" # Microsoft Graph 14 | 15 | resource_access { 16 | id = "9a5d68dd-52b0-4cc2-bd40-abcf44ac3a30" # Application.Read.All 17 | type = "Role" 18 | } 19 | 20 | resource_access { 21 | id = "b0afded3-3588-46d8-8b3d-9842eff778da" # AuditLog.Read.All 22 | type = "Role" 23 | } 24 | 25 | resource_access { 26 | id = "7ab1d382-f21e-4acd-a863-ba3e13f7da61" # Directory.Read.All 27 | type = "Role" 28 | } 29 | 30 | resource_access { 31 | id = "dbb9058a-0e50-45d7-ae91-66909b5d4664" # Domain.Read.All 32 | type = "Role" 33 | } 34 | 35 | resource_access { 36 | id = "5b567255-7703-4780-807c-7be8301ae99b" # Group.Read.All 37 | type = "Role" 38 | } 39 | 40 | resource_access { 41 | id = "e321f0bb-e7f7-481e-bb28-e3b0b32d4bd0" # IdentityProvider.Read.All 42 | type = "Role" 43 | } 44 | 45 | resource_access { 46 | id = "246dd0d5-5bd0-4def-940b-0421030a5b68" # Policy.Read.All 47 | type = "Role" 48 | } 49 | 50 | resource_access { 51 | id = "df021288-bdef-4463-88db-98f22de89214" # User.Read.All 52 | type = "Role" 53 | } 54 | 55 | } 56 | } 57 | 58 | # Create a Service Principal from that Application 59 | resource "azuread_service_principal" "github_actions_sp" { 60 | application_id = azuread_application.github_actions_app.application_id 61 | } 62 | 63 | # Grant our service principal "Reader" access over the subscription 64 | resource "azurerm_role_assignment" "github_actions_sp_permissions" { 65 | scope = data.azurerm_subscription.primary.id 66 | role_definition_name = "Reader" 67 | principal_id = azuread_service_principal.github_actions_sp.object_id 68 | } 69 | 70 | # Create a federated identity credential for the application 71 | resource "azuread_application_federated_identity_credential" "federated_creds" { 72 | application_object_id = azuread_application.github_actions_app.object_id 73 | display_name = var.azuread_application_name 74 | description = "Run Steampipe on GitHub Actions Demo" 75 | audiences = ["api://AzureADTokenExchange"] 76 | issuer = "https://token.actions.githubusercontent.com" 77 | subject = "repo:${var.github_repo}:ref:refs/heads/${var.github_branch}" 78 | } 79 | -------------------------------------------------------------------------------- /all/github-actions-oidc/azure/output.tf: -------------------------------------------------------------------------------- 1 | output "OIDC_AZURE_CLIENT_ID" { 2 | description = "Client ID. Add this to your GitHub Secrets" 3 | value = azuread_application.github_actions_app.application_id 4 | } 5 | 6 | output "OIDC_AZURE_SUBSCRIPTION_ID" { 7 | description = "Subscription ID. Add this to your GitHub Secrets" 8 | value = data.azurerm_client_config.current.subscription_id 9 | } 10 | 11 | output "OIDC_AZURE_TENANT_ID" { 12 | description = "Directory (Tenant) ID. Add this to your GitHub Secrets" 13 | value = data.azurerm_client_config.current.tenant_id 14 | } 15 | -------------------------------------------------------------------------------- /all/github-actions-oidc/azure/providers.tf: -------------------------------------------------------------------------------- 1 | # Azure Provider source and version being used 2 | terraform { 3 | required_providers { 4 | azurerm = { 5 | source = "hashicorp/azurerm" 6 | version = ">=3.0.0" 7 | } 8 | azuread = { 9 | source = "hashicorp/azuread" 10 | version = ">=2.15.0" 11 | } 12 | } 13 | } 14 | 15 | # Configure the Azure Active Directory Provider 16 | provider "azuread" { 17 | } 18 | 19 | 20 | # Configure the Microsoft Azure Provider 21 | provider "azurerm" { 22 | features {} 23 | } 24 | -------------------------------------------------------------------------------- /all/github-actions-oidc/azure/steampipe-sample-azure-workflow.yml: -------------------------------------------------------------------------------- 1 | name: Steampipe Azure Benchmark 2 | on: 3 | # run a workflow manually on demand 4 | workflow_dispatch: 5 | # runs on the 7th, 14th, 21st and 28th day of every month at 04:00 UTC i.e., 09:30 AM IST 6 | schedule: 7 | - cron: "0 4 7,14,21,28 * *" 8 | 9 | # These permissions are needed to interact with GitHub's OIDC Token endpoint. 10 | permissions: 11 | id-token: write # This is required for requesting the JWT 12 | contents: write # This is required for actions/checkout 13 | 14 | jobs: 15 | azure: 16 | name: Steampipe Azure Demo 17 | runs-on: ubuntu-latest 18 | 19 | steps: 20 | - name: "Checkout working branch" 21 | uses: actions/checkout@v3 22 | 23 | - name: "Az CLI login" 24 | id: config-azure-auth 25 | uses: azure/login@v1 26 | with: 27 | client-id: ${{ secrets.OIDC_AZURE_CLIENT_ID }} 28 | tenant-id: ${{ secrets.OIDC_AZURE_TENANT_ID }} 29 | subscription-id: ${{ secrets.OIDC_AZURE_SUBSCRIPTION_ID }} 30 | 31 | - name: "Install Steampipe cli and plugin" 32 | id: steampipe-installation 33 | run: | 34 | 35 | # Install Steampipe CLI 36 | sudo /bin/sh -c "$(curl -fsSL https://raw.githubusercontent.com/turbot/steampipe/main/install.sh)" 37 | # Check steampipe version 38 | steampipe -v 39 | # Install Azure and AzureAD plugins 40 | steampipe plugin install azure 41 | steampipe plugin install azuread 42 | 43 | - name: "Run Steampipe benchmark" 44 | id: steampipe-benchmark 45 | continue-on-error: true 46 | run: | 47 | 48 | # Install the Steampipe Azure Compliance mod 49 | steampipe mod install github.com/turbot/steampipe-mod-azure-compliance 50 | cd .steampipe/mods/github.com/turbot/steampipe-mod-azure-compliance* 51 | # Run the Azure CIS v2.0.0 benchmark 52 | steampipe check benchmark.cis_v200 --export=$GITHUB_WORKSPACE/steampipe/benchmarks/azure/cis_v200_"$(date +"%d_%B_%Y")".html --output=none 53 | 54 | - name: "Commit the file to github" 55 | id: push-to-gh 56 | working-directory: steampipe/benchmarks/azure 57 | run: | 58 | 59 | git config user.name github-actions 60 | git config user.email github-actions@github.com 61 | git add cis_v200_"$(date +"%d_%B_%Y")".html 62 | git commit -m "Add Steampipe Benchmark Results" 63 | git push 64 | 65 | - name: Az logout 66 | id: cleanup-azure 67 | uses: azure/CLI@v1 68 | with: 69 | inlineScript: | 70 | az logout 71 | az cache purge 72 | az account clear 73 | 74 | - name: Cleanup Steampipe files 75 | id: cleanup-steampipe 76 | run: rm -rf ~/.steampipe && rm -rf .steampipe -------------------------------------------------------------------------------- /all/github-actions-oidc/azure/variables.tf: -------------------------------------------------------------------------------- 1 | variable "github_repo" { 2 | type = string 3 | description = "GitHub repository that needs the access token. Example: octo-org/octo-repo" 4 | } 5 | 6 | variable "github_branch" { 7 | type = string 8 | description = "GitHub branch that runs the workflow. Example: demo-branch" 9 | } 10 | 11 | variable "azuread_application_name" { 12 | type = string 13 | description = "Name of the Azure AD Application to create. Example: steampipe_gh_oidc_demo" 14 | default = "steampipe_gh_oidc_demo" 15 | } 16 | -------------------------------------------------------------------------------- /all/github-actions-oidc/gcp/README.md: -------------------------------------------------------------------------------- 1 | # OpenID Connect (OIDC) in GCP 2 | 3 | OpenID Connect (OIDC) allows your GitHub Actions workflows to access resources in Google Cloud Platform (GCP), without needing to store the GCP credentials as long-lived GitHub secrets. You can learn more [here](https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect). 4 | 5 | ## Implementation details 6 | 7 | This Terraform template creates the following GCP resources: 8 | 9 | - `GCP > IAM > Workload Identity Pool (steampipe-gh-oidc-demo)` 10 | - `GCP > IAM > Workload Identity Pool Provider (steampipe-gh-oidc-demo-provider)` 11 | - `GCP > IAM > Service Account (steampipe-gh-oidc-demo-sa)` 12 | 13 | **NOTE**: The GCP Service Account(steampipe-gh-oidc-demo-sa) has the GCP predefined role "roles/viewer" assigned. 14 | 15 | ## Prerequisites 16 | 17 | To run this example, you must install: 18 | 19 | - [Terraform](https://www.terraform.io) Version 13, minimum. 20 | - [GCP Terraform Provider](https://registry.terraform.io/providers/hashicorp/google/latest). 21 | 22 | This example is tested with the following versions. 23 | 24 | - Terraform v0.13.7 25 | - provider registry.terraform.io/hashicorp/google v4.31.0 26 | 27 | ### Authentication and Configuration 28 | 29 | You must set your GCP environment variables to create above resources in your GCP Project Account. Please refer to the Terraform documentation on [Authentication and Configuration](https://registry.terraform.io/providers/hashicorp/google/latest/docs/guides/provider_reference#authentication). 30 | 31 | ## Running the example 32 | 33 | Scripts can be run in the folder that contains the script. 34 | 35 | ### Configure the script 36 | 37 | Update [default.tfvars](default.tfvars) or create a new Terraform configuration file. 38 | 39 | Variables that are exposed by this script are: 40 | 41 | - project_id 42 | - github_repo 43 | - github_branch 44 | - pool_id 45 | 46 | Open the file [variables.tf](variables.tf) for further details. 47 | 48 | ### Initialize Terraform 49 | 50 | If not previously run then initialize Terraform to get all necessary providers. 51 | 52 | Command: `terraform init` 53 | 54 | ### Apply using default configuration 55 | 56 | If seeking to apply the configuration using the configuration file [defaults.tfvars](defaults.tfvars). 57 | 58 | Command: `terraform apply -var-file=default.tfvars` 59 | 60 | ### Apply using custom configuration 61 | 62 | If seeking to apply the configuration using a custom configuration file `.tfvars`. 63 | 64 | Command: `terraform apply -var-file=.tfvars` 65 | 66 | ### Destroy using default configuration 67 | 68 | If seeking to apply the configuration using the configuration file [defaults.tfvars](defaults.tfvars). 69 | 70 | Command: `terraform destroy -var-file=default.tfvars` 71 | 72 | ### Destroy using custom configuration 73 | 74 | If seeking to apply the configuration using a custom configuration file `.tfvars`. 75 | 76 | Command: `terraform destroy -var-file=.tfvars` 77 | 78 | ## GitHub Actions Workflow 79 | 80 | A sample GitHub Actions Workflow for AWS is available [here](./steampipe-sample-gcp-workflow.yml). Below GitHub Secrets are to be added in your repository. 81 | 82 | - OIDC_GCP_IDENTITY_PROVIDER 83 | - OIDC_GCP_SERVICE_ACCOUNT 84 | - OIDC_GCP_PROJECT 85 | - OIDC_SLACK_CHANNEL_ID 86 | - OIDC_SLACK_OAUTH_ACCESS_TOKEN 87 | -------------------------------------------------------------------------------- /all/github-actions-oidc/gcp/default.tfvars: -------------------------------------------------------------------------------- 1 | # Required 2 | # GitHub repository that needs the access token. Example: octo-org/octo-repo 3 | # github_repo = "" 4 | 5 | # GitHub branch that runs the workflow. Example: demo-branch 6 | # github_branch = "" 7 | 8 | # Workload Identity Pool ID to create. Example: steampipe_gh_oidc_demo 9 | # pool_id = "" 10 | 11 | # The project id to create Workload Identity Pool. Example: my-gcp-project-123 12 | # project_id = "" 13 | -------------------------------------------------------------------------------- /all/github-actions-oidc/gcp/main.tf: -------------------------------------------------------------------------------- 1 | resource "google_iam_workload_identity_pool" "oidc_pool" { 2 | project = var.project_id 3 | workload_identity_pool_id = var.pool_id 4 | description = "Workload Identity Pool managed by Terraform" 5 | disabled = false 6 | } 7 | 8 | resource "google_iam_workload_identity_pool_provider" "oidc_pool_provider" { 9 | project = var.project_id 10 | workload_identity_pool_id = google_iam_workload_identity_pool.oidc_pool.workload_identity_pool_id 11 | workload_identity_pool_provider_id = "${var.pool_id}-provider" 12 | description = "Workload Identity Pool Provider managed by Terraform" 13 | attribute_mapping = { 14 | "google.subject" = "assertion.sub" 15 | "attribute.full" = "assertion.repository+assertion.ref" 16 | } 17 | oidc { 18 | issuer_uri = "https://token.actions.githubusercontent.com" 19 | } 20 | } 21 | 22 | resource "google_service_account" "oidc_sa" { 23 | project = var.project_id 24 | account_id = "${var.pool_id}-sa" 25 | display_name = "${var.pool_id}-sa" 26 | } 27 | 28 | resource "google_project_iam_binding" "sa_viewer_role" { 29 | project = var.project_id 30 | role = "roles/viewer" 31 | 32 | members = ["serviceAccount:${google_service_account.oidc_sa.email}"] 33 | } 34 | 35 | resource "google_service_account_iam_member" "wif_sa" { 36 | service_account_id = google_service_account.oidc_sa.id 37 | role = "roles/iam.workloadIdentityUser" 38 | member = "principalSet://iam.googleapis.com/${google_iam_workload_identity_pool.oidc_pool.name}/attribute.full/${var.github_repo}refs/heads/${var.github_branch}" 39 | } 40 | -------------------------------------------------------------------------------- /all/github-actions-oidc/gcp/output.tf: -------------------------------------------------------------------------------- 1 | output "OIDC_GCP_PROJECT" { 2 | description = "GCP Project ID. Add this to your GitHub Secrets" 3 | value = var.project_id 4 | } 5 | 6 | output "OIDC_GCP_IDENTITY_PROVIDER" { 7 | description = "GCP Workload Identity Provider ID. Add this to your GitHub Secrets" 8 | value = "${google_iam_workload_identity_pool.oidc_pool.name}/providers/${google_iam_workload_identity_pool_provider.oidc_pool_provider.workload_identity_pool_provider_id}" 9 | } 10 | 11 | output "OIDC_GCP_SERVICE_ACCOUNT" { 12 | description = "GCP Service Account Email ID. Add this to your GitHub Secrets" 13 | value = google_service_account.oidc_sa.email 14 | } 15 | -------------------------------------------------------------------------------- /all/github-actions-oidc/gcp/providers.tf: -------------------------------------------------------------------------------- 1 | terraform { 2 | required_providers { 3 | 4 | google = { 5 | source = "hashicorp/google" 6 | version = ">=4.31.0" 7 | } 8 | } 9 | } 10 | 11 | provider "google" { 12 | project = var.project_id 13 | } 14 | -------------------------------------------------------------------------------- /all/github-actions-oidc/gcp/steampipe-sample-gcp-workflow.yml: -------------------------------------------------------------------------------- 1 | name: Steampipe GCP Benchmark 2 | on: 3 | # run a workflow manually on demand 4 | workflow_dispatch: 5 | # runs on the 7th, 14th, 21st and 28th day of every month at 04:00 UTC i.e., 09:30 AM IST 6 | schedule: 7 | - cron: "0 4 7,14,21,28 * *" 8 | 9 | # These permissions are needed to interact with GitHub's OIDC Token endpoint. 10 | permissions: 11 | id-token: write # This is required for requesting the JWT 12 | contents: read # This is required for actions/checkout 13 | 14 | jobs: 15 | gcp: 16 | name: Steampipe GCP Demo 17 | runs-on: ubuntu-latest 18 | 19 | steps: 20 | - name: "Checkout working branch" 21 | uses: actions/checkout@v3 22 | 23 | - name: "Configure GCP credentials" 24 | id: config-gcp-auth 25 | uses: "google-github-actions/auth@v0" 26 | with: 27 | create_credentials_file: "true" 28 | workload_identity_provider: ${{ secrets.OIDC_GCP_IDENTITY_PROVIDER }} 29 | service_account: ${{ secrets.OIDC_GCP_SERVICE_ACCOUNT }} 30 | access_token_lifetime: 900s 31 | 32 | - name: "Install Steampipe cli and plugin" 33 | id: steampipe-installation 34 | run: | 35 | 36 | # Install Steampipe CLI 37 | sudo /bin/sh -c "$(curl -fsSL https://raw.githubusercontent.com/turbot/steampipe/main/install.sh)" 38 | # Check steampipe version 39 | steampipe -v 40 | # Install GCP plugin 41 | steampipe plugin install gcp 42 | 43 | - name: "Run Steampipe benchmark" 44 | id: steampipe-benchmark 45 | run: | 46 | 47 | export CLOUDSDK_CORE_PROJECT=${{ secrets.OIDC_GCP_PROJECT }} 48 | # Install the Steampipe GCP Compliance mod 49 | steampipe mod install github.com/turbot/steampipe-mod-gcp-compliance 50 | cd .steampipe/mods/github.com/turbot/steampipe-mod-gcp-compliance* 51 | # Run the GCP CIS v1.3.0 benchmark 52 | steampipe check benchmark.cis_v130 --export=steampipe_gcp_cis_v130_"$(date +"%d_%B_%Y")".html --output=none 53 | 54 | - name: "Slack Notification" 55 | id: slack-upload 56 | env: 57 | slack_channel: ${{ secrets.OIDC_SLACK_CHANNEL_ID }} 58 | slack_token: ${{ secrets.OIDC_SLACK_OAUTH_ACCESS_TOKEN }} 59 | run: | 60 | 61 | cd .steampipe/mods/github.com/turbot/steampipe-mod-gcp-compliance* 62 | curl -F file=@"steampipe_gcp_cis_v130_"$(date +"%d_%B_%Y")".html" -F "initial_comment=Steampipe GCP CIS v130 Benchmark" -F "title=steampipe_gcp_cis_v130_"$(date +"%d_%B_%Y")".html" -F channels=$slack_channel -H "Authorization: Bearer $slack_token " https://slack.com/api/files.upload -s -o /dev/null 63 | 64 | - name: Cleanup Steampipe files 65 | id: cleanup 66 | run: rm -rf ~/.steampipe && rm -rf .steampipe 67 | -------------------------------------------------------------------------------- /all/github-actions-oidc/gcp/variables.tf: -------------------------------------------------------------------------------- 1 | variable "project_id" { 2 | type = string 3 | description = "The project id to create Workload Identity Pool" 4 | } 5 | 6 | variable "github_repo" { 7 | type = string 8 | description = "GitHub repository that needs the access token. Example: octo-org/octo-repo" 9 | } 10 | 11 | variable "github_branch" { 12 | type = string 13 | description = "GitHub branch that runs the workflow. Example: demo-branch" 14 | } 15 | 16 | variable "pool_id" { 17 | type = string 18 | description = "Workload Identity Pool ID" 19 | default = "steampipe-gh-oidc-demo" 20 | } 21 | -------------------------------------------------------------------------------- /all/github-activity/github-activity.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/turbot/steampipe-samples/b22ba2de97d625a6912fb463bfed3d2b948fdac6/all/github-activity/github-activity.gif -------------------------------------------------------------------------------- /all/github-activity/in-tableau.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/turbot/steampipe-samples/b22ba2de97d625a6912fb463bfed3d2b948fdac6/all/github-activity/in-tableau.gif -------------------------------------------------------------------------------- /all/github-activity/mod.sp: -------------------------------------------------------------------------------- 1 | mod "github" { 2 | title = "github" 3 | } 4 | 5 | locals { 6 | default_org = { 7 | name = "turbot" 8 | } 9 | default_user = { 10 | name = "judell" 11 | } 12 | } 13 | 14 | dashboard "github_activity" { 15 | 16 | input "username" { 17 | title = "username" 18 | width = 2 19 | query = query.usernames 20 | } 21 | 22 | input "repo_pattern" { 23 | title = "repo pattern" 24 | width = 2 25 | option "turbot" {} 26 | option "steampipe-mod" {} 27 | option "steampipe-plugin" {} 28 | } 29 | 30 | input "issue_or_pull" { 31 | title = "issue/pull" 32 | width = 2 33 | option "issue" {} 34 | option "pull" {} 35 | option "both" {} 36 | } 37 | 38 | input "open_or_closed" { 39 | title = "open/closed" 40 | width = 2 41 | option "open" {} 42 | option "closed" {} 43 | option "both" {} 44 | } 45 | 46 | container { 47 | 48 | table { 49 | title = "my team's github activity" 50 | width = 12 51 | args = [ 52 | self.input.username.value, 53 | self.input.repo_pattern.value, 54 | self.input.issue_or_pull.value, 55 | self.input.open_or_closed.value 56 | ] 57 | sql = < avatars.csv` 10 | 11 | - `python avatars.py > mod.sp` 12 | 13 | ![](./github-avatars.png) -------------------------------------------------------------------------------- /all/github-avatars/avatars.csv: -------------------------------------------------------------------------------- 1 | repo,src,alt,count 2 | turbot/steampipe-plugin-oci,https://avatars.githubusercontent.com/u/59417312?v=4,anisadas,1 3 | turbot/steampipe-plugin-oci,https://avatars.githubusercontent.com/u/78197905?v=4,bigdatasourav,59 4 | turbot/steampipe-plugin-oci,https://avatars.githubusercontent.com/u/45350738?v=4,c0d3r-arnab,13 5 | turbot/steampipe-plugin-oci,https://avatars.githubusercontent.com/u/12363488?v=4,cbruno10,27 6 | turbot/steampipe-plugin-oci,https://avatars.githubusercontent.com/u/54130?v=4,dboeke,1 7 | turbot/steampipe-plugin-oci,https://avatars.githubusercontent.com/u/16674560?v=4,karanpopat,6 8 | turbot/steampipe-plugin-oci,https://avatars.githubusercontent.com/u/46913995?v=4,khushboo9024,15 9 | turbot/steampipe-plugin-oci,https://avatars.githubusercontent.com/u/37527306?v=4,LalitTurbot,9 10 | turbot/steampipe-plugin-oci,https://avatars.githubusercontent.com/u/47312748?v=4,misraved,12 11 | turbot/steampipe-plugin-oci,https://avatars.githubusercontent.com/u/47887552?v=4,ParthaI,9 12 | turbot/steampipe-plugin-oci,https://avatars.githubusercontent.com/u/39396993?v=4,Priyanka585464,1 13 | turbot/steampipe-plugin-oci,https://avatars.githubusercontent.com/u/46851617?v=4,rajeshbal65,18 14 | turbot/steampipe-plugin-oci,https://avatars.githubusercontent.com/u/26873346?v=4,rajlearner17,2 15 | turbot/steampipe-plugin-oci,https://avatars.githubusercontent.com/u/38218418?v=4,Subhajit97,5 16 | turbot/steampipe-plugin-oci,https://avatars.githubusercontent.com/u/38211164?v=4,subham9418,1 17 | turbot/steampipe-plugin-oci,https://avatars.githubusercontent.com/u/55080164?v=4,visiit,9 18 | -------------------------------------------------------------------------------- /all/github-avatars/avatars.py: -------------------------------------------------------------------------------- 1 | import csv 2 | 3 | with open('avatars.csv', 'r') as csvfile: 4 | reader = csv.DictReader(csvfile) 5 | row = next(reader) 6 | dashboard = f""" 7 | mod "avatars" {{ 8 | }} 9 | 10 | dashboard "avatars" {{ 11 | title = "committers for {row['repo']}" 12 | 13 | """ 14 | 15 | with open('avatars.csv', 'r') as csvfile: 16 | reader = csv.DictReader(csvfile) 17 | for row in reader: 18 | dashboard += f""" 19 | image {{ 20 | title = "{row['alt']} ({row['count']})" 21 | width=1 22 | src = "{row['src']}" 23 | alt = "{row['alt']}" 24 | }} 25 | 26 | """ 27 | dashboard += "}" 28 | 29 | print(dashboard) -------------------------------------------------------------------------------- /all/github-avatars/avatars.sql: -------------------------------------------------------------------------------- 1 | create or replace function avatars(repo text) returns table ( 2 | repo text, 3 | src text, 4 | alt text, 5 | count bigint 6 | ) as $$ 7 | with committers as ( 8 | select 9 | author_login, 10 | count(*) 11 | from 12 | github_commit 13 | where 14 | repository_full_name = $1 15 | group by 16 | author_login 17 | ) 18 | select 19 | repo, 20 | u.avatar_url as src, 21 | c.author_login as alt, 22 | c.count 23 | from 24 | committers c 25 | join github_user u on c.author_login = u.login 26 | order by 27 | lower(author_login); 28 | $$ language sql; 29 | 30 | -- alias spsql="psql -h localhost -p 9193 -d steampipe -U steampipe" 31 | -- spsql -c "copy (select * from avatars('turbot/steampipe-plugin-oci')) to stdout with delimiter ',' csv header" > avatars.csv; -------------------------------------------------------------------------------- /all/github-avatars/github-avatars.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/turbot/steampipe-samples/b22ba2de97d625a6912fb463bfed3d2b948fdac6/all/github-avatars/github-avatars.png -------------------------------------------------------------------------------- /all/github-avatars/mod.sp: -------------------------------------------------------------------------------- 1 | 2 | mod "avatars" { 3 | } 4 | 5 | dashboard "avatars" { 6 | title = "committers for turbot/steampipe-plugin-oci" 7 | 8 | 9 | image { 10 | title = "anisadas (1)" 11 | width=1 12 | src = "https://avatars.githubusercontent.com/u/59417312?v=4" 13 | alt = "anisadas" 14 | } 15 | 16 | 17 | image { 18 | title = "bigdatasourav (59)" 19 | width=1 20 | src = "https://avatars.githubusercontent.com/u/78197905?v=4" 21 | alt = "bigdatasourav" 22 | } 23 | 24 | 25 | image { 26 | title = "c0d3r-arnab (13)" 27 | width=1 28 | src = "https://avatars.githubusercontent.com/u/45350738?v=4" 29 | alt = "c0d3r-arnab" 30 | } 31 | 32 | 33 | image { 34 | title = "cbruno10 (27)" 35 | width=1 36 | src = "https://avatars.githubusercontent.com/u/12363488?v=4" 37 | alt = "cbruno10" 38 | } 39 | 40 | 41 | image { 42 | title = "dboeke (1)" 43 | width=1 44 | src = "https://avatars.githubusercontent.com/u/54130?v=4" 45 | alt = "dboeke" 46 | } 47 | 48 | 49 | image { 50 | title = "karanpopat (6)" 51 | width=1 52 | src = "https://avatars.githubusercontent.com/u/16674560?v=4" 53 | alt = "karanpopat" 54 | } 55 | 56 | 57 | image { 58 | title = "khushboo9024 (15)" 59 | width=1 60 | src = "https://avatars.githubusercontent.com/u/46913995?v=4" 61 | alt = "khushboo9024" 62 | } 63 | 64 | 65 | image { 66 | title = "LalitTurbot (9)" 67 | width=1 68 | src = "https://avatars.githubusercontent.com/u/37527306?v=4" 69 | alt = "LalitTurbot" 70 | } 71 | 72 | 73 | image { 74 | title = "misraved (12)" 75 | width=1 76 | src = "https://avatars.githubusercontent.com/u/47312748?v=4" 77 | alt = "misraved" 78 | } 79 | 80 | 81 | image { 82 | title = "ParthaI (9)" 83 | width=1 84 | src = "https://avatars.githubusercontent.com/u/47887552?v=4" 85 | alt = "ParthaI" 86 | } 87 | 88 | 89 | image { 90 | title = "Priyanka585464 (1)" 91 | width=1 92 | src = "https://avatars.githubusercontent.com/u/39396993?v=4" 93 | alt = "Priyanka585464" 94 | } 95 | 96 | 97 | image { 98 | title = "rajeshbal65 (18)" 99 | width=1 100 | src = "https://avatars.githubusercontent.com/u/46851617?v=4" 101 | alt = "rajeshbal65" 102 | } 103 | 104 | 105 | image { 106 | title = "rajlearner17 (2)" 107 | width=1 108 | src = "https://avatars.githubusercontent.com/u/26873346?v=4" 109 | alt = "rajlearner17" 110 | } 111 | 112 | 113 | image { 114 | title = "Subhajit97 (5)" 115 | width=1 116 | src = "https://avatars.githubusercontent.com/u/38218418?v=4" 117 | alt = "Subhajit97" 118 | } 119 | 120 | 121 | image { 122 | title = "subham9418 (1)" 123 | width=1 124 | src = "https://avatars.githubusercontent.com/u/38211164?v=4" 125 | alt = "subham9418" 126 | } 127 | 128 | 129 | image { 130 | title = "visiit (9)" 131 | width=1 132 | src = "https://avatars.githubusercontent.com/u/55080164?v=4" 133 | alt = "visiit" 134 | } 135 | 136 | } 137 | -------------------------------------------------------------------------------- /all/github-external-contributor-analysis/README.md: -------------------------------------------------------------------------------- 1 | In [A Portrait of VSCode's external contributors](https://steampipe.io/blog/vscode-analysis) we analyzed internal vs external contributors to VSCode using the `vscode.sql` script here. 2 | 3 | You can run `build-the-script.py` with other settings to analyze another repo. The example provided is for TypeScript (`typescript.sql`), where we found: 4 | 5 | ``` 6 | select count(*), 'internal' as type from typescript_internal_committers 7 | union 8 | select count(*), 'external' as type from typescript_external_committers; 9 | 10 | count | type 11 | -------+---------- 12 | 54 | internal 13 | 540 | external 14 | 15 | select count(*), 'internal' as type from typescript_internal_issue_filers 16 | union 17 | select count(*), 'external' as type from typescript_external_issue_filers; 18 | 19 | count | type 20 | -------+---------- 21 | 118 | internal 22 | 10670 | external 23 | 24 | select * from typescript_internal_commit_counts limit 10; 25 | 26 | repository_full_name | author_login | count 27 | ----------------------+-------------------+------- 28 | microsoft/typescript | ahejlsberg | 3690 29 | microsoft/typescript | sheetalkamat | 2600 30 | microsoft/typescript | DanielRosenwasser | 2269 31 | microsoft/typescript | sandersn | 2209 32 | microsoft/typescript | andy-ms | 2067 33 | microsoft/typescript | rbuckton | 1708 34 | microsoft/typescript | weswigham | 1503 35 | microsoft/typescript | RyanCavanaugh | 915 36 | microsoft/typescript | typescript-bot | 468 37 | microsoft/typescript | amcasey | 466 38 | 39 | select * from typescript_external_commit_counts limit 10; 40 | 41 | repository_full_name | author_login | count 42 | ----------------------+-----------------+------- 43 | microsoft/typescript | JsonFreeman | 674 44 | microsoft/typescript | a-tarasyuk | 508 45 | microsoft/typescript | zhengbli | 468 46 | microsoft/typescript | yuit | 217 47 | microsoft/typescript | saschanaz | 215 48 | microsoft/typescript | Kingwl | 202 49 | microsoft/typescript | ajafff | 180 50 | microsoft/typescript | tinganho | 99 51 | microsoft/typescript | JoshuaKGoldberg | 87 52 | microsoft/typescript | bigaru | 77 53 | 54 | select * from typescript_internal_issue_counts limit 10; 55 | 56 | repository_full_name | author_login | count 57 | ----------------------+-------------------+------- 58 | microsoft/typescript | danielrosenwasser | 1192 59 | microsoft/typescript | ryancavanaugh | 318 60 | microsoft/typescript | sandersn | 296 61 | microsoft/typescript | dbaeumer | 249 62 | microsoft/typescript | weswigham | 199 63 | microsoft/typescript | amcasey | 139 64 | microsoft/typescript | rbuckton | 106 65 | microsoft/typescript | egamma | 96 66 | microsoft/typescript | sheetalkamat | 69 67 | microsoft/typescript | jrieken | 60 68 | 69 | select * from typescript_external_issue_counts limit 10; 70 | 71 | repository_full_name | author_login | count 72 | ----------------------+------------------+------- 73 | microsoft/typescript | falsandtru | 439 74 | microsoft/typescript | zpdDG4gta8XKpMCd | 310 75 | microsoft/typescript | OliverJAsh | 245 76 | microsoft/typescript | ajafff | 220 77 | microsoft/typescript | JsonFreeman | 156 78 | microsoft/typescript | tinganho | 151 79 | microsoft/typescript | saschanaz | 133 80 | microsoft/typescript | basarat | 130 81 | microsoft/typescript | NoelAbrahams | 129 82 | microsoft/typescript | yuit | 116 83 | 84 | 85 | ``` -------------------------------------------------------------------------------- /all/github-issue-duration/README.md: -------------------------------------------------------------------------------- 1 | # GitHub issue duration 2 | 3 | For a specified subset of your repos, this dashboard explores how long issues remain open in each repo. 4 | 5 | ## Setup 6 | 7 | - Install [Steampipe](https://steampipe.io/downloads) 8 | - Install and configure [the GitHub plugin](https://hub.steampipe.io/plugins/turbot/github) 9 | - Clone this repo and visit `steampipe-samples/all/github-issue-duration` 10 | - Edit `mod.sp` and set `repo_pattern` to a regex that matches the repos you want to explore 11 | - Copy the commented section that defines `pct_issues_open_for_repo_by_interval`, run `steampipe query`, and paste to create the function 12 | - Run `steampipe dashboard` 13 | - Open localhost:9194 14 | 15 | # Demo 16 | 17 | https://user-images.githubusercontent.com/46509/178044297-7083aee2-053e-46c7-a8b8-2a3c1973d02d.mp4 18 | 19 | -------------------------------------------------------------------------------- /all/github-traffic/README.md: -------------------------------------------------------------------------------- 1 | # GitHub traffic for a set of repos 2 | 3 | This dashboard reports data from [github_traffic_view_daily](https://hub.steampipe.io/plugins/turbot/github/tables/github_traffic_view_daily) as a set of Tuftean [small multiples](https://www.juiceanalytics.com/writing/better-know-visualization-small-multiples) that summarize the last 14 days of GitHub traffic to Steampipe plugins and mods. 4 | 5 | - Repos with negligible traffic are excluded. 6 | 7 | - Repos are sorted in descending order by the sum of daily uniques for the 2-week period. 8 | 9 | - Daily maxes are scaled to the biggest day for each type of chart (plugin or mod). 10 | 11 | # Implementation 12 | 13 | The HCL+SQL code in `mod.sp` is not complicated, but it would be impractical to write by hand. Instead, it's generated by a Python script. 14 | 15 | You could write that script in the conventional way, using `psycopg2` to connect Python to Steampipe. Or as per ../github-avatars you could use SQL to produce CSV output that's further transformed by Python. This example does things differently in order to show that it's possible to use Postgres' pl/python extension with Steampipe. `plpy-traffic-dashboard.sql` defines a Postgres procedure, `traffic()` that's written in Python. You run it from the Postgres command line: `call traffic()`. 16 | 17 | The easiest way to install pl/python (or any other Postgres extension) into the Steampipe instance of Postgres is to copy files from another instance of Postgres where the extension was installed in the conventional way. 18 | 19 | 20 | ``` 21 | sudo apt install postgres-plpython3-14 22 | 23 | cp /usr/lib/postgresql/14/lib/plpython3.so ~/.steampipe/db/14.2.0/postgres/lib/postgresql/plpython3.so 24 | 25 | sudo -u root psql -d steampipe -h localhost -p 9193 26 | 27 | =# create extension plpython3u 28 | 29 | =# update pg_language set lanpltrusted = true where lanname = 'plpython3u'; 30 | 31 | =# grant all on language plpython3u to steampipe; 32 | ``` 33 | 34 | (https://stackoverflow.com/questions/2848704/postgresql-running-python-stored-procedures-as-a-normal-user) 35 | 36 | # Demo 37 | 38 | ![](./repo-traffic-small-multiples.png) 39 | -------------------------------------------------------------------------------- /all/github-traffic/plpy-traffic-dashboard.sql: -------------------------------------------------------------------------------- 1 | create or replace procedure traffic() as $$ 2 | import re 3 | sql = """ 4 | select 5 | full_name 6 | from 7 | github_search_repository 8 | where 9 | query = 'steampipe in:name -org:turbotio -org:turbothq ' 10 | and 11 | full_name ~ 'turbot/steampipe-(plugin|mod)' 12 | and 13 | not full_name ~ 'reddit' 14 | """ 15 | rows = plpy.execute(sql) 16 | 17 | totals = {} 18 | daily_maxes = {} 19 | 20 | for row in rows: 21 | sql = f""" 22 | select 23 | sum(uniques), 24 | max(uniques) 25 | from 26 | github_traffic_view_daily 27 | where 28 | repository_full_name = '{row['full_name']}' 29 | having 30 | sum(uniques) > 20 31 | """ 32 | r = plpy.execute(sql) 33 | if len(r) > 0: 34 | totals[row['full_name']] = r[0]['sum'] 35 | daily_maxes[row['full_name']] = r[0]['max'] 36 | 37 | sorted_totals = {k: v for k, v in sorted(totals.items(), key=lambda item: item[1], reverse=True)} 38 | 39 | sorted_plugin_names = [k for k in sorted_totals if 'steampipe-plugin' in k] 40 | sorted_mod_names = [k for k in sorted_totals if 'steampipe-mod' in k] 41 | 42 | plugin_daily_max = max([daily_maxes[k] for k in daily_maxes.keys() if 'steampipe-plugin' in k]) 43 | mod_daily_max = max([daily_maxes[k] for k in daily_maxes.keys() if 'steampipe-mod' in k]) 44 | 45 | def chart(full_name, max): 46 | name = re.sub('steampipe-(plugin|mod)-', '', full_name) 47 | chart = f""" 48 | chart {{ 49 | width = 2 50 | title = "{name}" 51 | axes {{ 52 | y {{ 53 | max = {max} 54 | }} 55 | }} 56 | sql = <&1 3 | 4 | echo 'create table hn_items_all' 5 | cp hn_items_all.csv ~/csv >/dev/null 2>&1 6 | 7 | steampipe query "drop table if exists hn_items_all" >/dev/null 2>&1 8 | steampipe query "create table public.hn_items_all as select distinct on (id) id, title, \"time\", by, score, descendants, type, url from csv.hn_items_all" >/dev/null 2>&1 9 | steampipe query "delete from hn_items_all where substring(time from 1 for 10) < to_char(now() - interval '31 day' , 'YYYY-MM-DD')" >/dev/null 2>&1 10 | steampipe query "update hn_items_all set descendants = 0 where descendants = ''" 2>&1 11 | 12 | echo 'create indexes' 13 | steampipe query "create index idx_hn_items_all_by on public.hn_items_all(by)" >/dev/null 2>&1 14 | steampipe query "create index idx_hn_items_all_score on public.hn_items_all(score)" >/dev/null 2>&1 15 | steampipe query "create index idx_hn_items_all_descendants on public.hn_items_all(descendants)" >/dev/null 2>&1 16 | steampipe query "create index idx_hn_items_all_url on public.hn_items_all(url)" >/dev/null 2>&1 17 | 18 | 19 | echo 'cast types' 20 | steampipe query "update hn_items_all set descendants = 0::text where descendants = ''" >/dev/null 2>&1 21 | steampipe query "update hn_items_all set score = 0::text where score = ''" >/dev/null 2>&1 22 | steampipe query "update hn_items_all set url = '' where url = ''" >/dev/null 2>&1 23 | 24 | echo 'now run `steampipe dashboard`, then visit http://localhost:9194 and check out the hacker news dashboard!' 25 | 26 | 27 | -------------------------------------------------------------------------------- /all/hackernews/urls.sp: -------------------------------------------------------------------------------- 1 | dashboard "Urls" { 2 | 3 | tags = { 4 | service = "Hacker News" 5 | } 6 | 7 | container { 8 | 9 | text { 10 | width = 6 11 | value = < parameterized inline query: $1 51 | // sql = "select 'parameterized inline query: $1' as data" 52 | 53 | } 54 | 55 | } 56 | 57 | container { 58 | title = "query object" 59 | 60 | table { 61 | width = 4 62 | title = "plain query object" 63 | query = query.plain_query_object 64 | } 65 | 66 | table { 67 | width = 4 68 | title = "interpolated query object" 69 | query = query.interpolated_query_object 70 | } 71 | 72 | table { 73 | width = 4 74 | title = "parameterized query object" 75 | query = query.parameterized_query_object 76 | args = [ 77 | local.foo 78 | ] 79 | 80 | // When there is only one you can also do this. 81 | 82 | // args = [ local.foo ] 83 | 84 | // But when >1 it's like this: 85 | 86 | // args = [ 87 | // local.foo, 88 | // local.bar 89 | // ] 90 | 91 | 92 | // This is not an error, but has no effect. 93 | param "param" {} 94 | } 95 | 96 | } 97 | 98 | container { 99 | title = "query file" 100 | 101 | table { 102 | width = 4 103 | title = "use plain sql file" 104 | sql = query.plain_sql_file.sql 105 | } 106 | 107 | table { 108 | width = 4 109 | title = "use interpolated sql file (fail)" 110 | sql = query.interpolated_sql_file.sql 111 | } 112 | 113 | table { 114 | width = 4 115 | title = "use parameterized sql file" 116 | sql = query.parameterized_sql_file.sql 117 | args = [ local.foo ] 118 | 119 | // This is not an error, but has no effect. 120 | param "param" {} 121 | } 122 | 123 | } 124 | 125 | } -------------------------------------------------------------------------------- /all/hcl-dashboard-patterns/parameterized_sql_file.sql: -------------------------------------------------------------------------------- 1 | select 'parameterized sql file: ' || $1 as data 2 | 3 | -- but this will fail, yielding -> parameterized sql file: $1 4 | --select 'parameterized sql file: $1' as data -------------------------------------------------------------------------------- /all/hcl-dashboard-patterns/plain_sql_file.sql: -------------------------------------------------------------------------------- 1 | select 'plain sql file' as data -------------------------------------------------------------------------------- /all/hypothesis/README.md: -------------------------------------------------------------------------------- 1 | # Hypothesis dashboards 2 | 3 | Dashboards for Hypothesis annotations 4 | 5 | ## Annotations 6 | 7 | Choose a group, and an URL, then review: 8 | 9 | - top annotators 10 | - top domains 11 | - top tags 12 | - top urls 13 | - top tags and taggers 14 | - histogram of annotation word counts 15 | - histogram of users by their annotation counts 16 | - longest threads 17 | - conversations 18 | 19 | ## Media Conversations 20 | 21 | Choose a group, a media source (e.g. NYTimes), and an URL, then review: 22 | 23 | - longest threads 24 | - conversations 25 | 26 | https://user-images.githubusercontent.com/46509/171987725-52587870-1eb0-48e6-af77-3e1cde6a1f8f.mp4 27 | 28 | -------------------------------------------------------------------------------- /all/hypothesis/media_conversations.sp: -------------------------------------------------------------------------------- 1 | dashboard "Media_Conversations" { 2 | 3 | tags = { 4 | service = "Hypothesis" 5 | } 6 | 7 | container { 8 | 9 | text { 10 | width = 3 11 | value = <>'name' as label, 32 | group_info->>'id' as value, 33 | json_build_object('id', group_info->>'id') as tags 34 | from 35 | groups 36 | EOQ 37 | } 38 | 39 | input "media_source" { 40 | title = "media source (select or type another)" 41 | type = "combo" 42 | width = 4 43 | sql = < min 123 | ), 124 | bucket_min_max as ( 125 | select 126 | bucket, 127 | min(value), 128 | max(value) 129 | from 130 | buckets 131 | group by 132 | bucket 133 | ), 134 | ranges as ( 135 | select 136 | bucket, 137 | int4range(min, max, '[]') as range 138 | from bucket_min_max 139 | ) 140 | select 141 | r.range, 142 | count(b.*) 143 | from ranges r join buckets b using (bucket) 144 | where r.range != 'empty' 145 | group by b.bucket, r.range 146 | order by b.bucket 147 | """ 148 | return plpy.execute(sql) 149 | $$ language plpython3u; 150 | 151 | */ 152 | -------------------------------------------------------------------------------- /all/introspection/README.md: -------------------------------------------------------------------------------- 1 | When launched in a directory that contains mod resources, Steampipe builds introspection tables including `steampipe_query`, `steampipe_benchmark`, and `steampipe_control`. This example shows how to iterate over a list of mod names, git-clone of them, query those tables, and accumulate counts of those resources in a CSV file. 2 | 3 | ```bash 4 | echo "mod,queries,benchmarks,controls" > stats.csv 5 | 6 | mods=('steampipe-mod-alicloud-compliance' 'steampipe-mod-alicloud-thrifty' 'steampipe-mod-aws-compliance' 'steampipe-mod-aws-tags' 'steampipe-mod-aws-thrifty' 'steampipe-mod-azure-compliance' 'steampipe-mod-azure-tags' 'steampipe-mod-azure-thrifty' 'steampipe-mod-digitalocean-thrifty' 'steampipe-mod-gcp-compliance' 'steampipe-mod-gcp-labels' 'steampipe-mod-gcp-thrifty' 'steampipe-mod-github-sherlock' 'steampipe-mod-ibm-compliance' 'steampipe-mod-kubernetes-compliance' 'steampipe-mod-oci-compliance' 'steampipe-mod-oci-thrifty' 'steampipe-mod-terraform-aws-compliance' 'steampipe-mod-terraform-azure-compliance' 'steampipe-mod-terraform-gcp-compliance' 'steampipe-mod-zoom-compliance') 7 | 8 | modcount="${#mods[@]}" 9 | 10 | process () { 11 | cd $1 12 | steampipe query --output csv --header=false "select '$1' as mod, ( select count(*) from steampipe_query ) as queries, ( select count(*) from steampipe_benchmark ) as benchmarks, ( select count(*) from steampipe_control ) as controls" >> ../stats.csv 13 | cd .. 14 | } 15 | 16 | for (( i=0; i<$modcount; i++ )); 17 | do 18 | rm -rf "${mods[$i]}" ; 19 | git clone "https://github.com/turbot/${mods[$i]}" ; 20 | process "${mods[$i]}" ; 21 | done; 22 | ``` 23 | 24 | Here's the output as of Feb 15, 2022. 25 | 26 | ``` 27 | mod,queries,benchmarks,controls 28 | steampipe-mod-alicloud-compliance,40,9,78 29 | steampipe-mod-alicloud-thrifty,0,5,15 30 | steampipe-mod-aws-compliance,233,374,448 31 | steampipe-mod-aws-tags,0,4,284 32 | steampipe-mod-azure-compliance,281,251,360 33 | steampipe-mod-azure-tags,0,4,228 34 | steampipe-mod-azure-thrifty,0,4,13 35 | steampipe-mod-digitalocean-thrifty,0,5,9 36 | steampipe-mod-gcp-compliance,91,13,114 37 | steampipe-mod-gcp-labels,0,4,44 38 | steampipe-mod-gcp-thrifty,0,5,14 39 | steampipe-mod-github-sherlock,0,4,34 40 | steampipe-mod-ibm-compliance,20,21,67 41 | steampipe-mod-kubernetes-compliance,165,28,158 42 | steampipe-mod-oci-compliance,31,6,33 43 | steampipe-mod-oci-thrifty,0,8,18 44 | steampipe-mod-terraform-aws-compliance,153,39,153 45 | steampipe-mod-terraform-azure-compliance,151,34,151 46 | steampipe-mod-terraform-gcp-compliance,55,9,55 47 | steampipe-mod-zoom-compliance,1,44,171 48 | ``` 49 | 50 | 51 | 52 | 53 | -------------------------------------------------------------------------------- /all/jira/README.md: -------------------------------------------------------------------------------- 1 | # Jira tasks and subtasks 2 | 3 | The output of `select * from jira_issue`, for the toy Jira environment at `jonudell.atlassian.com`, is in `jira.json`. 4 | 5 | There's just one task with two children. 6 | 7 | The dashboard graphs the task/subtask relationships, and displays the data needed to do that. 8 | 9 | ![screenshot](https://user-images.githubusercontent.com/46509/218625345-e71c3bb5-d2ee-4524-b565-dd853968200b.png) 10 | -------------------------------------------------------------------------------- /all/jira/mod.sp: -------------------------------------------------------------------------------- 1 | mod "jira" { 2 | } 3 | 4 | locals { 5 | server = "https://jonudell.atlassian.net" 6 | } 7 | -------------------------------------------------------------------------------- /all/jira/tasks_and_subtasks.sp: -------------------------------------------------------------------------------- 1 | dashboard "TasksAndSubtasks" { 2 | 3 | graph { 4 | 5 | node { 6 | category = category.task 7 | sql = <'parent'->>'key' as from_id, 43 | title as to_id 44 | from 45 | jira_issue 46 | where 47 | fields->'parent' is not null 48 | EOQ 49 | } 50 | 51 | } 52 | 53 | table { 54 | sql = <'parent'->>'key' as parent 60 | from 61 | jira_issue 62 | EOQ 63 | } 64 | 65 | } 66 | 67 | category "task" { 68 | icon = "task" 69 | color = "black" 70 | href = "{{.properties.'url'}}" 71 | } 72 | 73 | category "subtask" { 74 | icon = "task" 75 | color = "gray" 76 | href = "{{.properties.'url'}}" 77 | } -------------------------------------------------------------------------------- /all/join-csv-and-api/README.md: -------------------------------------------------------------------------------- 1 | # Joining CSV and API tables 2 | 3 | The scenario: you have a list of service names and IP addresses in a CSV file. You'd like to join that list, on IP address, to AWS resources. 4 | 5 | The file `~/csv/ips.csv` contains this data. 6 | 7 | ``` 8 | service,ip_addr 9 | service1,54.176.63.151 10 | service2,222.236.38.99 11 | service3,41.65.221.12 12 | service4,83.151.87.112 13 | service5,85.188.10.179 14 | ``` 15 | 16 | The CSV plugin is installed, and the `~/.steampipe/config/csv.spc` contains this `paths` directive. 17 | 18 | ``` 19 | connection "csv" { 20 | plugin = "csv" 21 | 22 | paths = [ "~/csv/*.csv" ] 23 | } 24 | ``` 25 | 26 | This query selects all records in the CSV file. 27 | 28 | ``` 29 | select * from csv.ips 30 | 31 | +----------+---------------+ 32 | | service | ip_addr | 33 | +----------+---------------+ 34 | | service1 | 54.176.63.151 | 35 | | service2 | 222.236.38.99 | 36 | | service3 | 41.65.221.12 | 37 | | service4 | 83.151.87.112 | 38 | | service5 | 85.188.10.179 | 39 | +----------+---------------+ 40 | ``` 41 | 42 | Here's a query for EC2 endpoints. 43 | 44 | ``` 45 | select private_ip_address, public_ip_address from aws_ec2_instance 46 | 47 | +--------------------+-------------------+ 48 | | private_ip_address | public_ip_address | 49 | +--------------------+-------------------+ 50 | | 172.31.31.137 | 54.176.63.151 | 51 | | 172.31.29.210 | | 52 | | 10.11.66.164 | | 53 | | 10.10.10.41 | 18.205.6.164 | 54 | +--------------------+-------------------+ 55 | ``` 56 | 57 | And finally, here's a query that joins on IP address and reports EC2 details. 58 | 59 | 60 | ``` 61 | select 62 | c.service, 63 | c.ip_addr, 64 | a.instance_id, 65 | a.instance_state 66 | from csv.ips c 67 | join aws_ec2_instance a 68 | on c.ip_addr = host(a.public_ip_address) 69 | ``` 70 | 71 | ``` 72 | +----------+---------------+---------------------+----------------+ 73 | | service | ip_addr | instance_id | instance_state | 74 | +----------+---------------+---------------------+----------------+ 75 | | service1 | 54.176.63.151 | i-06d8571f170181287 | running | 76 | +----------+---------------+---------------------+----------------+ 77 | ``` 78 | 79 | Per the Postgres doc on [network address functions and operators](https://www.postgresql.org/docs/12/functions-net.html), we use the `host` function to convert `a.public_ip_address` from its native type, `inet`, to type `text` so it can join with `c.ip_addr`. -------------------------------------------------------------------------------- /all/linkcheck/links.sp: -------------------------------------------------------------------------------- 1 | dashboard "Links" { 2 | 3 | tags = { 4 | service = "Link Checker" 5 | } 6 | 7 | container { 8 | 9 | input "scheme" { 10 | title = "scheme" 11 | type = "combo" 12 | width = 2 13 | option "https://" {} 14 | option "http://" {} 15 | } 16 | 17 | input "target_url" { 18 | title = "target url" 19 | type = "combo" 20 | width = 4 21 | option "steampipe.io" {} 22 | } 23 | 24 | table "links" { 25 | args = [ 26 | self.input.scheme.value, 27 | self.input.target_url.value 28 | ] 29 | query = query.links 30 | column "link" { 31 | href = "${local.host}/steampipe_stats.dashboard.Links?input.scheme={{.'scheme'}}&input.target_url={{.'link'}}" 32 | wrap = "all" 33 | } 34 | column "context" { 35 | wrap = "all" 36 | } 37 | column "response_error" { 38 | wrap = "all" 39 | } 40 | 41 | } 42 | 43 | } 44 | 45 | } 46 | 47 | 48 | -------------------------------------------------------------------------------- /all/linkcheck/mod.sp: -------------------------------------------------------------------------------- 1 | mod "link_check" { 2 | title = "Link Check" 3 | } 4 | 5 | locals { 6 | // host = "https://cloud.steampipe.io/org/acme/workspace/jon/dashboard" 7 | host = "http://localhost:9194" 8 | } 9 | -------------------------------------------------------------------------------- /all/linkcheck/query.sp: -------------------------------------------------------------------------------- 1 | query "links" { 2 | sql = <> 'name')::text as source, 17 | to_char(timestamp, 'YYYY-MM-DD') as date, 18 | permalink as link, 19 | substring(text from 1 for 200) as content 20 | from 21 | slack_search 22 | where 23 | $1 ~ 'slack' 24 | and query = 'in:#steampipe after:${local.config.slack_date} ' || $2 25 | limit $3 26 | ), 27 | github_issue as ( 28 | select 29 | 'github_issue' as type, 30 | repository_full_name || ' ' || title as source, 31 | to_char(created_at, 'YYYY-MM-DD') as date, 32 | html_url as link, 33 | substring(body from 1 for 200) || '...' as content 34 | from 35 | github_search_issue 36 | where 37 | $1 ~ 'github_issue' 38 | and query = ' in:body in:comments org:${local.config.github_org} ' || $2 39 | limit $3 40 | ), 41 | gdrive as ( 42 | select 43 | 'gdrive' as type, 44 | replace(mime_type,'application/vnd.google-apps.','') as source, 45 | to_char(created_time, 'YYYY-MM-DD') as date, 46 | 'https://docs.google.com/document/d/' || id as link, 47 | name as content 48 | from 49 | googleworkspace_drive_my_file 50 | where 51 | $1 ~ 'gdrive' 52 | and query = 'fullText contains ' || '''' || $2 || '''' 53 | limit $3 54 | ) 55 | 56 | select * from slack 57 | union 58 | select * from github_issue 59 | union 60 | select * from gdrive 61 | 62 | order by 63 | date desc 64 | EOQ 65 | param "sources" {} 66 | param "search_term" {} 67 | param "max_per_source" {} 68 | } 69 | 70 | dashboard "metasearch" { 71 | 72 | input "sources" { 73 | title = "sources" 74 | type = "multiselect" 75 | width = 4 76 | option "slack" {} 77 | option "github_issue" {} 78 | option "gdrive" {} 79 | } 80 | 81 | input "search_term" { 82 | type = "text" 83 | width = 2 84 | title = "search term" 85 | } 86 | 87 | input "max_per_source" { 88 | title = "max per source" 89 | width = 2 90 | option "2" {} 91 | option "5" {} 92 | option "10" {} 93 | option "20" {} 94 | } 95 | 96 | table { 97 | title = "search slack + github + gdrive" 98 | query = query.metasearch 99 | args = [ 100 | self.input.sources, 101 | self.input.search_term, 102 | self.input.max_per_source 103 | ] 104 | column "source" { 105 | wrap = "all" 106 | } 107 | column "link" { 108 | wrap = "all" 109 | } 110 | column "content" { 111 | wrap = "all" 112 | } 113 | } 114 | 115 | } 116 | 117 | -------------------------------------------------------------------------------- /all/metasearch/zendesk.sql: -------------------------------------------------------------------------------- 1 | -- this worked but our trial account expired 2 | zendesk as ( 3 | select 4 | 'zendesk' as type, 5 | result -> 'via' ->> 'channel' || ': ' || 6 | ( select name from zendesk_user where id::text = result ->> 'submitter_id' ) 7 | as source, 8 | substring(result ->> 'created_at' from 1 for 10) as date, 9 | 'https://turbothelp.zendesk.com/agent/tickets/' || (result ->> 'id')::text as link, 10 | result ->> 'subject' as content 11 | from 12 | zendesk_search 13 | where 14 | $1 ~ 'zendesk' 15 | and query = $2 16 | limit $3 17 | ) -------------------------------------------------------------------------------- /all/pipes-terraform-provider/README.md: -------------------------------------------------------------------------------- 1 | # Turbot Pipes Terraform Provider 2 | 3 | The official Terraform Provider for Turbot Pipes can be found in [Terraform Registry](https://registry.terraform.io/providers/turbot/pipes/latest). 4 | 5 | This script has been created to complement the content of this [blog post](https://turbot.com/blog/2023/08/provision-pipes-workspaces-with-the-terraform-provider). 6 | -------------------------------------------------------------------------------- /all/pipes-terraform-provider/outputs.tf: -------------------------------------------------------------------------------- 1 | # Retrieve information about the organization. 2 | output "my_org_details" { 3 | value = pipes_organization.pipes_demo 4 | } 5 | 6 | # Retrieve information about the orginization workspace. 7 | output "my_org_workspace_details" { 8 | value = pipes_workspace.pipes_demo_workspace 9 | } -------------------------------------------------------------------------------- /all/pipes-terraform-provider/pipes.tf: -------------------------------------------------------------------------------- 1 | # Create an organization "Pipes Demo Org" 2 | resource "pipes_organization" "pipes_demo" { 3 | handle = "pipes-demo-org" 4 | display_name = "Pipes Demo" 5 | } 6 | 7 | # Add user with handle "vkumbha-mnuv" to the Organization as a member 8 | resource "pipes_organization_member" "org_member" { 9 | organization = pipes_organization.pipes_demo.handle 10 | user_handle = "vkumbha-mnuv" 11 | role = "member" 12 | } 13 | 14 | # Create AWS Connections for the organization 15 | resource "pipes_connection" "aws_connections" { 16 | for_each = var.aws_connections 17 | organization = pipes_organization.pipes_demo.handle 18 | plugin = "aws" 19 | handle = each.key 20 | config = jsonencode({ 21 | regions = each.value.regions 22 | role_arn = each.value.role_arn 23 | external_id = each.value.external_id 24 | }) 25 | } 26 | 27 | # Create an organization workspace 28 | resource "pipes_workspace" "pipes_demo_workspace" { 29 | organization = pipes_organization.pipes_demo.handle 30 | handle = "awsworkspace" 31 | } 32 | 33 | # Associate connection(s) to an organization workspace 34 | resource "pipes_workspace_connection" "pipes_demo_connections" { 35 | depends_on = [pipes_connection.aws_connections] 36 | for_each = var.aws_connections 37 | organization = pipes_organization.pipes_demo.handle 38 | workspace_handle = pipes_workspace.pipes_demo_workspace.handle 39 | connection_handle = each.key 40 | } 41 | 42 | # Add user with handle "vkumbha-mnuv" as an owner to the workspace 43 | resource "pipes_organization_workspace_member" "pipes_demo_workspace_member" { 44 | depends_on = [pipes_organization_member.org_member] 45 | organization = pipes_organization.pipes_demo.handle 46 | workspace_handle = pipes_workspace.pipes_demo_workspace.handle 47 | user_handle = "vkumbha-mnuv" 48 | role = "owner" 49 | } 50 | 51 | # Create an organization workspace aggregator 52 | resource "pipes_workspace_aggregator" "all_aws_aggregator" { 53 | organization = pipes_organization.pipes_demo.handle 54 | workspace = pipes_workspace.pipes_demo_workspace.handle 55 | handle = "all_aws" 56 | plugin = "aws" 57 | connections = ["aws*"] 58 | } 59 | 60 | # Schedule a custom query pipeline to run weekly 61 | resource "pipes_workspace_pipeline" "aws_s3_bucket_versioning_report" { 62 | organization = pipes_organization.pipes_demo.handle 63 | workspace = pipes_workspace.pipes_demo_workspace.handle 64 | title = "AWS S3 Bucket Versioning Report" 65 | pipeline = "pipeline.snapshot_query" 66 | frequency = jsonencode({ 67 | "type" : "interval", 68 | "schedule" : "weekly" 69 | }) 70 | args = jsonencode({ 71 | "resource" : "custom.dashboard.sql", 72 | "snapshot_title" : "AWS S3 Bucket Versioning Report", 73 | "sql" : <> 'ExecutiveOwner' as Executive_Owner, 4 | tags ->> 'TechnicalContact' as Technical_Contact, 5 | tags ->> 'DataClassification' as Data_Classification 6 | tags ->> 'environment' as Environment, 7 | from 8 | aws_payer.aws_organizations_account; 9 | -------------------------------------------------------------------------------- /all/splunk-lookup-tables/eni.sql: -------------------------------------------------------------------------------- 1 | select 2 | eni.network_interface_id, 3 | eni.private_ip, 4 | eni.vpc_id as vpc_id, 5 | eni.region, 6 | eni.status, 7 | eni.interface_type, 8 | eni.association_public_ip as public_ip, 9 | case 10 | when eni.attached_instance_id is not null 11 | then eni.attached_instance_id 12 | else eni.description 13 | end as attached_resource, 14 | vpc.tags ->> 'name' as vpc_name, 15 | org.name as account_name 16 | from 17 | aws_ec2_network_interface as eni, 18 | aws_vpc as vpc, 19 | aws_payer.aws_organizations_account as org 20 | where vpc.vpc_id = eni.vpc_id 21 | and org.id = eni.account_id; 22 | -------------------------------------------------------------------------------- /all/splunk-lookup-tables/generate_tables.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | steampipe query accounts.sql --output csv > sp_aws_accounts.csv 4 | steampipe query instances.sql --output csv > sp_ec2_instances.csv 5 | steampipe query eni.sql --output csv > sp_eni.csv 6 | 7 | if [ ! -z "$1" ] ; then 8 | SPLUNK_SERVER=$1 9 | scp *.csv ec2-user@$SPLUNK_SERVER:/opt/splunk/etc/system/lookups 10 | fi 11 | -------------------------------------------------------------------------------- /all/splunk-lookup-tables/instances.sql: -------------------------------------------------------------------------------- 1 | select ec2.instance_id, ec2.instance_type, ec2.instance_state, ec2.image_id, 2 | ec2.launch_time, 3 | ec2.private_ip_address, 4 | ec2.public_ip_address, 5 | ec2.tags ->> 'name' as instance_name, 6 | jsonb_array_elements(ec2.security_groups) ->> 'groupname' as security_group_name, 7 | org.name as account_name, 8 | org.id as account_id 9 | from aws_ec2_instance as ec2, 10 | aws_payer.aws_organizations_account as org 11 | where org.id = ec2.account_id; 12 | -------------------------------------------------------------------------------- /all/spreadsheet-integrity/README.md: -------------------------------------------------------------------------------- 1 | In [Writing custom controls to check spreadsheet integrity](https://steampipe.io/blog/spreadsheet-integrity) we show an example based on these files. To run it for yourself: 2 | 3 | 1. `steampipe plugin install csv` 4 | 5 | 2. Edit `~/.steampipe/config/csv.spc`, refer to this directory, e.g: 6 | 7 | - `paths = [ "~/steampipe-samples/spreadsheet-integrity" ]` 8 | 9 | 3. Run `steampipe query`, then: 10 | 11 | - `.inspect csv` to check that the tables exist 12 | 13 | - `select * from csv.people` 14 | 15 | - `select * from csv.sessions` 16 | 17 | 4. `steampipe check control.sessions_valid_in_session_table` 18 | 19 | 5. `steampipe check control.sessions_valid_in_people_table` 20 | 21 | 6. `steampipe check all` 22 | 23 | 7. `steampipe check all --export event_planning.html` -------------------------------------------------------------------------------- /all/spreadsheet-integrity/event_planning.sp: -------------------------------------------------------------------------------- 1 | variable "valid_sessions" { 2 | type = list(string) 3 | default = ["101","102","103","104","105","201","202","203","204","205"] 4 | } 5 | 6 | control "sessions_valid_in_session_table" { 7 | title = "Sessions in the sessions table are valid" 8 | sql = <