├── .bandit
├── .gitattributes
├── .github
└── workflows
│ ├── codeguru-reviewer.yml
│ ├── codeql-analysis.yml
│ ├── publish-rss.yml
│ ├── python-publish.yml
│ ├── security-scan.yml
│ └── tests.yml
├── .gitignore
├── .pylintrc
├── CID-CMD.md
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── LICENSE
├── MANIFEST.in
├── README.md
├── assets
├── build_lambda_layer.sh
├── images
│ ├── advanced-architecture.png
│ ├── deployment-guide-button.svg
│ ├── documentation.svg
│ └── foundational-architecture.png
├── lint.sh
└── publish_lambda_layer.sh
├── bump-release.py
├── cfn-templates
├── cid-admin-policies.yaml
├── cid-cfn.tests.bats
├── cid-cfn.yml
├── cid-lakeformation-prerequisite.yaml
├── cid-plugin.yml
└── tests
│ └── test_deploy_with_permissions.py
├── changes
├── CHAMGELOG-sustainability-proxy-metrics.md
├── CHANGELOG-amazon-connect.md
├── CHANGELOG-aws-cost-anomalies.md
├── CHANGELOG-aws-marketplace-spg.md
├── CHANGELOG-cid.md
├── CHANGELOG-cod.md
├── CHANGELOG-cora.md
├── CHANGELOG-cudos.md
├── CHANGELOG-extended-support-cost-projection.md
├── CHANGELOG-focus.md
├── CHANGELOG-graviton-opportunities.md
├── CHANGELOG-graviton-savings.md
├── CHANGELOG-hed.md
├── CHANGELOG-kpi.md
├── CHANGELOG-scad-cca.md
├── CHANGELOG-support-cases-radar.md
├── CHANGELOG-tao.md
├── CHANGELOG-trends.md
└── cloud-intelligence-dashboards.rss
├── cid
├── __init__.py
├── _version.py
├── base.py
├── builtin
│ ├── __init__.py
│ └── core
│ │ ├── __init__.py
│ │ └── data
│ │ ├── datasets
│ │ ├── cid
│ │ │ ├── compute.json
│ │ │ ├── ec2_running_cost.json
│ │ │ ├── s3_view.json
│ │ │ └── summary_view.json
│ │ ├── co
│ │ │ └── dataset.json
│ │ ├── cudos
│ │ │ ├── hourly_view.json
│ │ │ └── resource_view.json
│ │ ├── kpi
│ │ │ ├── kpi_ebs_snap.json
│ │ │ ├── kpi_ebs_storage_all.json
│ │ │ ├── kpi_instance_all.json
│ │ │ ├── kpi_s3_storage_all.json
│ │ │ └── kpi_tracker.json
│ │ ├── shared
│ │ │ └── customer_all.json
│ │ ├── tao
│ │ │ └── dataset.json
│ │ └── trends
│ │ │ ├── daily_anomaly_detection.json
│ │ │ ├── monthly_anomaly_detection.json
│ │ │ └── monthly_bill_by_account.json
│ │ ├── permissions
│ │ ├── dashboard_permissions.json
│ │ ├── dashboard_permissions_namespace.json
│ │ ├── data_set_permissions.json
│ │ ├── data_source_permissions.json
│ │ └── folder_permissions.json
│ │ ├── queries
│ │ ├── cid
│ │ │ ├── compute_savings_plan_eligible_spend.sql
│ │ │ ├── ec2_running_cost.sql
│ │ │ ├── ri_sp_mapping.sql
│ │ │ ├── s3.sql
│ │ │ └── summary_view.sql
│ │ ├── co
│ │ │ ├── all_options.sql
│ │ │ ├── auto_scale.json
│ │ │ ├── auto_scale_options.sql
│ │ │ ├── ebs_volume.json
│ │ │ ├── ebs_volume_options.sql
│ │ │ ├── ec2_instance.json
│ │ │ ├── ec2_instance_options.sql
│ │ │ ├── ecs_service.json
│ │ │ ├── ecs_service_options.sql
│ │ │ ├── idle.json
│ │ │ ├── idle_options.sql
│ │ │ ├── lambda.json
│ │ │ ├── lambda_options.sql
│ │ │ ├── license.json
│ │ │ ├── license_options.sql
│ │ │ ├── rds_database.json
│ │ │ ├── rds_instance_options.sql
│ │ │ └── rds_storage_options.sql
│ │ ├── cudos
│ │ │ ├── hourly_view.sql
│ │ │ └── resource_view.sql
│ │ ├── kpi
│ │ │ ├── first_kpi_instance_mapping_view.sql
│ │ │ ├── kpi_ebs_snap_view.sql
│ │ │ ├── kpi_ebs_storage_view.sql
│ │ │ ├── kpi_instance_all_view.sql
│ │ │ ├── kpi_s3_storage_view.sql
│ │ │ └── last_kpi_tracker_view.sql
│ │ ├── shared
│ │ │ ├── account_map.sql
│ │ │ ├── account_map_cur2.sql
│ │ │ ├── account_map_dummy.sql
│ │ │ ├── aws_accounts.sql
│ │ │ ├── aws_regions.sql
│ │ │ ├── aws_service_category_map.sql
│ │ │ ├── business_units_map.sql
│ │ │ ├── cur.yaml
│ │ │ ├── customer_all_unlimited.sql
│ │ │ ├── payer_account_name_map.sql
│ │ │ └── ta_descriptions.sql
│ │ ├── tao
│ │ │ ├── glue_table.json
│ │ │ └── ta_org_view.sql
│ │ └── trends
│ │ │ ├── daily_anomaly_detection.sql
│ │ │ ├── monthly_anomaly_detection.sql
│ │ │ └── monthly_bill_by_account.sql
│ │ └── resources.yaml
├── cli.py
├── commands
│ ├── __init__.py
│ ├── command_base.py
│ └── init_qs.py
├── common.py
├── exceptions.py
├── export.py
├── helpers
│ ├── __init__.py
│ ├── account_map.py
│ ├── athena.py
│ ├── cloudformation.py
│ ├── csv2view.py
│ ├── cur.py
│ ├── cur_proxy.py
│ ├── diff.py
│ ├── glue.py
│ ├── iam.py
│ ├── organizations.py
│ ├── parameter_store.py
│ ├── quicksight
│ │ ├── __init__.py
│ │ ├── dashboard.py
│ │ ├── dashboard_patching.py
│ │ ├── dataset.py
│ │ ├── datasource.py
│ │ ├── definition.py
│ │ ├── resource.py
│ │ ├── template.py
│ │ └── version.py
│ ├── randtime.py
│ ├── s3.py
│ └── timezone.py
├── logger.py
├── plugin.py
├── test
│ ├── bats
│ │ ├── 10-deploy-update-delete
│ │ │ ├── compute-optimizer-dashboard.bats
│ │ │ ├── cost_intelligence_dashboard.bats
│ │ │ ├── cudos.bats
│ │ │ ├── kpi_dashboard.bats
│ │ │ ├── ta-organizational-view.bats
│ │ │ └── trends-dashboard.bats
│ │ ├── 20-init-quicksight
│ │ │ └── create-qs-subscription.bats
│ │ └── README.md
│ └── python
│ │ ├── test_csv2view.py
│ │ ├── test_isolated_parameters.py
│ │ └── test_merge.py
└── utils.py
├── dashboards
├── amazon-connect
│ └── amazon-connect.yaml
├── aws-budgets
│ └── aws-budgets.yaml
├── aws-feeds
│ └── aws-feeds.yaml
├── aws-marketplace
│ └── aws-marketplace-spg.yaml
├── catalog.yaml
├── cloudfront-dashboard-templates
│ ├── cloudfront_realtime_logs_dashboard.yaml
│ └── cloudfront_standard_logs_dashboard.yaml
├── cora
│ └── cora.yaml
├── cost-anomalies
│ └── cost-anomalies.yaml
├── cost-intelligence
│ ├── cost-intelligence-definition.yaml
│ └── cost-intelligence.yaml
├── cudos
│ ├── CUDOS-v5-definition.yaml
│ └── CUDOS-v5.yaml
├── data-transfer
│ └── DataTransfer-Cost-Analysis-Dashboard.yaml
├── extended-support-cost-projection
│ └── extended-support-cost-projection.yaml
├── focus
│ └── focus.yaml
├── graviton-savings-dashboard
│ ├── graviton_legacy.yaml
│ └── graviton_savings_dashboard.yaml
├── health-events
│ └── health-events.yaml
├── kpi_dashboard
│ └── kpi_dashboard.yaml
├── scad-containers-cost-allocation
│ └── scad-containers-cost-allocation.yaml
├── support-cases-radar
│ └── support-cases-radar.yaml
└── sustainability-proxy-metrics
│ └── sustainability-proxy-metrics.yaml
├── docs
└── cid-cmd.md
├── pyproject.toml
├── requirements.txt
├── setup.cfg
└── terraform-modules
├── README.md
├── cid-dashboards
├── .terraform-docs.yml
├── README.md
├── main.tf
├── outputs.tf
├── variables.tf
└── versions.tf
├── cur-setup-destination
├── .terraform-docs.yml
├── README.md
├── main.tf
├── outputs.tf
├── variables.tf
└── versions.tf
├── cur-setup-source
├── .terraform-docs.yml
├── README.md
├── main.tf
├── outputs.tf
├── variables.tf
└── versions.tf
└── terraform-test.bats
/.bandit:
--------------------------------------------------------------------------------
1 | # FILE: .bandit
2 | [bandit]
3 | exclude = ./bump-release.py,./build/*,./.venv/*
4 | skips = B101,B608
5 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | * text=auto
2 |
--------------------------------------------------------------------------------
/.github/workflows/codeguru-reviewer.yml:
--------------------------------------------------------------------------------
1 | permissions:
2 | id-token: write
3 | contents: read
4 |
5 | name: Code Review
6 |
7 | on:
8 | pull_request:
9 | branches:
10 | - '*'
11 |
12 | jobs:
13 | build:
14 | runs-on: 'ubuntu-latest'
15 | steps:
16 |
17 | - name: Checkout repository
18 | uses: actions/checkout@v3
19 | with:
20 | fetch-depth: 0
21 |
22 | - name: configure aws credentials
23 | uses: aws-actions/configure-aws-credentials@v2
24 | with:
25 | role-to-assume: ${{ secrets.AWS_ROLE }}
26 | role-session-name: ${{ secrets.AWS_SESSION_NAME }}
27 | aws-region: ${{ secrets.AWS_REGION }}
28 |
29 | - name: CodeGuru Reviewer
30 | uses: aws-actions/codeguru-reviewer@v1.1
31 | with:
32 | s3_bucket: codeguru-reviewer-cid-stage
33 |
34 | - name: Upload review result
35 | if: ${{ github.event_name != 'push' }}
36 | uses: github/codeql-action/upload-sarif@v2
37 | with:
38 | sarif_file: codeguru-results.sarif.json
39 |
--------------------------------------------------------------------------------
/.github/workflows/codeql-analysis.yml:
--------------------------------------------------------------------------------
1 | # For most projects, this workflow file will not need changing; you simply need
2 | # to commit it to your repository.
3 | #
4 | # You may wish to alter this file to override the set of languages analyzed,
5 | # or to provide custom queries or build logic.
6 | #
7 | # ******** NOTE ********
8 | # We have attempted to detect the languages in your repository. Please check
9 | # the `language` matrix defined below to confirm you have the correct set of
10 | # supported CodeQL languages.
11 | #
12 | name: "CodeQL"
13 |
14 | on:
15 | push:
16 | branches: [ "main" ]
17 | pull_request:
18 | # The branches below must be a subset of the branches above
19 | branches: [ "main" ]
20 | schedule:
21 | - cron: '34 10 * * 5'
22 |
23 | jobs:
24 | analyze:
25 | name: Analyze
26 | runs-on: ubuntu-latest
27 | permissions:
28 | actions: read
29 | contents: read
30 | security-events: write
31 |
32 | strategy:
33 | fail-fast: false
34 | matrix:
35 | language: [ 'python' ]
36 | # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
37 | # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
38 |
39 | steps:
40 | - name: Checkout repository
41 | uses: actions/checkout@v3
42 |
43 | # Initializes the CodeQL tools for scanning.
44 | - name: Initialize CodeQL
45 | uses: github/codeql-action/init@v2
46 | with:
47 | languages: ${{ matrix.language }}
48 | # If you wish to specify custom queries, you can do so here or in a config file.
49 | # By default, queries listed here will override any specified in a config file.
50 | # Prefix the list here with "+" to use these queries and those in the config file.
51 |
52 | # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
53 | # queries: security-extended,security-and-quality
54 |
55 |
56 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
57 | # If this step fails, then you should remove it and run the build manually (see below)
58 | - name: Autobuild
59 | uses: github/codeql-action/autobuild@v2
60 |
61 | # ℹ️ Command-line programs to run using the OS shell.
62 | # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
63 |
64 | # If the Autobuild fails above, remove it and uncomment the following three lines.
65 | # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
66 |
67 | # - run: |
68 | # echo "Run, Build Application using script"
69 | # ./location_of_script_within_repo/buildscript.sh
70 |
71 | - name: Perform CodeQL Analysis
72 | uses: github/codeql-action/analyze@v2
73 |
--------------------------------------------------------------------------------
/.github/workflows/publish-rss.yml:
--------------------------------------------------------------------------------
1 | name: Sync RSS to S3
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 | paths:
8 | - 'changes/cloud-intelligence-dashboards.rss'
9 |
10 | jobs:
11 | sync-to-s3:
12 | runs-on: ubuntu-latest
13 | permissions:
14 | id-token: write
15 | contents: read
16 |
17 | steps:
18 | - name: Checkout repository
19 | uses: actions/checkout@v3
20 |
21 | - name: Configure AWS credentials
22 | uses: aws-actions/configure-aws-credentials@v3
23 | with:
24 | role-to-assume: ${{ secrets.AWS_RSS_ROLE }}
25 | role-session-name: ${{ secrets.AWS_RSS_SESSION_NAME }}
26 | aws-region: ${{ secrets.AWS_REGION }}
27 | - name: Copy RSS file to S3
28 | run: |
29 | ls
30 | aws s3 cp changes/cloud-intelligence-dashboards.rss s3://cid-feed/feed/cloud-intelligence-dashboards.rss --content-type application/rss+xml
31 |
--------------------------------------------------------------------------------
/.github/workflows/python-publish.yml:
--------------------------------------------------------------------------------
1 | # This workflow will upload a Python Package using Twine when a release is created
2 | # For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries
3 |
4 | # This workflow uses actions that are not certified by GitHub.
5 | # They are provided by a third-party and are governed by
6 | # separate terms of service, privacy policy, and support
7 | # documentation.
8 |
9 | permissions:
10 | id-token: write
11 | contents: read
12 |
13 | name: Upload Python Package
14 |
15 | on:
16 | release:
17 | types: [published]
18 |
19 | jobs:
20 | deploy:
21 | runs-on: ubuntu-latest
22 | environment: release
23 |
24 | steps:
25 | - uses: actions/checkout@v3
26 | - name: Set up Python
27 | uses: actions/setup-python@v4
28 | with:
29 | python-version: "3.x"
30 | - name: Install dependencies
31 | run: |
32 | python -m pip install --upgrade pip
33 | pip install build
34 | - name: Build package
35 | run: python -m build
36 | - name: Publish package
37 | uses: pypa/gh-action-pypi-publish@release/v1
38 | with:
39 | user: __token__
40 | password: ${{ secrets.PYPI_API_TOKEN }}
41 | - name: configure aws credentials
42 | uses: aws-actions/configure-aws-credentials@v2
43 | with:
44 | role-to-assume: ${{ secrets.AWS_ROLE }}
45 | role-session-name: ${{ secrets.AWS_SESSION_NAME }}
46 | aws-region: ${{ secrets.AWS_REGION }}
47 | - name: Push LambdaLayer to S3
48 | run: |
49 | ./assets/publish_lambda_layer.sh
50 |
--------------------------------------------------------------------------------
/.github/workflows/security-scan.yml:
--------------------------------------------------------------------------------
1 |
2 | name: Security Scan
3 |
4 | on:
5 | pull_request:
6 | branches:
7 | - '*'
8 |
9 | jobs:
10 | python-scan:
11 | runs-on: ubuntu-latest
12 | strategy:
13 | matrix:
14 | include:
15 | - {python-version: '3.9' }
16 | - {python-version: '3.12' }
17 | steps:
18 | - name: Git clone the repository
19 | uses: actions/checkout@v3
20 | - name: Set up Python
21 | uses: actions/setup-python@v4
22 | with:
23 | python-version: ${{ matrix.python-version }}
24 | - name: Install Python Modules
25 | run: |
26 | python -m pip install --upgrade pip
27 | pip install -U bandit pip-audit setuptools .
28 | - name: Bandit Scan
29 | run: |
30 | bandit -r .
31 | - name: Install cid-cmd
32 | run: |
33 | pip install -U .
34 | - name: Pip Audit
35 | run: |
36 | # GHSA-wfm5-v35h-vwf4 gitpython is a dependency of pip-audit not cid-cmd
37 | # GHSA-cwvm-v4w8-q58c gitpython is a dependency of pip-audit not cid-cmd
38 | pip-audit --ignore-vuln GHSA-wfm5-v35h-vwf4 --ignore-vuln GHSA-cwvm-v4w8-q58c
39 |
40 | python-pylint-cid:
41 | runs-on: ubuntu-latest
42 | steps:
43 | - name: Git clone the repository
44 | uses: actions/checkout@v3
45 | - name: Set up Python
46 | uses: actions/setup-python@v4
47 | with:
48 | python-version: '3.11'
49 | - name: Install cfn-lint
50 | run: |
51 | pip install pylint
52 | - name: PyLint
53 | run: |
54 | pylint ./cid/
55 |
56 | cfn-scan-cid:
57 | runs-on: ubuntu-latest
58 | steps:
59 | - name: Git clone the repository
60 | uses: actions/checkout@v3
61 | - name: Set up Python
62 | uses: actions/setup-python@v4
63 | with:
64 | python-version: '3.10'
65 | - name: Set up Ruby
66 | uses: ruby/setup-ruby@v1
67 | with:
68 | ruby-version: '3.1'
69 | - name: Install CFN tools
70 | run: |
71 | gem install cfn-nag
72 | - name: CFN Nag scan
73 | run: |
74 | cfn_nag_scan --input-path ./cfn-templates/cid-cfn.yml
75 | - name: Install cfn-lint
76 | run: |
77 | pip install cfn-lint
78 | - name: CFN Lint
79 | run: |
80 | cfn-lint ./cfn-templates/cid-cfn.yml
81 |
82 | cfn-scan-cid-admin-policies:
83 | runs-on: ubuntu-latest
84 | steps:
85 | - name: Git clone the repository
86 | uses: actions/checkout@v3
87 | - name: Set up Python
88 | uses: actions/setup-python@v4
89 | with:
90 | python-version: '3.10'
91 | - name: Set up Ruby
92 | uses: ruby/setup-ruby@v1
93 | with:
94 | ruby-version: '3.1'
95 | - name: Install CFN tools
96 | run: |
97 | gem install cfn-nag
98 | - name: CFN Nag scan
99 | run: |
100 | cfn_nag_scan --input-path ./cfn-templates/cid-admin-policies.yaml
101 | - name: Install cfn-lint
102 | run: |
103 | pip install cfn-lint
104 | - name: CFN Lint
105 | run: |
106 | cfn-lint ./cfn-templates/cid-admin-policies.yaml
107 |
108 |
109 | terraform-scan:
110 | runs-on: ubuntu-latest
111 | steps:
112 | - name: Git clone the repository
113 | uses: actions/checkout@v3
114 | - name: Set up Python
115 | uses: actions/setup-python@v4
116 | with:
117 | python-version: '3.11'
118 | - name: Install checkov
119 | run: |
120 | pip install -U schema checkov
121 | - name: Checkov scan
122 | run: |
123 | checkov --directory ./terraform-modules
124 |
--------------------------------------------------------------------------------
/.github/workflows/tests.yml:
--------------------------------------------------------------------------------
1 | permissions:
2 | id-token: write
3 | contents: read
4 |
5 | name: Run Tests
6 |
7 | on:
8 | pull_request:
9 | branches:
10 | - '*'
11 |
12 | concurrency: staging_environment
13 |
14 | jobs:
15 | test-cli:
16 | runs-on: ubuntu-latest
17 | strategy:
18 | fail-fast: false
19 | max-parallel: 1
20 | matrix:
21 | include:
22 | - {python-version: '3.9', os: ubuntu-latest, dashboard: cudos }
23 | - {python-version: '3.10', os: macos-latest, dashboard: cudos }
24 | - {python-version: '3.11', os: ubuntu-latest, dashboard: cudos }
25 | - {python-version: '3.12', os: macos-latest, dashboard: cudos }
26 | steps:
27 | - name: Git clone the repository
28 | uses: actions/checkout@v3
29 | - name: Set up Python
30 | uses: actions/setup-python@v4
31 | with:
32 | python-version: ${{ matrix.python-version }}
33 | - name: Install
34 | run: |
35 | python -m pip install --upgrade pip
36 | pip install ./
37 | - name: configure aws credentials
38 | uses: aws-actions/configure-aws-credentials@v2
39 | with:
40 | role-to-assume: ${{ secrets.AWS_ROLE }}
41 | role-session-name: ${{ secrets.AWS_SESSION_NAME }}
42 | aws-region: ${{ secrets.AWS_REGION }}
43 | - name: Basic check
44 | run: |
45 | cid-cmd status
46 | - name: Install Bats
47 | run: |
48 | sudo apt-get install -y bats
49 | - name: Check Bats Verson
50 | run: |
51 | bats -v
52 | - name: Run Bats Tests
53 | run: |
54 | bats cid/test/bats/10-deploy-update-delete/${{ matrix.dashboard }}.bats
55 | - name: Save logs
56 | if: always()
57 | run: |
58 | aws s3 cp ./cid.log s3://aws-cid-stage-logs/$GITHUB_EVENT_NAME/$GITHUB_REF_NAME/$GITHUB_RUN_NUMBER/python${{ matrix.python-version }}-${{ matrix.os }}/
59 |
60 | #publish-lambda-layer:
61 | # needs: test-cli
62 | # runs-on: ubuntu-latest
63 | # strategy:
64 | # fail-fast: false
65 | # max-parallel: 1
66 | # steps:
67 | # - name: Git clone the repository
68 | # uses: actions/checkout@v3
69 | # - name: Set up Python
70 | # uses: actions/setup-python@v4
71 | # with:
72 | # python-version: '3.x'
73 | # - name: Install
74 | # run: |
75 | # python -m pip install --upgrade pip
76 | # - name: configure aws credentials
77 | # uses: aws-actions/configure-aws-credentials@v2
78 | # with:
79 | # role-to-assume: ${{ secrets.AWS_ROLE }}
80 | # role-session-name: ${{ secrets.AWS_SESSION_NAME }}
81 | # aws-region: ${{ secrets.AWS_REGION }}
82 | # - name: Push LambdaLayer to S3
83 | # run: |
84 | # ./assets/publish_lambda_layer.sh
85 |
86 | #test-cfn:
87 | # needs: publish-lambda-layer
88 | # runs-on: 'ubuntu-latest'
89 | # steps:
90 | # - name: Git clone the repository
91 | # uses: actions/checkout@v3
92 | # - name: configure aws credentials
93 | # uses: aws-actions/configure-aws-credentials@v2
94 | # with:
95 | # role-to-assume: ${{ secrets.AWS_ROLE }}
96 | # role-session-name: ${{ secrets.AWS_SESSION_NAME }}
97 | # aws-region: ${{ secrets.AWS_REGION }}
98 | # - name: Install Bats
99 | # run: |
100 | # sudo apt-get install -y bats
101 | # bats -v
102 | # - name: Run Bats Tests for CloudFormation
103 | # run: |
104 | # export lambda_bucket='aws-cid-stage'
105 | # # bats cfn-templates/cid-cfn.tests.bats
106 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Working files for legacy (bash) automation
2 | */work/
3 | work/
4 |
5 | *~
6 | *#
7 | *.swp
8 | *.log
9 |
10 | # Python specific
11 | build/
12 | dist/
13 |
14 | __pycache__/
15 | *.py[cod]
16 | *$py.class
17 | *.egg-info/
18 |
19 | /.coverage
20 | /.coverage.*
21 | /.cache
22 | /.pytest_cache
23 | /.mypy_cache
24 |
25 | # Terraform specific
26 | .terraform/
27 | *.tfstate
28 | *.tfstate.backup
29 | .terraform.lock.hcl
30 | *.tfvars
31 | .tftest.*
32 |
33 | # Environments
34 | .env
35 | .venv
36 | env/
37 | venv/
38 | ENV/
39 | env.bak/
40 | venv.bak/
41 |
42 | # IDEs
43 | .vscode
44 | .idea/
45 |
46 | # Files that might appear in the root of a volume
47 | .DS_Store
48 | .AppleDouble
49 | .LSOverride
50 | .DocumentRevisions-V100
51 | .fseventsd
52 | .Spotlight-V100
53 | .TemporaryItems
54 | .Trashes
55 | .VolumeIcon.icns
56 | .com.apple.timemachine.donotpresent
57 |
58 | #Local dev and testing files
59 | cfn-templates/parameters.local
60 | sandbox/
61 |
--------------------------------------------------------------------------------
/.pylintrc:
--------------------------------------------------------------------------------
1 | [MAIN]
2 | fail-under=7.50 # not the same in github actions and on your pc
3 |
4 | [FORMAT]
5 | max-line-length=240
6 |
7 | [MESSAGES CONTROL]
8 | # Disabling some:
9 | # W1203:logging-fstring-interpolation - No performance cosern
10 | # W0511:fixme - widely used
11 | disable=W1203,W0511
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | ## Code of Conduct
2 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct).
3 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact
4 | opensource-codeofconduct@amazon.com with any additional questions or comments.
5 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing Guidelines
2 |
3 | Thank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional
4 | documentation, we greatly value feedback and contributions from our community.
5 |
6 | Please read through this document before submitting any issues or pull requests to ensure we have all the necessary
7 | information to effectively respond to your bug report or contribution.
8 |
9 |
10 | ## Reporting Bugs/Feature Requests
11 |
12 | We welcome you to use the GitHub issue tracker to report bugs or suggest features.
13 |
14 | When filing an issue, please check existing open, or recently closed, issues to make sure somebody else hasn't already
15 | reported the issue. Please try to include as much information as you can. Details like these are incredibly useful:
16 |
17 | * A reproducible test case or series of steps
18 | * The version of our code being used
19 | * Any modifications you've made relevant to the bug
20 | * Anything unusual about your environment or deployment
21 |
22 |
23 | ## Contributing via Pull Requests
24 | Contributions via pull requests are much appreciated. Before sending us a pull request, please ensure that:
25 |
26 | 1. You are working against the latest source on the *main* branch.
27 | 2. You check existing open, and recently merged, pull requests to make sure someone else hasn't addressed the problem already.
28 | 3. You open an issue to discuss any significant work - we would hate for your time to be wasted.
29 |
30 | To send us a pull request, please:
31 |
32 | 1. Fork the repository.
33 | 2. Modify the source; please focus on the specific change you are contributing. If you also reformat all the code, it will be hard for us to focus on your change.
34 | 3. Ensure local tests pass.
35 | 4. Commit to your fork using clear commit messages.
36 | 5. Send us a pull request, answering any default questions in the pull request interface.
37 | 6. Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation.
38 |
39 | GitHub provides additional document on [forking a repository](https://help.github.com/articles/fork-a-repo/) and
40 | [creating a pull request](https://help.github.com/articles/creating-a-pull-request/).
41 |
42 |
43 | ## Finding contributions to work on
44 | Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels (enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any 'help wanted' issues is a great place to start.
45 |
46 |
47 | ## Code of Conduct
48 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct).
49 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact
50 | opensource-codeofconduct@amazon.com with any additional questions or comments.
51 |
52 |
53 | ## Security issue notifications
54 | If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public github issue.
55 |
56 |
57 | ## Licensing
58 |
59 | See the [LICENSE](LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution.
60 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 |
3 | Permission is hereby granted, free of charge, to any person obtaining a copy of
4 | this software and associated documentation files (the "Software"), to deal in
5 | the Software without restriction, including without limitation the rights to
6 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
7 | the Software, and to permit persons to whom the Software is furnished to do so.
8 |
9 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
10 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
11 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
12 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
13 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
14 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
15 |
16 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | recursive-include cid *.sql *.json *.yaml
2 |
3 | prune assets
4 | prune cfn-templates
5 | prune src
6 | prune work
7 |
--------------------------------------------------------------------------------
/assets/build_lambda_layer.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # This script builds a lambda layer. Outputs relative path of layer zip.
3 | export CID_VERSION=$(python3 -c "from cid import _version;print(_version.__version__)")
4 | rm -rf build
5 |
6 | function get_hash {
7 | find ./cid -type f -exec md5sum {} + | md5sum | awk '{print $1}'
8 | }
9 |
10 | function build_layer {
11 | echo 'Building a layer'
12 | mkdir -p ./python
13 | python3 -m pip install . -t ./python
14 | zip -qr cid-$CID_VERSION.zip ./python
15 | ls -l cid-$CID_VERSION.zip
16 | rm -rf ./python
17 | }
18 |
19 | # Check if code has been changed
20 | previous_hash=$(cat cid-$CID_VERSION.hash)
21 | actual_hash=$(get_hash)
22 | if [ "$actual_hash" == "$previous_hash" ] && [ -e "cid-$CID_VERSION.zip" ]; then
23 | echo "No changes in code. Reuse existing zip." 1>&2
24 | else
25 | build_layer 1>&2
26 | echo $actual_hash > cid-$CID_VERSION.hash
27 | fi
28 |
29 | ls cid-$CID_VERSION.zip
--------------------------------------------------------------------------------
/assets/images/advanced-architecture.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions-library-samples/cloud-intelligence-dashboards-framework/e29093e19c866a99d0b71f86fc32702ff237bc51/assets/images/advanced-architecture.png
--------------------------------------------------------------------------------
/assets/images/deployment-guide-button.svg:
--------------------------------------------------------------------------------
1 |
21 |
--------------------------------------------------------------------------------
/assets/images/documentation.svg:
--------------------------------------------------------------------------------
1 |
21 |
--------------------------------------------------------------------------------
/assets/images/foundational-architecture.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-solutions-library-samples/cloud-intelligence-dashboards-framework/e29093e19c866a99d0b71f86fc32702ff237bc51/assets/images/foundational-architecture.png
--------------------------------------------------------------------------------
/assets/lint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # shellcheck disable=SC2086,SC2181
3 | # This script runs cfn-lint cfn_nag_scan and checkov for all templates in folder
4 |
5 | RED='\033[0;31m'
6 | GREEN='\033[0;32m'
7 | YELLOW='\033[0;33m'
8 | NC='\033[0m' # No Color
9 |
10 | folder=$(git rev-parse --show-toplevel)/cfn-templates/
11 | success_count=0
12 | failure_count=0
13 |
14 | # CKV_AWS_109: "Ensure IAM policies does not allow permissions management without constraints"
15 | # CKV_AWS_111: "Ensure IAM policies does not allow write access without constraints"
16 | # CKV_AWS_115: "Ensure that AWS Lambda function is configured for function-level concurrent execution limit"
17 | # CKV_AWS_116: "Ensure that AWS Lambda function is configured for a Dead Letter Queue(DLQ)"
18 | # CKV_AWS_117: "Ensure that AWS Lambda function is configured inside a VPC"
19 | # CKV_AWS_173: "Check encryption settings for Lambda environmental variable"
20 | # CKV_AWS_195: "Ensure Glue component has a security configuration associated"
21 | # CKV_AWS_18: "Ensure the S3 bucket has access logging enabled"
22 | # CKV_AWS_21: "Ensure the S3 bucket has versioning enabled"
23 | checkov_skip=CKV_AWS_109,CKV_AWS_111,CKV_AWS_115,CKV_AWS_116,CKV_AWS_117,CKV_AWS_173,CKV_AWS_195,CKV_AWS_18,CKV_AWS_21
24 |
25 |
26 | export exclude_files=("module-inventory.yaml" "module-pricing.yaml") # For::Each breaks lint :'(
27 |
28 | yaml_files=$(find "$folder" -type f \( -name "*.yaml" -o -name "*.yml" \) -exec ls -1t "{}" +;) # ordered by date
29 |
30 | for file in $yaml_files; do
31 | echo "Linting $(basename $file)"
32 | fail=0
33 |
34 | # checkov
35 | output=$(eval checkov --skip-download --skip-check $checkov_skip --quiet -f "$file")
36 | if [ $? -ne 0 ]; then
37 | echo "$output" | awk '{ print "\t" $0 }'
38 | echo -e "checkov ${RED}KO${NC}" | awk '{ print "\t" $0 }'
39 | fail=1
40 | else
41 | echo -e "checkov ${GREEN}OK${NC}" | awk '{ print "\t" $0 }'
42 | fi
43 |
44 | # cfn-lint
45 | output=$(eval cfn-lint -- "$file")
46 | if [ $? -ne 0 ]; then
47 | echo "$output" | awk '{ print "\t" $0 }'
48 | echo -e "cfn-lint ${RED}KO${NC}" | awk '{ print "\t" $0 }'
49 | fail=1
50 | else
51 | echo -e "cfn-lint ${GREEN}OK${NC}" | awk '{ print "\t" $0 }'
52 | fi
53 |
54 | # cfn_nag_scan
55 | output=$(eval cfn_nag_scan --input-path "$file")
56 | if [ $? -ne 0 ]; then
57 | echo "$output" | awk '{ print "\t" $0 }'
58 | echo -e "cfn_nag_scan ${RED}KO${NC}" | awk '{ print "\t" $0 }'
59 | fail=1
60 | else
61 | echo -e "cfn_nag_scan ${GREEN}OK${NC}" | awk '{ print "\t" $0 }'
62 | fi
63 |
64 | if [ $fail -ne 0 ]; then
65 | ((failure_count++))
66 | else
67 | ((success_count++))
68 | fi
69 | done
70 |
71 | echo "Successful lints: $success_count"
72 | echo "Failed lints: $failure_count"
73 | if [ $failure_count -ne 0 ]; then
74 | exit 1
75 | else
76 | exit 0
77 | fi
78 |
--------------------------------------------------------------------------------
/assets/publish_lambda_layer.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # This script can be used for release or testing of lambda layers upload.
3 |
4 | # First build layer
5 | layer=$(./assets/build_lambda_layer.sh)
6 | # Source the version
7 | export CID_VERSION=$(python3 -c "from cid import _version;print(_version.__version__)")
8 |
9 |
10 | # Then publish on s3
11 | export AWS_REGION=us-east-1
12 | export STACK_SET_NAME=LayerBuckets
13 | aws cloudformation list-stack-instances \
14 | --stack-set-name $STACK_SET_NAME \
15 | --query 'Summaries[].[StackId,Region]' \
16 | --output text |
17 | while read stack_id region; do
18 | echo "uploading $layer to $region"
19 | bucket=$(aws cloudformation list-stack-resources --stack-name $stack_id \
20 | --query 'StackResourceSummaries[?LogicalResourceId == `LayerBucket`].PhysicalResourceId' \
21 | --region $region --output text)
22 | output=$(aws s3api put-object \
23 | --bucket "$bucket" \
24 | --key cid-resource-lambda-layer/$layer \
25 | --body ./$layer)
26 | if [ $? -ne 0 ]; then
27 | echo "Error: $output"
28 | else
29 | echo "Uploaded successfully"
30 | fi
31 | done
32 |
33 | echo 'Cleanup'
34 | rm -vf ./$layer
35 |
36 |
37 | # Publish cfn (only works for the release)
38 | if aws s3 ls "s3://aws-managed-cost-intelligence-dashboards" >/dev/null 2>&1; then
39 | echo "Updating cid-cfn.yml"
40 | aws s3 sync ./cfn-templates/ s3://aws-managed-cost-intelligence-dashboards/cfn/ --exclude './cfn-templates/cur-aggregation.yaml' --exclude './cfn-templates/data-exports-aggregation.yaml'
41 | # Publish additional copy into respective version folder
42 | aws s3 sync ./cfn-templates/ "s3://aws-managed-cost-intelligence-dashboards/cfn/${CID_VERSION}/" --exclude './cfn-templates/cur-aggregation.yaml' --exclude './cfn-templates/data-exports-aggregation.yaml'
43 |
44 | echo "Syncing dashboards"
45 | aws s3 sync ./dashboards s3://aws-managed-cost-intelligence-dashboards/hub/
46 | else
47 | echo "Not the main account. Skipping"
48 | fi
49 |
50 | echo 'Done'
51 |
--------------------------------------------------------------------------------
/bump-release.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 |
4 | os.system('git checkout main')
5 | os.system('git pull')
6 |
7 | from cid._version import __version__ as old_ver
8 |
9 | bump='patch'
10 | if len(sys.argv)>1:
11 | bump = sys.argv[1]
12 |
13 | maj, minor, patch = map(int, old_ver.split('.'))
14 |
15 | if bump=='patch':
16 | new_ver = '.'.join(map(str,[maj, minor, patch + 1]))
17 | elif bump=='minor':
18 | new_ver = '.'.join(map(str,[maj, minor + 1, 0]))
19 | else:
20 | raise NotImplementedError('only patch and minor are implemented')
21 |
22 | os.system(f"git checkout -b 'release/{new_ver}'")
23 |
24 |
25 | tx = open('cid/_version.py').read()
26 | with open('cid/_version.py', "w") as f:
27 | f.write(tx.replace(f"version__ = '{old_ver}'", f"version__ = '{new_ver}'"))
28 |
29 | tx = open('cfn-templates/cid-cfn.yml').read()
30 | with open('cfn-templates/cid-cfn.yml', "w") as f:
31 | f.write(tx.replace(f"{old_ver}", f"{new_ver}"))
32 |
33 |
34 | os.system('git diff HEAD --unified=0')
35 | print('to undo:\n git checkout HEAD -- cfn-templates/cid-cfn.yml cid/_version.py')
36 | print(f"to continue:\n git commit -am 'release {new_ver}'; git push origin 'release/{new_ver}'")
37 |
--------------------------------------------------------------------------------
/cfn-templates/cid-lakeformation-prerequisite.yaml:
--------------------------------------------------------------------------------
1 | AWSTemplateFormatVersion: '2010-09-09'
2 | Description: 'CID LakeFormation Prerequisite Stack v0.0.1'
3 |
4 | Resources:
5 | LakeFormationTag:
6 | Type: AWS::LakeFormation::Tag
7 | Properties:
8 | CatalogId: !Ref 'AWS::AccountId'
9 | TagKey: CidAssetsAccess
10 | TagValues:
11 | - Allow
12 | - Deny
13 |
14 | Outputs:
15 | CidLakeFormationTagKey:
16 | Description: Technical Value - CidExecArn
17 | Value: CidAssetsAccess
18 | Export: { Name: 'cid-LakeFormation-TagKey'}
19 | CidLakeFormationTagValue:
20 | Description: Technical Value - CidExecArn
21 | Value: Allow
22 | Export: { Name: 'cid-LakeFormation-TagValue'}
--------------------------------------------------------------------------------
/cfn-templates/cid-plugin.yml:
--------------------------------------------------------------------------------
1 | AWSTemplateFormatVersion: '2010-09-09'
2 | Description: A Plugin for Cloud Intelligence Dashboards v0.0.1
3 |
4 | Parameters:
5 | DashboardId:
6 | Type: String
7 | Description: 'Dashboard id'
8 | Default: ''
9 | AllowedPattern: "^[a-zA-Z0-9_-]*$"
10 | ResourcesUrl:
11 | Type: String
12 | Description: 'Resources File URL. Keep it empty if not sure.'
13 | Default: ''
14 | RequiresDataCollection:
15 | Type: String
16 | Description: 'Is DataCollectionRequired'
17 | AllowedValues: ["yes", "no"]
18 | Default: 'no'
19 | RequiresDataExports:
20 | Type: String
21 | Description: 'Is DataCollectionRequired'
22 | AllowedValues: ["yes", "no"]
23 | Default: 'no'
24 |
25 | Conditions:
26 | ResourcesUrlIsEmpty: !Equals [!Ref ResourcesUrl, '']
27 | RequiresDataCollection: !Equals [!Ref RequiresDataCollection, 'yes']
28 | RequiresDataExports: !Equals [!Ref RequiresDataExports, 'yes']
29 |
30 | Resources:
31 | Dashboard:
32 | Type: Custom::CidDashboard
33 | Properties:
34 | Name: !Ref DashboardId
35 | ServiceToken: {'Fn::ImportValue': "cid-CidExecArn"}
36 | Dashboard:
37 | dashboard-id: !Ref DashboardId
38 | account-map-source: 'dummy'
39 | resources: !If [ResourcesUrlIsEmpty, !Ref 'AWS::NoValue', !Ref ResourcesUrl]
40 | data_exports_database_name: !If [RequiresDataExports, {'Fn::ImportValue': "cid-DataExports-Database"}, !Ref 'AWS::NoValue']
41 | data_collection_database_name: !If [RequiresDataCollection, {'Fn::ImportValue': "cid-DataCollection-Database"}, !Ref 'AWS::NoValue']
42 |
43 | Outputs:
44 | DashboardURL:
45 | Description: "URL of Dashboard"
46 | Value: !GetAtt Dashboard.DashboardURL
47 |
--------------------------------------------------------------------------------
/changes/CHAMGELOG-sustainability-proxy-metrics.md:
--------------------------------------------------------------------------------
1 | # What's new in the Sustainability Proxy and Carbon emissions data Dashboard
2 |
3 | ## Sustainability Proxy and Carbon emissions data Dashboard - v4.0.0
4 | * Removed option for using experimental-programmatic-access-ccft (Deprecated)
5 | * Added dependency on the Carbon Data export.
6 | * Updated datasets to gain data from the Carbon Data export.
--------------------------------------------------------------------------------
/changes/CHANGELOG-amazon-connect.md:
--------------------------------------------------------------------------------
1 | # What's new in Amazon Connect Dashboard
2 |
3 | ## Amazon Connect Dashboard - v1.1.1
4 | * minor fixes
5 |
6 | ## Amazon Connect Dashboard - v1.1.0
7 | * new visual on MoM Connect usage trends
8 | * new tab Contact Center to track other services on Connect accounts
9 | * new filter (slicer) to find calls with cost in defined range
10 | * new call distribution per cost bins visual
11 | * removed link to deprecated feedback form
12 | * added recommendations to enable granular billing
13 | * added description on cost and charge types
14 | * added explanation on average unit price
15 | * minor fixes
16 |
17 | ## Amazon Connect Dashboard - v1.0.1
18 | * minor bugfixes
19 |
20 | ## Amazon Connect Dashboard - v1.0.0
21 | * initial release
22 |
--------------------------------------------------------------------------------
/changes/CHANGELOG-aws-cost-anomalies.md:
--------------------------------------------------------------------------------
1 | # What's new in AWS Cost Anomalies Dashboard
2 |
3 | ## AWS Cost Anomalies Dashboard v1.1.0:
4 | * Anomalies status changed from Open/Closed to Active/Past
5 | * Added anomalies deduplication logic to show total impact from the most recent value of anomaly
6 | * Improved look and feel by adding action filters and visual elements
7 | * Added 'Daily Cost Anomalies Total Impact' visual which allows to track anomaly impact per anomaly
8 | * Removed KPI visuals
9 |
10 | ## AWS Cost Anomalies Dashboard v1.0.0:
11 | * Initial release
12 |
--------------------------------------------------------------------------------
/changes/CHANGELOG-aws-marketplace-spg.md:
--------------------------------------------------------------------------------
1 | # What's new in AWS Marketplace Single Pane of Glass (SPG)
2 | ## v0.1.1
3 | * Added "Invoice Tracker" in Spend Summary tab to have an overall view of all AWS Marketplace invoices in a month.
4 | * Modified "AWS Marketplace Spend and Usage by Seller Product" in Spend Summary tab to identify Seller of Record.
5 | * Merged "AWS Marketplace Subscription Start and End Dates" in Spend Deep Dive tab with "Spend Details by Invoice".
6 | * Modified "Spend Details by Invoice" in Spend Deep Dive tab to include all Charge types.
7 | * All "Seller" references were updated to "Seller of Record" as it appears in Cost and Usage Report (CUR) data.
8 |
9 |
10 | ## v0.1.0
11 | * Initial release
12 |
--------------------------------------------------------------------------------
/changes/CHANGELOG-cid.md:
--------------------------------------------------------------------------------
1 | # What's new in the Cost Intelligence Dashboard (CID)
2 |
3 | ## CID - 3.5
4 | * Removed link to deprecated feedback form
5 |
6 | ## CID - 3.4
7 | * All sheets
8 | * Added Payer Accounts control
9 |
10 | ## CID - 3.3
11 | * RI/SP Summary
12 | * Fixed Compute SP Total Savings for current Month in "Pricing Model Savings Summary" Visual
13 |
14 | ## CID - 3.2
15 | * Billing summary
16 | * Limited Forecast
17 | * Compute Summary
18 | * Fixed spot savings visual.
19 |
20 | ## CID - 3.1
21 | * All
22 | * Added Account Name control to all tabs
23 | * Compute Summary
24 | * Removed % sign from Spot savings.
25 |
26 | ## CID - 3
27 | * All
28 | * Added an Account ID control to CID
29 | * New calc fields or calc field adjustments
30 | * Normalized Usage Quantity
31 | * % spot savings
32 | * Spot coverage
33 | * Spot Savings
34 | * Spot OnDemand Cost
35 | * % RI_SP savings
36 | * Date Granularity
37 | * OnDemand Cost
38 | * OnDemand Coverage
39 | * Open Invoice Count
40 | * RI_SP Coverage Tracking
41 | * RI_SP OnDemand Cost
42 | * RI_SP Coverage
43 | * RI_SP Utilization
44 | * Renamed calc fields % coverage to % coverage usage
45 | * Added a % coverage cost
46 | * Added a Product Category to summary view to group services and provide a better grouping for SPs/RIs.
47 | * Containers called as compute
48 | * Renamed Group by Fields to Group By Fields - Level 1
49 | * Added a Group By Fields - Level 2 for OPTICS explorer
50 | * Updated the EBS volume field to include gp3 and io2 better naming
51 | * Billing summary
52 | * Added the invoice id insight from CUDOS
53 | * Added an AWS monthly report visual
54 | * Compute & RI/SP
55 | * Changed RI/SP to total savings not just gross RI/SP savings.
56 | * Updated Spot visual to summary view not ec2 running costs now that we have spot fields available. To avoid negative savings for fargate spot added formula to take the amortized cost for those sum(ifelse(({purchase_option}='Spot' AND {Cost_Public} >0),{Cost_Public},{purchase_option}='Spot',{Cost_Amortized},0))
57 | * Updated the RI/SP summary to include a filter for product category and service
58 | * Added in new RI/SP visuals at the bottom with a date filter for those.
59 | * Updated the Expiring RI/SP tracker to have more data points in the table and include a grouping for Product Category
60 | * Optics Explorer
61 | * Added a group by level 2 field to allow for multi-dimensional reporting
62 | * Added more filters to align with CE better and more!
63 | * Changed date filtering to relative filter so we could do trailing, etc.
64 | * Added a date granularity to provide daily or monthly views
65 |
66 |
67 |
--------------------------------------------------------------------------------
/changes/CHANGELOG-cod.md:
--------------------------------------------------------------------------------
1 | # What's new in the Compute Optimizer Dashboard (COD)
2 |
3 | ## Compute Optimizer Dashboard - v5.0.0
4 | * Added License and Idle Optimizer sheets
5 |
6 | ## Compute Optimizer Dashboard - v4.0.0
7 | * Added ECS Compute Optimizer sheets
8 |
9 | ## Compute Optimizer Dashboard - v3.1.0
10 | * Removed link to deprecated feedback form
11 |
12 | ## Compute Optimizer Dashboard - v3.0.0
13 | * Added RDS Compute Optimizer sheets
14 |
15 | ## Compute Optimizer Dashboard - v2.0.2
16 | * Added History Visual on EBS tab
17 |
18 | ## Compute Optimizer Dashboard - v2.0.1
19 | * Bugfixes
20 |
21 | ## Compute Optimizer Dashboard - v2.0.0
22 | * Added support of Tags. Currently dashboard use 2 tags (primary and secondary). You can specify on install or update. Values of these tags can be used in filters.
23 | * Added finding history, showing all availabe findings for a particualr resource over time.
24 | * Added AccountId and BU filters.
25 |
26 | **Important:** Update to this version requires cid-cmd v0.2.18+.
27 |
28 | ```
29 | pip3 install --upgrade cid-cmd
30 | cid-cmd update --dashboards-id compute-optimizer-dashboard --force --recursive --on-drift override
31 | ```
32 |
33 | ## Compute Optimizer Dashboard - v1.0
34 | * Initial release
--------------------------------------------------------------------------------
/changes/CHANGELOG-cora.md:
--------------------------------------------------------------------------------
1 | # What's new in the CORA
2 |
3 | ## CORA - v0.0.7
4 | * Added Support of Idle recommendations
5 | * Added Resource Id filter on Usage Optimization tab
6 |
7 | ## CORA - v0.0.6
8 | * Minor fixes
9 | * Added Resource Id filter
10 |
11 |
12 | ## CORA - v0.0.5
13 | * Initial release
--------------------------------------------------------------------------------
/changes/CHANGELOG-focus.md:
--------------------------------------------------------------------------------
1 | # What's new in FOCUS Dashboard
2 |
3 | ## FOCUS Dashboard v0.2.0
4 | * Align with GA release of FOCUS 1.0 in AWS Data Exports
5 | * MoM Trends: Bug fix for action filter and conditional formatting for 'Effective Cost per Sub Account in USD' visual
6 |
7 |
8 | ## FOCUS Dashboard (Preview) v0.1.0
9 | * Initial release
10 |
--------------------------------------------------------------------------------
/changes/CHANGELOG-graviton-opportunities.md:
--------------------------------------------------------------------------------
1 | # What's new in the Graviton Opportunities Dashboard
2 |
3 | **Important:** Graviton Opportunities Dashboard has changed it's name Graviton Savings Dashboard.
4 |
5 | Please delete the legacy version of this dashboard by running
6 | ```
7 | cid-cmd delete --dashboard-id graviton-opportunities
8 | ```
9 | Please deploy the new version of this dashboard by running
10 | ```
11 | cid-cmd deploy --dashboard-id graviton-savings
12 | ```
13 |
14 | ## Graviton Opportunities Dashboard v1.1.1:
15 | ```
16 | cid-cmd update --dashboard-id graviton-opportunities
17 | Choose 'Yes' for the following prompt
18 | [confirm-update] No updates available, should I update it anyway?
19 | ```
20 | * Fix broken hyperlinks under Additional Resources
21 |
22 | ## Graviton Opportunities Dashboard v1.1.0:
23 | **Important:** If attempting to update the dashboard, please update cid-cmd first. To update run these commands in your CloudShell (recommended) or other terminal:
24 |
25 | ```
26 | python3 -m ensurepip --upgrade
27 | pip3 install --upgrade cid-cmd
28 | cid-cmd update --dashboard-id graviton-opportunities --force --recursive
29 | ```
30 | **Bug fixes and improvements**
31 | * Mordernization mapping updated with missing instance types
32 | * Deleted Target Coverage and Efficiency sliders
33 | * Including Savings Plan covered usage for EC2
34 | * Updated Missing filters for RDS
35 | * Updates to visuals
36 | * New visuals for existing, potential coverage and implementation effort
37 |
38 | ## Graviton Opportunities Dashboard v1.0.3:
39 | * Updated modernization mapping to include r8g
40 | * Moved EC2 usage type filters from dashboard into SQL
41 |
42 | ## Graviton Opportunities Dashboard v1.0.2:
43 | **Important:** If attempting to update the dashboard, please update cid-cmd first. To update run these commands in your CloudShell (recommended) or other terminal:
44 |
45 | ```
46 | python3 -m ensurepip --upgrade
47 | pip3 install --upgrade cid-cmd
48 | cid-cmd update --dashboard-id graviton-opportunities --force --recursive
49 | ```
50 |
51 | **Bug fixes and improvements**
52 | * Updates to visuals
53 | * Bug fix for duplicate resources caused by data collector storing multiple versions of upgraded resources
54 |
55 |
56 | ## Graviton Opportunities Dashboard v1.0.0
57 | * Initial release
58 |
--------------------------------------------------------------------------------
/changes/CHANGELOG-graviton-savings.md:
--------------------------------------------------------------------------------
1 | # What's new in the Graviton Savings Dashboard
2 |
3 | ## Graviton Savings Dashboard v2.1.0:
4 | ```
5 | cid-cmd update --dashboard-id graviton-savings --force --recursive
6 | ```
7 | * CUR 2 Support
8 | * Fixed filters on Opensearch and ElastiCache Tabs
9 |
10 |
11 | ## Graviton Savings Dashboard v2.0.0:
12 | ```
13 | cid-cmd update --dashboard-id graviton-savings --force --recursive
14 | ```
15 | * New Savings Implementation Effort and Reason Categorization
16 | * New Top and Bottom insights for Managed Services
17 | * New Radio buttons to toggle between Spend, Usage & Savings
18 | * New Potential Graviton Savings breakdown by Purchase Option and Operating System
19 |
20 |
21 | ## Graviton Savings Dashboard Name Update ##
22 | **Important:** Graviton Opportunities Dashboard has changed it's name Graviton Savings Dashboard.
23 |
24 | Please delete the legacy version of this dashboard by running
25 | ```
26 | cid-cmd delete --dashboard-id graviton-opportunities
27 | ```
28 | Please deploy the new version of this dashboard by running
29 | ```
30 | cid-cmd deploy --dashboard-id graviton-savings
31 | ```
32 |
33 | ## Graviton Opportunities Dashboard v1.1.1:
34 | ```
35 | cid-cmd update --dashboard-id graviton-opportunities
36 | Choose 'Yes' for the following prompt
37 | [confirm-update] No updates available, should I update it anyway?
38 | ```
39 | * Fix broken hyperlinks under Additional Resources
40 |
41 | ## Graviton Opportunities Dashboard v1.1.0:
42 | **Important:** If attempting to update the dashboard, please update cid-cmd first. To update run these commands in your CloudShell (recommended) or other terminal:
43 |
44 | ```
45 | python3 -m ensurepip --upgrade
46 | pip3 install --upgrade cid-cmd
47 | cid-cmd update --dashboard-id graviton-opportunities --force --recursive
48 | ```
49 | **Bug fixes and improvements**
50 | * Mordernization mapping updated with missing instance types
51 | * Deleted Target Coverage and Efficiency sliders
52 | * Including Savings Plan covered usage for EC2
53 | * Updated Missing filters for RDS
54 | * Updates to visuals
55 | * New visuals for existing, potential coverage and implementation effort
56 |
57 | ## Graviton Opportunities Dashboard v1.0.3:
58 | * Updated modernization mapping to include r8g
59 | * Moved EC2 usage type filters from dashboard into SQL
60 |
61 | ## Graviton Opportunities Dashboard v1.0.2:
62 | **Important:** If attempting to update the dashboard, please update cid-cmd first. To update run these commands in your CloudShell (recommended) or other terminal:
63 |
64 | ```
65 | python3 -m ensurepip --upgrade
66 | pip3 install --upgrade cid-cmd
67 | cid-cmd update --dashboard-id graviton-opportunities --force --recursive
68 | ```
69 |
70 | **Bug fixes and improvements**
71 | * Updates to visuals
72 | * Bug fix for duplicate resources caused by data collector storing multiple versions of upgraded resources
73 |
74 |
75 | ## Graviton Opportunities Dashboard v1.0.0
76 | * Initial release
77 |
--------------------------------------------------------------------------------
/changes/CHANGELOG-hed.md:
--------------------------------------------------------------------------------
1 | # What's new in Health Events Dashboard (HED)
2 | ## v2.1.0
3 | * Modified Athena query to include events ingested more than 90 days ago if they are not of closed status. Although not a breaking change for the dasboard, you should update with the `--force --recursive` flags to incorporate it.
4 | * Added guidance text for date range filtering
5 | * Minor cosmetic and usability changes
6 |
7 | ## v2.0.4
8 | * Fix resetting description free text filter issue
9 | * Minor cosmetic and usability changes
10 |
11 | ## v2.0.3
12 | * Fix case where reported resource name is not a full unique ARN to derive unique instance counts
13 | * Easier filtering by date ranges
14 | * Layout changes to event detail section
15 | * Minor cosmetic and usage guidance changes
16 |
17 | ## v2.0.2
18 | * Fix Event Category filter on Events Explorer tab
19 | * Minor cosmetic changes
20 | * Add Event Start Date parameter to assist filtering out old events
21 |
22 | ## v2.0.1
23 | * Minor cosmetic changes and visual fix
24 |
25 | ## v2.0.0
26 | * Reorganized Summary tab for better flow and easy creation of targeted inventory reports of impacted resources
27 | * _NOTE: This and any updates from v1.x to v2.x requires that you update with cid-cmd parameters: --force --recursive_
28 |
29 | ## v1.0.5
30 | * Added icons and styling for status
31 |
32 | ## v1.0.4
33 | * Switched Status to Resource Status from Event Status
34 | * Added Affected resources list
35 |
36 | ## v1.0.3
37 | * Enhanced Events page
38 |
39 | ## v1.0.2
40 | * Initial release
41 |
--------------------------------------------------------------------------------
/changes/CHANGELOG-kpi.md:
--------------------------------------------------------------------------------
1 | # What's new in the KPI Dashboard
2 |
3 | ## KPI - 2.1.0
4 | * Removed link to deprecated feedback form
5 |
6 | ## KPI - 2.0.0
7 | > [!IMPORTANT]
8 | > Update to this version requires cid-cmd v0.2.23. Please update cid-cmd first before updating the dashboard. During the update QuickSight datasets and Athena views will be updated, please make a copy if you've made any customizations. To update run these commands in your CloudShell (recommended) or other terminal:
9 | ```
10 | python3 -m ensurepip --upgrade
11 | pip3 install --upgrade cid-cmd
12 | cid-cmd update --dashboard-id kpi_dashboard --recursive
13 | ```
14 | > [!WARNING]
15 | > You will be prompted to override KPI dashboard views and datasets with v2.0.0 versions. It's required to choose **proceed and override** for kpi_instance_all and kpi_tracker views and datasets while you can choose **keep existing** for others. Any customizations done to visuals for which you've selected **proceed and override** will be overwritten hence it's important to save copies of them in case you would like to re-implement them after update. You'll be able to see the diff of the changes before selecting an option.
16 |
17 | Release notes:
18 | * KPI Tracker: Added new KPI 'RDS Open Source Engines Coverage'
19 | * Metrics Summary: Added RDS visual showing 'RDS Oracle Coverage', 'RDS SQL Server Coverage', 'RDS Open Source Engines Coverage', 'RDS Graviton Coverage'
20 | * RDS: RDS Graviton coverage and savings estimations moved to the new RDS tab. Added visuals 'Top 10 Accounts Spend for Amazon RDS running on Graviton Processors', 'Top 10 Accounts Spend for Amazon RDS running on Other Processors'
21 | * RDS: Added section RDS Engines with Licensing Options with visuals 'Spend trend of RDS Engine Oracle, SQL Server by License Model', 'Potential Savings by migrating RDS Engine Oracle, SQL Server to Open Source engines', 'Top 10 Accounts Spend for RDS Engine Oracle, SQL Server', 'Coverage by Database Engines for Amazon Relational Database Service' and 'RDS Oracle, SQL Server Instances and Potential Savings'
22 |
23 |
24 |
25 | ## KPI - 1.2.1
26 | * Other Graviton: Fixed potential savings filter to show a correct monthly value.
27 |
28 | ## KPI - 1.2
29 | * Metrics Summary: Added EC2 Unit Cost to EC2 section
30 | * KPI S3 Storage All Athena view: Updated product_volume_type case when statement to like '%Intelligent%' for Intelligent Tiering to meet AIA formatting
31 |
32 | ## KPI - 1.1
33 | * Calculated field correction for Normalized Usage Quantity in the summary_view and kpi_instance_all data sets.
34 | ```bash
35 | ifelse(
36 | locate({instance_type}, '10xlarge')>0, 80*{usage_quantity},
37 | locate({instance_type}, '12xlarge')>0, 96*{usage_quantity},
38 | locate({instance_type}, '16xlarge')>0, 128*{usage_quantity},
39 | locate({instance_type}, '18xlarge')>0, 144*{usage_quantity},
40 | locate({instance_type}, '24xlarge')>0, 192*{usage_quantity},
41 | locate({instance_type}, '32xlarge')>0, 256*{usage_quantity},
42 | locate({instance_type}, '2xlarge')>0, 16*{usage_quantity},
43 | locate({instance_type}, '4xlarge')>0, 32*{usage_quantity},
44 | locate({instance_type}, '8xlarge')>0, 64*{usage_quantity},
45 | locate({instance_type}, '9xlarge')>0, 72*{usage_quantity},
46 | locate({instance_type}, 'nano')>0, .25*{usage_quantity},
47 | locate({instance_type}, 'micro')>0, 0.5*{usage_quantity},
48 | locate({instance_type}, 'small')>0, 1*{usage_quantity},
49 | locate({instance_type}, 'medium')>0, 2*{usage_quantity},
50 | locate({instance_type}, 'xlarge')>0, 8*{usage_quantity},
51 | locate({instance_type}, 'large')>0, 4*{usage_quantity},
52 | {usage_quantity})
53 | ```
54 |
55 | ## KPI - 1
56 | * Launch of KPI Dashboard
57 |
--------------------------------------------------------------------------------
/changes/CHANGELOG-scad-cca.md:
--------------------------------------------------------------------------------
1 | # What's new in SCAD Containers Cost Allocation Dashboard
2 |
3 | ## SCAD Containers Cost Allocation Dashboard - v2.0.0
4 | * Added support for AWS Split Cost Allocation Data (SCAD) for ECS:
5 | * All visuals now include SCAD ECS data (including AWS Batch on ECS), in addition to SCAD EKS data
6 | * The "EKS Breakdown" sheet has been renamed "Cluster Breakdown"
7 | * CUR 2.0 support: The dashboard now defaults to CUR 2.0, and still supports legacy CUR
8 | * The `scad_cca_summary_view` and `scad_cca_hourly_resource_view` Athena views have been converged to a single view.
9 | The time and data granularity levels remain the same, but now in a single view instead of 2 views
10 | * The hourly and resource-level interactive visuals in the "Cluster Breakdown" sheet have been removed, and converged with the visuals in the "Workloads Explorer" tab, which now have hourly and resource-level data, in addition to the current granularity levels in these visuals
11 |
12 | Notes:
13 | * For this version (v2.0.0), the minimum required `cid-cmd` version is 4.0.9
14 | * Starting this version (v2.0.0), the `scad_cca_hourly_resource_view` QuickSight dataset and Athena view are no longer used by the dashboard.
15 | Following an upgrade to this version (v2.0.0), after verifying functionality of the dashboard, you can delete the `scad_cca_hourly_resource_view` QuickSight dataset and Athena view (in this order).
16 | They won't be deleted automatically, and will continue refreshing and incurring cost if you don't delete them.
17 | Deleting the `scad_cca_hourly_resource_view` QuickSight dataset and Athena view is not necessary on a new installation of the dashboard, only following an upgrade.
18 | Deleteing a QuickSight dataset: https://docs.aws.amazon.com/quicksight/latest/user/delete-a-data-set.html
19 | Deleting an Athena view: https://docs.aws.amazon.com/athena/latest/ug/drop-view.html
20 |
21 | ## SCAD Containers Cost Allocation Dashboard - v1.0.0
22 | * Added support to view Net Amortized Cost in "View Cost As" control in all sheets
23 | * Removed "Exlucde last 1 month" from all date range controls to prevent "No Data" (beacuse Split Cost Allocation Data for EKS starts filling data only in current month)
24 | * Fixed issue where all split cost and usage metrics were lower than they should be, for pods on EC2 instances that were running for less than a full hour
25 | * Fixed aggregation issues for usage metrics in Athena views
26 |
27 | ## SCAD Containers Cost Allocation Dashboard - v0.0.1
28 | * Initial release
29 |
--------------------------------------------------------------------------------
/changes/CHANGELOG-support-cases-radar.md:
--------------------------------------------------------------------------------
1 | # What's new in Support Cases Radar
2 |
3 | ## v1.0.0
4 | * Initial Release
5 |
--------------------------------------------------------------------------------
/changes/CHANGELOG-trends.md:
--------------------------------------------------------------------------------
1 | # What's new in Trends Dashboard
2 | ## Trends Dashboard - v5.1.0
3 | * Updated 'Date range' and 'As of Date' to relative dates on all Tabs
4 | * AWS Usage: New Visual - Spend By Calendar Period, Spend by Account is now pivot with Payer and usage account both included.
5 |
6 |
7 | ## Trends Dashboard - v5.0
8 | * Initial release
9 |
--------------------------------------------------------------------------------
/cid/__init__.py:
--------------------------------------------------------------------------------
1 | # Declare namespace
2 |
3 | __import__('pkg_resources').declare_namespace(__name__)
4 |
--------------------------------------------------------------------------------
/cid/_version.py:
--------------------------------------------------------------------------------
1 | __version__ = '4.2.1'
2 |
3 |
--------------------------------------------------------------------------------
/cid/base.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | from boto3.session import Session
4 | from cid.exceptions import CidCritical
5 |
6 | logger = logging.getLogger(__name__)
7 |
8 | class CidBase():
9 | """
10 | Core class for cid.
11 | """
12 |
13 | _session: Session = None
14 | _awsIdentity: dict = None
15 |
16 |
17 | def __init__(self, session: Session) -> None:
18 | self.session = session
19 |
20 | @property
21 | def account_id(self) -> str:
22 | return self.awsIdentity.get('Account')
23 |
24 | @property
25 | def domain(self) -> str:
26 | if self.partition == 'aws-us-gov':
27 | return 'amazonaws-us-gov.com'
28 | if self.partition == 'aws-cn':
29 | return 'amazonaws.cn'
30 | return 'aws.amazon.com'
31 |
32 | @property
33 | def awsIdentity(self) -> dict:
34 | if not self._awsIdentity:
35 | try:
36 | sts = self.session.client('sts')
37 | self.awsIdentity = sts.get_caller_identity()
38 | except Exception as e:
39 | raise CidCritical(f'Authentication error: {e}')
40 | return self._awsIdentity
41 |
42 | @awsIdentity.setter
43 | def awsIdentity(self, value):
44 | self._awsIdentity = value
45 |
46 | @property
47 | def region(self) -> str:
48 | return self.session.region_name
49 |
50 | @property
51 | def region_name(self) -> str:
52 | return self.session.region_name
53 |
54 | @property
55 | def partition(self) -> str:
56 | return self.session.get_partition_for_region(region_name=self.region_name)
57 |
58 | @property
59 | def session(self) -> Session:
60 | return self._session
61 |
62 | @session.setter
63 | def session(self, value):
64 | self._session = value
65 |
66 | @property
67 | def username(self) -> str:
68 | if not hasattr(self, "_user") or self._user is None:
69 | # Guess the username from identity ARN
70 | arn = self.awsIdentity.get('Arn')
71 | if arn.split(':')[5] == 'root':
72 | return self.account_id
73 | else:
74 | return '/'.join(arn.split('/')[1:])
75 | return self._user.get('UserName')
76 |
--------------------------------------------------------------------------------
/cid/builtin/__init__.py:
--------------------------------------------------------------------------------
1 | # Implement your code here.
2 |
--------------------------------------------------------------------------------
/cid/builtin/core/__init__.py:
--------------------------------------------------------------------------------
1 | # This plugin implements Core dashboards
2 |
--------------------------------------------------------------------------------
/cid/builtin/core/data/datasets/cid/compute.json:
--------------------------------------------------------------------------------
1 | {
2 | "DataSetId": "3fa0d804-9bf5-4a20-a61d-4bdbb6d543b1",
3 | "Name": "compute_savings_plan_eligible_spend",
4 | "PhysicalTableMap": {
5 | "5939aadd-602d-4127-8974-b4ff911eb6c4": {
6 | "RelationalTable": {
7 | "DataSourceArn": "${athena_datasource_arn}",
8 | "Schema": "${athena_database_name}",
9 | "Name": "compute_savings_plan_eligible_spend",
10 | "InputColumns": [
11 | {
12 | "Name": "billing_period",
13 | "Type": "DATETIME"
14 | },
15 | {
16 | "Name": "month",
17 | "Type": "STRING"
18 | },
19 | {
20 | "Name": "year",
21 | "Type": "STRING"
22 | },
23 | {
24 | "Name": "payer_account_id",
25 | "Type": "STRING"
26 | },
27 | {
28 | "Name": "unblended_cost",
29 | "Type": "DECIMAL"
30 | },
31 | {
32 | "Name": "linked_account_id",
33 | "Type": "STRING"
34 | },
35 | {
36 | "Name": "usage_date",
37 | "Type": "DATETIME"
38 | }
39 | ]
40 | }
41 | },
42 | "a073386e-83e3-4798-8167-5f5604e48ede": {
43 | "RelationalTable": {
44 | "DataSourceArn": "${athena_datasource_arn}",
45 | "Schema": "${athena_database_name}",
46 | "Name": "account_map",
47 | "InputColumns": [
48 | {
49 | "Name": "account_id",
50 | "Type": "STRING"
51 | },
52 | {
53 | "Name": "account_name",
54 | "Type": "STRING"
55 | }
56 | ]
57 | }
58 | }
59 | },
60 | "LogicalTableMap": {
61 | "606cb37c-c311-4cbd-96e4-e0b7ac252d3e": {
62 | "Alias": "Intermediate Table",
63 | "DataTransforms": [
64 | {
65 | "ProjectOperation": {
66 | "ProjectedColumns": [
67 | "year",
68 | "month",
69 | "payer_account_id",
70 | "linked_account_id",
71 | "billing_period",
72 | "usage_date",
73 | "unblended_cost",
74 | "account_id",
75 | "account_name"
76 | ]
77 | }
78 | }
79 | ],
80 | "Source": {
81 | "JoinInstruction": {
82 | "LeftOperand": "d8ff07f9-bc56-4b6d-9388-f9e2cdd74d78",
83 | "RightOperand": "7d72d7c7-85ae-4349-8b61-610af5393caf",
84 | "Type": "LEFT",
85 | "OnClause": "{linked_account_id} = {account_id}"
86 | }
87 | }
88 | },
89 | "7d72d7c7-85ae-4349-8b61-610af5393caf": {
90 | "Alias": "account_map",
91 | "Source": {
92 | "PhysicalTableId": "a073386e-83e3-4798-8167-5f5604e48ede"
93 | }
94 | },
95 | "d8ff07f9-bc56-4b6d-9388-f9e2cdd74d78": {
96 | "Alias": "compute_savings_plan_eligible_spend",
97 | "Source": {
98 | "PhysicalTableId": "5939aadd-602d-4127-8974-b4ff911eb6c4"
99 | }
100 | }
101 | },
102 | "ImportMode": "SPICE",
103 | "Permissions": []
104 | }
105 |
--------------------------------------------------------------------------------
/cid/builtin/core/data/datasets/cid/ec2_running_cost.json:
--------------------------------------------------------------------------------
1 | {
2 | "DataSetId": "9497cc49-c9b1-4dcd-8bcc-c16396898f29",
3 | "Name": "ec2_running_cost",
4 | "PhysicalTableMap": {
5 | "9db232e0-a43d-4885-b122-6349618551b6": {
6 | "RelationalTable": {
7 | "DataSourceArn": "${athena_datasource_arn}",
8 | "Schema": "${athena_database_name}",
9 | "Name": "account_map",
10 | "InputColumns": [
11 | {
12 | "Name": "account_id",
13 | "Type": "STRING"
14 | },
15 | {
16 | "Name": "account_name",
17 | "Type": "STRING"
18 | }
19 | ]
20 | }
21 | },
22 | "f664fb78-28ab-4119-a0f5-809a3f0b4d72": {
23 | "RelationalTable": {
24 | "DataSourceArn": "${athena_datasource_arn}",
25 | "Schema": "${athena_database_name}",
26 | "Name": "ec2_running_cost",
27 | "InputColumns": [
28 | {
29 | "Name": "billing_period",
30 | "Type": "DATETIME"
31 | },
32 | {
33 | "Name": "purchase_option",
34 | "Type": "STRING"
35 | },
36 | {
37 | "Name": "month",
38 | "Type": "STRING"
39 | },
40 | {
41 | "Name": "year",
42 | "Type": "STRING"
43 | },
44 | {
45 | "Name": "payer_account_id",
46 | "Type": "STRING"
47 | },
48 | {
49 | "Name": "linked_account_id",
50 | "Type": "STRING"
51 | },
52 | {
53 | "Name": "usage_date",
54 | "Type": "DATETIME"
55 | },
56 | {
57 | "Name": "amortized_cost",
58 | "Type": "DECIMAL"
59 | },
60 | {
61 | "Name": "usage_quantity",
62 | "Type": "DECIMAL"
63 | }
64 | ]
65 | }
66 | }
67 | },
68 | "LogicalTableMap": {
69 | "0214ccfd-faf3-4d5f-b334-3171555ecba3": {
70 | "Alias": "Intermediate Table",
71 | "DataTransforms": [
72 | {
73 | "ProjectOperation": {
74 | "ProjectedColumns": [
75 | "year",
76 | "month",
77 | "billing_period",
78 | "usage_date",
79 | "payer_account_id",
80 | "linked_account_id",
81 | "purchase_option",
82 | "amortized_cost",
83 | "usage_quantity",
84 | "account_id",
85 | "account_name"
86 | ]
87 | }
88 | }
89 | ],
90 | "Source": {
91 | "JoinInstruction": {
92 | "LeftOperand": "8f4dd082-917c-4e44-9b7c-57bc6bb33ebe",
93 | "RightOperand": "d1a3a9a4-0569-457f-bd14-9bd53ea96eb8",
94 | "Type": "LEFT",
95 | "OnClause": "{payer_account_id} = {account_id}"
96 | }
97 | }
98 | },
99 | "8f4dd082-917c-4e44-9b7c-57bc6bb33ebe": {
100 | "Alias": "ec2_running_cost",
101 | "Source": {
102 | "PhysicalTableId": "f664fb78-28ab-4119-a0f5-809a3f0b4d72"
103 | }
104 | },
105 | "d1a3a9a4-0569-457f-bd14-9bd53ea96eb8": {
106 | "Alias": "account_map",
107 | "Source": {
108 | "PhysicalTableId": "9db232e0-a43d-4885-b122-6349618551b6"
109 | }
110 | }
111 | },
112 | "ImportMode": "SPICE",
113 | "Permissions": []
114 | }
115 |
--------------------------------------------------------------------------------
/cid/builtin/core/data/datasets/cid/s3_view.json:
--------------------------------------------------------------------------------
1 | {
2 | "DataSetId": "826896be-4d0f-4f90-832f-3427f5444016",
3 | "Name": "s3_view",
4 | "PhysicalTableMap": {
5 | "516bd20f-660c-49e0-95d3-0b5d21ef39f7": {
6 | "RelationalTable": {
7 | "DataSourceArn": "${athena_datasource_arn}",
8 | "Schema": "${athena_database_name}",
9 | "Name": "account_map",
10 | "InputColumns": [
11 | {
12 | "Name": "account_id",
13 | "Type": "STRING"
14 | },
15 | {
16 | "Name": "account_name",
17 | "Type": "STRING"
18 | }
19 | ]
20 | }
21 | },
22 | "7bb99428-5dfd-4599-8893-b4f57f0c689f": {
23 | "RelationalTable": {
24 | "DataSourceArn": "${athena_datasource_arn}",
25 | "Schema": "${athena_database_name}",
26 | "Name": "s3_view",
27 | "InputColumns": [
28 | {
29 | "Name": "billing_period",
30 | "Type": "DATETIME"
31 | },
32 | {
33 | "Name": "charge_type",
34 | "Type": "STRING"
35 | },
36 | {
37 | "Name": "year",
38 | "Type": "STRING"
39 | },
40 | {
41 | "Name": "payer_account_id",
42 | "Type": "STRING"
43 | },
44 | {
45 | "Name": "usage_date",
46 | "Type": "DATETIME"
47 | },
48 | {
49 | "Name": "product_code",
50 | "Type": "STRING"
51 | },
52 | {
53 | "Name": "month",
54 | "Type": "STRING"
55 | },
56 | {
57 | "Name": "public_cost",
58 | "Type": "DECIMAL"
59 | },
60 | {
61 | "Name": "unblended_cost",
62 | "Type": "DECIMAL"
63 | },
64 | {
65 | "Name": "linked_account_id",
66 | "Type": "STRING"
67 | },
68 | {
69 | "Name": "resource_id",
70 | "Type": "STRING"
71 | },
72 | {
73 | "Name": "usage_quantity",
74 | "Type": "DECIMAL"
75 | },
76 | {
77 | "Name": "region",
78 | "Type": "STRING"
79 | },
80 | {
81 | "Name": "operation",
82 | "Type": "STRING"
83 | },
84 | {
85 | "Name": "pricing_unit",
86 | "Type": "STRING"
87 | }
88 | ]
89 | }
90 | }
91 | },
92 | "LogicalTableMap": {
93 | "4559aa95-4a54-4998-800b-5fd03af260ea": {
94 | "Alias": "account_map",
95 | "Source": {
96 | "PhysicalTableId": "516bd20f-660c-49e0-95d3-0b5d21ef39f7"
97 | }
98 | },
99 | "6beb277c-4d74-47da-b425-1e255aa31be6": {
100 | "Alias": "Intermediate Table",
101 | "DataTransforms": [
102 | {
103 | "ProjectOperation": {
104 | "ProjectedColumns": [
105 | "year",
106 | "month",
107 | "billing_period",
108 | "usage_date",
109 | "payer_account_id",
110 | "linked_account_id",
111 | "resource_id",
112 | "product_code",
113 | "operation",
114 | "region",
115 | "charge_type",
116 | "pricing_unit",
117 | "usage_quantity",
118 | "unblended_cost",
119 | "public_cost",
120 | "account_id",
121 | "account_name"
122 | ]
123 | }
124 | },
125 | {
126 | "TagColumnOperation": {
127 | "ColumnName": "region",
128 | "Tags": [
129 | {
130 | "ColumnGeographicRole": "STATE"
131 | }
132 | ]
133 | }
134 | }
135 | ],
136 | "Source": {
137 | "JoinInstruction": {
138 | "LeftOperand": "ea0d1482-1bf2-46ee-8dc4-5aaea2ab2209",
139 | "RightOperand": "4559aa95-4a54-4998-800b-5fd03af260ea",
140 | "Type": "LEFT",
141 | "OnClause": "{linked_account_id} = {account_id}"
142 | }
143 | }
144 | },
145 | "ea0d1482-1bf2-46ee-8dc4-5aaea2ab2209": {
146 | "Alias": "s3_view",
147 | "Source": {
148 | "PhysicalTableId": "7bb99428-5dfd-4599-8893-b4f57f0c689f"
149 | }
150 | }
151 | },
152 | "ImportMode": "SPICE",
153 | "Permissions": []
154 | }
155 |
--------------------------------------------------------------------------------
/cid/builtin/core/data/datasets/kpi/kpi_ebs_snap.json:
--------------------------------------------------------------------------------
1 | {
2 | "DataSetId": "0e7cd1a6-e38d-4ca8-af6f-9256577d651e",
3 | "Name": "kpi_ebs_snap",
4 | "PhysicalTableMap": {
5 | "88cab3bf-bb23-4b37-9ff6-6f32e4c3d9d8": {
6 | "RelationalTable": {
7 | "DataSourceArn": "${athena_datasource_arn}",
8 | "Catalog": "AwsDataCatalog",
9 | "Schema": "${athena_database_name}",
10 | "Name": "account_map",
11 | "InputColumns": [
12 | {
13 | "Name": "account_id",
14 | "Type": "STRING"
15 | },
16 | {
17 | "Name": "account_name",
18 | "Type": "STRING"
19 | }
20 | ]
21 | }
22 | },
23 | "f224e706-8cd3-4c2f-b4e8-d8470368782f": {
24 | "RelationalTable": {
25 | "DataSourceArn": "${athena_datasource_arn}",
26 | "Schema": "${athena_database_name}",
27 | "Name": "kpi_ebs_snap",
28 | "InputColumns": [
29 | {
30 | "Name": "billing_period",
31 | "Type": "DATETIME"
32 | },
33 | {
34 | "Name": "start_date",
35 | "Type": "DATETIME"
36 | },
37 | {
38 | "Name": "payer_account_id",
39 | "Type": "STRING"
40 | },
41 | {
42 | "Name": "linked_account_id",
43 | "Type": "STRING"
44 | },
45 | {
46 | "Name": "snapshot_type",
47 | "Type": "STRING"
48 | },
49 | {
50 | "Name": "resource_id",
51 | "Type": "STRING"
52 | },
53 | {
54 | "Name": "usage_quantity",
55 | "Type": "DECIMAL",
56 | "SubType": "FLOAT"
57 | },
58 | {
59 | "Name": "ebs_snapshot_cost",
60 | "Type": "DECIMAL",
61 | "SubType": "FLOAT"
62 | },
63 | {
64 | "Name": "public_cost",
65 | "Type": "DECIMAL",
66 | "SubType": "FLOAT"
67 | },
68 | {
69 | "Name": "ebs_snapshots_under_1yr_cost",
70 | "Type": "DECIMAL",
71 | "SubType": "FLOAT"
72 | },
73 | {
74 | "Name": "ebs_snapshots_over_1yr_cost",
75 | "Type": "DECIMAL",
76 | "SubType": "FLOAT"
77 | }
78 | ]
79 | }
80 | }
81 | },
82 | "LogicalTableMap": {
83 | "9cd6e7ab-5a7a-40b8-8d3f-0061502aa428": {
84 | "Alias": "Intermediate Table",
85 | "DataTransforms": [
86 | {
87 | "ProjectOperation": {
88 | "ProjectedColumns": [
89 | "billing_period",
90 | "start_date",
91 | "payer_account_id",
92 | "linked_account_id",
93 | "snapshot_type",
94 | "resource_id",
95 | "usage_quantity",
96 | "ebs_snapshot_cost",
97 | "public_cost",
98 | "ebs_snapshots_under_1yr_cost",
99 | "ebs_snapshots_over_1yr_cost",
100 | "account_id",
101 | "account_name"
102 | ]
103 | }
104 | }
105 | ],
106 | "Source": {
107 | "JoinInstruction": {
108 | "LeftOperand": "f224e706-8cd3-4c2f-b4e8-d8470368782f",
109 | "RightOperand": "f0d714f0-5bdc-467d-a201-d15ec6f1bcbc",
110 | "Type": "LEFT",
111 | "OnClause": "{linked_account_id} = {account_id}"
112 | }
113 | }
114 | },
115 | "f0d714f0-5bdc-467d-a201-d15ec6f1bcbc": {
116 | "Alias": "account_map",
117 | "Source": {
118 | "PhysicalTableId": "88cab3bf-bb23-4b37-9ff6-6f32e4c3d9d8"
119 | }
120 | },
121 | "f224e706-8cd3-4c2f-b4e8-d8470368782f": {
122 | "Alias": "kpi_ebs_snap",
123 | "Source": {
124 | "PhysicalTableId": "f224e706-8cd3-4c2f-b4e8-d8470368782f"
125 | }
126 | }
127 | },
128 | "ImportMode": "SPICE",
129 | "Permissions": []
130 | }
--------------------------------------------------------------------------------
/cid/builtin/core/data/datasets/trends/daily_anomaly_detection.json:
--------------------------------------------------------------------------------
1 | {
2 | "DataSetId": "69029320-c52c-4d21-86ad-3927bb2069f3",
3 | "Name": "daily-anomaly-detection",
4 | "PhysicalTableMap": {
5 | "d6cf03cf-e5b5-444b-868d-1c0ae100dcf3": {
6 | "RelationalTable": {
7 | "DataSourceArn": "${athena_datasource_arn}",
8 | "Schema": "${athena_database_name}",
9 | "Name": "daily_anomaly_detection",
10 | "InputColumns": [
11 | {
12 | "Name": "line_item_usage_start_date",
13 | "Type": "DATETIME"
14 | },
15 | {
16 | "Name": "line_item_usage_account_id",
17 | "Type": "STRING"
18 | },
19 | {
20 | "Name": "account_name",
21 | "Type": "STRING"
22 | },
23 | {
24 | "Name": "product_product_name",
25 | "Type": "STRING"
26 | },
27 | {
28 | "Name": "unblended_cost",
29 | "Type": "DECIMAL"
30 | },
31 | {
32 | "Name": "line_item_usage_amount",
33 | "Type": "DECIMAL"
34 | }
35 | ]
36 | }
37 | }
38 | },
39 | "LogicalTableMap": {
40 | "d6cf03cf-e5b5-444b-868d-1c0ae100dcf3": {
41 | "Alias": "daily_anomaly_detection",
42 | "DataTransforms": [
43 | {
44 | "ProjectOperation": {
45 | "ProjectedColumns": [
46 | "line_item_usage_start_date",
47 | "line_item_usage_account_id",
48 | "account_name",
49 | "product_product_name",
50 | "unblended_cost",
51 | "line_item_usage_amount"
52 | ]
53 | }
54 | }
55 | ],
56 | "Source": {
57 | "PhysicalTableId": "d6cf03cf-e5b5-444b-868d-1c0ae100dcf3"
58 | }
59 | }
60 | },
61 | "ImportMode": "SPICE",
62 | "Permissions": []
63 | }
--------------------------------------------------------------------------------
/cid/builtin/core/data/datasets/trends/monthly_anomaly_detection.json:
--------------------------------------------------------------------------------
1 | {
2 | "DataSetId": "0f11c81d-536a-405f-8de0-d0dc247627ad",
3 | "Name": "monthly-anomaly-detection",
4 | "PhysicalTableMap": {
5 | "a55c68b9-9677-4c31-9464-392a3e97d984": {
6 | "RelationalTable": {
7 | "DataSourceArn": "${athena_datasource_arn}",
8 | "Schema": "${athena_database_name}",
9 | "Name": "monthly_anomaly_detection",
10 | "InputColumns": [
11 | {
12 | "Name": "bill_billing_period_start_date",
13 | "Type": "DATETIME"
14 | },
15 | {
16 | "Name": "line_item_usage_account_id",
17 | "Type": "STRING"
18 | },
19 | {
20 | "Name": "account_name",
21 | "Type": "STRING"
22 | },
23 | {
24 | "Name": "product_product_name",
25 | "Type": "STRING"
26 | },
27 | {
28 | "Name": "unblended_cost",
29 | "Type": "DECIMAL"
30 | },
31 | {
32 | "Name": "line_item_usage_amount",
33 | "Type": "DECIMAL"
34 | }
35 | ]
36 | }
37 | }
38 | },
39 | "LogicalTableMap": {
40 | "a55c68b9-9677-4c31-9464-392a3e97d984": {
41 | "Alias": "monthly_anomaly_detection",
42 | "DataTransforms": [
43 | {
44 | "ProjectOperation": {
45 | "ProjectedColumns": [
46 | "bill_billing_period_start_date",
47 | "line_item_usage_account_id",
48 | "account_name",
49 | "product_product_name",
50 | "unblended_cost",
51 | "line_item_usage_amount"
52 | ]
53 | }
54 | }
55 | ],
56 | "Source": {
57 | "PhysicalTableId": "a55c68b9-9677-4c31-9464-392a3e97d984"
58 | }
59 | }
60 | },
61 | "ImportMode": "SPICE",
62 | "Permissions": []
63 | }
--------------------------------------------------------------------------------
/cid/builtin/core/data/datasets/trends/monthly_bill_by_account.json:
--------------------------------------------------------------------------------
1 | {
2 | "DataSetId": "60e746ae-5781-4352-9752-dc9c633e21e4",
3 | "Name": "monthly-bill-by-account",
4 | "PhysicalTableMap": {
5 | "47da7096-571a-48a5-94f4-ba03339c9193": {
6 | "RelationalTable": {
7 | "DataSourceArn": "${athena_datasource_arn}",
8 | "Schema": "${athena_database_name}",
9 | "Name": "monthly_bill_by_account",
10 | "InputColumns": [
11 | {
12 | "Name": "bill_billing_period_start_date",
13 | "Type": "DATETIME"
14 | },
15 | {
16 | "Name": "bill_payer_account_id",
17 | "Type": "STRING"
18 | },
19 | {
20 | "Name": "line_item_usage_account_id",
21 | "Type": "STRING"
22 | },
23 | {
24 | "Name": "charge_type",
25 | "Type": "STRING"
26 | },
27 | {
28 | "Name": "product_product_name",
29 | "Type": "STRING"
30 | },
31 | {
32 | "Name": "product_region",
33 | "Type": "STRING"
34 | },
35 | {
36 | "Name": "line_item_product_code",
37 | "Type": "STRING"
38 | },
39 | {
40 | "Name": "unblended_cost",
41 | "Type": "DECIMAL"
42 | },
43 | {
44 | "Name": "amortized_cost",
45 | "Type": "DECIMAL"
46 | },
47 | {
48 | "Name": "account_name",
49 | "Type": "STRING"
50 | },
51 | {
52 | "Name": "payer_account_name",
53 | "Type": "STRING"
54 | },
55 | {
56 | "Name": "region_latitude",
57 | "Type": "DECIMAL"
58 | },
59 | {
60 | "Name": "region_longitude",
61 | "Type": "DECIMAL"
62 | },
63 | {
64 | "Name": "aws_service_category",
65 | "Type": "STRING"
66 | }
67 | ]
68 | }
69 | }
70 | },
71 | "LogicalTableMap": {
72 | "47da7096-571a-48a5-94f4-ba03339c9193": {
73 | "Alias": "monthly_bill_by_account",
74 | "DataTransforms": [
75 | {
76 | "TagColumnOperation": {
77 | "ColumnName": "product_region",
78 | "Tags": [
79 | {
80 | "ColumnGeographicRole": "STATE"
81 | }
82 | ]
83 | }
84 | },
85 | {
86 | "TagColumnOperation": {
87 | "ColumnName": "region_latitude",
88 | "Tags": [
89 | {
90 | "ColumnGeographicRole": "LATITUDE"
91 | }
92 | ]
93 | }
94 | },
95 | {
96 | "TagColumnOperation": {
97 | "ColumnName": "region_longitude",
98 | "Tags": [
99 | {
100 | "ColumnGeographicRole": "LONGITUDE"
101 | }
102 | ]
103 | }
104 | },
105 | {
106 | "ProjectOperation": {
107 | "ProjectedColumns": [
108 | "bill_billing_period_start_date",
109 | "bill_payer_account_id",
110 | "line_item_usage_account_id",
111 | "charge_type",
112 | "product_product_name",
113 | "product_region",
114 | "line_item_product_code",
115 | "unblended_cost",
116 | "amortized_cost",
117 | "account_name",
118 | "payer_account_name",
119 | "region_latitude",
120 | "region_longitude",
121 | "aws_service_category"
122 | ]
123 | }
124 | }
125 | ],
126 | "Source": {
127 | "PhysicalTableId": "47da7096-571a-48a5-94f4-ba03339c9193"
128 | }
129 | }
130 | },
131 | "ImportMode": "SPICE",
132 | "Permissions": []
133 | }
--------------------------------------------------------------------------------
/cid/builtin/core/data/permissions/dashboard_permissions.json:
--------------------------------------------------------------------------------
1 | {
2 | "Principal": "${PrincipalArn}",
3 | "Actions": [
4 | "quicksight:DescribeDashboard",
5 | "quicksight:ListDashboardVersions",
6 | "quicksight:UpdateDashboardPermissions",
7 | "quicksight:QueryDashboard",
8 | "quicksight:UpdateDashboard",
9 | "quicksight:DeleteDashboard",
10 | "quicksight:UpdateDashboardPublishedVersion",
11 | "quicksight:DescribeDashboardPermissions"
12 | ]
13 | }
14 |
--------------------------------------------------------------------------------
/cid/builtin/core/data/permissions/dashboard_permissions_namespace.json:
--------------------------------------------------------------------------------
1 | {
2 | "Principal": "${PrincipalArn}",
3 | "Actions": [
4 | "quicksight:DescribeDashboard",
5 | "quicksight:ListDashboardVersions",
6 | "quicksight:QueryDashboard"
7 | ]
8 | }
9 |
--------------------------------------------------------------------------------
/cid/builtin/core/data/permissions/data_set_permissions.json:
--------------------------------------------------------------------------------
1 | {
2 | "Principal": "${PrincipalArn}",
3 | "Actions": [
4 | "quicksight:UpdateDataSetPermissions",
5 | "quicksight:DescribeDataSet",
6 | "quicksight:DescribeDataSetPermissions",
7 | "quicksight:PassDataSet",
8 | "quicksight:DescribeIngestion",
9 | "quicksight:ListIngestions",
10 | "quicksight:UpdateDataSet",
11 | "quicksight:DeleteDataSet",
12 | "quicksight:CreateIngestion",
13 | "quicksight:CancelIngestion"
14 | ]
15 | }
16 |
--------------------------------------------------------------------------------
/cid/builtin/core/data/permissions/data_source_permissions.json:
--------------------------------------------------------------------------------
1 | {
2 | "Principal": "${PrincipalArn}",
3 | "Actions": [
4 | "quicksight:UpdateDataSourcePermissions",
5 | "quicksight:DescribeDataSource",
6 | "quicksight:DescribeDataSourcePermissions",
7 | "quicksight:PassDataSource",
8 | "quicksight:UpdateDataSource",
9 | "quicksight:DeleteDataSource"
10 | ]
11 | }
--------------------------------------------------------------------------------
/cid/builtin/core/data/permissions/folder_permissions.json:
--------------------------------------------------------------------------------
1 | {
2 | "Permissions": [
3 | {
4 | "Principal": "${PrincipalArn}",
5 | "Actions": [
6 | "quicksight:CreateFolder",
7 | "quicksight:DescribeFolder",
8 | "quicksight:UpdateFolder",
9 | "quicksight:DeleteFolder",
10 | "quicksight:CreateFolderMembership",
11 | "quicksight:DeleteFolderMembership",
12 | "quicksight:DescribeFolderPermissions",
13 | "quicksight:UpdateFolderPermissions"
14 | ]
15 | }
16 | ]
17 | }
18 |
--------------------------------------------------------------------------------
/cid/builtin/core/data/queries/cid/compute_savings_plan_eligible_spend.sql:
--------------------------------------------------------------------------------
1 | CREATE OR REPLACE VIEW "compute_savings_plan_eligible_spend" AS
2 | SELECT DISTINCT
3 | split_part("billing_period", '-', 1) "year"
4 | , split_part("billing_period", '-', 2) "month"
5 | , ${cur_tags_json} tags_json --replace with
6 | , "bill_payer_account_id" "payer_account_id"
7 | , "line_item_usage_account_id" "linked_account_id"
8 | , "bill_billing_period_start_date" "billing_period"
9 | , "date_trunc"('hour', "line_item_usage_start_date") "usage_date"
10 | , "sum"("line_item_unblended_cost") "unblended_cost"
11 | FROM
12 | "${cur2_database}"."${cur2_table_name}"
13 | WHERE (
14 | "bill_billing_period_start_date" >= ("date_trunc"('month', current_timestamp) - INTERVAL '1' MONTH)
15 | AND "line_item_usage_start_date" < ("date_trunc"('day', current_timestamp) - INTERVAL '1' DAY)
16 | AND "line_item_line_item_type" = 'Usage'
17 | AND "product_servicecode" <> 'AWSDataTransfer'
18 | AND "line_item_usage_type" NOT LIKE '%Spot%'
19 | AND "line_item_usage_type" NOT LIKE '%DataXfer%'
20 | AND (
21 | ("line_item_product_code" = 'AmazonEC2' AND "line_item_operation" LIKE '%RunInstances%')
22 | OR ("line_item_product_code" = 'AWSLambda' AND ("line_item_usage_type" LIKE '%Lambda-GB-Second%'
23 | OR "line_item_usage_type" LIKE '%Lambda-Provisioned-GB-Second%'
24 | OR "line_item_usage_type" LIKE '%Lambda-Provisioned-Concurrency%')
25 | )
26 | OR "line_item_usage_type" LIKE '%Fargate%'
27 | )
28 | )
29 | GROUP BY 1, 2, 3, 4, 5, 6, 7
30 |
--------------------------------------------------------------------------------
/cid/builtin/core/data/queries/cid/ec2_running_cost.sql:
--------------------------------------------------------------------------------
1 | CREATE OR REPLACE VIEW "ec2_running_cost" AS
2 | SELECT DISTINCT
3 | split_part("billing_period", '-', 1) "year"
4 | , split_part("billing_period", '-', 2) "month"
5 | , ${cur_tags_json} tags_json --replace with
6 | , "bill_billing_period_start_date" "billing_period"
7 | , "date_trunc"('hour', "line_item_usage_start_date") "usage_date"
8 | , "bill_payer_account_id" "payer_account_id"
9 | , "line_item_usage_account_id" "linked_account_id"
10 | , (CASE
11 | WHEN ("savings_plan_savings_plan_a_r_n" <> '') THEN 'SavingsPlan'
12 | WHEN ("reservation_reservation_a_r_n" <> '') THEN 'Reserved'
13 | WHEN ("line_item_usage_type" LIKE '%Spot%') THEN 'Spot'
14 | ELSE 'OnDemand'
15 | END) "purchase_option"
16 | , "sum"(CASE
17 | WHEN "line_item_line_item_type" = 'SavingsPlanCoveredUsage' THEN "savings_plan_savings_plan_effective_cost"
18 | WHEN "line_item_line_item_type" = 'DiscountedUsage' THEN "reservation_effective_cost"
19 | WHEN "line_item_line_item_type" = 'Usage' THEN "line_item_unblended_cost"
20 | ELSE 0 END) "amortized_cost"
21 | , "round"("sum"("line_item_usage_amount"), 2) "usage_quantity"
22 | FROM
23 | "${cur2_database}"."${cur2_table_name}"
24 | WHERE (((((("bill_billing_period_start_date" >= ("date_trunc"('month', current_timestamp) - INTERVAL '1' MONTH))
25 | AND ("line_item_product_code" = 'AmazonEC2'))
26 | AND ("product_servicecode" <> 'AWSDataTransfer'))
27 | AND ("line_item_operation" LIKE '%RunInstances%'))
28 | AND ("line_item_usage_type" NOT LIKE '%DataXfer%'))
29 | AND ((("line_item_line_item_type" = 'Usage')
30 | OR ("line_item_line_item_type" = 'SavingsPlanCoveredUsage'))
31 | OR ("line_item_line_item_type" = 'DiscountedUsage')))
32 | GROUP BY 1, 2, 3, 4, 5, 6, 7, 8
33 |
--------------------------------------------------------------------------------
/cid/builtin/core/data/queries/cid/ri_sp_mapping.sql:
--------------------------------------------------------------------------------
1 | CREATE OR REPLACE VIEW "ri_sp_mapping" AS
2 | SELECT DISTINCT
3 | "a"."billing_period_mapping"
4 | , "a"."payer_account_id_mapping"
5 | , "a"."ri_sp_arn_mapping"
6 | , "a"."ri_sp_end_date"
7 | , COALESCE("b"."ri_sp_term", "a"."ri_sp_term") "ri_sp_term"
8 | , COALESCE("b"."ri_sp_offering", "a"."ri_sp_offering") "ri_sp_offering"
9 | , COALESCE("b"."ri_sp_payment", "a"."ri_sp_payment") "ri_sp_payment"
10 | FROM
11 | ((
12 | SELECT DISTINCT
13 | "bill_billing_period_start_date" "billing_period_mapping"
14 | , "bill_payer_account_id" "payer_account_id_mapping"
15 | , (CASE WHEN ("savings_plan_savings_plan_a_r_n" <> '') THEN "savings_plan_savings_plan_a_r_n" WHEN ("reservation_reservation_a_r_n" <> '') THEN "reservation_reservation_a_r_n" ELSE '' END) "ri_sp_arn_mapping"
16 | , (CASE WHEN ("savings_plan_savings_plan_a_r_n" <> '') THEN CAST(CAST("from_iso8601_timestamp"("savings_plan_end_time") AS date) AS timestamp) WHEN (("reservation_reservation_a_r_n" <> '') AND ("reservation_end_time" <> '')) THEN CAST(CAST("from_iso8601_timestamp"("reservation_end_time") AS date) AS timestamp) ELSE null END) "ri_sp_end_date"
17 | , (CASE WHEN ("savings_plan_savings_plan_a_r_n" <> '') THEN "savings_plan_purchase_term" WHEN ("reservation_reservation_a_r_n" <> '') THEN "pricing_lease_contract_length" ELSE '' END) "ri_sp_term"
18 | , (CASE WHEN ("savings_plan_savings_plan_a_r_n" <> '') THEN "savings_plan_offering_type" WHEN ("reservation_reservation_a_r_n" <> '') THEN "pricing_offering_class" ELSE '' END) "ri_sp_offering"
19 | , (CASE WHEN ("savings_plan_savings_plan_a_r_n" <> '') THEN "savings_plan_payment_option" WHEN ("reservation_reservation_a_r_n" <> '') THEN "pricing_purchase_option" ELSE '' END) "ri_sp_payment"
20 | FROM
21 | "${cur2_database}"."${cur2_table_name}"
22 | WHERE (("line_item_line_item_type" = 'RIFee') OR ("line_item_line_item_type" = 'SavingsPlanRecurringFee'))
23 | ) a
24 | LEFT JOIN (
25 | SELECT DISTINCT
26 | "bill_billing_period_start_date" "billing_period_mapping"
27 | , "bill_payer_account_id" "payer_account_id_mapping"
28 | , (CASE WHEN ("savings_plan_savings_plan_a_r_n" <> '') THEN "savings_plan_savings_plan_a_r_n" WHEN ("reservation_reservation_a_r_n" <> '') THEN "reservation_reservation_a_r_n" ELSE '' END) "ri_sp_arn_mapping"
29 | , (CASE WHEN ("savings_plan_savings_plan_a_r_n" <> '') THEN "savings_plan_purchase_term" WHEN ("reservation_reservation_a_r_n" <> '') THEN "pricing_lease_contract_length" ELSE '' END) "ri_sp_term"
30 | , (CASE WHEN ("savings_plan_savings_plan_a_r_n" <> '') THEN "savings_plan_offering_type" WHEN ("reservation_reservation_a_r_n" <> '') THEN "pricing_offering_class" ELSE '' END) "ri_sp_offering"
31 | , (CASE WHEN ("savings_plan_savings_plan_a_r_n" <> '') THEN "savings_plan_payment_option" WHEN ("reservation_reservation_a_r_n" <> '') THEN "pricing_purchase_option" ELSE '' END) "ri_sp_payment"
32 | FROM
33 | "${cur2_database}"."${cur2_table_name}"
34 | WHERE (("line_item_line_item_type" = 'DiscountedUsage') OR ("line_item_line_item_type" = 'SavingsPlanCoveredUsage'))
35 | ) b ON ((("a"."ri_sp_arn_mapping" = "b"."ri_sp_arn_mapping") AND ("a"."billing_period_mapping" = "b"."billing_period_mapping")) AND ("a"."payer_account_id_mapping" = "b"."payer_account_id_mapping")))
36 |
--------------------------------------------------------------------------------
/cid/builtin/core/data/queries/cid/s3.sql:
--------------------------------------------------------------------------------
1 | CREATE OR REPLACE VIEW "s3_view" AS
2 | SELECT DISTINCT
3 | split_part("billing_period", '-', 1) "year",
4 | split_part("billing_period", '-', 2) "month",
5 | ${cur_tags_json} tags_json, --replace with
6 | "bill_billing_period_start_date" "billing_period",
7 | "date_trunc"('day', "line_item_usage_start_date") "usage_date",
8 | "bill_payer_account_id" "payer_account_id",
9 | "line_item_usage_account_id" "linked_account_id",
10 | "line_item_resource_id" "resource_id",
11 | "line_item_product_code" "product_code",
12 | "line_item_operation" "operation",
13 | product['region'] "region",
14 | "line_item_line_item_type" "charge_type",
15 | "pricing_unit" "pricing_unit",
16 | "sum"(
17 | CASE
18 | WHEN ("line_item_line_item_type" = 'Usage') THEN "line_item_usage_amount"
19 | ELSE 0
20 | END
21 | ) "usage_quantity",
22 | "sum"("line_item_unblended_cost") "unblended_cost",
23 | "sum"("pricing_public_on_demand_cost") "public_cost"
24 | FROM "${cur2_database}"."${cur2_table_name}"
25 | WHERE (
26 | (
27 | (
28 | (
29 | "bill_billing_period_start_date" >= (
30 | "date_trunc"('month', current_timestamp) - INTERVAL '3' MONTH
31 | )
32 | )
33 | AND (
34 | "line_item_usage_start_date" < (
35 | "date_trunc"('day', current_timestamp) - INTERVAL '1' DAY
36 | )
37 | )
38 | )
39 | AND ("line_item_operation" LIKE '%Storage%')
40 | )
41 | AND (
42 | ("line_item_product_code" LIKE '%AmazonGlacier%')
43 | OR ("line_item_product_code" LIKE '%AmazonS3%')
44 | )
45 | )
46 | GROUP BY 1,
47 | 2,
48 | 3,
49 | 4,
50 | 5,
51 | 6,
52 | 7,
53 | 8,
54 | 9,
55 | 10,
56 | 11,
57 | 12,
58 | 13
--------------------------------------------------------------------------------
/cid/builtin/core/data/queries/co/all_options.sql:
--------------------------------------------------------------------------------
1 | CREATE OR REPLACE VIEW "compute_optimizer_all_options" AS
2 | (
3 | SELECT *
4 | FROM
5 | compute_optimizer_ec2_instance_options
6 | UNION SELECT *
7 | FROM
8 | compute_optimizer_auto_scale_options
9 | UNION SELECT *
10 | FROM
11 | compute_optimizer_ebs_volume_options
12 | UNION SELECT *
13 | FROM
14 | compute_optimizer_lambda_options
15 | UNION SELECT *
16 | FROM
17 | compute_optimizer_rds_instance_options
18 | UNION SELECT *
19 | FROM
20 | compute_optimizer_rds_storage_options
21 | UNION SELECT *
22 | FROM
23 | compute_optimizer_ecs_service_options
24 | UNION SELECT *
25 | FROM
26 | compute_optimizer_license_options
27 | UNION SELECT *
28 | FROM
29 | compute_optimizer_idle_options
30 | )
31 |
--------------------------------------------------------------------------------
/cid/builtin/core/data/queries/co/idle.json:
--------------------------------------------------------------------------------
1 | {
2 | "DatabaseName": "${athena_database_name}",
3 | "TableInput": {
4 | "Name": "${athena_table_name}",
5 | "StorageDescriptor": {
6 | "Location": "${s3FolderPath}",
7 | "Columns": [
8 | {
9 | "Name": "accountid",
10 | "Type": "string"
11 | },
12 | {
13 | "Name": "resourcearn",
14 | "Type": "string"
15 | },
16 | {
17 | "Name": "resourceid",
18 | "Type": "string"
19 | },
20 | {
21 | "Name": "resourcetype",
22 | "Type": "string"
23 | },
24 | {
25 | "Name": "lastrefreshtimestamp_utc",
26 | "Type": "string"
27 | },
28 | {
29 | "Name": "lookbackperiodindays",
30 | "Type": "string"
31 | },
32 | {
33 | "Name": "recommendations_count",
34 | "Type": "string"
35 | },
36 | {
37 | "Name": "savingsopportunitypercentage",
38 | "Type": "string"
39 | },
40 | {
41 | "Name": "estimatedmonthlysavingscurrency",
42 | "Type": "string"
43 | },
44 | {
45 | "Name": "estimatedmonthlysavingsvalue",
46 | "Type": "string"
47 | },
48 | {
49 | "Name": "savingsopportunitypercentageafterdiscounts",
50 | "Type": "string"
51 | },
52 | {
53 | "Name": "estimatedmonthlysavingscurrencyafterdiscounts",
54 | "Type": "string"
55 | },
56 | {
57 | "Name": "estimatedmonthlysavingsvalueafterdiscounts",
58 | "Type": "string"
59 | },
60 | {
61 | "Name": "utilizationmetricscpumaximum",
62 | "Type": "string"
63 | },
64 | {
65 | "Name": "utilizationmetricsmemorymaximum",
66 | "Type": "string"
67 | },
68 | {
69 | "Name": "utilizationmetricsnetworkoutbytespersecondmaximum",
70 | "Type": "string"
71 | },
72 | {
73 | "Name": "utilizationmetricsnetworkinbytespersecondmaximum",
74 | "Type": "string"
75 | },
76 | {
77 | "Name": "utilizationmetricsdatabaseconnectionsmaximum",
78 | "Type": "string"
79 | },
80 | {
81 | "Name": "utilizationmetricsebsvolumereadiopsmaximum",
82 | "Type": "string"
83 | },
84 | {
85 | "Name": "utilizationetricsebsvolumewriteiopsmaximum",
86 | "Type": "string"
87 | },
88 | {
89 | "Name": "utilizationmetricsvolumereadopspersecondmaximum",
90 | "Type": "string"
91 | },
92 | {
93 | "Name": "utilizationmetricsvolumewriteopspersecondmaximum",
94 | "Type": "string"
95 | },
96 | {
97 | "Name": "finding",
98 | "Type": "string"
99 | },
100 | {
101 | "Name": "findingdescription",
102 | "Type": "string"
103 | },
104 | {
105 | "Name": "errorcode",
106 | "Type": "string"
107 | },
108 | {
109 | "Name": "errormessage",
110 | "Type": "string"
111 | },
112 | {
113 | "Name": "tags",
114 | "Type": "string"
115 | }
116 | ],
117 | "InputFormat": "org.apache.hadoop.mapred.TextInputFormat",
118 | "NumberOfBuckets": -1,
119 | "OutputFormat": "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat",
120 | "Parameters": {},
121 | "SerdeInfo": {
122 | "Parameters": {
123 | "quoteChar": "\"",
124 | "separatorChar": ",",
125 | "serialization.format": "1"
126 | },
127 | "SerializationLibrary": "org.apache.hadoop.hive.serde2.OpenCSVSerde"
128 | }
129 | },
130 | "Parameters": {
131 | "EXTERNAL": "TRUE",
132 | "skip.header.line.count": "1"
133 | },
134 | "PartitionKeys": [],
135 | "TableType": "EXTERNAL_TABLE"
136 | }
137 | }
--------------------------------------------------------------------------------
/cid/builtin/core/data/queries/co/idle_options.sql:
--------------------------------------------------------------------------------
1 | CREATE OR REPLACE VIEW compute_optimizer_idle_options AS
2 | (
3 | SELECT
4 | TRY("date_parse"(lastrefreshtimestamp_utc, '%Y-%m-%dT%H:%i:%sZ')) lastrefreshtimestamp_utc
5 | , accountid accountid
6 | , resourcearn arn
7 | , TRY("split_part"(resourcearn, ':', 4)) region
8 | , TRY("split_part"(resourcearn, ':', 3)) service
9 | , resourceid name
10 | , 'idle' module
11 | , resourcetype recommendationsourcetype
12 | , finding finding
13 | , findingdescription reason
14 | , lookbackperiodindays lookbackperiodindays
15 | , 'na' currentperformancerisk
16 | , errorcode errorcode
17 | , errormessage errormessage
18 | , '' ressouce_details
19 | , CONCAT(
20 | utilizationmetricscpumaximum, ';',
21 | utilizationmetricsmemorymaximum, ';',
22 | utilizationmetricsnetworkoutbytespersecondmaximum, ';',
23 | utilizationmetricsnetworkinbytespersecondmaximum, ';',
24 | utilizationmetricsdatabaseconnectionsmaximum, ';',
25 | utilizationmetricsebsvolumereadiopsmaximum, ';',
26 | utilizationetricsebsvolumewriteiopsmaximum, ';',
27 | utilizationmetricsvolumereadopspersecondmaximum, ';',
28 | utilizationmetricsvolumewriteopspersecondmaximum
29 | ) utilizationmetrics
30 | , 'Current' option_name
31 | , 'na' option_from
32 | , '' option_to
33 | , estimatedmonthlysavingscurrency currency
34 | , 0E0 monthlyprice
35 | , 0E0 hourlyprice
36 | , TRY(CAST(estimatedmonthlysavingsvalue AS double)) estimatedmonthlysavings_value
37 | , 0E0 estimatedmonthly_ondemand_cost_change
38 | , TRY(CAST(estimatedMonthlySavingsValue AS double)) max_estimatedmonthlysavings_value_very_low
39 | , TRY(CAST(estimatedMonthlySavingsValue AS double)) max_estimatedmonthlysavings_value_low
40 | , TRY(CAST(estimatedMonthlySavingsValue AS double)) max_estimatedmonthlysavings_value_medium
41 | , CONCAT(
42 | COALESCE(recommendations_count, ''), ';',
43 | COALESCE(savingsopportunitypercentage, ''), ';'
44 | ) option_details
45 | , tags tags
46 | FROM
47 | compute_optimizer_idle_lines
48 | WHERE (resourcearn LIKE '%arn:%')
49 | UNION SELECT
50 | TRY("date_parse"(lastrefreshtimestamp_utc, '%Y-%m-%dT%H:%i:%sZ')) lastrefreshtimestamp_utc
51 | , accountid accountid
52 | , resourcearn arn
53 | , TRY("split_part"(resourcearn, ':', 4)) region
54 | , TRY("split_part"(resourcearn, ':', 3)) service
55 | , resourceid name
56 | , 'idle' module
57 | , resourcetype recommendationsourcetype
58 | , finding finding
59 | , findingdescription reason
60 | , lookbackperiodindays lookbackperiodindays
61 | , '' currentperformancerisk
62 | , errorcode errorcode
63 | , errormessage errormessage
64 | , 'na' ressouce_details
65 | , '' utilizationmetrics
66 | , 'Recommendation' option_name
67 | , '' option_from
68 | , 'na' option_to
69 | , estimatedmonthlysavingscurrency currency
70 | , 0E0 monthlyprice
71 | , 0E0 hourlyprice
72 | , TRY(CAST(estimatedmonthlysavingsvalue AS double)) estimatedmonthlysavings_value
73 | , 0E0 estimatedmonthly_ondemand_cost_change
74 | , TRY(CAST(estimatedMonthlySavingsValue AS double)) max_estimatedmonthlysavings_value_very_low
75 | , TRY(CAST(estimatedMonthlySavingsValue AS double)) max_estimatedmonthlysavings_value_low
76 | , TRY(CAST(estimatedMonthlySavingsValue AS double)) max_estimatedmonthlysavings_value_medium
77 | , CONCAT(
78 | COALESCE(estimatedmonthlysavingsvalueafterdiscounts, ''), ';',
79 | COALESCE(savingsopportunitypercentageafterdiscounts, ''), ';'
80 | ) option_details
81 | , tags tags
82 | FROM
83 | compute_optimizer_idle_lines
84 | WHERE (resourcearn LIKE '%arn:%')
85 | )
--------------------------------------------------------------------------------
/cid/builtin/core/data/queries/co/license.json:
--------------------------------------------------------------------------------
1 | {
2 | "DatabaseName": "${athena_database_name}",
3 | "TableInput": {
4 | "Name": "${athena_table_name}",
5 | "StorageDescriptor": {
6 | "Location": "${s3FolderPath}",
7 | "Columns": [
8 | {
9 | "Name": "accountid",
10 | "Type": "string"
11 | },
12 | {
13 | "Name": "resourcearn",
14 | "Type": "string"
15 | },
16 | {
17 | "Name": "lookbackperiodindays",
18 | "Type": "string"
19 | },
20 | {
21 | "Name": "lastrefreshtimestamp_utc",
22 | "Type": "string"
23 | },
24 | {
25 | "Name": "currentlicenseconfiguration_numberofcores",
26 | "Type": "string"
27 | },
28 | {
29 | "Name": "currentlicenseconfiguration_instancetype",
30 | "Type": "string"
31 | },
32 | {
33 | "Name": "currentlicenseconfiguration_operatingsystem",
34 | "Type": "string"
35 | },
36 | {
37 | "Name": "currentlicenseconfiguration_licensename",
38 | "Type": "string"
39 | },
40 | {
41 | "Name": "currentlicenseconfiguration_licenseedition",
42 | "Type": "string"
43 | },
44 | {
45 | "Name": "currentlicenseconfiguration_licensemodel",
46 | "Type": "string"
47 | },
48 | {
49 | "Name": "currentlicenseconfiguration_licenseversion",
50 | "Type": "string"
51 | },
52 | {
53 | "Name": "finding",
54 | "Type": "string"
55 | },
56 | {
57 | "Name": "findingreasoncodes_isinvalidcloudwatchapplicationinsightssetup",
58 | "Type": "string"
59 | },
60 | {
61 | "Name": "findingreasoncodes_iscloudwatchapplicationinsightserror",
62 | "Type": "string"
63 | },
64 | {
65 | "Name": "findingreasoncodes_islicenseoverprovisioned",
66 | "Type": "string"
67 | },
68 | {
69 | "Name": "findingreasoncodes_isoptimized",
70 | "Type": "string"
71 | },
72 | {
73 | "Name": "recommendationoptions_1_operatingsystem",
74 | "Type": "string"
75 | },
76 | {
77 | "Name": "recommendationoptions_1_licenseedition",
78 | "Type": "string"
79 | },
80 | {
81 | "Name": "recommendationoptions_1_licensemodel",
82 | "Type": "string"
83 | },
84 | {
85 | "Name": "recommendationoptions_1_savingsopportunitypercentage",
86 | "Type": "string"
87 | },
88 | {
89 | "Name": "recommendationoptions_1_estimatedmonthlysavingscurrency",
90 | "Type": "string"
91 | },
92 | {
93 | "Name": "recommendationoptions_1_estimatedmonthlysavingsvalue",
94 | "Type": "string"
95 | },
96 | {
97 | "Name": "errorcode",
98 | "Type": "string"
99 | },
100 | {
101 | "Name": "errormessage",
102 | "Type": "string"
103 | },
104 | {
105 | "Name": "tags",
106 | "Type": "string"
107 | }
108 | ],
109 | "InputFormat": "org.apache.hadoop.mapred.TextInputFormat",
110 | "NumberOfBuckets": -1,
111 | "OutputFormat": "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat",
112 | "Parameters": {},
113 | "SerdeInfo": {
114 | "Parameters": {
115 | "quoteChar": "\"",
116 | "separatorChar": ",",
117 | "serialization.format": "1"
118 | },
119 | "SerializationLibrary": "org.apache.hadoop.hive.serde2.OpenCSVSerde"
120 | }
121 | },
122 | "Parameters": {
123 | "EXTERNAL": "TRUE",
124 | "skip.header.line.count": "1"
125 | },
126 | "PartitionKeys": [],
127 | "TableType": "EXTERNAL_TABLE"
128 | }
129 | }
--------------------------------------------------------------------------------
/cid/builtin/core/data/queries/co/license_options.sql:
--------------------------------------------------------------------------------
1 | CREATE OR REPLACE VIEW compute_optimizer_license_options AS
2 | (
3 | SELECT
4 | TRY("date_parse"(lastrefreshtimestamp_utc, '%Y-%m-%dT%H:%i:%sZ')) lastrefreshtimestamp_utc
5 | , accountid accountid
6 | , resourcearn arn
7 | , TRY("split_part"(resourcearn, ':', 4)) region
8 | , TRY("split_part"(resourcearn, ':', 3)) service
9 | , TRY("split_part"(resourcearn, '/', 2)) name
10 | , 'license' module
11 | , 'na' recommendationsourcetype
12 | , finding finding
13 | , CONCAT(
14 | (CASE WHEN (findingreasoncodes_isinvalidcloudwatchapplicationinsightssetup = 'true') THEN 'InvalidCloudwatchApplicationInsights ' ELSE '' END),
15 | (CASE WHEN (findingreasoncodes_iscloudwatchapplicationinsightserror = 'true') THEN 'CloudwatchApplicationInsightsError ' ELSE '' END),
16 | (CASE WHEN (findingreasoncodes_islicenseoverprovisioned = 'true') THEN 'LicenseOverprovisioned ' ELSE '' END),
17 | (CASE WHEN (findingreasoncodes_isoptimized = 'true') THEN 'Optimized ' ELSE '' END)
18 | ) reason
19 | , lookbackperiodindays lookbackperiodindays
20 | , 'na' currentperformancerisk
21 | , errorcode errorcode
22 | , errormessage errormessage
23 | , 'na' ressouce_details
24 | , 'na' utilizationmetrics
25 | , 'Current' option_name
26 | , CONCAT(
27 | currentlicenseconfiguration_numberofcores, ';',
28 | currentlicenseconfiguration_instancetype, ';',
29 | currentlicenseconfiguration_licenseversion
30 | ) option_from
31 | , '' option_to
32 | , recommendationoptions_1_estimatedmonthlysavingscurrency currency
33 | , 0E0 monthlyprice
34 | , 0E0 hourlyprice
35 | , COALESCE(TRY_CAST(recommendationoptions_1_estimatedmonthlysavingsvalue AS double), 0E0) as estimatedmonthlysavings_value
36 | , 0E0 estimatedmonthly_ondemand_cost_change
37 | , COALESCE(TRY(CAST(recommendationoptions_1_estimatedmonthlysavingsvalue AS double)), 0E0) as max_estimatedmonthlysavings_value_very_low
38 | , COALESCE(TRY(CAST(recommendationoptions_1_estimatedmonthlysavingsvalue AS double)), 0E0) as max_estimatedmonthlysavings_value_low
39 | , COALESCE(TRY(CAST(recommendationoptions_1_estimatedmonthlysavingsvalue AS double)), 0E0) as max_estimatedmonthlysavings_value_medium
40 | , CONCAT(
41 | CONCAT(COALESCE(currentlicenseconfiguration_licensename, ''), ';'),
42 | CONCAT(COALESCE(currentlicenseconfiguration_operatingsystem, ''), ';'),
43 | CONCAT(COALESCE(currentlicenseconfiguration_licenseedition, ''), ';'),
44 | CONCAT(COALESCE(currentlicenseconfiguration_licensemodel, ''), ';'),
45 | '', ';'
46 | ) option_details
47 | , tags tags
48 | FROM
49 | compute_optimizer_license_lines
50 | WHERE (resourcearn LIKE '%arn:%')
51 | UNION SELECT
52 | TRY("date_parse"(lastrefreshtimestamp_utc, '%Y-%m-%dT%H:%i:%sZ')) lastrefreshtimestamp_utc
53 | , accountid accountid
54 | , resourcearn arn
55 | , TRY("split_part"(resourcearn, ':', 4)) region
56 | , TRY("split_part"(resourcearn, ':', 3)) service
57 | , TRY("split_part"(resourcearn, '/', 2)) name
58 | , 'license' module
59 | , '' recommendationsourcetype
60 | , finding finding
61 | , '' reason
62 | , lookbackperiodindays lookbackperiodindays
63 | , '' currentperformancerisk
64 | , errorcode errorcode
65 | , errormessage errormessage
66 | , '' ressouce_details
67 | , '' utilizationmetrics
68 | , 'Recommendation' option_name
69 | , '' option_from
70 | , 'na' option_to
71 | , recommendationoptions_1_estimatedmonthlysavingscurrency currency
72 | , 0E0 monthlyprice
73 | , 0E0 hourlyprice
74 | , COALESCE(TRY(CAST(recommendationoptions_1_estimatedmonthlysavingsvalue AS double)), 0E0) as estimatedmonthlysavings_value
75 | , COALESCE(TRY_CAST(recommendationoptions_1_estimatedmonthlysavingsvalue AS double), 0E0) as estimatedmonthly_ondemand_cost_change
76 | , COALESCE(TRY(CAST(recommendationoptions_1_estimatedmonthlysavingsvalue AS double)), 0E0) as max_estimatedmonthlysavings_value_very_low
77 | , COALESCE(TRY(CAST(recommendationoptions_1_estimatedmonthlysavingsvalue AS double)), 0E0) as max_estimatedmonthlysavings_value_low
78 | , COALESCE(TRY(CAST(recommendationoptions_1_estimatedmonthlysavingsvalue AS double)), 0E0) as max_estimatedmonthlysavings_value_medium
79 | , CONCAT(
80 | CONCAT(COALESCE(currentlicenseconfiguration_licensename, ''), ';'),
81 | CONCAT(COALESCE(recommendationoptions_1_operatingsystem, ''), ';'),
82 | CONCAT(COALESCE(recommendationoptions_1_licenseedition, ''), ';'),
83 | CONCAT(COALESCE(recommendationoptions_1_licensemodel, ''), ';'),
84 | CONCAT(COALESCE(recommendationoptions_1_savingsopportunitypercentage, ''), ';')
85 | ) option_details
86 | , tags tags
87 | FROM
88 | compute_optimizer_license_lines
89 | WHERE (resourcearn LIKE '%arn:%')
90 | )
--------------------------------------------------------------------------------
/cid/builtin/core/data/queries/cudos/hourly_view.sql:
--------------------------------------------------------------------------------
1 | CREATE OR REPLACE VIEW hourly_view AS
2 | SELECT DISTINCT
3 | "line_item_product_code" "product_code"
4 | , "product_servicecode" "service"
5 | , ${cur_tags_json} tags_json --replace with ''
6 | , "line_item_operation" "operation"
7 | , "line_item_line_item_type" "charge_type"
8 | , "line_item_usage_type" "usage_type"
9 | , "line_item_line_item_description" "item_description"
10 | , "pricing_unit" "pricing_unit"
11 | , product['region'] "region"
12 | , "pricing_term" "pricing_term"
13 | , "bill_billing_period_start_date" "billing_period"
14 | , "line_item_usage_start_date" "usage_date"
15 | , "bill_payer_account_id" "payer_account_id"
16 | , "line_item_usage_account_id" "linked_account_id"
17 | , "savings_plan_savings_plan_a_r_n" "savings_plan_a_r_n"
18 | , "reservation_reservation_a_r_n" "reservation_a_r_n"
19 | , "sum"("line_item_unblended_cost") "unblended_cost"
20 | , "sum"("reservation_effective_cost") "reservation_effective_cost"
21 | , "sum"("savings_plan_savings_plan_effective_cost") "savings_plan_effective_cost"
22 | , "sum"("line_item_usage_amount") "usage_quantity"
23 | FROM
24 | "${cur2_database}"."${cur2_table_name}"
25 | WHERE
26 | (((current_date - INTERVAL '30' DAY) <= line_item_usage_start_date)
27 | AND ((("line_item_line_item_type" = 'Usage') OR ("line_item_line_item_type" = 'SavingsPlanCoveredUsage')) OR ("line_item_line_item_type" = 'DiscountedUsage'))
28 | AND coalesce("line_item_operation", '') NOT IN ('EKSPod-EC2','ECSTask-EC2'))
29 | GROUP BY 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16
30 |
--------------------------------------------------------------------------------
/cid/builtin/core/data/queries/cudos/resource_view.sql:
--------------------------------------------------------------------------------
1 | CREATE OR REPLACE VIEW resource_view AS
2 | SELECT DISTINCT
3 | "date_trunc"('day', "line_item_usage_start_date") "usage_date"
4 | , "bill_payer_account_id" "payer_account_id"
5 | , ${cur_tags_json} tags_json --replace with ''
6 | , "line_item_usage_account_id" "linked_account_id"
7 | , "bill_billing_entity" "billing_entity"
8 | , product['product_name'] "product_name"
9 | , "line_item_resource_id" "resource_id"
10 | , "line_item_product_code" "product_code"
11 | , "line_item_operation" "operation"
12 | , "line_item_line_item_type" "charge_type"
13 | , "line_item_usage_type" "usage_type"
14 | , "pricing_unit" "pricing_unit"
15 | , product['region'] "region"
16 | , "line_item_line_item_description" "item_description"
17 | , "line_item_legal_entity" "legal_entity"
18 | , "pricing_term" "pricing_term"
19 | , product['database_engine'] "database_engine"
20 | , product['deployment_option'] "product_deployment_option"
21 | , "product_from_location" "product_from_location"
22 | , product['group'] "product_group"
23 | , "product_instance_type" "instance_type"
24 | , product['instance_type_family'] "instance_type_family"
25 | , product['operating_system'] "platform"
26 | , "product_product_family" "product_family"
27 | , "product_servicecode" "service"
28 | , product['storage'] "product_storage"
29 | , "product_to_location" "product_to_location"
30 | , product['volume_api_name'] "product_volume_api_name"
31 | , "reservation_reservation_a_r_n" "reservation_a_r_n"
32 | , "savings_plan_savings_plan_a_r_n" "savings_plan_a_r_n"
33 | , "sum"("savings_plan_savings_plan_effective_cost") "savings_plan_effective_cost"
34 | , "sum"("reservation_effective_cost") "reservation_effective_cost"
35 | , "sum"("line_item_usage_amount") "usage_quantity"
36 | , "sum"("line_item_unblended_cost") "unblended_cost"
37 | FROM
38 | "${cur2_database}"."${cur2_table_name}"
39 | WHERE
40 | (((current_date - INTERVAL '30' DAY) <= line_item_usage_start_date)
41 | AND (line_item_resource_id <> '')
42 | AND coalesce("line_item_operation", '') NOT IN ('EKSPod-EC2','ECSTask-EC2'))
43 | GROUP BY 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30
44 |
--------------------------------------------------------------------------------
/cid/builtin/core/data/queries/kpi/kpi_ebs_snap_view.sql:
--------------------------------------------------------------------------------
1 | /*Replace customer_all in row 20 with your CUR table name */
2 | CREATE OR REPLACE VIEW kpi_ebs_snap AS
3 | WITH
4 | -- Step 1: Filter CUR to return all ebs ec2 snapshot usage data
5 | snapshot_usage_all_time AS (
6 | SELECT
7 | split_part(billing_period, '-', 1) year
8 | , split_part(billing_period, '-', 2) month
9 | , bill_billing_period_start_date billing_period
10 | , line_item_usage_start_date usage_start_date
11 | , bill_payer_account_id payer_account_id
12 | , line_item_usage_account_id linked_account_id
13 | , ${cur_tags_json} tags_json
14 | , line_item_resource_id resource_id
15 | , (CASE WHEN (line_item_usage_type LIKE '%EBS:SnapshotArchive%') THEN 'Snapshot_Archive' WHEN (line_item_usage_type LIKE '%EBS:Snapshot%') THEN 'Snapshot' ELSE "line_item_operation" END) snapshot_type
16 | , line_item_usage_amount
17 | , line_item_unblended_cost
18 | , pricing_public_on_demand_cost
19 | FROM
20 | "${cur2_database}"."${cur2_table_name}"
21 | WHERE (((((bill_payer_account_id <> '') AND (line_item_resource_id <> '')) AND (line_item_line_item_type LIKE '%Usage%')) AND (line_item_product_code = 'AmazonEC2')) AND (line_item_usage_type LIKE '%EBS:Snapshot%'))
22 |
23 | ),
24 | -- Step 2: Return most recent billing_period and the first billing_period
25 | request_dates AS (
26 | SELECT DISTINCT
27 | resource_id request_dates_resource_id
28 | , "min"(usage_start_date) start_date
29 | FROM
30 | snapshot_usage_all_time
31 | WHERE (snapshot_type = 'Snapshot')
32 | GROUP BY 1
33 | )
34 | (
35 | -- Step 3: Pivot table so looking at previous month filtered for only snapshots still available in the current month
36 | SELECT DISTINCT
37 | billing_period
38 | , request_dates.start_date
39 | , payer_account_id
40 | , linked_account_id
41 | , tags_json
42 | , snapshot_type
43 | , resource_id
44 | , "sum"(line_item_usage_amount) usage_quantity
45 | , "sum"(line_item_unblended_cost) ebs_snapshot_cost
46 | , "sum"(pricing_public_on_demand_cost) public_cost
47 | , "sum"((CASE WHEN ((request_dates.start_date > (billing_period - INTERVAL '12' MONTH)) AND (snapshot_type = 'Snapshot')) THEN line_item_unblended_cost ELSE 0 END)) "ebs_snapshots_under_1yr_cost" /*No savings estimate since it uses uses 100% of snapshot cost for snapshots over 6mos as savings estimate*/
48 | , "sum"((CASE WHEN ((request_dates.start_date <= (billing_period - INTERVAL '12' MONTH)) AND (snapshot_type = 'Snapshot')) THEN line_item_unblended_cost ELSE 0 END)) "ebs_snapshots_over_1yr_cost"
49 | FROM
50 | (snapshot_usage_all_time snapshot
51 | LEFT JOIN request_dates ON (request_dates.request_dates_resource_id = snapshot.resource_id))
52 | WHERE (CAST("concat"(snapshot.year, '-', snapshot.month, '-01') AS date) >= ("date_trunc"('month', current_date) - INTERVAL '3' MONTH))
53 | GROUP BY 1, 2, 3, 4, 5, 6, 7
54 | )
--------------------------------------------------------------------------------
/cid/builtin/core/data/queries/shared/account_map.sql:
--------------------------------------------------------------------------------
1 | CREATE OR REPLACE VIEW account_map AS
2 | SELECT
3 | ${account_id} AS account_id,
4 | MAX_BY(${account_name}, ${account_id}) AS account_name
5 | FROM
6 | "${metadata_database_name}"."${metadata_table_name}"
7 | WHERE
8 | ${account_name} <> ''
9 | GROUP BY
10 | ${account_id}
11 |
--------------------------------------------------------------------------------
/cid/builtin/core/data/queries/shared/account_map_cur2.sql:
--------------------------------------------------------------------------------
1 | CREATE OR REPLACE VIEW ${athena_view_name} AS
2 | SELECT DISTINCT
3 | line_item_usage_account_id account_id,
4 | MAX_BY(line_item_usage_account_name, line_item_usage_start_date) account_name,
5 | MAX_BY(bill_payer_account_id, line_item_usage_start_date) parent_account_id,
6 | MAX_BY(bill_payer_account_name, line_item_usage_start_date) parent_account_name
7 | FROM
8 | "${cur_database}"."${cur_table_name}"
9 | GROUP BY
10 | line_item_usage_account_id
--------------------------------------------------------------------------------
/cid/builtin/core/data/queries/shared/account_map_dummy.sql:
--------------------------------------------------------------------------------
1 | CREATE OR REPLACE VIEW ${athena_view_name} AS
2 | SELECT DISTINCT
3 | line_item_usage_account_id account_id,
4 | MAX_BY(bill_payer_account_id, line_item_usage_start_date) parent_account_id,
5 | MAX_BY(line_item_usage_account_id, line_item_usage_start_date) account_name,
6 | MAX_BY(line_item_usage_account_id, line_item_usage_start_date) account_email_id
7 | FROM
8 | "${cur_database}"."${cur_table_name}"
9 | GROUP BY
10 | line_item_usage_account_id
11 |
--------------------------------------------------------------------------------
/cid/builtin/core/data/queries/shared/aws_accounts.sql:
--------------------------------------------------------------------------------
1 | CREATE OR REPLACE VIEW aws_accounts AS
2 | WITH
3 | m AS (
4 | SELECT
5 | ${account_id} as account_id,
6 | ${account_name} as account_name,
7 | email account_email_id
8 | FROM
9 | "${metadata_database_name}"."${metadata_table_name}"
10 | ),
11 | cur AS (
12 | SELECT DISTINCT
13 | line_item_usage_account_id,
14 | bill_payer_account_id parent_account_id
15 | FROM "${cur_database}"."${cur_table_name}"
16 | )
17 | SELECT
18 | m.account_id,
19 | m.account_name,
20 | cur.parent_account_id,
21 | m.account_email_id,
22 | 'Active' account_status
23 | FROM (
24 | m
25 | LEFT JOIN cur ON m.account_id = cur.line_item_usage_account_id
26 | )
27 |
--------------------------------------------------------------------------------
/cid/builtin/core/data/queries/shared/aws_regions.sql:
--------------------------------------------------------------------------------
1 | CREATE OR REPLACE VIEW aws_regions AS
2 | SELECT *
3 | FROM
4 | (
5 | VALUES
6 | ROW ('ap-northeast-1', 'Japan', 'Tokyo', '35.64', '139.76', 'Asia/Tokyo')
7 | , ROW ('ap-south-1', 'India', 'Mumbai', '19.08', '72.88', 'Asia/Kolkata')
8 | , ROW ('cn-northwest-1', 'China', 'Ningxia', '38.47', '106.26', 'Asia/Beijing')
9 | , ROW ('eu-central-1', 'Germany', 'Frankfurt', '50.11', '8.68', 'Europe/Berlin')
10 | , ROW ('eu-north-1', 'Sweden', 'Stockholm', '59.33', '18.07', 'Europe/Stockholm')
11 | , ROW ('eu-west-1', 'Ireland', 'Dublin', '53.28', '-7.71', 'Europe/Dublin')
12 | , ROW ('us-east-2', 'USA', 'Ohio', '40.36', '-82.91', 'America/New_York')
13 | , ROW ('us-gov-west-1', 'USA', 'Oregon', '39.53', '-119.88', 'US/Pacific')
14 | , ROW ('us-west-1', 'USA', 'N. California', '36.55', '-119.89', 'America/Los_Angeles')
15 | , ROW ('us-west-2', 'USA', 'Oregon', '43.82', '-120.33', 'America/Los_Angeles')
16 | , ROW ('ap-east-1', 'Hong Kong', 'Hong Kong', '22.28', '114.15', 'Asia/Hong_Kong')
17 | , ROW ('ap-northeast-2', 'South Korea', 'Seoul', '37.72', '126.04', 'Asia/Seoul')
18 | , ROW ('ap-northeast-3', 'Japan', 'Osaka', '34.69', '135.5', 'Asia/Tokyo')
19 | , ROW ('ap-southeast-2', 'Australia', 'Sydney', '-33.88', '151.23', 'Australia/Sydney')
20 | , ROW ('ca-central-1', 'Canada', 'Montral', '44.08', '-77.42', 'America/Toronto')
21 | , ROW ('cn-north-1', 'China', 'Beijing', '39.91', '116.41', 'Asia/Beijing')
22 | , ROW ('eu-west-3', 'France', 'Paris', '48.85', '2.35', 'Europe/Paris')
23 | , ROW ('me-south-1', 'Bahrain', 'Bahrain', '26.11', '50.50', 'Asia/Riyadh')
24 | , ROW ('sa-east-1', 'Brazil', 'Sao Paulo', '-23.37', '-46.63', 'America/Sao_Paulo')
25 | , ROW ('us-gov-east-1', 'USA', 'Ohio', '40.4897', '-81.45', 'US/Eastern')
26 | , ROW ('ap-southeast-1', 'Singapore', 'Singapore', '1.35', '103.86', 'Asia/Singapore')
27 | , ROW ('eu-west-2', 'UK', 'London', '51.53', '0.12', 'Europe/London')
28 | , ROW ('us-east-1', 'USA', 'Washington DC', '38.80', '-77.11', 'America/New_York')
29 | , ROW ('eu-south-1', 'Italy', 'Milan', '45.46', '9.18', 'Europe/Rome')
30 | , ROW ('af-south-1', 'Africa', 'Cape Town', '-33.91', '18.42', 'Africa/Blantyre')
31 | , ROW ('ap-southeast-3', 'Indonesia', 'Jakarta', '-6.17', '106.82', 'Asia/Jakarta')
32 | , ROW ('ap-southeast-4', 'Australia', 'Melbourne', '-37.81', '144.96', 'Australia/Melbourne')
33 | , ROW ('eu-south-2', 'Spain', 'Madrid', '40.41', '-3.70', 'Europe/Madrid')
34 | , ROW ('eu-central-2', 'Switzerland', 'Zurich', '47.37', '8.54', 'Europe/Zurich')
35 | , ROW ('me-central-1', 'UAE', 'Dubai', '25.27', '55.29', 'Asia/Dubai')
36 | , ROW ('il-central-1', 'Israel', 'Tel Aviv', '32.08', '34.78', 'Asia/Jerusalem')
37 | ) ignored_table_name (region_name, region_country, region_city, region_latitude, region_longitude, region_timezone)
38 |
--------------------------------------------------------------------------------
/cid/builtin/core/data/queries/shared/business_units_map.sql:
--------------------------------------------------------------------------------
1 | CREATE VIEW business_units_map AS
2 | SELECT *
3 | FROM
4 | (
5 | VALUES
6 | ROW ('111111111', 'account1', 'Business Unit 1')
7 | , ROW ('222222222', 'account2', 'Business Unit 2')
8 | ) ignored_table_name (account_id, account_name, bu)
--------------------------------------------------------------------------------
/cid/builtin/core/data/queries/shared/cur.yaml:
--------------------------------------------------------------------------------
1 | DatabaseName: "${athena_database_name}"
2 | TableInput:
3 | Name: "${athena_table_name}"
4 | Owner: owner
5 | Retention: 0
6 | TableType: EXTERNAL_TABLE
7 | Parameters:
8 | compressionType: none
9 | classification: parquet
10 | UPDATED_BY_CRAWLER: CidCurCrawler # Hard coded Crawler Name
11 | StorageDescriptor:
12 | BucketColumns: []
13 | Compressed: false
14 | Location: "${location}"
15 | NumberOfBuckets: -1
16 | InputFormat: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat
17 | OutputFormat: org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat
18 | SerdeInfo:
19 | Parameters:
20 | serialization.format: '1'
21 | SerializationLibrary: org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe
22 | StoredAsSubDirectories: false
23 | Columns: # All fields required for CID
24 | - {"Name": "bill_bill_type", "Type": "string" }
25 | - {"Name": "bill_billing_entity", "Type": "string" }
26 | - {"Name": "bill_billing_period_end_date", "Type": "timestamp" }
27 | - {"Name": "bill_billing_period_start_date", "Type": "timestamp" }
28 | - {"Name": "bill_invoice_id", "Type": "string" }
29 | - {"Name": "bill_payer_account_id", "Type": "string" }
30 | - {"Name": "identity_line_item_id", "Type": "string" }
31 | - {"Name": "identity_time_interval", "Type": "string" }
32 | - {"Name": "line_item_availability_zone", "Type": "string" }
33 | - {"Name": "line_item_legal_entity", "Type": "string" }
34 | - {"Name": "line_item_line_item_description", "Type": "string" }
35 | - {"Name": "line_item_line_item_type", "Type": "string" }
36 | - {"Name": "line_item_operation", "Type": "string" }
37 | - {"Name": "line_item_product_code", "Type": "string" }
38 | - {"Name": "line_item_resource_id", "Type": "string" }
39 | - {"Name": "line_item_unblended_cost", "Type": "double" }
40 | - {"Name": "line_item_usage_account_id", "Type": "string" }
41 | - {"Name": "line_item_usage_amount", "Type": "double" }
42 | - {"Name": "line_item_usage_end_date", "Type": "timestamp" }
43 | - {"Name": "line_item_usage_start_date", "Type": "timestamp" }
44 | - {"Name": "line_item_usage_type", "Type": "string" }
45 | - {"Name": "pricing_lease_contract_length", "Type": "string" }
46 | - {"Name": "pricing_offering_class", "Type": "string" }
47 | - {"Name": "pricing_public_on_demand_cost", "Type": "double" }
48 | - {"Name": "pricing_purchase_option", "Type": "string" }
49 | - {"Name": "pricing_term", "Type": "string" }
50 | - {"Name": "pricing_unit", "Type": "string" }
51 | - {"Name": "product_cache_engine", "Type": "string" }
52 | - {"Name": "product_current_generation", "Type": "string" }
53 | - {"Name": "product_database_engine", "Type": "string" }
54 | - {"Name": "product_deployment_option", "Type": "string" }
55 | - {"Name": "product_from_location", "Type": "string" }
56 | - {"Name": "product_group", "Type": "string" }
57 | - {"Name": "product_instance_type", "Type": "string" }
58 | - {"Name": "product_instance_type_family", "Type": "string" }
59 | - {"Name": "product_license_model", "Type": "string" }
60 | - {"Name": "product_operating_system", "Type": "string" }
61 | - {"Name": "product_physical_processor", "Type": "string" }
62 | - {"Name": "product_processor_features", "Type": "string" }
63 | - {"Name": "product_product_family", "Type": "string" }
64 | - {"Name": "product_product_name", "Type": "string" }
65 | - {"Name": "product_region", "Type": "string" }
66 | - {"Name": "product_servicecode", "Type": "string" }
67 | - {"Name": "product_storage", "Type": "string" }
68 | - {"Name": "product_tenancy", "Type": "string" }
69 | - {"Name": "product_to_location", "Type": "string" }
70 | - {"Name": "product_volume_api_name", "Type": "string" }
71 | - {"Name": "product_volume_type", "Type": "string" }
72 | - {"Name": "reservation_amortized_upfront_fee_for_billing_period", "Type": "double" }
73 | - {"Name": "reservation_effective_cost", "Type": "double" }
74 | - {"Name": "reservation_end_time", "Type": "string" }
75 | - {"Name": "reservation_reservation_a_r_n", "Type": "string" }
76 | - {"Name": "reservation_start_time", "Type": "string" }
77 | - {"Name": "reservation_unused_amortized_upfront_fee_for_billing_period", "Type": "double" }
78 | - {"Name": "reservation_unused_recurring_fee", "Type": "double" }
79 | - {"Name": "savings_plan_amortized_upfront_commitment_for_billing_period", "Type": "double" }
80 | - {"Name": "savings_plan_end_time", "Type": "string" }
81 | - {"Name": "savings_plan_offering_type", "Type": "string" }
82 | - {"Name": "savings_plan_payment_option", "Type": "string" }
83 | - {"Name": "savings_plan_purchase_term", "Type": "string" }
84 | - {"Name": "savings_plan_savings_plan_a_r_n", "Type": "string" }
85 | - {"Name": "savings_plan_savings_plan_effective_cost", "Type": "double" }
86 | - {"Name": "savings_plan_start_time", "Type": "string" }
87 | - {"Name": "savings_plan_total_commitment_to_date", "Type": "double" }
88 | - {"Name": "savings_plan_used_commitment", "Type": "double" }
89 | PartitionKeys: ${partitions} # can be a list
90 |
--------------------------------------------------------------------------------
/cid/builtin/core/data/queries/shared/payer_account_name_map.sql:
--------------------------------------------------------------------------------
1 | CREATE OR REPLACE VIEW payer_account_name_map AS
2 | SELECT
3 | "account_id"
4 | , "account_name" "payer_account_name"
5 | FROM
6 | aws_accounts
7 | WHERE ("parent_account_id" = "account_id")
--------------------------------------------------------------------------------
/cid/builtin/core/data/queries/trends/daily_anomaly_detection.sql:
--------------------------------------------------------------------------------
1 | CREATE OR REPLACE VIEW daily_anomaly_detection AS
2 | SELECT
3 | "line_item_usage_start_date"
4 | , "line_item_usage_account_id"
5 | , "account_name"
6 | , (CASE WHEN (product['product_name'] LIKE '') THEN "line_item_product_code" ELSE product['product_name'] END) "product_product_name"
7 | , "round"("sum"("line_item_unblended_cost"), 2) "unblended_cost"
8 | , "round"("sum"("line_item_usage_amount"), 2) "line_item_usage_amount"
9 | FROM
10 | ("${cur2_database}"."${cur2_table_name}"
11 | LEFT JOIN aws_accounts ON ("line_item_usage_account_id" = "account_id"))
12 | WHERE ("date_diff"('day', "date"("line_item_usage_start_date"), "date"("now"())) <= 110)
13 | GROUP BY "line_item_usage_start_date", "line_item_usage_account_id", "account_name", 4
--------------------------------------------------------------------------------
/cid/builtin/core/data/queries/trends/monthly_anomaly_detection.sql:
--------------------------------------------------------------------------------
1 | CREATE OR REPLACE VIEW monthly_anomaly_detection AS
2 | SELECT
3 | "bill_billing_period_start_date"
4 | , "line_item_usage_account_id"
5 | , "account_name"
6 | , (CASE
7 | WHEN (product['product_name'] LIKE '') THEN "line_item_product_code"
8 | ELSE product['product_name']
9 | END) "product_product_name"
10 | , "round"("sum"("line_item_unblended_cost"), 2) "unblended_cost"
11 | , "round"("sum"("line_item_usage_amount"), 2) "line_item_usage_amount"
12 | FROM
13 | ("${cur2_database}"."${cur2_table_name}"
14 | LEFT JOIN aws_accounts ON ("line_item_usage_account_id" = "account_id"))
15 | WHERE ("date_diff"('month', "date"("bill_billing_period_start_date"), "date"("now"())) <= 20)
16 | GROUP BY "bill_billing_period_start_date", "line_item_usage_account_id", "account_name", 4
--------------------------------------------------------------------------------
/cid/builtin/core/data/queries/trends/monthly_bill_by_account.sql:
--------------------------------------------------------------------------------
1 | CREATE OR REPLACE VIEW monthly_bill_by_account AS
2 | WITH
3 | t1 AS (
4 | SELECT
5 | "bill_billing_period_start_date"
6 | , "bill_payer_account_id"
7 | , "line_item_usage_account_id"
8 | , "line_item_line_item_type" "charge_type"
9 | , (CASE WHEN (product['product_name'] LIKE '') THEN "line_item_product_code" ELSE product['product_name'] END) "product_product_name"
10 | , product['region'] "product_region"
11 | , "line_item_product_code"
12 | , "round"("sum"("line_item_unblended_cost"), 2) "unblended_cost"
13 | , "round"("sum"((CASE WHEN ("line_item_line_item_type" = 'SavingsPlanCoveredUsage') THEN "savings_plan_savings_plan_effective_cost" WHEN ("line_item_line_item_type" = 'SavingsPlanRecurringFee') THEN ("savings_plan_total_commitment_to_date" - "savings_plan_used_commitment") WHEN ("line_item_line_item_type" = 'SavingsPlanNegation') THEN 0 WHEN ("line_item_line_item_type" = 'SavingsPlanUpfrontFee') THEN 0 WHEN ("line_item_line_item_type" = 'DiscountedUsage') THEN "reservation_effective_cost" WHEN ("line_item_line_item_type" = 'RIFee') THEN ("reservation_unused_amortized_upfront_fee_for_billing_period" + "reservation_unused_recurring_fee") WHEN (("line_item_line_item_type" = 'Fee') AND ("reservation_reservation_a_r_n" <> '')) THEN 0 ELSE "line_item_unblended_cost" END)), 2) "amortized_cost"
14 | FROM
15 | "${cur2_database}"."${cur2_table_name}"
16 | GROUP BY 1, 2, 3, 4, 5, 6, 7
17 | )
18 | , t2 AS (
19 | SELECT
20 | "account_name"
21 | , "account_id"
22 | FROM
23 | aws_accounts
24 | )
25 | , t3 AS (
26 | SELECT
27 | "payer_account_name"
28 | , "account_id"
29 | FROM
30 | payer_account_name_map
31 | )
32 | , t4 AS (
33 | SELECT
34 | "region_latitude"
35 | , "region_longitude"
36 | , "region_name"
37 | FROM
38 | aws_regions
39 | )
40 | , t5 AS (
41 | SELECT
42 | "aws_service_category"
43 | , "line_item_product_code"
44 | FROM
45 | aws_service_category_map
46 | )
47 | SELECT
48 | t1.*
49 | , "t2"."account_name"
50 | , "t3"."payer_account_name"
51 | , TRY_CAST("t4"."region_latitude" AS decimal) "region_latitude"
52 | , TRY_CAST("t4"."region_longitude" AS decimal) "region_longitude"
53 | , (CASE WHEN (("t5"."aws_service_category" IS NULL) AND ("t1"."product_product_name" IS NOT NULL)) THEN "t1"."product_product_name" WHEN (("aws_service_category" IS NULL) AND ("t1"."product_product_name" IS NULL)) THEN 'Other' ELSE "t5"."aws_service_category" END) "aws_service_category"
54 | FROM
55 | ((((t1
56 | LEFT JOIN t2 ON ("t1"."line_item_usage_account_id" = "t2"."account_id"))
57 | LEFT JOIN t3 ON ("t1"."bill_payer_account_id" = "t3"."account_id"))
58 | LEFT JOIN t4 ON ("t1"."product_region" = "t4"."region_name"))
59 | LEFT JOIN t5 ON ("t1"."line_item_product_code" = "t5"."line_item_product_code"))
--------------------------------------------------------------------------------
/cid/commands/__init__.py:
--------------------------------------------------------------------------------
1 | # flake8: noqa: E401
2 | from cid.commands.init_qs import InitQsCommand
3 |
4 |
5 |
--------------------------------------------------------------------------------
/cid/commands/command_base.py:
--------------------------------------------------------------------------------
1 | from abc import ABC, abstractmethod
2 |
3 |
4 | class Command(ABC): # pylint: disable=too-few-public-methods
5 | """Abstract base class for commands"""
6 |
7 | @abstractmethod
8 | def __init__(self, cid, logger=None, **kwargs):
9 | """Initialize the class"""
10 |
11 | @abstractmethod
12 | def execute(self, *args, **kwargs):
13 | """Run the command"""
14 |
--------------------------------------------------------------------------------
/cid/exceptions.py:
--------------------------------------------------------------------------------
1 |
2 | class CidError(Exception):
3 | """Base class for CID Exceptions"""
4 | pass
5 |
6 | class CidCritical(BaseException):
7 | """Critical Exception, not to be catched with standard Exception"""
8 | pass
9 |
--------------------------------------------------------------------------------
/cid/helpers/__init__.py:
--------------------------------------------------------------------------------
1 | from cid.helpers.glue import Glue
2 | from cid.helpers.s3 import S3
3 | from cid.helpers.athena import Athena
4 | from cid.helpers.iam import IAM
5 | from cid.helpers.cur import CUR, ProxyCUR
6 | from cid.helpers.diff import diff
7 | from cid.helpers.quicksight import QuickSight, Dashboard, Dataset, Datasource, Template
8 | from cid.helpers.csv2view import csv2view
9 | from cid.helpers.organizations import Organizations
10 | from cid.helpers.cur_proxy import ProxyView
11 | from cid.helpers.cloudformation import CFN
12 | from cid.helpers.parameter_store import ParametersController
13 |
14 | __all__ = [
15 | "Athena",
16 | "S3",
17 | "IAM",
18 | "CUR",
19 | "ProxyCUR",
20 | "Glue",
21 | "QuickSight",
22 | "Dashboard",
23 | "Dataset",
24 | "Datasource",
25 | "Template",
26 | "diff",
27 | "csv2view",
28 | "Organizations",
29 | "ProxyView",
30 | "CFN",
31 | "ParametersController",
32 | ]
33 |
--------------------------------------------------------------------------------
/cid/helpers/cloudformation.py:
--------------------------------------------------------------------------------
1 | """ CloudFormation Helper
2 | """
3 | import logging
4 | from functools import lru_cache
5 |
6 | from cid.base import CidBase
7 |
8 | logger = logging.getLogger(__name__)
9 |
10 |
11 | class CFN(CidBase):
12 |
13 | def __init__(self, session):
14 | super().__init__(session)
15 | self.client = self.session.client('cloudformation', region_name=self.region)
16 |
17 | @lru_cache(1000)
18 | def get_read_access_policies(self):
19 | ''' returns a dict of read access policies provided by other stacks
20 |
21 | data collection stack can add CFN export ReadAccessPolicyARN
22 |
23 | return example:
24 | {
25 | "cid-DataCollection-ReadAccessPolicyARN": "arn:aws:iam::xxx:policy/CID-DC-DataCollectionReadAccess",
26 | "cid-DataExports-ReadAccessPolicyARN": "arn:aws:iam::xxx:policy/cidDataExportsReadAccess",
27 | "cid-SecurityHub-ReadAccessPolicyARN": "arn:aws:iam::xxx:policy/cid-SecurityHubReadAccess"
28 | }
29 | '''
30 | try:
31 | res = dict(
32 | self.client
33 | .get_paginator('list_exports')
34 | .paginate()
35 | .search("Exports[? starts_with(Name, 'cid-') && ends_with(Name,'ReadAccessPolicyARN')][Name, Value]"
36 | )
37 | )
38 | except self.client.exceptions.ClientError as exc:
39 | if 'AccessDenied' in str(exc):
40 | logger.warning('AccessDenied on cloudformation:ListExports. Most likely not critical.')
41 | res = {}
42 | else:
43 | raise
44 | return res
45 |
46 | @lru_cache(1000)
47 | def get_read_access_policy(self, key):
48 | ''' return a list of read access policies provided by other stacks
49 | key: cfn export name
50 | '''
51 | return self.get_read_access_policies().get(key, None)
52 |
53 | @lru_cache(1000)
54 | def get_read_access_policy_for_module(self, module):
55 | ''' return a list of read access policies provided by other stacks
56 | module: module name (DataCollection DataExports or SecurityHub)
57 | '''
58 | return self.get_read_access_policy(f'cid-{module}-ReadAccessPolicyARN')
59 |
--------------------------------------------------------------------------------
/cid/helpers/csv2view.py:
--------------------------------------------------------------------------------
1 | import re
2 | import csv
3 | import logging
4 | from io import StringIO
5 |
6 | from cid.exceptions import CidCritical
7 |
8 | logger = logging.getLogger(__name__)
9 |
10 | def escape_sql(text, character='_'):
11 | """ escape for sql statement """
12 | return re.sub('[^0-9a-zA-Z]+', character, str(text))
13 |
14 | def escape_text(text, character='_'):
15 | """ escape for sql statement """
16 | return str(text).replace("'", "''")
17 |
18 | def read_nonblank_lines(lines):
19 | """ returns non blank lines from file"""
20 | for line in lines:
21 | line_rstrip = line.rstrip()
22 | if line_rstrip:
23 | yield line_rstrip
24 |
25 | def read_csv(input_file_name):
26 | """ Read CSV """
27 | sniffer = csv.Sniffer()
28 | try:
29 | # AWS Organization returns a CSV with a BOM (byte order mark) character = U+FEFF to specify encoding
30 | first_character = open(input_file_name, errors='ignore').read(1)
31 | encoding = 'utf-8-sig' if first_character == '\ufeff' else 'utf-8'
32 |
33 | with open(input_file_name, encoding=encoding, errors='ignore') as file_:
34 | text = '\n'.join([line for line in read_nonblank_lines(file_)]) # AWS Organization produces a CSV with empty lines
35 | dialect = sniffer.sniff(text)
36 | data = [row for row in csv.DictReader(StringIO(text), dialect=dialect, skipinitialspace=True)]
37 |
38 | except FileNotFoundError:
39 | raise CidCritical(f'File not found: {repr(input_file_name)}')
40 | except PermissionError:
41 | raise CidCritical(f'Insufficient permission to read {repr(input_file_name)}!')
42 | except IsADirectoryError:
43 | raise CidCritical(f'{repr(input_file_name)} is a directory!')
44 | except Exception as _err:
45 | raise CidCritical(_err)
46 | return data
47 |
48 | def csv2view(input_file_name: str, name: str, output_file_name: str=None) -> None:
49 | """ Make an sql mapping from sql """
50 | logger.debug(f"input {input_file_name}")
51 | data = read_csv(input_file_name)
52 | lines = []
53 | for line in data:
54 | arr = ", ".join([f'\'{escape_text(val, " ")}\'' for val in line.values()])
55 | lines.append(f'ROW({arr})')
56 |
57 | if not lines:
58 | CidCritical(f'There is no data to write, exiting"')
59 |
60 | headers = data[0].keys()
61 |
62 | row_lines = '\n, '.join(lines)
63 | cols = ', '.join([escape_sql(c.lower()) for c in headers ])
64 |
65 | sql = (f'''
66 | CREATE OR REPLACE VIEW {escape_sql(name.lower())} AS
67 | SELECT *
68 | FROM
69 | (
70 | VALUES
71 | {row_lines}
72 | ) ignored_table_name ({cols})
73 | '''.strip())
74 | if len(sql) > 262144:
75 | logger.warning(f'The maximum allowed query string length is 262144 bytes. Current sql size: {len(sql)}')
76 | output_file_name = output_file_name or name + '.sql'
77 | with open(output_file_name, 'w') as file_:
78 | file_.write(sql)
79 | print(f'Output: {output_file_name}')
80 |
--------------------------------------------------------------------------------
/cid/helpers/diff.py:
--------------------------------------------------------------------------------
1 | import difflib
2 |
3 | from cid.utils import cid_print
4 |
5 | def diff(text1, text2):
6 | """ Return SQL diff """
7 | res = {}
8 | ndiff = difflib.ndiff(
9 | text1.splitlines(keepends=True),
10 | text2.splitlines(keepends=True),
11 | )
12 | lines = ''.join(ndiff)
13 | res['lines'] = lines
14 | res['+'] = 0
15 | res['-'] = 0
16 | res['='] = 0
17 | for line in lines.splitlines():
18 | if line.startswith('-'): res['-'] += 1
19 | elif line.startswith('+'): res['+'] += 1
20 | elif line.startswith(' '): res['='] += 1
21 | res['diff'] = res['+'] + res['-']
22 | res['printable'] = diff_2_cid_print(lines)
23 | return res
24 |
25 | def diff_2_cid_print(lines):
26 | """ convert ndiff to printable color format """
27 | new_lines = []
28 | next_line_chars = []
29 | for line in lines.splitlines()[::-1]:
30 | color = 'END'
31 | new_line = ''
32 | if line.startswith('-'):
33 | color = 'RED'
34 | elif line.startswith('+'):
35 | color = 'GREEN'
36 | elif line.startswith(' '):
37 | color = 'GREY'
38 |
39 | if not line.startswith('?'):
40 | new_line = f'<{color}>'
41 | bold = False
42 | last_bold = False
43 | for i, c in enumerate(line):
44 | bold = (i in next_line_chars)
45 |
46 | if bold and not last_bold:
47 | new_line += f'<{color}>'
48 | if last_bold and not bold:
49 | new_line += f'<{color}>'
50 | new_line += c
51 | last_bold = bold
52 | new_line += ''
53 | next_line_chars = []
54 | else:
55 | next_line_chars = []
56 | for i, c in enumerate(line):
57 | if c not in [' ', '?']:
58 | next_line_chars.append(i)
59 | new_lines.append(new_line)
60 | return ('\n'.join(new_lines[::-1]))
61 |
62 | def print_diff(diff):
63 | """ print diff to stdout """
64 | return cid_print(diff_2_cid_print(diff))
65 |
66 |
67 | def test_diff_2_cid_print():
68 | res = diff_2_cid_print(diff_sql('SELECT * FROM aaa', 'SELECT * FROM "aaa" ')['lines'])
69 | assert res == ' SELECT\n *\n- FROM aaa\n+ FROM "aaa"\n'
--------------------------------------------------------------------------------
/cid/helpers/glue.py:
--------------------------------------------------------------------------------
1 | """ Glue Helper
2 | """
3 | import time
4 | import logging
5 |
6 | import yaml
7 |
8 | from cid.base import CidBase
9 |
10 | logger = logging.getLogger(__name__)
11 |
12 |
13 | class Glue(CidBase):
14 |
15 | def __init__(self, session):
16 | super().__init__(session)
17 | self.client = self.session.client('glue', region_name=self.region)
18 |
19 | def create_or_update_table(self, view_name: str, definition: dict) -> None:
20 | """create_or_update_table"""
21 | definition = yaml.safe_load(definition)
22 | logger.debug(definition)
23 | try:
24 | self.client.create_table(**definition)
25 | logger.info('table updated')
26 | logger.info(f'Table "{view_name}" created')
27 | except self.client.exceptions.AlreadyExistsException:
28 | logger.info(f'Glue table "{view_name}" exists')
29 | self.client.update_table(**definition)
30 | logger.info(f'Table "{view_name}" updated')
31 | except self.client.exceptions.ClientError:
32 | logger.error(definition)
33 | raise
34 |
35 | def create_database(self, name, description: str='Cloud Intelligence Dashboards Database'):
36 | """Create Database"""
37 | return self.client.create_database(
38 | DatabaseInput={
39 | 'Name': name,
40 | 'Description': description,
41 | },
42 | )
43 |
44 | def get_table(self, name, catalog, database):
45 | """Get table"""
46 | return self.client.get_table(
47 | CatalogId=catalog,
48 | DatabaseName=database,
49 | Name=name,
50 | )['Table']
51 |
52 | def delete_table(self, name, catalog, database):
53 | """ Delete an AWS Glue table """
54 | try:
55 | return self.client.delete_table(
56 | CatalogId=catalog,
57 | DatabaseName=database,
58 | Name=name,
59 | )
60 | except self.client.exceptions.EntityNotFoundException:
61 | return True
62 |
63 | def create_or_update_crawler(self, crawler_definition) -> None:
64 | """Create or update crawler. Also start it if not running."""
65 | logger.debug("updating crawler")
66 | logger.debug(crawler_definition)
67 | for attempt in range(10):
68 | try:
69 | self.client.create_crawler(**crawler_definition)
70 | logger.info(f'Created crawler')
71 | except self.client.exceptions.AlreadyExistsException:
72 | logger.info(f'Updating crawler')
73 | self.client.update_crawler(**crawler_definition)
74 | except self.client.exceptions.ClientError as exc:
75 | if 'Service is unable to assume provided role' in str(exc):
76 | logger.info(f'attempt{attempt}: Retrying ') # sometimes newly created roles cannot be assumed right away
77 | time.sleep(3)
78 | continue
79 | logger.error(crawler_definition)
80 | raise
81 | break
82 |
83 | crawler_name = crawler_definition['Name']
84 | try:
85 | self.client.start_crawler(Name=crawler_name)
86 | logger.critical(f'Started crawler {crawler_name}')
87 | except self.client.exceptions.ClientError as exc:
88 | if 'Cannot update Crawler while running' in str(exc):
89 | logger.info(f"Crawler is already running.")
90 | else:
91 | raise
92 |
93 | def get_crawler(self, name: str):
94 | """ GetCrawler """
95 | return self.client.get_crawler(Name=name)['Crawler']
96 |
--------------------------------------------------------------------------------
/cid/helpers/organizations.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | from cid.base import CidBase
4 |
5 | logger = logging.getLogger(__name__)
6 |
7 |
8 | class Organizations(CidBase):
9 | """Organizations helper class"""
10 |
11 | def __init__(self, session):
12 | super().__init__(session)
13 | self.client = self.session.client("organizations", region_name=self.region)
14 |
15 | def get_account_email(self):
16 | """Try to extract the account's email address for Organizations"""
17 | try:
18 | result = self.client.describe_account(AccountId=self.account_id).get("Email", "")
19 | except Exception: # pylint: disable=broad-except
20 | result = None
21 |
22 | return result
23 |
24 | def get_account_name(self):
25 | """Try to extract the account name from Organizations"""
26 | try:
27 | result = self.client.describe_account(AccountId=self.account_id).get("Name", "")
28 | except Exception: # pylint: disable=broad-except
29 | result = None
30 |
31 | return result
32 |
--------------------------------------------------------------------------------
/cid/helpers/parameter_store.py:
--------------------------------------------------------------------------------
1 | from cid.exceptions import CidCritical
2 |
3 | class AthenaStore():
4 | def __init__(self, athena, view_name='cid_parameters'):
5 | self.athena = athena
6 | self.view_name = view_name
7 |
8 | def dump(self, data):
9 | ''' load from athena
10 | '''
11 | # FIXME: make it multi view
12 | self.athena.query(self._generate_view_query(data, self.view_name))
13 |
14 | def load(self):
15 | ''' load from athena
16 | '''
17 | try:
18 | res = self.athena.query(f'''select * from {self.view_name}''', include_header=True)
19 | except CidCritical as exc:
20 | if 'TABLE_NOT_FOUND' in str(exc):
21 | res = []
22 | else:
23 | raise
24 | return [{k:v for k, v in zip(res[0], row)} for row in res[1:]]
25 |
26 | def _to_sql_str(self, val):
27 | if val is None:
28 | return "''"
29 | return "'" + str(val).replace("'", "''") + "'"
30 |
31 | def _generate_view_query(self, data, name):
32 | all_keys = {key for dictionary in data for key in dictionary.keys()}
33 | lines = ',\n '.join([f'''ROW({','.join([self._to_sql_str(line.get(k)) for k in all_keys])})''' for line in data])
34 | query = f"""
35 | CREATE OR REPLACE VIEW {name} AS
36 | SELECT *
37 | FROM (
38 | VALUES
39 | {lines}
40 | ) ignored_table_name ({','.join([key for key in all_keys])})
41 | """
42 | return query
43 |
44 | class ParametersController(AthenaStore):
45 | def load_parameters(self, context):
46 | data = self.load()
47 | return { line.get('parameter'):line.get('value') for line in data}
48 |
49 | def dump_parameters(self, params, context=None):
50 | data = [{'parameter': key, 'value': ','.join(val) if isinstance(val, list) else val, 'context': str(context) } for key, val in params.items()]
51 | self.dump(data)
--------------------------------------------------------------------------------
/cid/helpers/quicksight/datasource.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | from cid.helpers.quicksight.resource import CidQsResource
4 |
5 | logger = logging.getLogger(__name__)
6 |
7 | class Datasource(CidQsResource):
8 |
9 | @property
10 | def AthenaParameters(self) -> dict:
11 | return self.parameters.get('AthenaParameters', {})
12 |
13 | @property
14 | def role_name(self) -> dict:
15 | role_arn = self.parameters.get('AthenaParameters', {}).get('RoleArn')
16 | if not role_arn:
17 | return None
18 | return role_arn.split('/')[-1]
19 |
20 | @property
21 | def id(self) -> str:
22 | return self.get_property('DataSourceId')
23 |
24 | @property
25 | def parameters(self) -> dict:
26 | return self.get_property('DataSourceParameters') or {}
27 |
28 | @property
29 | def status(self) -> str:
30 | return self.get_property('Status')
31 |
32 | @property
33 | def type(self) -> str:
34 | return self.get_property('Type')
35 |
36 | @property
37 | def is_healthy(self) -> bool:
38 | return self.status not in ['CREATION_IN_PROGRESS', 'CREATION_FAILED']
39 |
40 | @property
41 | def error_info(self) -> bool:
42 | """ returns a dict {'Type': '...', 'Message': '...'} or empty dict """
43 | return self.get_property('ErrorInfo') or {}
--------------------------------------------------------------------------------
/cid/helpers/quicksight/definition.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import re
3 | from typing import Dict
4 | from cid.helpers.quicksight.resource import CidQsResource
5 | from cid.helpers.quicksight.version import CidVersion
6 |
7 | logger = logging.getLogger(__name__)
8 |
9 | class Definition:
10 |
11 | def __init__(self, raw: dict) -> None:
12 | self.raw: dict = raw
13 | # Resolve version from definition contents
14 | self._raw_version = self.resolve_version(self.raw)
15 |
16 | @property
17 | def cid_version(self) -> CidVersion:
18 | # Resolve version from "About" sheet contents
19 | try:
20 | return CidVersion(self._raw_version)
21 | except TypeError as e:
22 | logger.debug(f"Could not resolve CID version. Raw version value '{self._raw_version}' does not conform to CID version format vmajor.minor.build e.g. v.1.0.1")
23 |
24 | return CidVersion("v1.0.0")
25 |
26 | def resolve_version(self, raw: dict):
27 | about_content = []
28 |
29 | sheets = raw.get("Sheets", [])
30 |
31 | if sheets:
32 | text_boxes_content = self._extract_sheet_textboxes_content(sheets)
33 | about_content += text_boxes_content
34 |
35 | insight_visuals_content = self._extract_sheet_visuals_content(sheets)
36 | about_content += insight_visuals_content
37 |
38 | if about_content:
39 | all_about_content = " | ".join(about_content)
40 | # find first string that looks like vx.y.z using a regular expression where x, y and z are numbers
41 | version_matches = re.findall(r"(v\d+?\.\d+?\.\d+?)", all_about_content)
42 | if version_matches:
43 | return version_matches[0]
44 | else:
45 | version_matches = re.findall(r"(v\d+?\.\d+?)", all_about_content)
46 | if version_matches:
47 | return f"{version_matches[0]}.0"
48 |
49 | return None
50 |
51 | def _extract_sheet_visuals_content(self, sheets: list):
52 | insight_visuals_content = []
53 | visuals = (visual for sheet in sheets for visual in sheet.get("Visuals", []) if sheet.get("Name", None) == "About")
54 | insight_visuals_content = [
55 | visual["InsightVisual"]["InsightConfiguration"]["CustomNarrative"].get("Narrative", "")
56 | for visual in visuals
57 | if "InsightVisual" in visual
58 | and "InsightConfiguration" in visual["InsightVisual"]
59 | and "CustomNarrative" in visual["InsightVisual"]["InsightConfiguration"]
60 | ]
61 | return insight_visuals_content
62 |
63 | def _extract_sheet_textboxes_content(self, sheets: list):
64 | text_boxes = (text_boxes for sheet in sheets for text_boxes in sheet.get("TextBoxes", []) if sheet.get("Name", None) == "About")
65 | text_boxes_content = [
66 | text_content.get("Content", "") for text_content in text_boxes
67 | ]
68 | return text_boxes_content
--------------------------------------------------------------------------------
/cid/helpers/quicksight/resource.py:
--------------------------------------------------------------------------------
1 | class CidQsResource():
2 | def __init__(self, raw: dict) -> None:
3 | self.raw: dict = raw
4 |
5 | @property
6 | def name(self) -> str:
7 | return self.get_property('Name')
8 |
9 | @property
10 | def arn(self) -> str:
11 | return self.get_property('Arn')
12 |
13 | @property
14 | def account_id(self) -> str:
15 | return self.arn.split(':')[4]
16 |
17 | def get_property(self, property: str) -> str:
18 | return self.raw.get(property)
19 |
--------------------------------------------------------------------------------
/cid/helpers/quicksight/template.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import re
3 | from typing import Dict
4 | from cid.helpers.quicksight.resource import CidQsResource
5 | from cid.helpers.quicksight.version import CidVersion
6 |
7 | logger = logging.getLogger(__name__)
8 |
9 | class Template(CidQsResource):
10 |
11 | @property
12 | def id(self) -> str:
13 | return self.get_property('TemplateId')
14 |
15 | @property
16 | def datasets(self) -> Dict[str, list]:
17 | _datasets = {}
18 | try:
19 | for ds in self.raw.get('Version').get('DataSetConfigurations'):
20 | _datasets.update({ds.get('Placeholder'): ds.get('DataSetSchema').get('ColumnSchemaList')})
21 | except Exception as e:
22 | logger.debug(e, exc_info = True)
23 | return _datasets
24 |
25 | @property
26 | def version(self) -> int:
27 | return self.raw.get('Version', dict()).get('VersionNumber', -1)
28 |
29 | @property
30 | def description(self) -> str:
31 | return self.raw.get('Version', dict()).get('Description')
32 |
33 | @property
34 | def cid_version(self) -> CidVersion:
35 | return CidVersion(self.description)
36 |
37 |
--------------------------------------------------------------------------------
/cid/helpers/quicksight/version.py:
--------------------------------------------------------------------------------
1 | """ Semantic version of dashboards in CID
2 | """
3 |
4 | import re
5 | import logging
6 |
7 | logger = logging.getLogger(__name__)
8 |
9 | class CidVersion:
10 | """ Semantic version of dashboards in CID
11 | """
12 |
13 | def __init__(self, version):
14 |
15 | if isinstance(version, __class__):
16 | self.major, self.minor, self.build = version.major, version.minor, version.build
17 | elif isinstance(version, str):
18 | self.major, self.minor, self.build = self._parse(version)
19 | else:
20 | raise TypeError(f'{version} must be {__class__} or str ')
21 |
22 | def __str__(self):
23 | return f'v{self.major}.{self.minor}.{self.build}'
24 |
25 | def _parse(self, str_version, default=(1, 0, 0)):
26 | version_pattern = re.compile(r"^[v|V](?P[0-9]+)\.(?P[0-9]+)(\.(?P[0-9]+))?$")
27 | results = version_pattern.match(str_version)
28 |
29 | if not results:
30 | logger.debug(f'Could not find version pattern in provided string: {str_version} will use default ({default})')
31 | major, minor, build = default
32 | else:
33 | major = int(results.group("major"))
34 | minor = int(results.group("minor") or 0)
35 | build = int(results.group("build") or 0)
36 | return major, minor, build
37 |
38 | def compatible_versions(self, _version) -> bool:
39 | """ Return True when both version are on the same major branch """
40 | return CidVersion(_version).major == self.major
41 |
42 | def __lt__(self, _version):
43 | return self.as_tuple() < CidVersion(_version).as_tuple()
44 |
45 | def __le__(self, _version):
46 | return self.as_tuple() <= CidVersion(_version).as_tuple()
47 |
48 | def __eq__(self, _version):
49 | return self.as_tuple() == CidVersion(_version).as_tuple()
50 |
51 | def __ge__(self, _version):
52 | return self.as_tuple() >= CidVersion(_version).as_tuple()
53 |
54 | def __gt__(self, _version):
55 | return self.as_tuple() > CidVersion(_version).as_tuple()
56 |
57 | def __ne__(self, _version):
58 | return self.as_tuple() != CidVersion(_version).as_tuple()
59 |
60 | def as_tuple(self) -> tuple:
61 | """ return version as tuple """
62 | return (self.major, self.minor, self.build)
63 |
64 |
65 | def test_versions():
66 | """ basic tests for versions
67 | """
68 | assert CidVersion('v1.2').as_tuple() == (1, 2, 0)
69 | assert CidVersion('v1.2.3').as_tuple() == (1, 2, 3)
70 | assert CidVersion('v1.3.3') > CidVersion('V1.2.4')
71 | assert CidVersion('v1.3.3') >= CidVersion('v1.3.3')
72 | assert CidVersion(CidVersion('v1.2')).as_tuple() == (1, 2, 0)
73 | assert str(CidVersion('v1.2')) == 'v1.2.0'
74 |
75 |
76 | def test_version_raises():
77 | """ test exception cases
78 | """
79 | import pytest
80 | with pytest.raises(TypeError):
81 | CidVersion(1)
--------------------------------------------------------------------------------
/cid/helpers/randtime.py:
--------------------------------------------------------------------------------
1 | ''' Helper functions for dataset schedules
2 | '''
3 | import hashlib
4 | from datetime import datetime, timedelta
5 |
6 | def pseudo_random_generator(hashable_string: str, maximum: int=100) -> int:
7 | """Gernerate a pseudo random integer number, but the same for any given hashable_string identifier """
8 | hash_hex = hashlib.md5(bytes(hashable_string, "utf-8")).hexdigest()[:16] # nosec B303, B324 - not used for security
9 | bigint_value = int.from_bytes(bytes.fromhex(hash_hex), 'little', signed=True)
10 | return bigint_value % int(maximum)
11 |
12 | def get_random_time_from_range(hashable_string, time_range):
13 | """ Generate a random time from a given range
14 | In case that input time is in format hh:mm, just return it back.
15 | When input time is time range hh:mm-hh:mm, then return random time in format hh:mm within provided time range
16 | """
17 | items = time_range.strip().split('-')
18 |
19 | if len(items) == 1:
20 | try:
21 | return datetime.strptime(time_range.strip(), '%H:%M').strftime('%H:%M')
22 | except Exception as exc:
23 | raise ValueError(f'Invalid time range "{time_range}": {str(exc)}') from exc
24 | elif len(items) == 2:
25 | try:
26 | time_from = datetime.strptime(items[0].strip(), '%H:%M')
27 | time_to = datetime.strptime(items[1].strip(), '%H:%M')
28 | if time_to < time_from:
29 | time_to += timedelta(days=1)
30 | time_diff_sec = (time_to - time_from).total_seconds()
31 | return (time_from + timedelta(seconds=pseudo_random_generator(hashable_string, time_diff_sec))).strftime('%H:%M')
32 | except Exception as exc:
33 | raise ValueError(f'Invalid time range "{time_range}": {str(exc)}') from exc
34 | else:
35 | raise ValueError(f'Invalid time range "{time_range}". Please provide timerange in format hh:mm or hh:mm-hh:mm')
36 |
--------------------------------------------------------------------------------
/cid/helpers/s3.py:
--------------------------------------------------------------------------------
1 | import re
2 | import logging
3 | from typing import Optional, List
4 |
5 | from cid.base import CidBase
6 | from cid.exceptions import CidError
7 |
8 |
9 | logger = logging.getLogger(__name__)
10 |
11 |
12 | class S3(CidBase):
13 | ''' S3 Helper
14 | '''
15 | def __init__(self, session):
16 | super().__init__(session)
17 | self.client = self.session.client('s3', region_name=self.region)
18 |
19 | def ensure_bucket(self, name: str, lifecycle: int=14) -> str:
20 | ''' ensure bucket exists, have an encryption and lifecycle
21 | '''
22 | try:
23 | self.client.head_bucket(Bucket=name)
24 | return name
25 | except self.client.exceptions.ClientError as exc:
26 | if int(exc.response['Error']['Code']) != 404:
27 | raise CidError(f"Cannot check bucket {exc}!") from exc
28 |
29 | parameters = {
30 | 'ACL': 'private',
31 | 'Bucket': name
32 | }
33 | if self.region != 'us-east-1':
34 | parameters['CreateBucketConfiguration'] = {'LocationConstraint': self.region}
35 | self.client.create_bucket(**parameters)
36 |
37 | self.client.put_bucket_encryption(
38 | Bucket=name,
39 | ServerSideEncryptionConfiguration={
40 | 'Rules': [{
41 | 'ApplyServerSideEncryptionByDefault': { 'SSEAlgorithm': 'AES256' },
42 | }]
43 | }
44 | )
45 |
46 | if lifecycle is not None:
47 | self.client.put_bucket_lifecycle_configuration(
48 | Bucket=name,
49 | LifecycleConfiguration={
50 | 'Rules': [{
51 | 'ID': 'ExpireAfter14Days',
52 | 'Status': 'Enabled',
53 | 'Expiration': { 'Days': lifecycle },
54 | 'Filter': { 'Prefix': '/' },
55 | }],
56 | },
57 | )
58 | return name
59 |
60 | def list_buckets(self, region_name: Optional[str] = None) -> List[str]:
61 | ''' List buckets
62 | region_name: optional region filter
63 | '''
64 | bucket_names = [bucket['Name'] for bucket in self.client.list_buckets()['Buckets']]
65 | if region_name:
66 | bucket_names = list(filter(
67 | lambda bucket_name: self.client.get_bucket_location(Bucket=bucket_name).get('LocationConstraint', 'us-east-1') == region_name,
68 | bucket_names,
69 | ))
70 | return bucket_names
71 |
72 | def iterate_objects(self, bucket: str, prefix: str='/', search: str='Contents[].Key') -> List[str]:
73 | ''' iterate objects in bucket
74 | '''
75 | yield from self.client.get_paginator('list_objects_v2').paginate(Bucket=bucket, Prefix=prefix).search(search)
76 |
77 | def list_path_prefixes_with_regexp(self, bucket: str, regexp: str, prefix: str='/') -> List[str]:
78 | ''' list prefixes of bucket object keys before given regexp
79 | bucket: bucket name
80 | regexp: a regexp that should match. ex : 'year*/month*/'
81 | '''
82 | paths = []
83 | regexp = regexp.replace('*', '.+?') + '$'
84 | for key in self.iterate_objects(bucket=bucket):
85 | path = '/'.join(key.split('/')[:-1]) + '/'
86 | if any(path.startswith(existing_path) for existing_path in paths):
87 | continue # not original prefix
88 | if re.findall(regexp, path):
89 | paths.append(re.sub(regexp,'',path))
90 | return paths
91 |
--------------------------------------------------------------------------------
/cid/helpers/timezone.py:
--------------------------------------------------------------------------------
1 | ''' Helper functions for dataset schedules
2 | '''
3 | import logging
4 |
5 | import boto3
6 | from tzlocal.windows_tz import win_tz
7 | from tzlocal import get_localzone_name
8 |
9 | from cid.utils import exec_env
10 |
11 | logger = logging.getLogger(__name__)
12 |
13 | MAPPING_REGION_2_TIMEZONE = {
14 | "us-east-1": "America/New_York",
15 | "us-east-2": "America/New_York",
16 | "us-west-1": "America/Los_Angeles",
17 | "us-west-2": "America/Los_Angeles",
18 | "af-south-1": "Africa/Blantyre",
19 | "ap-east-1": "Asia/Hong_Kong",
20 | "ap-south-1": "Asia/Kolkata",
21 | "ap-southeast-3": "Asia/Jakarta",
22 | "ap-southeast-4": "Australia/Melbourne",
23 | "ap-northeast-3": "Asia/Tokyo",
24 | "ap-northeast-2": "Asia/Seoul",
25 | "ap-southeast-1": "Asia/Singapore",
26 | "ap-southeast-2": "Australia/Sydney",
27 | "ap-northeast-1": "Asia/Tokyo",
28 | "ca-central-1": "America/Toronto",
29 | "eu-central-1": "Europe/Berlin",
30 | "eu-west-1": "Europe/Dublin",
31 | "eu-west-2": "Europe/London",
32 | "eu-south-1": "Europe/Rome",
33 | "eu-west-3": "Europe/Paris",
34 | "eu-south-2": "Europe/Madrid",
35 | "eu-north-1": "Europe/Stockholm",
36 | "eu-central-2": "Europe/Zurich",
37 | "me-south-1": "Asia/Riyadh",
38 | "me-central-1": "Asia/Dubai",
39 | "sa-east-1": "America/Sao_Paulo",
40 | "us-gov-east-1": "US/Eastern",
41 | "us-gov-west-1": "US/Pacific",
42 | "il-central-1": "Asia/Jerusalem",
43 | }
44 |
45 |
46 | def get_timezone_from_aws_region(region):
47 | """ Get Timezone from AWS region. """
48 | if region not in MAPPING_REGION_2_TIMEZONE:
49 | logger.warning(f'Unkown region {region}. please create a github issue to add it.')
50 | return MAPPING_REGION_2_TIMEZONE.get(region, "America/New_York")
51 |
52 |
53 | def get_default_timezone():
54 | """ Get timzone best guess from Shell or from Region. """
55 |
56 | # In a case of running lambda of chloudshell the local timezone does not make much sense
57 | # so we take the one from the region
58 | if exec_env()['terminal'] in ('cloudshell', 'lambda'):
59 | region = boto3.session.Session().region_name
60 | return get_timezone_from_aws_region(region)
61 |
62 | # for all other cases use local timezone of the shell
63 | return get_localzone_name()
64 |
65 |
66 | def get_all_timezones():
67 | """Get all zones"""
68 | # zoneinfo is not working with 3.7, 3.8
69 |
70 | return sorted(list(set(
71 | list(win_tz.values()) + list(MAPPING_REGION_2_TIMEZONE.values())
72 | )))
73 |
--------------------------------------------------------------------------------
/cid/logger.py:
--------------------------------------------------------------------------------
1 | import os
2 | import logging
3 |
4 | def add_logging_level(name, num):
5 | """
6 | # This method was inspired by the answers to Stack Overflow post
7 | # https://stackoverflow.com/a/35804945
8 |
9 | Usage:
10 | add_logging_level('TRACE', logging.DEBUG - 5)
11 | logging.getLogger(__name__).setLevel("TRACE")
12 | logging.getLogger(__name__).trace('that worked')
13 | logging.trace('so did this')
14 | logging.TRACE
15 | """
16 | method = name.lower()
17 |
18 | def log_method(self, message, *args, **kwargs):
19 | if self.isEnabledFor(num):
20 | self._log(num, message, args, **kwargs) #yes, not '*args'
21 | def log_to_root(message, *args, **kwargs):
22 | logging.log(num, message, *args, **kwargs)
23 |
24 | if hasattr(logging, name): return # Already set
25 | logging.addLevelName(num, name)
26 | setattr(logging, name, num)
27 | setattr(logging.getLoggerClass(), method, log_method)
28 | setattr(logging, method, log_to_root)
29 |
30 |
31 | def set_cid_logger(verbosity=2, log_filename=None):
32 |
33 | add_logging_level('TRACE', logging.DEBUG - 5)
34 |
35 | logger = logging.getLogger('cid')
36 |
37 | # File handler logs everything down to DEBUG level
38 | if log_filename and not os.environ.get('AWS_EXECUTION_ENV', '').startswith('AWS_Lambda'):
39 | fh = logging.FileHandler(log_filename)
40 | #fh.setLevel(logging.TRACE)
41 | formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(name)s:%(funcName)s:%(lineno)d - %(message)s')
42 | fh.setFormatter(formatter)
43 | logger.addHandler(fh)
44 |
45 | # Console handler logs everything down to WARNING level
46 | ch = logging.StreamHandler()
47 | #ch.setLevel(logging.WARNING)
48 | formatter = logging.Formatter('%(levelname)s - %(message)s')
49 | ch.setFormatter(formatter)
50 | logger.addHandler(ch)
51 |
52 | if verbosity:
53 | # Limit Logging level to DEBUG, base level is WARNING
54 | verbosity = min(verbosity, 3)
55 | level_map = {
56 | 1: logging.ERROR,
57 | 2: logging.WARNING,
58 | 3: logging.INFO,
59 | 4: logging.DEBUG,
60 | 5: logging.TRACE,
61 | }
62 | logger.setLevel(level_map.get(2 + verbosity, logging.INFO))
63 | # Logging application start here due to logging configuration
64 | print(f'Logging level set to: {logging.getLevelName(logger.getEffectiveLevel())}')
65 |
66 | return logger
67 |
--------------------------------------------------------------------------------
/cid/plugin.py:
--------------------------------------------------------------------------------
1 | # Implements generic Plugin class to load plugins
2 |
3 | import json, yaml
4 | from pkg_resources import (
5 | resource_exists,
6 | resource_string,
7 | resource_listdir,
8 | resource_isdir,
9 | resource_stream,
10 | resource_filename,
11 | )
12 | import logging
13 |
14 | logger = logging.getLogger(__name__)
15 |
16 | class Plugin():
17 |
18 | def __init__(self, name):
19 | logger.debug(f'Initializing plugin {name}')
20 | self.resources = {}
21 | self.name = name
22 | pkg_resources_db_directory = 'data'
23 | for pkg_resource in resource_listdir(self.name,pkg_resources_db_directory):
24 | if not resource_isdir(self.name, f'data/{pkg_resource}'):
25 | logger.debug(f'Located data file: {pkg_resource}')
26 | ext = pkg_resource.rsplit('.', -1)[-1].lower()
27 | content = None
28 | if ext == 'json':
29 | content = json.loads(resource_string(self.name, f'data/{pkg_resource}'))
30 | logger.debug(f'Loaded {pkg_resource} as JSON')
31 | elif ext in ['yaml', 'yml']:
32 | with resource_stream(self.name, f'data/{pkg_resource}') as yaml_stream:
33 | content = yaml.load(yaml_stream, Loader=yaml.SafeLoader)
34 | logger.debug(f'Loaded {pkg_resource} as YAML')
35 | if content is None:
36 | logger.info(f'Unsupported file type: {pkg_resource}')
37 | continue
38 | # If plugin has resources defined in different files,
39 | # they will be merged into one dict
40 | resource_kind = pkg_resource.rsplit('.', -1)[0]
41 | supported_resource_kinds = ['dashboards', 'views', 'datasets']
42 | if resource_kind in supported_resource_kinds:
43 | self.resources.update({
44 | resource_kind: content
45 | })
46 | logger.info(f'Loaded {resource_kind} from {pkg_resource}')
47 | # If plugin has resources defined in one file,
48 | # simply add it to resources dict
49 | else:
50 | self.resources.update(content)
51 | # Add plugin name to every resource
52 | for v in self.resources.values():
53 | for item in v.values():
54 | if item is not None:
55 | item.update({'providedBy': self.name})
56 | item.update({'source': resource_filename(self.name, f'data/{pkg_resource}')})
57 | logger.debug(f'Plugin {self.name} initialized')
58 |
59 | def provides(self) -> dict:
60 | logger.debug(f'Provides: {self.resources}')
61 | return self.resources
62 |
63 | def get_resource(self, resource_name) -> str:
64 | _resource = f'data/{resource_name}'
65 | if resource_exists(self.name, _resource):
66 | logger.info(f'Resource {resource_name} found')
67 | _content = resource_string(self.name, _resource).decode("utf-8")
68 | logger.debug(f'Resource {resource_name} content: {_content}')
69 | return _content
70 | return None
71 |
--------------------------------------------------------------------------------
/cid/test/bats/10-deploy-update-delete/compute-optimizer-dashboard.bats:
--------------------------------------------------------------------------------
1 | #!/bin/bats
2 |
3 | account_id=$(aws sts get-caller-identity --query "Account" --output text )
4 |
5 | @test "Install" {
6 | run cid-cmd -vv deploy \
7 | --dashboard-id compute-optimizer-dashboard \
8 | --share-with-account \
9 | --athena-database 'optimization_data' \
10 | --view-compute-optimizer-lambda-lines-s3FolderPath 's3://cid-data-{account_id}/compute_optimizer/compute_optimizer_ec2_lambda' \
11 | --view-compute-optimizer-ebs-volume-lines-s3FolderPath 's3://cid-data-{account_id}/compute_optimizer/compute_optimizer_ebs_volume' \
12 | --view-compute-optimizer-auto-scale-lines-s3FolderPath 's3://cid-data-{account_id}/compute_optimizer/compute_optimizer_auto_scale' \
13 | --view-compute-optimizer-ec2-instance-lines-s3FolderPath 's3://cid-data-{account_id}/compute_optimizer/compute_optimizer_ec2_instance'
14 |
15 | [ "$status" -eq 0 ]
16 | }
17 |
18 | @test "Views created" {
19 | run aws athena get-table-metadata \
20 | --catalog-name 'AwsDataCatalog'\
21 | --database-name 'optimization_data' \
22 | --table-name 'compute_optimizer_all_options'
23 |
24 | [ "$status" -eq 0 ]
25 | }
26 |
27 | @test "Dataset created" {
28 | run aws quicksight describe-data-set \
29 | --aws-account-id $account_id \
30 | --data-set-id compute_optimizer_all_options
31 |
32 | [ "$status" -eq 0 ]
33 | }
34 |
35 | @test "Dashboard created" {
36 | run aws quicksight describe-dashboard \
37 | --aws-account-id $account_id \
38 | --dashboard-id compute-optimizer-dashboard
39 |
40 | [ "$status" -eq 0 ]
41 | }
42 |
43 | @test "Update works" {
44 | run cid-cmd -vv --yes update --force --recursive \
45 | --dashboard-id compute-optimizer-dashboard \
46 | --view-compute-optimizer-lambda-lines-s3FolderPath 's3://cid-data-{account_id}/compute_optimizer/compute_optimizer_ec2_lambda' \
47 | --view-compute-optimizer-ebs-volume-lines-s3FolderPath 's3://cid-data-{account_id}/compute_optimizer/compute_optimizer_ebs_volume' \
48 | --view-compute-optimizer-auto-scale-lines-s3FolderPath 's3://cid-data-{account_id}/compute_optimizer/compute_optimizer_auto_scale' \
49 | --view-compute-optimizer-ec2-instance-lines-s3FolderPath 's3://cid-data-{account_id}/compute_optimizer/compute_optimizer_ec2_instance'
50 |
51 | [ "$status" -eq 0 ]
52 | echo "$output" | grep 'Update completed'
53 | }
54 |
55 | @test "Delete runs" {
56 | run cid-cmd -vv --yes delete \
57 | --dashboard-id compute-optimizer-dashboard
58 |
59 | [ "$status" -eq 0 ]
60 | }
61 |
62 | @test "Dashboard is deleted" {
63 | run aws quicksight describe-dashboard \
64 | --aws-account-id $account_id \
65 | --dashboard-id compute-optimizer-dashboard
66 |
67 | [ "$status" -ne 0 ]
68 | }
69 |
70 | @test "Dataset is deleted" {
71 | run aws quicksight describe-data-set \
72 | --aws-account-id $account_id \
73 | --data-set-id compute_optimizer_all_options
74 |
75 | [ "$status" -ne 0 ]
76 | }
77 |
78 | @test "View is deleted" {
79 | run aws athena get-table-metadata \
80 | --catalog-name 'AwsDataCatalog'\
81 | --database-name 'optimization_data' \
82 | --table-name 'compute_optimizer_all_options'
83 |
84 | [ "$status" -ne 0 ]
85 | }
--------------------------------------------------------------------------------
/cid/test/bats/10-deploy-update-delete/cost_intelligence_dashboard.bats:
--------------------------------------------------------------------------------
1 | #!/bin/bats
2 |
3 |
4 | account_id=$(aws sts get-caller-identity --query "Account" --output text )
5 | database_name="${database_name:-athenacurcfn_cur1}" # If variable not set or null, use default
6 |
7 | @test "Install" {
8 | run cid-cmd -vv deploy \
9 | --dashboard-id cost_intelligence_dashboard \
10 | --athena-database $database_name\
11 | --account-map-source dummy \
12 | --share-with-account \
13 |
14 | [ "$status" -eq 0 ]
15 | }
16 |
17 | @test "Views created" {
18 | run aws athena get-table-metadata \
19 | --catalog-name 'AwsDataCatalog'\
20 | --database-name $database_name \
21 | --table-name 'summary_view' \
22 |
23 | # FIXME: add
24 | # compute_savings_plan_eligible_spend
25 | # summary_view
26 | # s3_view
27 | # customer_all
28 | # ec2_running_cost
29 |
30 | [ "$status" -eq 0 ]
31 | }
32 |
33 | @test "Dataset created" {
34 | run aws quicksight describe-data-set \
35 | --aws-account-id $account_id \
36 | --data-set-id d01a936f-2b8f-49dd-8f95-d9c7130c5e46
37 |
38 | [ "$status" -eq 0 ]
39 | }
40 |
41 | @test "Dashboard created" {
42 | run aws quicksight describe-dashboard \
43 | --aws-account-id $account_id \
44 | --dashboard-id cost_intelligence_dashboard
45 |
46 | [ "$status" -eq 0 ]
47 | }
48 |
49 | @test "Update works" {
50 | run cid-cmd -vv --yes update --force --recursive \
51 | --dashboard-id cost_intelligence_dashboard \
52 |
53 | [ "$status" -eq 0 ]
54 | echo "$output" | grep 'Update completed'
55 | }
56 |
57 |
58 | @test "Delete runs" {
59 | run cid-cmd -vv --yes delete \
60 | --dashboard-id cost_intelligence_dashboard
61 |
62 | [ "$status" -eq 0 ]
63 | }
64 |
65 | @test "Dashboard is deleted" {
66 | run aws quicksight describe-dashboard \
67 | --aws-account-id $account_id \
68 | --dashboard-id cost_intelligence_dashboard
69 |
70 | [ "$status" -ne 0 ]
71 | }
72 |
73 | @test "Dataset is deleted" {
74 | run aws quicksight describe-data-set \
75 | --aws-account-id $account_id \
76 | --data-set-id d01a936f-2b8f-49dd-8f95-d9c7130c5e46
77 |
78 | [ "$status" -ne 0 ]
79 | }
80 |
81 | @test "View is deleted" {
82 | skip
83 | run aws athena get-table-metadata \
84 | --catalog-name 'AwsDataCatalog'\
85 | --database-name $database_name \
86 | --table-name 'summary_view'
87 |
88 | [ "$status" -ne 0 ]
89 | }
--------------------------------------------------------------------------------
/cid/test/bats/10-deploy-update-delete/cudos.bats:
--------------------------------------------------------------------------------
1 | #!/bin/bats
2 |
3 |
4 | account_id=$(aws sts get-caller-identity --query "Account" --output text )
5 | database_name="${database_name:-athenacurcfn_cur1}" # If variable not set or null, use default
6 | quicksight_user="${quicksight_user:-cicd-staging}" # If variable not set or null, use default
7 | quicksight_datasource_id="${quicksight_datasource_id:-CID-CMD-Athena}" # If variable not set or null, use default
8 | cur_table="${cur_table:-cur1}" # If variable not set or null, use default. FIXME can be autodetected!
9 |
10 |
11 | @test "Install" {
12 | run cid-cmd -vv deploy \
13 | --dashboard-id cudos-v5 \
14 | --athena-database $database_name\
15 | --account-map-source dummy \
16 | --cur-table-name $cur_table \
17 | --athena-workgroup primary\
18 | --quicksight-user $quicksight_user \
19 | --share-with-account \
20 | --timezone 'Europe/Paris' \
21 | --quicksight-datasource-id $quicksight_datasource_id \
22 | --resource-tags '' \
23 | --taxonomy 'payer_account_id,account_id,account_name' \
24 |
25 | [ "$status" -eq 0 ]
26 | }
27 |
28 | @test "Views created" {
29 | run aws athena get-table-metadata \
30 | --catalog-name 'AwsDataCatalog'\
31 | --database-name $database_name \
32 | --table-name 'summary_view' \
33 |
34 | # FIXME: add
35 | # compute_savings_plan_eligible_spend
36 | # summary_view
37 | # s3_view
38 | # customer_all
39 | # ec2_running_cost
40 |
41 | [ "$status" -eq 0 ]
42 | }
43 |
44 | @test "Dataset created" {
45 | run aws quicksight describe-data-set \
46 | --aws-account-id $account_id \
47 | --data-set-id d01a936f-2b8f-49dd-8f95-d9c7130c5e46
48 |
49 | [ "$status" -eq 0 ]
50 | }
51 |
52 | @test "Dashboard created" {
53 | run aws quicksight describe-dashboard \
54 | --aws-account-id $account_id \
55 | --dashboard-id cudos-v5
56 |
57 | [ "$status" -eq 0 ]
58 | }
59 |
60 | @test "Update works" {
61 | run cid-cmd -vv --yes update --force --recursive \
62 | --dashboard-id cudos-v5 \
63 | --cur-table-name $cur_table \
64 | --athena-database $database_name\
65 | --athena-workgroup primary\
66 | --timezone 'Europe/Paris' \
67 | --quicksight-user $quicksight_user \
68 | --quicksight-datasource-id $quicksight_datasource_id \
69 | --resource-tags '' \
70 | --taxonomy 'payer_account_id,account_id,account_name' \
71 |
72 | [ "$status" -eq 0 ]
73 | }
74 |
75 |
76 | @test "Delete runs" {
77 | run cid-cmd -vv --yes delete \
78 | --athena-database $database_name\
79 | --athena-workgroup primary\
80 | --dashboard-id cudos-v5
81 |
82 | [ "$status" -eq 0 ]
83 | }
84 |
85 | @test "Dashboard is deleted" {
86 | run aws quicksight describe-dashboard \
87 | --aws-account-id $account_id \
88 | --dashboard-id cudos-v5
89 |
90 | [ "$status" -ne 0 ]
91 | }
92 |
93 | @test "Dataset is deleted" {
94 | run aws quicksight describe-data-set \
95 | --aws-account-id $account_id \
96 | --data-set-id d01a936f-2b8f-49dd-8f95-d9c7130c5e46
97 |
98 | [ "$status" -ne 0 ]
99 | }
100 |
101 | @test "View is deleted" {
102 | run aws athena get-table-metadata \
103 | --catalog-name 'AwsDataCatalog'\
104 | --database-name $database_name \
105 | --table-name 'summary_view'
106 |
107 | [ "$status" -ne 0 ]
108 | }
--------------------------------------------------------------------------------
/cid/test/bats/10-deploy-update-delete/kpi_dashboard.bats:
--------------------------------------------------------------------------------
1 | #!/bin/bats
2 |
3 |
4 | account_id=$(aws sts get-caller-identity --query "Account" --output text )
5 | database_name="${database_name:-athenacurcfn_cur1}" # If variable not set or null, use default
6 |
7 | @test "Install" {
8 | run cid-cmd -vv deploy \
9 | --dashboard-id kpi_dashboard \
10 | --athena-database $database_name\
11 | --account-map-source dummy \
12 | --share-with-account \
13 |
14 | [ "$status" -eq 0 ]
15 | }
16 |
17 | @test "Views created" {
18 | run aws athena get-table-metadata \
19 | --catalog-name 'AwsDataCatalog'\
20 | --database-name $database_name \
21 | --table-name 'summary_view' \
22 |
23 | # FIXME: add
24 | # - kpi_ebs_storage_all
25 | # - kpi_ebs_snap
26 | # - kpi_instance_all
27 | # - kpi_s3_storage_all
28 | # - kpi_tracker
29 | # - summary_view
30 |
31 | [ "$status" -eq 0 ]
32 | }
33 |
34 | @test "Dataset created" {
35 | run aws quicksight describe-data-set \
36 | --aws-account-id $account_id \
37 | --data-set-id d01a936f-2b8f-49dd-8f95-d9c7130c5e46
38 |
39 | [ "$status" -eq 0 ]
40 | }
41 |
42 | @test "Dashboard created" {
43 | run aws quicksight describe-dashboard \
44 | --aws-account-id $account_id \
45 | --dashboard-id kpi_dashboard
46 |
47 | [ "$status" -eq 0 ]
48 | }
49 |
50 | @test "Update works" {
51 | run cid-cmd -vv --yes update --force --recursive \
52 | --dashboard-id kpi_dashboard \
53 |
54 | [ "$status" -eq 0 ]
55 | echo "$output" | grep 'Update completed'
56 | }
57 |
58 |
59 | @test "Delete runs" {
60 | run cid-cmd -vv --yes delete \
61 | --dashboard-id kpi_dashboard
62 |
63 | [ "$status" -eq 0 ]
64 | }
65 |
66 | @test "Dashboard is deleted" {
67 | run aws quicksight describe-dashboard \
68 | --aws-account-id $account_id \
69 | --dashboard-id kpi_dashboard
70 |
71 | [ "$status" -ne 0 ]
72 | }
73 |
74 | @test "Dataset is deleted" {
75 | skip "summary_view can be used by others"
76 | run aws quicksight describe-data-set \
77 | --aws-account-id $account_id \
78 | --data-set-id d01a936f-2b8f-49dd-8f95-d9c7130c5e46
79 |
80 | [ "$status" -ne 0 ]
81 | }
82 |
83 | @test "View is deleted" {
84 | skip "summary_view can be used by others"
85 | run aws athena get-table-metadata \
86 | --catalog-name 'AwsDataCatalog'\
87 | --database-name $database_name \
88 | --table-name 'summary_view'
89 |
90 | [ "$status" -ne 0 ]
91 | }
--------------------------------------------------------------------------------
/cid/test/bats/10-deploy-update-delete/ta-organizational-view.bats:
--------------------------------------------------------------------------------
1 | #!/bin/bats
2 |
3 | account_id=$(aws sts get-caller-identity --query "Account" --output text )
4 |
5 | @test "Install" {
6 | run cid-cmd -vv deploy \
7 | --dashboard-id ta-organizational-view \
8 | --athena-database 'optimization_data' \
9 | --share-with-account \
10 |
11 | --view-ta-organizational-view-reports-s3FolderPath "s3://cid-data-$account_id/optics-data-collector/ta-data'"
12 |
13 | [ "$status" -eq 0 ]
14 | }
15 |
16 | @test "Views created" {
17 | run aws athena get-table-metadata \
18 | --catalog-name 'AwsDataCatalog'\
19 | --database-name 'optimization_data' \
20 | --table-name 'ta_organizational_view_reports'
21 |
22 | [ "$status" -eq 0 ]
23 | }
24 |
25 | @test "Dataset created" {
26 | run aws quicksight describe-data-set \
27 | --aws-account-id $account_id \
28 | --data-set-id ta-organizational-view
29 |
30 | [ "$status" -eq 0 ]
31 | }
32 |
33 | @test "Dashboard created" {
34 | run aws quicksight describe-dashboard \
35 | --aws-account-id $account_id \
36 | --dashboard-id ta-organizational-view
37 |
38 | [ "$status" -eq 0 ]
39 | }
40 |
41 | @test "Update works" {
42 | run cid-cmd -vv --yes update --force --recursive \
43 | --dashboard-id ta-organizational-view \
44 | --view-ta-organizational-view-reports-s3FolderPath "s3://cid-data-$account_id)/optics-data-collector/ta-data"
45 |
46 | [ "$status" -eq 0 ]
47 | echo "$output" | grep 'Update completed'
48 | }
49 |
50 | @test "Delete runs" {
51 | run cid-cmd -vv --yes delete \
52 | --dashboard-id ta-organizational-view
53 |
54 | [ "$status" -eq 0 ]
55 | }
56 |
57 | @test "Dashboard is deleted" {
58 | run aws quicksight describe-dashboard \
59 | --aws-account-id $account_id \
60 | --dashboard-id ta-organizational-view
61 |
62 | [ "$status" -ne 0 ]
63 | }
64 |
65 | @test "Dataset is deleted" {
66 | run aws quicksight describe-data-set \
67 | --aws-account-id $account_id \
68 | --data-set-id ta-organizational-view
69 |
70 | [ "$status" -ne 0 ]
71 | }
72 |
73 | @test "View is deleted" {
74 | run aws quicksight describe-dashboard \
75 | --aws-account-id $account_id \
76 | --dashboard-id ta-organizational-view
77 |
78 | [ "$status" -ne 0 ]
79 | }
--------------------------------------------------------------------------------
/cid/test/bats/10-deploy-update-delete/trends-dashboard.bats:
--------------------------------------------------------------------------------
1 | #!/bin/bats
2 |
3 |
4 | account_id=$(aws sts get-caller-identity --query "Account" --output text )
5 | database_name="${database_name:-athenacurcfn_cur1}" # If variable not set or null, use default
6 |
7 | @test "Install" {
8 | run cid-cmd -vv deploy \
9 | --dashboard-id trends-dashboard \
10 | --athena-database $database_name\
11 | --account-map-source dummy \
12 | --share-with-account \
13 |
14 | [ "$status" -eq 0 ]
15 | }
16 |
17 | @test "Views created" {
18 | run aws athena get-table-metadata \
19 | --catalog-name 'AwsDataCatalog'\
20 | --database-name $database_name \
21 | --table-name 'monthly_anomaly_detection' \
22 |
23 | # FIXME: add
24 | # - daily-anomaly-detection
25 | # - monthly-bill-by-account
26 | # - monthly-anomaly-detection
27 |
28 | [ "$status" -eq 0 ]
29 | }
30 |
31 | @test "Dataset created" {
32 | run aws quicksight describe-data-set \
33 | --aws-account-id $account_id \
34 | --data-set-id 0f11c81d-536a-405f-8de0-d0dc247627ad
35 |
36 | [ "$status" -eq 0 ]
37 | }
38 |
39 | @test "Dashboard created" {
40 | run aws quicksight describe-dashboard \
41 | --aws-account-id $account_id \
42 | --dashboard-id trends-dashboard
43 |
44 | [ "$status" -eq 0 ]
45 | }
46 |
47 | @test "Update works" {
48 | run cid-cmd -vv --yes update --force --recursive \
49 | --dashboard-id trends-dashboard \
50 |
51 | [ "$status" -eq 0 ]
52 | echo "$output" | grep 'Update completed'
53 | }
54 |
55 |
56 | @test "Delete runs" {
57 | run cid-cmd -vv --yes delete \
58 | --dashboard-id trends-dashboard
59 |
60 | [ "$status" -eq 0 ]
61 | }
62 |
63 | @test "Dashboard is deleted" {
64 | run aws quicksight describe-dashboard \
65 | --aws-account-id $account_id \
66 | --dashboard-id trends-dashboard
67 |
68 | [ "$status" -ne 0 ]
69 | }
70 |
71 | @test "Dataset is deleted" {
72 | run aws quicksight describe-data-set \
73 | --aws-account-id $account_id \
74 | --data-set-id 0f11c81d-536a-405f-8de0-d0dc247627ad
75 |
76 | [ "$status" -ne 0 ]
77 | }
78 |
79 | @test "View is deleted" {
80 | run aws athena get-table-metadata \
81 | --catalog-name 'AwsDataCatalog'\
82 | --database-name $database_name \
83 | --table-name 'monthly_anomaly_detection'
84 |
85 | [ "$status" -ne 0 ]
86 | }
--------------------------------------------------------------------------------
/cid/test/bats/20-init-quicksight/create-qs-subscription.bats:
--------------------------------------------------------------------------------
1 | #!/bin/bats
2 |
3 | account_id=$(aws sts get-caller-identity --query "Account" --output text )
4 | BATS_TEST_TIMEOUT=300
5 |
6 | # Helper function for waiting for the requred SubscriptionStatus status.
7 | # TODO: add timeout
8 | function wait_subscription {
9 | status=$1
10 | until (aws quicksight describe-account-subscription \
11 | --aws-account-id $account_id \
12 | --query AccountInfo.AccountSubscriptionStatus | grep -m 1 $status);
13 | do :
14 | sleep 5;
15 | done
16 | }
17 |
18 | @test "Delete Account Subscription" {
19 | aws quicksight update-account-settings \
20 | --aws-account-id $account_id \
21 | --default-namespace default \
22 | --no-termination-protection-enabled
23 | aws quicksight delete-account-subscription --aws-account-id $account_id
24 | }
25 |
26 | @test "Waiting for SubscriptionStatus = UNSUBSCRIBED (can take 2 minutes)" {
27 | wait_subscription "UNSUBSCRIBED"
28 | }
29 |
30 | @test "Run cid-cmd initqs (can take 1 minute)" {
31 | run timeout 300 cid-cmd -vv initqs \
32 | --enable-quicksight-enterprise yes \
33 | --account-name $account_id \
34 | --notification-email 'aaa@bb.com'
35 |
36 | [ "$status" -eq 0 ]
37 | }
38 |
39 | @test "SubscriptionStatus is ACCOUNT_CREATED" {
40 | wait_subscription "ACCOUNT_CREATED"
41 | }
42 |
43 | @test "Edition is ENTERPRISE" {
44 | aws quicksight describe-account-subscription \
45 | --aws-account-id $account_id \
46 | --query AccountInfo.Edition | grep "ENTERPRISE"
47 | }
48 |
--------------------------------------------------------------------------------
/cid/test/bats/README.md:
--------------------------------------------------------------------------------
1 | # CID Bash tests
2 |
3 | ## Prerequisites
4 | * Account with data from Cost Optimization Collection Lab, CUR, Configured Athena and QuickSight
5 |
6 |
7 | ## Install
8 | on Mac:
9 |
10 | brew install bats-core
11 | brew install parallel
12 |
13 |
14 | Others: see https://bats-core.readthedocs.io/en/latest/installation.html
15 |
16 | ## Run
17 |
18 | Non-parallel (each test runs in 1-3 min)
19 |
20 | bats cid/test/bats/ \
21 | --print-output-on-failure \
22 | --recursive \
23 | --timing \
24 | --trace
25 |
26 |
27 | Experemental parallel run (All tests run in 3 min, this run is flaky)
28 |
29 | bats cid/test/bats/ \
30 | --jobs 5 \
31 | --no-parallelize-within-files \
32 | --print-output-on-failure \
33 | --recursive \
34 | --timing \
35 | --trace
36 |
--------------------------------------------------------------------------------
/cid/test/python/test_csv2view.py:
--------------------------------------------------------------------------------
1 | import os
2 | from cid.utils import set_parameters
3 | from cid.common import Cid
4 |
5 | import inspect
6 |
7 | def test_basic_csv2view():
8 | with open('test.csv', 'w') as file_:
9 | file_.write('''a,b\nc,d'e''')
10 |
11 | Cid().csv2view(input='test.csv', name='res', athena_database='athenacurcfn_cur1')
12 |
13 | with open('res.sql') as file_:
14 | sql = file_.read()
15 |
16 | assert "CREATE OR REPLACE VIEW res AS" in sql
17 | assert "ROW('c', 'd e')" in sql
18 | assert "(a, b)" in sql
19 |
20 |
--------------------------------------------------------------------------------
/cid/test/python/test_isolated_parameters.py:
--------------------------------------------------------------------------------
1 | from cid.utils import IsolatedParameters, get_parameters, set_parameters
2 |
3 | def test_isolated_parameter_context():
4 | """ make sure the isolated_parameter works
5 | """
6 | set_parameters({'param': 'a'})
7 |
8 | with IsolatedParameters():
9 | set_parameters({'param': 'b'})
10 | assert get_parameters().get('param') == 'b', 'parameters within context must be B'
11 |
12 | assert get_parameters().get('param') == 'a', 'parameters within context must be A'
13 |
--------------------------------------------------------------------------------
/cid/test/python/test_merge.py:
--------------------------------------------------------------------------------
1 | from cid.utils import merge_objects
2 |
3 |
4 | def test_merge_objects():
5 | """ make sure the merge works with depth
6 | """
7 |
8 | obj1 = {'a': {'b': {'c1': 1}, 'c': 3}}
9 | obj2 = {'a': {'b': {'c2': 1}, 'd': {'e': 2}}}
10 |
11 | assert merge_objects(obj1, obj2, depth=0) == obj2
12 | assert merge_objects(obj1, obj2, depth=1) == {
13 | 'a': {
14 | 'b': { 'c2': 1},
15 | 'c': 3,
16 | 'd': {'e': 2}
17 | }
18 | }
19 | assert merge_objects(obj1, obj2, depth=2) == {
20 | 'a': {
21 | 'b': { 'c1': 1, 'c2': 1},
22 | 'c': 3,
23 | 'd': {'e': 2}
24 | }
25 | }
--------------------------------------------------------------------------------
/dashboards/catalog.yaml:
--------------------------------------------------------------------------------
1 | Resources:
2 | - Url: cost-anomalies/cost-anomalies.yaml
3 | - Url: sustainability-proxy-metrics/sustainability-proxy-metrics.yaml
4 | - Url: data-transfer/DataTransfer-Cost-Analysis-Dashboard.yaml
5 | - Url: aws-marketplace/aws-marketplace-spg.yaml
6 | - Url: extended-support-cost-projection/extended-support-cost-projection.yaml
7 | - Url: graviton-savings-dashboard/graviton_savings_dashboard.yaml
8 | - Url: graviton-savings-dashboard/graviton_legacy.yaml # deprecated
9 | - Url: amazon-connect/amazon-connect.yaml
10 | - Url: support-cases-radar/support-cases-radar.yaml
11 | - Url: health-events/health-events.yaml
12 | - Url: scad-containers-cost-allocation/scad-containers-cost-allocation.yaml
13 | - Url: aws-feeds/aws-feeds.yaml
14 | - Url: focus/focus.yaml
15 | - Url: cora/cora.yaml
16 | - Url: aws-budgets/aws-budgets.yaml
17 | - Url: cudos/CUDOS-v5.yaml
18 | - Url: kpi_dashboard/kpi_dashboard.yaml
19 | - Url: cost-intelligence/cost-intelligence.yaml
20 |
--------------------------------------------------------------------------------
/dashboards/cost-intelligence/cost-intelligence.yaml:
--------------------------------------------------------------------------------
1 | dashboards:
2 | CID:
3 | dependsOn:
4 | datasets:
5 | - compute_savings_plan_eligible_spend
6 | - ec2_running_cost
7 | - s3_view
8 | - summary_view
9 | name: Cost Intelligence
10 | dashboardId: cost_intelligence_dashboard
11 | category: Custom
12 | theme: MIDNIGHT
13 | version: v3.6.0
14 | file: ./cost-intelligence-definition.yaml
15 | nonTaxonomyColumns:
16 | - product_code
17 | - service
18 | - operation
19 | - charge_type
20 | - usage_type
21 | - reservation_a_r_n
22 | - item_description
23 | - pricing_unit
24 | - region
25 | - pricing_term
26 | - linked_account_id
27 | - savings_plan_a_r_n
28 | - tags_json
29 | datasets: {}
30 | views: {}
31 | crawlers: {}
32 |
--------------------------------------------------------------------------------
/dashboards/cudos/CUDOS-v5.yaml:
--------------------------------------------------------------------------------
1 | dashboards:
2 | CUDOSv5:
3 | dependsOn:
4 | datasets:
5 | - hourly_view
6 | - resource_view
7 | - summary_view
8 | name: CUDOS Dashboard v5
9 | dashboardId: cudos-v5
10 | category: Foundational
11 | theme: MIDNIGHT
12 | nonTaxonomyColumns:
13 | - product_code
14 | - service
15 | - operation
16 | - charge_type
17 | - usage_type
18 | - reservation_a_r_n
19 | - item_description
20 | - pricing_unit
21 | - region
22 | - pricing_term
23 | - linked_account_id
24 | - savings_plan_a_r_n
25 | - tags_json
26 | file: ./CUDOS-v5-definition.yaml
27 | version: v5.6.0
28 | datasets: {}
29 | crawlers: {}
30 | views: {}
31 |
--------------------------------------------------------------------------------
/dashboards/graviton-savings-dashboard/graviton_legacy.yaml:
--------------------------------------------------------------------------------
1 | dashboards:
2 | GRAVITON_DASHBOARD:
3 | dependsOn:
4 | datasets:
5 | - graviton_ec2_view
6 | - graviton_elasticache_view
7 | - graviton_opensearch_view
8 | - graviton_rds_view
9 | name: Graviton Opportunities Dashboard
10 | deprecationNotice: "Graviton Opportunities Dashboard has changed it's name to Graviton Savings Dashboard. Please deploy Graviton Savings Dashboard by running 'cid-cmd deploy --dashboard-id graviton-savings'. After that you can delete existing one with 'cid-cmd delete --dashboard-id graviton-opportunities'"
11 | dashboardId: graviton-opportunities
12 | category: Advanced
13 | theme: MIDNIGHT
14 | data: '{}'
15 |
--------------------------------------------------------------------------------
/docs/cid-cmd.md:
--------------------------------------------------------------------------------
1 | # CID-CMD - Cloud Intellegence Dashboards - CoMmanD line tool
2 | CID-CMD is tool for managing QuickSight Dasboards. Also it manage Dashboard's dependancies, like Datasets, DataSources and Athena Views.
3 |
4 | Syntax:
5 | ```bash
6 | cid-cmd [tool parameters] [command] [command parameters]
7 | ```
8 |
9 |
10 |
11 | ## Commands
12 |
13 | ### deploy
14 |
15 | ### update
16 |
17 | ### delete
18 |
19 | ### map
20 |
21 | ### share
22 |
23 | ### export
24 |
25 |
26 | ## Tool Parameters
27 | #### verbose
28 | Generate a verbose log.
29 | ex:
30 | ```bash
31 | cid-cmd -vv deploy
32 | ```
33 | See `cid.log` in the current folder
34 |
35 | #### yes
36 | Allways answer yes to yes/no questions
37 |
38 | ## Command Parameters
39 |
40 | #### dashboard-id
41 | QuickSight Dashboard ID (cudos, cost_intelligence_dashboard, kpi_dashboard, ta-organizational-view, trends-dashboard etc)
42 |
43 | #### athena-database
44 | Athena database
45 |
46 | #### athena-workgroup
47 | Athena workgroup
48 |
49 | #### glue-data-catalog
50 | Glue data catalog. Default = AwsDataCatalog
51 |
52 | #### cur-table-name
53 | CUR table name. A Name of Athena Table that contains all typucal fields of Cost & Usage Report.
54 |
55 | #### quicksight-datasource-id
56 | QuickSight DataSource ID
57 |
58 | CID-CMD tool needs datasource arn for provisionning and update of DataSets. Only GLUE/Athena DataSources can be used and only in healthy state (CREATION_SUCCESSFUL|UPDATE_*).
59 |
60 |
61 | If datasource parameter is omitted:
62 | - for update operations CID-CMD will determine the dataset from existing dataset
63 | - if no datasource found, CID-CMD will try to create one.
64 | - if only one datasource exists CID-CMD will use it
65 | - if multiple datasources found, CID-CMD will ask to choose one explictly
66 |
67 |
68 | #### quicksight-user
69 | QuickSight user.
70 |
71 | #### dataset-{dataset_name}-id
72 | QuickSight dataset id for a specific dataset. Can be useful if the tool is not able to list datasets due to perimissions issue.
73 |
74 | #### view-{view_name}-{parameter}
75 | a custom parameter for a view creation, can use variable: {account_id}
76 |
77 | #### account-map-source
78 | Vales: `csv`, `dummy`, `organization` (if autodiscovery impossible)
79 | `csv` - from csv. Format is the same as in
80 | `dummy` - fill table with account ids instead of names
81 | `organization` - one time read organizations api
82 |
83 | If you do not know what to choose, choose `dummy`, and modify `account_map` later.
84 |
85 | #### account-map-file
86 | csv file path relative to current directory (if autodiscovery impossible and `csv` selected as a source )
87 |
88 | #### resources
89 | CID resources file (yaml)
90 |
91 | #### share-with-account
92 | Share dashboard with all users in the current account.
93 | values: ['yes/no']
94 |
95 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = ["setuptools>=42"]
3 | build-backend = "setuptools.build_meta"
4 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | boto3>=1.35.86
2 | Click>=8.0
3 | PyYAML
4 | requests
5 | six>=1.15
6 | tqdm
7 | tzlocal>=4.0
8 | InquirerPy
9 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [wheel]
2 | universal = 1
3 |
4 | [metadata]
5 | name = cid-cmd
6 | version = attr: cid._version.__version__
7 | keywords = aws, cmd, cli, cost intelligence dashboards
8 | description = Cloud Intelligence Dashboards deployment helper tool
9 | long_description = file: README.md
10 | long_description_content_type = text/markdown
11 | url = https://github.com/aws-samples/aws-cudos-framework-deployment
12 | author = AWS CUDOS Team
13 | license = MIT
14 | classifiers =
15 | Development Status :: 4 - Beta
16 | License :: OSI Approved :: MIT License
17 | Programming Language :: Python :: 3
18 | Programming Language :: Python :: 3.9
19 | Programming Language :: Python :: 3.10
20 | Programming Language :: Python :: 3.11
21 | Programming Language :: Python :: 3.12
22 |
23 | [options]
24 | include_package_data = True
25 | packages = find_namespace:
26 | install_requires =
27 | setuptools
28 | boto3>=1.35.86
29 | Click>=8.0
30 | PyYAML
31 | requests
32 | tzlocal>=4.0
33 | six>=1.15
34 | InquirerPy
35 | tqdm
36 |
37 | [options.entry_points]
38 | console_scripts =
39 | cid-cmd=cid.cli:main
40 |
41 | cid.plugins =
42 | Core = cid.builtin.core
43 |
44 | [options.package_data]
45 | cid = *.json, *.sql, *.yaml, *.yml
46 |
--------------------------------------------------------------------------------
/terraform-modules/cid-dashboards/.terraform-docs.yml:
--------------------------------------------------------------------------------
1 | output:
2 | file: README.md
3 | mode: inject
4 | formatter: "markdown document"
5 | sections:
6 | hide:
7 | - providers
8 | - modules
9 | sort:
10 | by: required
11 | settings:
12 | anchor: false
--------------------------------------------------------------------------------
/terraform-modules/cid-dashboards/main.tf:
--------------------------------------------------------------------------------
1 | data "aws_s3_bucket" "template_bucket" {
2 | bucket = var.template_bucket
3 | }
4 |
5 | resource "aws_s3_object" "template" {
6 | bucket = data.aws_s3_bucket.template_bucket.bucket
7 | key = var.template_key
8 | source = "${path.module}/../../cfn-templates/cid-cfn.yml"
9 | source_hash = filemd5("${path.module}/../../cfn-templates/cid-cfn.yml")
10 | tags = var.stack_tags
11 | }
12 |
13 | resource "aws_cloudformation_stack" "cid" {
14 | name = var.stack_name
15 | template_url = "https://${data.aws_s3_bucket.template_bucket.bucket_regional_domain_name}/${aws_s3_object.template.key}?hash=${aws_s3_object.template.source_hash}"
16 | capabilities = ["CAPABILITY_NAMED_IAM"]
17 | parameters = var.stack_parameters
18 | iam_role_arn = var.stack_iam_role
19 | policy_body = var.stack_policy_body
20 | policy_url = var.stack_policy_url
21 | # checkov:skip=CKV_AWS_124:Stack event notifications are configurable by the user
22 | notification_arns = var.stack_notification_arns
23 | tags = var.stack_tags
24 | }
25 |
26 |
--------------------------------------------------------------------------------
/terraform-modules/cid-dashboards/outputs.tf:
--------------------------------------------------------------------------------
1 | output "stack_outputs" {
2 | description = "CloudFormation stack outputs (map of strings)"
3 | value = aws_cloudformation_stack.cid.outputs
4 | }
--------------------------------------------------------------------------------
/terraform-modules/cid-dashboards/variables.tf:
--------------------------------------------------------------------------------
1 | variable "stack_name" {
2 | type = string
3 | description = "CloudFormation stack name for Cloud Intelligence Dashboards deployment"
4 | }
5 |
6 | variable "template_bucket" {
7 | type = string
8 | description = "S3 bucket where the Cloudformation template will be uploaded. Must already exist and be in the same region as the stack."
9 | }
10 |
11 | variable "template_key" {
12 | type = string
13 | description = "Name of the S3 path/key where the Cloudformation template will be created. Defaults to cid-cfn.yml"
14 | default = "cid-cfn.yml"
15 | }
16 |
17 | variable "stack_parameters" {
18 | type = map(string)
19 | description = <<-EOF
20 | CloudFormation stack parameters. For the full list of available parameters, refer to
21 | https://github.com/aws-samples/aws-cudos-framework-deployment/blob/main/cfn-templates/cid-cfn.yml.
22 | For most setups, you will want to set the following parameters:
23 | - PrerequisitesQuickSight: yes/no
24 | - PrerequisitesQuickSightPermissions: yes/no
25 | - QuickSightUser: Existing quicksight user
26 | - QuickSightDataSetRefreshSchedule: Cron expression to refresh spice datasets daily outside of business hours. Default is 4 AM UTC, which should work for most customers in US and EU time zones
27 | - CURBucketPath: Leave as default is if CUR was created with CloudFormation (cur-aggregation.yaml). If it was a manually created CUR, the path entered below must be for the directory that contains the years partition (s3://curbucketname/prefix/curname/curname/).
28 | - OptimizationDataCollectionBucketPath: The S3 path to the bucket created by the Cost Optimization Data Collection Lab. The path will need point to a folder containing /optics-data-collector folder. Required for TAO and Compute Optimizer dashboards.
29 | - DataBuketsKmsKeyArns: Comma-delimited list of KMS key ARNs ("*" is also valid). Include any KMS keys used to encrypt your CUR or Cost Optimization Data S3 data
30 | - DeployCUDOSDashboard: (yes/no, default no)
31 | - DeployCostIntelligenceDashboard: (yes/no, default no)
32 | - DeployKPIDashboard: (yes/no, default no)
33 | - DeployTAODashboard: (yes/no, default no)
34 | - DeployComputeOptimizerDashboard: (yes/no, default no)
35 | - PermissionsBoundary: Leave blank if you don't need to set a boundary for roles
36 | - RolePath: Path for roles where PermissionBoundaries can limit location
37 | EOF
38 | }
39 |
40 | variable "stack_tags" {
41 | type = map(string)
42 | description = "Tag key-value pairs to apply to the stack"
43 | default = null
44 | }
45 |
46 | variable "stack_policy_body" {
47 | type = string
48 | description = "String containing the stack policy body. Conflicts with stack_policy_url."
49 | default = null
50 | }
51 |
52 | variable "stack_policy_url" {
53 | type = string
54 | description = "Location of a file containing the stack policy body. Conflicts with stack_policy_body."
55 | default = null
56 | }
57 |
58 | variable "stack_notification_arns" {
59 | type = list(string)
60 | description = "A list of SNS topic ARNs to publish stack related events."
61 | default = []
62 | }
63 |
64 | variable "stack_iam_role" {
65 | type = string
66 | description = "The ARN of an IAM role that AWS CloudFormation assumes to create the stack (default behavior is to use the previous role if available, or current user permissions otherwise)."
67 | default = null
68 | }
69 |
--------------------------------------------------------------------------------
/terraform-modules/cid-dashboards/versions.tf:
--------------------------------------------------------------------------------
1 | terraform {
2 | required_version = ">= 1.0"
3 | required_providers {
4 | aws = {
5 | source = "hashicorp/aws"
6 | version = ">= 3.0"
7 | }
8 | }
9 | }
--------------------------------------------------------------------------------
/terraform-modules/cur-setup-destination/.terraform-docs.yml:
--------------------------------------------------------------------------------
1 | output:
2 | file: README.md
3 | mode: inject
4 | formatter: "markdown document"
5 | sections:
6 | hide:
7 | - modules
8 | sort:
9 | by: required
10 | settings:
11 | anchor: false
--------------------------------------------------------------------------------
/terraform-modules/cur-setup-destination/outputs.tf:
--------------------------------------------------------------------------------
1 | output "cur_report_arn" {
2 | description = "ARN of the Cost and Usage Report"
3 | value = var.create_cur ? aws_cur_report_definition.this[0].arn : null
4 | }
5 |
6 | output "cur_bucket_arn" {
7 | description = "ARN of the S3 Bucket where the Cost and Usage Report is delivered"
8 | value = aws_s3_bucket.this.arn
9 | }
10 |
11 | output "cur_bucket_name" {
12 | description = "Name of the S3 Bucket where the Cost and Usage Report is delivered"
13 | value = aws_s3_bucket.this.bucket
14 | }
--------------------------------------------------------------------------------
/terraform-modules/cur-setup-destination/variables.tf:
--------------------------------------------------------------------------------
1 | variable "source_account_ids" {
2 | type = list(string)
3 | description = "List of all source accounts that will replicate CUR Data. Ex: [12345678912,98745612312,...] (fill only on Destination Account)"
4 | }
5 |
6 | variable "resource_prefix" {
7 | type = string
8 | description = "Prefix used for all named resources, including S3 Bucket"
9 | default = "cid"
10 | }
11 |
12 | variable "create_cur" {
13 | type = bool
14 | description = "Whether to create a local CUR in the destination account or not. Set this to true if the destination account is NOT covered in the CUR of the source accounts"
15 | }
16 |
17 | variable "cur_name_suffix" {
18 | type = string
19 | description = "Suffix used to name the local CUR report if create_cur is `true`"
20 | default = "cur"
21 | }
22 |
23 | variable "s3_access_logging" {
24 | type = object({
25 | enabled = bool
26 | bucket = string
27 | prefix = string
28 | })
29 | description = "S3 Access Logging configuration for the CUR bucket"
30 | default = {
31 | enabled = false
32 | bucket = null
33 | prefix = null
34 | }
35 | }
36 |
37 | variable "kms_key_id" {
38 | type = string
39 | default = null
40 | description = <<-EOF
41 | !!!WARNING!!! EXPERIMENTAL - Do not use unless you know what you are doing. The correct key policies and IAM permissions
42 | on the S3 replication role must be configured external to this module.
43 | - If create_cur is true, the "billingreports.amazonaws.com" service must have access to encrypt S3 objects with the key ID provided
44 | - See https://docs.aws.amazon.com/AmazonS3/latest/userguide/replication-config-for-kms-objects.html for information
45 | on permissions required for replicating KMS-encrypted objects
46 | EOF
47 | }
48 |
49 | variable "enable_split_cost_allocation_data" {
50 | type = bool
51 | description = "Enable split cost allocation data for ECS and EKS for this CUR report"
52 | default = false
53 | }
54 |
55 | variable "tags" {
56 | type = map(string)
57 | description = "Map of tags to apply to module resources"
58 | default = {}
59 | }
60 |
--------------------------------------------------------------------------------
/terraform-modules/cur-setup-destination/versions.tf:
--------------------------------------------------------------------------------
1 | terraform {
2 | required_version = ">= 1.0"
3 | required_providers {
4 | aws = {
5 | source = "hashicorp/aws"
6 | version = ">= 3.0"
7 | configuration_aliases = [
8 | aws.useast1,
9 | ]
10 | }
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/terraform-modules/cur-setup-source/.terraform-docs.yml:
--------------------------------------------------------------------------------
1 | output:
2 | file: README.md
3 | mode: inject
4 | formatter: "markdown document"
5 | sections:
6 | hide:
7 | - modules
8 | sort:
9 | by: required
10 | settings:
11 | anchor: false
--------------------------------------------------------------------------------
/terraform-modules/cur-setup-source/outputs.tf:
--------------------------------------------------------------------------------
1 | output "cur_report_arn" {
2 | description = "ARN of the Cost and Usage Report"
3 | value = aws_cur_report_definition.this.arn
4 | }
5 |
6 | output "cur_bucket_arn" {
7 | description = "ARN of the S3 Bucket where the Cost and Usage Report is delivered"
8 | value = aws_s3_bucket.this.arn
9 | }
10 |
11 | output "cur_bucket_name" {
12 | description = "Name of the S3 Bucket where the Cost and Usage Report is delivered"
13 | value = aws_s3_bucket.this.bucket
14 | }
15 |
16 | output "replication_role_arn" {
17 | description = "ARN of the IAM role created for S3 replication"
18 | value = aws_iam_role.replication.arn
19 | }
20 |
21 | output "replication_role_name" {
22 | description = "ARN of the IAM role created for S3 replication"
23 | value = aws_iam_role.replication.name
24 | }
--------------------------------------------------------------------------------
/terraform-modules/cur-setup-source/variables.tf:
--------------------------------------------------------------------------------
1 | variable "destination_bucket_arn" {
2 | type = string
3 | description = "Destination Bucket ARN"
4 | }
5 |
6 | variable "resource_prefix" {
7 | type = string
8 | default = "cid"
9 | description = "Prefix used for all named resources, including the S3 Bucket"
10 | }
11 |
12 | variable "cur_name_suffix" {
13 | type = string
14 | default = "cur"
15 | description = "Suffix used to name the CUR report"
16 | }
17 |
18 | variable "s3_access_logging" {
19 | type = object({
20 | enabled = bool
21 | bucket = string
22 | prefix = string
23 | })
24 | description = "S3 Access Logging configuration for the CUR bucket"
25 | default = {
26 | enabled = false
27 | bucket = null
28 | prefix = null
29 | }
30 | }
31 |
32 | variable "kms_key_id" {
33 | type = string
34 | default = null
35 | description = <<-EOF
36 | !!!WARNING!!! EXPERIMENTAL - Do not use unless you know what you are doing. The correct key policies and IAM permissions
37 | on the S3 replication role must be configured external to this module.
38 | - The "billingreports.amazonaws.com" service must have access to encrypt objects with the key ID provided
39 | - See https://docs.aws.amazon.com/AmazonS3/latest/userguide/replication-config-for-kms-objects.html for information
40 | on permissions required for replicating KMS-encrypted objects
41 | EOF
42 | }
43 |
44 | variable "enable_split_cost_allocation_data" {
45 | type = bool
46 | description = "Enable split cost allocation data for ECS and EKS for this CUR report"
47 | default = false
48 | }
49 |
50 | variable "tags" {
51 | type = map(string)
52 | description = "Map of tags to apply to module resources"
53 | default = {}
54 | }
55 |
--------------------------------------------------------------------------------
/terraform-modules/cur-setup-source/versions.tf:
--------------------------------------------------------------------------------
1 | terraform {
2 | required_version = ">= 1.0"
3 | required_providers {
4 | aws = {
5 | source = "hashicorp/aws"
6 | version = ">= 3.0"
7 | configuration_aliases = [
8 | aws.useast1,
9 | ]
10 | }
11 | }
12 | }
13 |
--------------------------------------------------------------------------------