├── .github
├── CODEOWNERS
├── CODE_OF_CONDUCT.md
├── dependabot.yml
└── workflows
│ ├── release.yml
│ └── test.yml
├── examples
├── data-sources
│ ├── superset_roles
│ │ └── data-source.tf
│ ├── superset_users
│ │ └── data-source.tf
│ ├── superset_databases
│ │ └── data-source.tf
│ ├── superset_datasets
│ │ └── data-source.tf
│ └── superset_role_permissions
│ │ └── data-source.tf
├── resources
│ ├── superset_role
│ │ ├── resource.tf
│ │ └── import.sh
│ ├── superset_user
│ │ ├── import.sh
│ │ └── resource.tf
│ ├── superset_dataset
│ │ ├── import.sh
│ │ └── resource.tf
│ ├── superset_database
│ │ ├── import.sh
│ │ └── resource.tf
│ ├── superset_meta_database
│ │ ├── import.sh
│ │ └── resource.tf
│ └── superset_role_permissions
│ │ ├── import.sh
│ │ └── resource.tf
├── provider
│ └── provider.tf
└── README.md
├── docker-compose
└── link.md
├── terraform-registry-manifest.json
├── GNUmakefile
├── tools
└── tools.go
├── .vscode
└── launch.json
├── catalog-info.yaml
├── .gitignore
├── .copywrite.hcl
├── .golangci.yml
├── docs
├── data-sources
│ ├── roles.md
│ ├── databases.md
│ ├── role_permissions.md
│ ├── users.md
│ └── datasets.md
├── resources
│ ├── role.md
│ ├── user.md
│ ├── role_permissions.md
│ ├── database.md
│ ├── meta_database.md
│ └── dataset.md
└── index.md
├── CHANGELOG.md
├── internal
└── provider
│ ├── provider_test.go
│ ├── databases_data_source_test.go
│ ├── role_resource_test.go
│ ├── role_permissions_data_source_test.go
│ ├── users_data_source_test.go
│ ├── roles_data_source_test.go
│ ├── roles_data_source.go
│ ├── datasets_data_source_test.go
│ ├── role_permissions_data_source.go
│ ├── user_resource_test.go
│ ├── users_data_source.go
│ ├── databases_data_source.go
│ ├── databases_resource_test.go
│ ├── role_permissions_resource_test.go
│ ├── provider.go
│ ├── role_resource.go
│ ├── dataset_resource_test.go
│ ├── datasets_data_source.go
│ ├── dataset_resource.go
│ ├── user_resource.go
│ ├── databases_resource.go
│ └── role_permissions_resource.go
├── main.go
├── README.md
├── .goreleaser.yml
├── go.mod
└── LICENSE
/.github/CODEOWNERS:
--------------------------------------------------------------------------------
1 | * @hashicorp/terraform-devex
2 |
--------------------------------------------------------------------------------
/examples/data-sources/superset_roles/data-source.tf:
--------------------------------------------------------------------------------
1 | data "superset_roles" "all" {}
--------------------------------------------------------------------------------
/examples/data-sources/superset_users/data-source.tf:
--------------------------------------------------------------------------------
1 | data "superset_users" "example" {}
--------------------------------------------------------------------------------
/docker-compose/link.md:
--------------------------------------------------------------------------------
1 | [docker-compose files page](https://github.com/apache/superset.git)
--------------------------------------------------------------------------------
/examples/data-sources/superset_databases/data-source.tf:
--------------------------------------------------------------------------------
1 | data "superset_databases" "example" {}
--------------------------------------------------------------------------------
/examples/data-sources/superset_datasets/data-source.tf:
--------------------------------------------------------------------------------
1 | data "superset_datasets" "example" {}
--------------------------------------------------------------------------------
/examples/resources/superset_role/resource.tf:
--------------------------------------------------------------------------------
1 | resource "superset_role" "example" {
2 | name = "Example-Role-Name"
3 | }
--------------------------------------------------------------------------------
/terraform-registry-manifest.json:
--------------------------------------------------------------------------------
1 | {
2 | "version": 1,
3 | "metadata": {
4 | "protocol_versions": ["6.0"]
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/examples/data-sources/superset_role_permissions/data-source.tf:
--------------------------------------------------------------------------------
1 | data "superset_role_permissions" "all" {
2 | role_name = "Example-DB-Connect"
3 | }
--------------------------------------------------------------------------------
/GNUmakefile:
--------------------------------------------------------------------------------
1 | default: testacc
2 |
3 | # Run acceptance tests
4 | .PHONY: testacc
5 | testacc:
6 | TF_ACC=1 go test ./... -v $(TESTARGS) -timeout 120m
7 |
--------------------------------------------------------------------------------
/examples/resources/superset_role/import.sh:
--------------------------------------------------------------------------------
1 | # Role can be imported by specifying the numeric identifier of the role id
2 | terraform import superset_role.example 632
--------------------------------------------------------------------------------
/examples/resources/superset_user/import.sh:
--------------------------------------------------------------------------------
1 | # User can be imported by specifying the numeric identifier of the User id
2 | terraform import superset_user.example_user 213
--------------------------------------------------------------------------------
/examples/resources/superset_dataset/import.sh:
--------------------------------------------------------------------------------
1 | # Dataset can be imported by specifying the numeric identifier of the Dataset id
2 | terraform import superset_dataset.example 123
--------------------------------------------------------------------------------
/examples/resources/superset_database/import.sh:
--------------------------------------------------------------------------------
1 | # Database can be imported by specifying the numeric identifier of the Database id
2 | terraform import superset_database.example 337
--------------------------------------------------------------------------------
/examples/resources/superset_meta_database/import.sh:
--------------------------------------------------------------------------------
1 | # Database can be imported by specifying the numeric identifier of the Database id
2 | terraform import superset_meta_database.example 1949
--------------------------------------------------------------------------------
/examples/resources/superset_role_permissions/import.sh:
--------------------------------------------------------------------------------
1 | # Role permissions can be imported by specifying the numeric identifier of the role id
2 | terraform import superset_role_permissions.example 129
--------------------------------------------------------------------------------
/.github/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Code of Conduct
2 |
3 | HashiCorp Community Guidelines apply to you when interacting with the community here on GitHub and contributing code.
4 |
5 | Please read the full text at https://www.hashicorp.com/community-guidelines
6 |
--------------------------------------------------------------------------------
/tools/tools.go:
--------------------------------------------------------------------------------
1 | // Copyright (c) HashiCorp, Inc.
2 | // SPDX-License-Identifier: MPL-2.0
3 |
4 | //go:build tools
5 |
6 | package tools
7 |
8 | import (
9 | // Documentation generation
10 | _ "github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs"
11 | )
12 |
--------------------------------------------------------------------------------
/examples/provider/provider.tf:
--------------------------------------------------------------------------------
1 | provider "superset" {
2 | host = "https://domain.com" # Replace with your Superset instance URL
3 | username = "username" # Replace with your Superset username
4 | password = "password" # Replace with your Superset password
5 | }
6 |
--------------------------------------------------------------------------------
/examples/resources/superset_user/resource.tf:
--------------------------------------------------------------------------------
1 | resource "superset_user" "example_user" {
2 | username = "example.user"
3 | first_name = "Example"
4 | last_name = "Sample"
5 | email = "example.sample@example.com"
6 | password = "ExampleSamplePass123!"
7 | active = true
8 | roles = [3, 4]
9 | }
--------------------------------------------------------------------------------
/examples/resources/superset_role_permissions/resource.tf:
--------------------------------------------------------------------------------
1 | resource "superset_role_permissions" "example" {
2 | role_name = "Example-DB-Connect"
3 | resource_permissions = [
4 | { permission = "database_access", view_menu = "[Trino].(id:34)" },
5 | { permission = "schema_access", view_menu = "[Trino].[devstorage]" },
6 | ]
7 | }
8 |
--------------------------------------------------------------------------------
/.vscode/launch.json:
--------------------------------------------------------------------------------
1 | {
2 | "version": "0.2.0",
3 | "configurations": [
4 | {
5 | "name": "Debug Terraform Provider",
6 | "type": "go",
7 | "request": "launch",
8 | "mode": "debug",
9 | "program": "${workspaceFolder}/main.go",
10 | "env": {},
11 | "args": ["--debug"]
12 | }
13 | ]
14 | }
15 |
--------------------------------------------------------------------------------
/catalog-info.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: backstage.io/v1alpha1
2 | kind: Component
3 | metadata:
4 | namespace: default
5 | name: terraform-provider-superset
6 | title: Terraform Provider Superset
7 | description: Terraform provider to manage Apache Superset Server resources
8 | links:
9 | - title: Terraform Registry
10 | url: https://registry.terraform.io/providers/platacard/superset/latest
11 | icon: terraform
12 | spec:
13 | type: library
14 | owner: group:default/devops
15 | lifecycle: production
16 |
--------------------------------------------------------------------------------
/examples/resources/superset_database/resource.tf:
--------------------------------------------------------------------------------
1 | resource "superset_database" "example" {
2 | connection_name = "SuperSetDBConnection"
3 | db_engine = "postgresql"
4 | db_user = "supersetuser"
5 | db_pass = "dbpassword"
6 | db_host = "pg.db.ro.domain.com"
7 | db_port = 5432
8 | db_name = "supersetdb"
9 | allow_ctas = false
10 | allow_cvas = false
11 | allow_dml = false
12 | allow_run_async = true
13 | expose_in_sqllab = false
14 | }
--------------------------------------------------------------------------------
/examples/resources/superset_meta_database/resource.tf:
--------------------------------------------------------------------------------
1 | resource "superset_meta_database" "example" {
2 | database_name = "SuperSetDBConnection"
3 | sqlalchemy_uri = "superset://" # optional
4 | allowed_databases = [
5 | "[Team]-Service1-Dev-RO[d_team_service1_db]",
6 | "[Team]-Service2-Prod-RO[d_team_market_service2_db]"
7 | ]
8 | expose_in_sqllab = true
9 | allow_ctas = false
10 | allow_cvas = false
11 | allow_dml = false
12 | allow_run_async = true
13 | is_managed_externally = false
14 | }
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *.dll
2 | *.exe
3 | .DS_Store
4 | example.tf
5 | terraform.tfplan
6 | terraform.tfstate
7 | bin/
8 | dist/
9 | modules-dev/
10 | /pkg/
11 | website/.vagrant
12 | website/.bundle
13 | website/build
14 | website/node_modules
15 | .vagrant/
16 | *.backup
17 | ./*.tfstate
18 | .terraform/
19 | *.log
20 | *.bak
21 | *~
22 | .*.swp
23 | .idea
24 | *.iml
25 | *.test
26 | *.iml
27 |
28 | website/vendor
29 |
30 | # Test exclusions
31 | !command/test-fixtures/**/*.tfstate
32 | !command/test-fixtures/**/.terraform/
33 |
34 | # Keep windows files with windows line endings
35 | *.winfile eol=crlf
36 |
--------------------------------------------------------------------------------
/.copywrite.hcl:
--------------------------------------------------------------------------------
1 | # NOTE: This file is for HashiCorp specific licensing automation and can be deleted after creating a new repo with this template.
2 | schema_version = 1
3 |
4 | project {
5 | license = "MPL-2.0"
6 | copyright_year = 2021
7 |
8 | header_ignore = [
9 | # examples used within documentation (prose)
10 | "examples/**",
11 |
12 | # GitHub issue template configuration
13 | ".github/ISSUE_TEMPLATE/*.yml",
14 |
15 | # golangci-lint tooling configuration
16 | ".golangci.yml",
17 |
18 | # GoReleaser tooling configuration
19 | ".goreleaser.yml",
20 | ]
21 | }
22 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | # See GitHub's documentation for more information on this file:
2 | # https://docs.github.com/en/code-security/supply-chain-security/keeping-your-dependencies-updated-automatically/configuration-options-for-dependency-updates
3 | version: 2
4 | updates:
5 | - package-ecosystem: "gomod"
6 | directory: "/"
7 | schedule:
8 | interval: "daily"
9 | - package-ecosystem: "github-actions"
10 | directory: "/"
11 | schedule:
12 | interval: "daily"
13 | # TODO: Dependabot only updates hashicorp GHAs in the template repository, the following lines can be removed for consumers of this template
14 | allow:
15 | - dependency-name: "hashicorp/*"
16 |
--------------------------------------------------------------------------------
/.golangci.yml:
--------------------------------------------------------------------------------
1 | # Visit https://golangci-lint.run/ for usage documentation
2 | # and information on other useful linters
3 | issues:
4 | max-same-issues: 0
5 |
6 | linters:
7 | disable-all: true
8 | enable:
9 | - durationcheck
10 | - errcheck
11 | - forcetypeassert
12 | - godot
13 | - gofmt
14 | - gosimple
15 | - ineffassign
16 | - makezero
17 | - misspell
18 | - nilerr
19 | - predeclared
20 | - staticcheck
21 | - unconvert
22 | - unparam
23 | - unused
24 | - govet
25 | - copyloopvar
26 | - usetesting
27 |
28 | linters-settings:
29 | usetesting:
30 | os-setenv: true
31 | copyloopvar:
32 | check-alias: true
33 |
--------------------------------------------------------------------------------
/examples/README.md:
--------------------------------------------------------------------------------
1 | # Examples
2 |
3 | This directory contains examples that are mostly used for documentation, but can also be run/tested manually via the Terraform CLI.
4 |
5 | The document generation tool looks for files in the following locations by default. All other *.tf files besides the ones mentioned below are ignored by the documentation tool. This is useful for creating examples that can run and/or ar testable even if some parts are not relevant for the documentation.
6 |
7 | * **provider/provider.tf** example file for the provider index page
8 | * **data-sources/`full data source name`/data-source.tf** example file for the named data source page
9 | * **resources/`full resource name`/resource.tf** example file for the named data source page
10 |
--------------------------------------------------------------------------------
/docs/data-sources/roles.md:
--------------------------------------------------------------------------------
1 | ---
2 | # generated by https://github.com/hashicorp/terraform-plugin-docs
3 | page_title: "superset_roles Data Source - superset"
4 | subcategory: ""
5 | description: |-
6 | Fetches the list of roles from Superset.
7 | ---
8 |
9 | # superset_roles (Data Source)
10 |
11 | Fetches the list of roles from Superset.
12 |
13 | ## Example Usage
14 |
15 | ```terraform
16 | data "superset_roles" "all" {}
17 | ```
18 |
19 |
20 | ## Schema
21 |
22 | ### Read-Only
23 |
24 | - `roles` (Attributes List) List of roles. (see [below for nested schema](#nestedatt--roles))
25 |
26 |
27 | ### Nested Schema for `roles`
28 |
29 | Read-Only:
30 |
31 | - `id` (Number) Numeric identifier of the role.
32 | - `name` (String) Name of the role.
33 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | ## 0.3.0 (TBD)
2 |
3 | FEATURES:
4 | * **New Resource**: `superset_user` - Manage Superset users
5 | * **New Data Source**: `superset_users` - Fetch users from Superset
6 |
7 | ## 0.2.0 (2025-08-29)
8 |
9 | FEATURES:
10 | * **New Resource**: `superset_meta_database` - Support for Superset meta database connections for cross-database queries
11 | * **New Resource**: `superset_dataset` - Manage individual Superset datasets
12 | * **New Data Source**: `superset_datasets` - Fetch all datasets from Superset
13 |
14 | IMPROVEMENTS:
15 | * Added comprehensive test coverage for meta database resource
16 | * Added schema-level default values for boolean attributes
17 | * Added global caching for database API calls to improve performance across multiple client instances
18 | * Added pagination support (page_size:5000) to datasets API calls
19 |
--------------------------------------------------------------------------------
/docs/data-sources/databases.md:
--------------------------------------------------------------------------------
1 | ---
2 | # generated by https://github.com/hashicorp/terraform-plugin-docs
3 | page_title: "superset_databases Data Source - superset"
4 | subcategory: ""
5 | description: |-
6 | Fetches the list of databases from Superset.
7 | ---
8 |
9 | # superset_databases (Data Source)
10 |
11 | Fetches the list of databases from Superset.
12 |
13 | ## Example Usage
14 |
15 | ```terraform
16 | data "superset_databases" "example" {}
17 | ```
18 |
19 |
20 | ## Schema
21 |
22 | ### Read-Only
23 |
24 | - `databases` (Attributes List) List of databases. (see [below for nested schema](#nestedatt--databases))
25 |
26 |
27 | ### Nested Schema for `databases`
28 |
29 | Read-Only:
30 |
31 | - `database_name` (String) Name of the database.
32 | - `id` (Number) Numeric identifier of the database.
33 |
--------------------------------------------------------------------------------
/internal/provider/provider_test.go:
--------------------------------------------------------------------------------
1 | package provider
2 |
3 | import (
4 | "github.com/hashicorp/terraform-plugin-framework/providerserver"
5 | "github.com/hashicorp/terraform-plugin-go/tfprotov6"
6 | "os"
7 | "testing"
8 | )
9 |
10 | const providerConfig = `
11 | provider "superset" {
12 | host = "http://superset-host"
13 | username = "fake-username"
14 | password = "fake-password"
15 | }
16 | `
17 |
18 | var (
19 | testAccProtoV6ProviderFactories = map[string]func() (tfprotov6.ProviderServer, error){
20 | "superset": providerserver.NewProtocol6WithError(New("test")()),
21 | }
22 | )
23 |
24 | func testAccPreCheck(t *testing.T) {
25 | if v := os.Getenv("SUPERSET_USERNAME"); v == "" {
26 | t.Fatal("SUPERSET_USERNAME must be set for acceptance tests")
27 | }
28 | if v := os.Getenv("SUPERSET_PASSWORD"); v == "" {
29 | t.Fatal("SUPERSET_PASSWORD must be set for acceptance tests")
30 | }
31 | if v := os.Getenv("SUPERSET_HOST"); v == "" {
32 | t.Fatal("SUPERSET_HOST must be set for acceptance tests")
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/docs/resources/role.md:
--------------------------------------------------------------------------------
1 | ---
2 | # generated by https://github.com/hashicorp/terraform-plugin-docs
3 | page_title: "superset_role Resource - superset"
4 | subcategory: ""
5 | description: |-
6 | Manages a role in Superset.
7 | ---
8 |
9 | # superset_role (Resource)
10 |
11 | Manages a role in Superset.
12 |
13 | ## Example Usage
14 |
15 | ```terraform
16 | resource "superset_role" "example" {
17 | name = "Example-Role-Name"
18 | }
19 | ```
20 |
21 |
22 | ## Schema
23 |
24 | ### Required
25 |
26 | - `name` (String) Name of the role.
27 |
28 | ### Read-Only
29 |
30 | - `id` (Number) Numeric identifier of the role.
31 | - `last_updated` (String) Timestamp of the last update.
32 |
33 | ## Import
34 |
35 | Import is supported using the following syntax:
36 |
37 | The [`terraform import` command](https://developer.hashicorp.com/terraform/cli/commands/import) can be used, for example:
38 |
39 | ```shell
40 | # Role can be imported by specifying the numeric identifier of the role id
41 | terraform import superset_role.example 632
42 | ```
43 |
--------------------------------------------------------------------------------
/docs/data-sources/role_permissions.md:
--------------------------------------------------------------------------------
1 | ---
2 | # generated by https://github.com/hashicorp/terraform-plugin-docs
3 | page_title: "superset_role_permissions Data Source - superset"
4 | subcategory: ""
5 | description: |-
6 | Fetches the permissions for a role from Superset.
7 | ---
8 |
9 | # superset_role_permissions (Data Source)
10 |
11 | Fetches the permissions for a role from Superset.
12 |
13 | ## Example Usage
14 |
15 | ```terraform
16 | data "superset_role_permissions" "all" {
17 | role_name = "Example-DB-Connect"
18 | }
19 | ```
20 |
21 |
22 | ## Schema
23 |
24 | ### Required
25 |
26 | - `role_name` (String) Name of the role.
27 |
28 | ### Read-Only
29 |
30 | - `permissions` (Attributes List) List of permissions. (see [below for nested schema](#nestedatt--permissions))
31 |
32 |
33 | ### Nested Schema for `permissions`
34 |
35 | Read-Only:
36 |
37 | - `id` (Number) Numeric identifier of the permission.
38 | - `permission_name` (String) Name of the permission.
39 | - `view_menu_name` (String) Name of the view menu associated with the permission.
40 |
--------------------------------------------------------------------------------
/docs/index.md:
--------------------------------------------------------------------------------
1 | ---
2 | # generated by https://github.com/hashicorp/terraform-plugin-docs
3 | page_title: "superset Provider"
4 | description: |-
5 | Superset provider for managing Superset resources.
6 | ---
7 |
8 | # superset Provider
9 |
10 | Superset provider for managing Superset resources.
11 |
12 | ## Example Usage
13 |
14 | ```terraform
15 | provider "superset" {
16 | host = "https://domain.com" # Replace with your Superset instance URL
17 | username = "username" # Replace with your Superset username
18 | password = "password" # Replace with your Superset password
19 | }
20 | ```
21 |
22 |
23 | ## Schema
24 |
25 | ### Optional
26 |
27 | - `host` (String) The URL of the Superset instance. This should include the protocol (http or https) and the hostname or IP address. Example: 'https://superset.example.com'.
28 | - `password` (String, Sensitive) The password to authenticate with Superset. This value is sensitive and will not be displayed in logs or state files.
29 | - `username` (String) The username to authenticate with Superset. This user should have the necessary permissions to manage resources within Superset.
30 |
--------------------------------------------------------------------------------
/docs/data-sources/users.md:
--------------------------------------------------------------------------------
1 | ---
2 | # generated by https://github.com/hashicorp/terraform-plugin-docs
3 | page_title: "superset_users Data Source - superset"
4 | subcategory: ""
5 | description: |-
6 | Fetches the list of users from Superset.
7 | ---
8 |
9 | # superset_users (Data Source)
10 |
11 | Fetches the list of users from Superset.
12 |
13 | ## Example Usage
14 |
15 | ```terraform
16 | data "superset_users" "example" {}
17 | ```
18 |
19 |
20 | ## Schema
21 |
22 | ### Read-Only
23 |
24 | - `users` (Attributes List) List of users. (see [below for nested schema](#nestedatt--users))
25 |
26 |
27 | ### Nested Schema for `users`
28 |
29 | Read-Only:
30 |
31 | - `active` (Boolean) Whether the user is active.
32 | - `email` (String) Email address of the user.
33 | - `first_name` (String) First name of the user.
34 | - `id` (Number) Numeric identifier of the user.
35 | - `last_name` (String) Last name of the user.
36 | - `roles` (Attributes List) List of roles assigned to the user. (see [below for nested schema](#nestedatt--users--roles))
37 | - `username` (String) Username of the user.
38 |
39 |
40 | ### Nested Schema for `users.roles`
41 |
42 | Read-Only:
43 |
44 | - `id` (Number) Numeric identifier of the role.
45 | - `name` (String) Name of the role.
46 |
--------------------------------------------------------------------------------
/examples/resources/superset_dataset/resource.tf:
--------------------------------------------------------------------------------
1 | resource "superset_dataset" "example" {
2 | table_name = "example_table"
3 | database_name = "PostgreSQL"
4 | schema = "public"
5 | sql = <<-EOF
6 | WITH RECURSIVE recursive_sequence AS (
7 | SELECT 1 as level, 1 as value, 'Level One' as description
8 | UNION ALL
9 | SELECT
10 | level + 1,
11 | value * 2,
12 | CASE
13 | WHEN level + 1 <= 3 THEN CONCAT('Level ',
14 | CASE level + 1
15 | WHEN 2 THEN 'Two'
16 | WHEN 3 THEN 'Three'
17 | END)
18 | ELSE 'Max Level'
19 | END
20 | FROM recursive_sequence
21 | WHERE level < 5
22 | ),
23 | calculated_data AS (
24 | SELECT
25 | level,
26 | value,
27 | description,
28 | value * 1.5 as weighted_value,
29 | ROUND(LN(value::numeric), 2) as log_value, -- Изменено LN + приведение к numeric
30 | ROW_NUMBER() OVER (ORDER BY level) as row_num
31 | FROM recursive_sequence
32 | )
33 | SELECT
34 | level as hierarchy_level,
35 | value as base_value,
36 | weighted_value,
37 | log_value,
38 | description as level_description,
39 | CASE
40 | WHEN row_num % 2 = 0 THEN 'Even'
41 | ELSE 'Odd'
42 | END as parity,
43 | NOW() as generated_timestamp
44 | FROM calculated_data
45 | ORDER BY level
46 | EOF
47 | }
--------------------------------------------------------------------------------
/docs/data-sources/datasets.md:
--------------------------------------------------------------------------------
1 | ---
2 | # generated by https://github.com/hashicorp/terraform-plugin-docs
3 | page_title: "superset_datasets Data Source - superset"
4 | subcategory: ""
5 | description: |-
6 | Fetches all datasets from Superset.
7 | ---
8 |
9 | # superset_datasets (Data Source)
10 |
11 | Fetches all datasets from Superset.
12 |
13 | ## Example Usage
14 |
15 | ```terraform
16 | data "superset_datasets" "example" {}
17 | ```
18 |
19 |
20 | ## Schema
21 |
22 | ### Read-Only
23 |
24 | - `datasets` (Attributes List) List of Superset datasets. (see [below for nested schema](#nestedatt--datasets))
25 |
26 |
27 | ### Nested Schema for `datasets`
28 |
29 | Read-Only:
30 |
31 | - `database_id` (Number) Database ID to which the dataset belongs.
32 | - `database_name` (String) Database name to which the dataset belongs.
33 | - `id` (Number) Dataset ID.
34 | - `kind` (String) Kind of the dataset.
35 | - `owners` (Attributes List) List of owners of the dataset. (see [below for nested schema](#nestedatt--datasets--owners))
36 | - `schema` (String) Schema of the dataset.
37 | - `sql` (String) SQL query of the dataset.
38 | - `table_name` (String) Name of the table.
39 |
40 |
41 | ### Nested Schema for `datasets.owners`
42 |
43 | Read-Only:
44 |
45 | - `first_name` (String) First name of the owner.
46 | - `id` (Number) Owner ID.
47 | - `last_name` (String) Last name of the owner.
48 |
--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------
1 | # Terraform Provider release workflow.
2 | name: Release
3 |
4 | # This GitHub action creates a release when a tag that matches the pattern
5 | # "v*" (e.g. v0.1.0) is created.
6 | on:
7 | push:
8 | tags:
9 | - 'v*'
10 |
11 | # Releases need permissions to read and write the repository contents.
12 | # GitHub considers creating releases and uploading assets as writing contents.
13 | permissions:
14 | contents: write
15 |
16 | jobs:
17 | goreleaser:
18 | runs-on: ubuntu-latest
19 | steps:
20 | - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4
21 | with:
22 | # Allow goreleaser to access older tag information.
23 | fetch-depth: 0
24 | - uses: actions/setup-go@cdcb36043654635271a94b9a6d1392de5bb323a7 # v5.0.1
25 | with:
26 | go-version-file: 'go.mod'
27 | cache: true
28 | - name: Import GPG key
29 | uses: crazy-max/ghaction-import-gpg@01dd5d3ca463c7f10f7f4f7b4f177225ac661ee4 # v6.1.0
30 | id: import_gpg
31 | with:
32 | gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
33 | passphrase: ${{ secrets.PASSPHRASE }}
34 | - name: Run GoReleaser
35 | uses: goreleaser/goreleaser-action@7ec5c2b0c6cdda6e8bbb49444bc797dd33d74dd8 # v5.0.0
36 | with:
37 | args: release --clean
38 | env:
39 | # GitHub sets the GITHUB_TOKEN secret automatically.
40 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
41 | GPG_FINGERPRINT: ${{ steps.import_gpg.outputs.fingerprint }}
42 |
--------------------------------------------------------------------------------
/main.go:
--------------------------------------------------------------------------------
1 | // Copyright (c) HashiCorp, Inc.
2 | // SPDX-License-Identifier: MPL-2.0
3 |
4 | package main
5 |
6 | import (
7 | "context"
8 | "flag"
9 | "log"
10 |
11 | "github.com/hashicorp/terraform-plugin-framework/providerserver"
12 | "terraform-provider-superset/internal/provider"
13 | )
14 |
15 | // Run "go generate" to format example terraform files and generate the docs for the registry/website
16 |
17 | // If you do not have terraform installed, you can remove the formatting command, but its suggested to
18 | // ensure the documentation is formatted properly.
19 | //go:generate terraform fmt -recursive ./examples/
20 |
21 | // Run the docs generation tool, check its repository for more information on how it works and how docs
22 | // can be customized.
23 | //go:generate go run github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs generate -provider-name superset
24 |
25 | var (
26 | // these will be set by the goreleaser configuration
27 | // to appropriate values for the compiled binary.
28 | version string = "dev"
29 |
30 | // goreleaser can pass other information to the main package, such as the specific commit
31 | // https://goreleaser.com/cookbooks/using-main.version/
32 | )
33 |
34 | func main() {
35 | var debug bool
36 |
37 | flag.BoolVar(&debug, "debug", false, "set to true to run the provider with support for debuggers like delve")
38 | flag.Parse()
39 |
40 | opts := providerserver.ServeOpts{
41 | Address: "hashicorp/platacard/superset",
42 | Debug: debug,
43 | }
44 |
45 | err := providerserver.Serve(context.Background(), provider.New(version), opts)
46 |
47 | if err != nil {
48 | log.Fatal(err.Error())
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Terraform Provider Apache Superset
2 |
3 | - A resource and a data source (`internal/provider/`),
4 | - Examples (`examples/`) and generated documentation (`docs/`),
5 | - Miscellaneous meta files.
6 |
7 | ## Requirements
8 |
9 | - [Terraform](https://developer.hashicorp.com/terraform/downloads) >= 1.0
10 | - [Go](https://golang.org/doc/install) >= 1.21
11 |
12 | ## Building The Provider
13 |
14 | 1. Clone the repository
15 | 1. Enter the repository directory
16 | 1. Build the provider using the Go `install` command:
17 |
18 | ```shell
19 | go install .
20 | ```
21 |
22 | ## Adding Dependencies
23 |
24 | This provider uses [Go modules](https://github.com/golang/go/wiki/Modules).
25 | Please see the Go documentation for the most up to date information about using Go modules.
26 |
27 | To add a new dependency `github.com/author/dependency` to your Terraform provider:
28 |
29 | ```shell
30 | go get github.com/author/dependency
31 | go mod tidy
32 | ```
33 |
34 | Then commit the changes to `go.mod` and `go.sum`.
35 |
36 | ## Using the provider
37 |
38 | Fill this in for each provider
39 |
40 | ## Developing the Provider
41 |
42 | If you wish to work on the provider, you'll first need [Go](http://www.golang.org) installed on your machine (see [Requirements](#requirements) above).
43 |
44 | To compile the provider, run `go install`. This will build the provider and put the provider binary in the `$GOPATH/bin` directory.
45 |
46 | To generate or update documentation, run `go generate`.
47 |
48 | In order to run the full suite of Acceptance tests, run `make testacc`.
49 |
50 | *Note:* Acceptance tests create real resources, and often cost money to run.
51 |
52 | ```shell
53 | make testacc
54 | ```
55 |
--------------------------------------------------------------------------------
/docs/resources/user.md:
--------------------------------------------------------------------------------
1 | ---
2 | # generated by https://github.com/hashicorp/terraform-plugin-docs
3 | page_title: "superset_user Resource - superset"
4 | subcategory: ""
5 | description: |-
6 | Manages a user in Superset.
7 | ---
8 |
9 | # superset_user (Resource)
10 |
11 | Manages a user in Superset.
12 |
13 | ## Example Usage
14 |
15 | ```terraform
16 | resource "superset_user" "example_user" {
17 | username = "example.user"
18 | first_name = "Example"
19 | last_name = "Sample"
20 | email = "example.sample@example.com"
21 | password = "ExampleSamplePass123!"
22 | active = true
23 | roles = [3, 4]
24 | }
25 | ```
26 |
27 |
28 | ## Schema
29 |
30 | ### Required
31 |
32 | - `email` (String) Email address of the user.
33 | - `roles` (List of Number) List of role IDs assigned to the user.
34 | - `username` (String) Username of the user.
35 |
36 | ### Optional
37 |
38 | - `active` (Boolean) Whether the user is active. Defaults to true.
39 | - `first_name` (String) First name of the user.
40 | - `last_name` (String) Last name of the user.
41 | - `password` (String, Sensitive) Password of the user. Required for creation, optional for updates.
42 |
43 | ### Read-Only
44 |
45 | - `id` (Number) Numeric identifier of the user.
46 | - `last_updated` (String) Timestamp of the last update.
47 |
48 | ## Import
49 |
50 | Import is supported using the following syntax:
51 |
52 | The [`terraform import` command](https://developer.hashicorp.com/terraform/cli/commands/import) can be used, for example:
53 |
54 | ```shell
55 | # User can be imported by specifying the numeric identifier of the User id
56 | terraform import superset_user.example_user 213
57 | ```
58 |
--------------------------------------------------------------------------------
/docs/resources/role_permissions.md:
--------------------------------------------------------------------------------
1 | ---
2 | # generated by https://github.com/hashicorp/terraform-plugin-docs
3 | page_title: "superset_role_permissions Resource - superset"
4 | subcategory: ""
5 | description: |-
6 | Manages the permissions associated with a role in Superset.
7 | ---
8 |
9 | # superset_role_permissions (Resource)
10 |
11 | Manages the permissions associated with a role in Superset.
12 |
13 | ## Example Usage
14 |
15 | ```terraform
16 | resource "superset_role_permissions" "example" {
17 | role_name = "Example-DB-Connect"
18 | resource_permissions = [
19 | { permission = "database_access", view_menu = "[Trino].(id:34)" },
20 | { permission = "schema_access", view_menu = "[Trino].[devstorage]" },
21 | ]
22 | }
23 | ```
24 |
25 |
26 | ## Schema
27 |
28 | ### Required
29 |
30 | - `resource_permissions` (Attributes List) A list of permissions associated with the role. (see [below for nested schema](#nestedatt--resource_permissions))
31 | - `role_name` (String) The name of the role to which the permissions are assigned.
32 |
33 | ### Read-Only
34 |
35 | - `id` (String) The unique identifier for the role permissions resource.
36 | - `last_updated` (String) The timestamp of the last update to the role permissions.
37 |
38 |
39 | ### Nested Schema for `resource_permissions`
40 |
41 | Required:
42 |
43 | - `permission` (String) The name of the permission.
44 | - `view_menu` (String) The name of the view menu associated with the permission.
45 |
46 | Read-Only:
47 |
48 | - `id` (Number) The unique identifier of the permission.
49 |
50 | ## Import
51 |
52 | Import is supported using the following syntax:
53 |
54 | The [`terraform import` command](https://developer.hashicorp.com/terraform/cli/commands/import) can be used, for example:
55 |
56 | ```shell
57 | # Role permissions can be imported by specifying the numeric identifier of the role id
58 | terraform import superset_role_permissions.example 129
59 | ```
60 |
--------------------------------------------------------------------------------
/docs/resources/database.md:
--------------------------------------------------------------------------------
1 | ---
2 | # generated by https://github.com/hashicorp/terraform-plugin-docs
3 | page_title: "superset_database Resource - superset"
4 | subcategory: ""
5 | description: |-
6 | Manages a database connection in Superset.
7 | ---
8 |
9 | # superset_database (Resource)
10 |
11 | Manages a database connection in Superset.
12 |
13 | ## Example Usage
14 |
15 | ```terraform
16 | resource "superset_database" "example" {
17 | connection_name = "SuperSetDBConnection"
18 | db_engine = "postgresql"
19 | db_user = "supersetuser"
20 | db_pass = "dbpassword"
21 | db_host = "pg.db.ro.domain.com"
22 | db_port = 5432
23 | db_name = "supersetdb"
24 | allow_ctas = false
25 | allow_cvas = false
26 | allow_dml = false
27 | allow_run_async = true
28 | expose_in_sqllab = false
29 | }
30 | ```
31 |
32 |
33 | ## Schema
34 |
35 | ### Required
36 |
37 | - `allow_ctas` (Boolean) Allow CTAS.
38 | - `allow_cvas` (Boolean) Allow CVAS.
39 | - `allow_dml` (Boolean) Allow DML.
40 | - `allow_run_async` (Boolean) Allow run async.
41 | - `connection_name` (String) Name of the database connection.
42 | - `db_engine` (String) Database engine (e.g., postgresql, mysql).
43 | - `db_host` (String) Database host.
44 | - `db_name` (String) Database name.
45 | - `db_pass` (String, Sensitive) Database password.
46 | - `db_port` (Number) Database port.
47 | - `db_user` (String) Database username.
48 | - `expose_in_sqllab` (Boolean) Expose in SQL Lab.
49 |
50 | ### Read-Only
51 |
52 | - `id` (Number) Numeric identifier of the database connection.
53 |
54 | ## Import
55 |
56 | Import is supported using the following syntax:
57 |
58 | The [`terraform import` command](https://developer.hashicorp.com/terraform/cli/commands/import) can be used, for example:
59 |
60 | ```shell
61 | # Database can be imported by specifying the numeric identifier of the Database id
62 | terraform import superset_database.example 337
63 | ```
64 |
--------------------------------------------------------------------------------
/.goreleaser.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | # Visit https://goreleaser.com for documentation on how to customize this
3 | # behavior.
4 | before:
5 | hooks:
6 | # this is just an example and not a requirement for provider building/publishing
7 | - go mod tidy
8 | builds:
9 | - env:
10 | # goreleaser does not work with CGO, it could also complicate
11 | # usage by users in CI/CD systems like HCP Terraform where
12 | # they are unable to install libraries.
13 | - CGO_ENABLED=0
14 | mod_timestamp: '{{ .CommitTimestamp }}'
15 | flags:
16 | - -trimpath
17 | ldflags:
18 | - '-s -w -X main.version={{.Version}} -X main.commit={{.Commit}}'
19 | goos:
20 | - freebsd
21 | - windows
22 | - linux
23 | - darwin
24 | goarch:
25 | - amd64
26 | - '386'
27 | - arm
28 | - arm64
29 | ignore:
30 | - goos: darwin
31 | goarch: '386'
32 | binary: '{{ .ProjectName }}_v{{ .Version }}'
33 | archives:
34 | - format: zip
35 | name_template: '{{ .ProjectName }}_{{ .Version }}_{{ .Os }}_{{ .Arch }}'
36 | checksum:
37 | extra_files:
38 | - glob: 'terraform-registry-manifest.json'
39 | name_template: '{{ .ProjectName }}_{{ .Version }}_manifest.json'
40 | name_template: '{{ .ProjectName }}_{{ .Version }}_SHA256SUMS'
41 | algorithm: sha256
42 | signs:
43 | - artifacts: checksum
44 | args:
45 | # if you are using this in a GitHub action or some other automated pipeline, you
46 | # need to pass the batch flag to indicate its not interactive.
47 | - "--batch"
48 | - "--local-user"
49 | - "{{ .Env.GPG_FINGERPRINT }}" # set this environment variable for your signing key
50 | - "--output"
51 | - "${signature}"
52 | - "--detach-sign"
53 | - "${artifact}"
54 | release:
55 | extra_files:
56 | - glob: 'terraform-registry-manifest.json'
57 | name_template: '{{ .ProjectName }}_{{ .Version }}_manifest.json'
58 | # If you want to manually examine the release before its live, uncomment this line:
59 | # draft: true
60 | # changelog:
61 | # skip: true
62 |
--------------------------------------------------------------------------------
/internal/provider/databases_data_source_test.go:
--------------------------------------------------------------------------------
1 | package provider
2 |
3 | import (
4 | "testing"
5 |
6 | "github.com/hashicorp/terraform-plugin-testing/helper/resource"
7 | "github.com/jarcoal/httpmock"
8 | )
9 |
10 | func TestAccDatabasesDataSource(t *testing.T) {
11 | // Activate httpmock
12 | httpmock.Activate()
13 | defer httpmock.DeactivateAndReset()
14 |
15 | // Mock the Superset API login response
16 | httpmock.RegisterResponder("POST", "http://superset-host/api/v1/security/login",
17 | httpmock.NewStringResponder(200, `{"access_token": "fake-token"}`))
18 |
19 | // Mock the Superset API response for fetching databases
20 | httpmock.RegisterResponder("GET", "http://superset-host/api/v1/database/",
21 | httpmock.NewStringResponder(200, `{
22 | "result": [
23 | {"id": 34, "database_name": "Trino"},
24 | {"id": 1, "database_name": "SelfPostgreSQL"},
25 | {"id": 141, "database_name": "DWH_database_connection3"},
26 | {"id": 140, "database_name": "DWH_database_connection2"},
27 | {"id": 139, "database_name": "DWH_database_connection"},
28 | {"id": 174, "database_name": "DWH_database_connection4"}
29 | ]
30 | }`))
31 |
32 | resource.Test(t, resource.TestCase{
33 | PreCheck: func() { testAccPreCheck(t) },
34 | ProtoV6ProviderFactories: testAccProtoV6ProviderFactories,
35 | Steps: []resource.TestStep{
36 | // Read testing
37 | {
38 | Config: providerConfig + testAccDatabasesDataSourceConfig,
39 | Check: resource.ComposeAggregateTestCheckFunc(
40 | resource.TestCheckResourceAttr("data.superset_databases.test", "databases.#", "6"),
41 | resource.TestCheckResourceAttr("data.superset_databases.test", "databases.0.id", "34"),
42 | resource.TestCheckResourceAttr("data.superset_databases.test", "databases.0.database_name", "Trino"),
43 | resource.TestCheckResourceAttr("data.superset_databases.test", "databases.1.id", "1"),
44 | resource.TestCheckResourceAttr("data.superset_databases.test", "databases.1.database_name", "SelfPostgreSQL"),
45 | ),
46 | },
47 | },
48 | })
49 | }
50 |
51 | const testAccDatabasesDataSourceConfig = `
52 | data "superset_databases" "test" {}
53 | `
54 |
--------------------------------------------------------------------------------
/docs/resources/meta_database.md:
--------------------------------------------------------------------------------
1 | ---
2 | # generated by https://github.com/hashicorp/terraform-plugin-docs
3 | page_title: "superset_meta_database Resource - superset"
4 | subcategory: ""
5 | description: |-
6 | Manages a meta database connection in Superset for cross-database queries.
7 | ---
8 |
9 | # superset_meta_database (Resource)
10 |
11 | Manages a meta database connection in Superset for cross-database queries.
12 |
13 | ## Example Usage
14 |
15 | ```terraform
16 | resource "superset_meta_database" "example" {
17 | database_name = "SuperSetDBConnection"
18 | sqlalchemy_uri = "superset://" # optional
19 | allowed_databases = [
20 | "[Team]-Service1-Dev-RO[d_team_service1_db]",
21 | "[Team]-Service2-Prod-RO[d_team_market_service2_db]"
22 | ]
23 | expose_in_sqllab = true
24 | allow_ctas = false
25 | allow_cvas = false
26 | allow_dml = false
27 | allow_run_async = true
28 | is_managed_externally = false
29 | }
30 | ```
31 |
32 |
33 | ## Schema
34 |
35 | ### Required
36 |
37 | - `allowed_databases` (List of String) List of database names that can be accessed through this meta connection.
38 | - `database_name` (String) Name of the meta database connection.
39 |
40 | ### Optional
41 |
42 | - `allow_ctas` (Boolean) Allow CREATE TABLE AS queries.
43 | - `allow_cvas` (Boolean) Allow CREATE VIEW AS queries.
44 | - `allow_dml` (Boolean) Allow DML queries (INSERT, UPDATE, DELETE).
45 | - `allow_run_async` (Boolean) Allow asynchronous query execution.
46 | - `expose_in_sqllab` (Boolean) Whether to expose this connection in SQL Lab.
47 | - `is_managed_externally` (Boolean) Whether this connection is managed externally.
48 | - `sqlalchemy_uri` (String) SQLAlchemy URI for the meta database connection. Defaults to 'superset://' for meta databases.
49 |
50 | ### Read-Only
51 |
52 | - `id` (Number) Numeric identifier of the meta database connection.
53 |
54 | ## Import
55 |
56 | Import is supported using the following syntax:
57 |
58 | The [`terraform import` command](https://developer.hashicorp.com/terraform/cli/commands/import) can be used, for example:
59 |
60 | ```shell
61 | # Database can be imported by specifying the numeric identifier of the Database id
62 | terraform import superset_meta_database.example 1949
63 | ```
64 |
--------------------------------------------------------------------------------
/internal/provider/role_resource_test.go:
--------------------------------------------------------------------------------
1 | package provider
2 |
3 | import (
4 | "testing"
5 |
6 | "github.com/hashicorp/terraform-plugin-testing/helper/resource"
7 | "github.com/jarcoal/httpmock"
8 | )
9 |
10 | func TestAccRoleResource(t *testing.T) {
11 | // Activate httpmock
12 | httpmock.Activate()
13 | defer httpmock.DeactivateAndReset()
14 |
15 | // Mock the Superset API login response
16 | httpmock.RegisterResponder("POST", "http://superset-host/api/v1/security/login",
17 | httpmock.NewStringResponder(200, `{"access_token": "fake-token"}`))
18 |
19 | // Mock the Superset API response for checking if role exists (for GetRoleIDByName)
20 | httpmock.RegisterResponder("GET", "http://superset-host/api/v1/security/roles?q=(page_size:5000)",
21 | httpmock.NewStringResponder(200, `{"result": [{"id": 1, "name": "Antifraud"}]}`))
22 |
23 | // Mock the Superset API response for creating roles
24 | httpmock.RegisterResponder("POST", "http://superset-host/api/v1/security/roles/",
25 | httpmock.NewStringResponder(201, `{"id": 1, "name": "Antifraud"}`))
26 |
27 | // Mock the Superset API response for reading roles by ID
28 | httpmock.RegisterResponder("GET", "http://superset-host/api/v1/security/roles/1",
29 | httpmock.NewStringResponder(200, `{"result": {"id": 1, "name": "Antifraud"}}`))
30 |
31 | // Mock the Superset API response for deleting roles
32 | httpmock.RegisterResponder("DELETE", "http://superset-host/api/v1/security/roles/1",
33 | httpmock.NewStringResponder(204, ""))
34 |
35 | resource.Test(t, resource.TestCase{
36 | PreCheck: func() { testAccPreCheck(t) },
37 | ProtoV6ProviderFactories: testAccProtoV6ProviderFactories,
38 | Steps: []resource.TestStep{
39 | // Create and Read testing
40 | {
41 | Config: providerConfig + testAccRoleResourceConfig,
42 | Check: resource.ComposeAggregateTestCheckFunc(
43 | resource.TestCheckResourceAttr("superset_role.team_antifraud", "name", "Antifraud"),
44 | resource.TestCheckResourceAttrSet("superset_role.team_antifraud", "id"),
45 | resource.TestCheckResourceAttrSet("superset_role.team_antifraud", "last_updated"),
46 | ),
47 | },
48 | // ImportState testing
49 | {
50 | ResourceName: "superset_role.team_antifraud",
51 | ImportState: true,
52 | ImportStateVerify: true,
53 | ImportStateVerifyIgnore: []string{"last_updated"},
54 | },
55 | },
56 | })
57 | }
58 |
59 | const testAccRoleResourceConfig = `
60 | resource "superset_role" "team_antifraud" {
61 | name = "Antifraud"
62 | }
63 | `
64 |
--------------------------------------------------------------------------------
/docs/resources/dataset.md:
--------------------------------------------------------------------------------
1 | ---
2 | # generated by https://github.com/hashicorp/terraform-plugin-docs
3 | page_title: "superset_dataset Resource - superset"
4 | subcategory: ""
5 | description: |-
6 | Manages a dataset in Superset.
7 | ---
8 |
9 | # superset_dataset (Resource)
10 |
11 | Manages a dataset in Superset.
12 |
13 | ## Example Usage
14 |
15 | ```terraform
16 | resource "superset_dataset" "example" {
17 | table_name = "example_table"
18 | database_name = "PostgreSQL"
19 | schema = "public"
20 | sql = <<-EOF
21 | WITH RECURSIVE recursive_sequence AS (
22 | SELECT 1 as level, 1 as value, 'Level One' as description
23 | UNION ALL
24 | SELECT
25 | level + 1,
26 | value * 2,
27 | CASE
28 | WHEN level + 1 <= 3 THEN CONCAT('Level ',
29 | CASE level + 1
30 | WHEN 2 THEN 'Two'
31 | WHEN 3 THEN 'Three'
32 | END)
33 | ELSE 'Max Level'
34 | END
35 | FROM recursive_sequence
36 | WHERE level < 5
37 | ),
38 | calculated_data AS (
39 | SELECT
40 | level,
41 | value,
42 | description,
43 | value * 1.5 as weighted_value,
44 | ROUND(LN(value::numeric), 2) as log_value, -- Изменено LN + приведение к numeric
45 | ROW_NUMBER() OVER (ORDER BY level) as row_num
46 | FROM recursive_sequence
47 | )
48 | SELECT
49 | level as hierarchy_level,
50 | value as base_value,
51 | weighted_value,
52 | log_value,
53 | description as level_description,
54 | CASE
55 | WHEN row_num % 2 = 0 THEN 'Even'
56 | ELSE 'Odd'
57 | END as parity,
58 | NOW() as generated_timestamp
59 | FROM calculated_data
60 | ORDER BY level
61 | EOF
62 | }
63 | ```
64 |
65 |
66 | ## Schema
67 |
68 | ### Required
69 |
70 | - `database_name` (String) Name of the database where the dataset resides. Cannot be changed after creation.
71 | - `table_name` (String) Name of the table or dataset.
72 |
73 | ### Optional
74 |
75 | - `schema` (String) Database schema name (optional).
76 | - `sql` (String) SQL query for the dataset (optional, for SQL-based datasets).
77 |
78 | ### Read-Only
79 |
80 | - `id` (Number) Numeric identifier of the dataset.
81 |
82 | ## Import
83 |
84 | Import is supported using the following syntax:
85 |
86 | The [`terraform import` command](https://developer.hashicorp.com/terraform/cli/commands/import) can be used, for example:
87 |
88 | ```shell
89 | # Dataset can be imported by specifying the numeric identifier of the Dataset id
90 | terraform import superset_dataset.example 123
91 | ```
92 |
--------------------------------------------------------------------------------
/internal/provider/role_permissions_data_source_test.go:
--------------------------------------------------------------------------------
1 | package provider
2 |
3 | import (
4 | "github.com/hashicorp/terraform-plugin-testing/helper/resource"
5 | "github.com/jarcoal/httpmock"
6 | "testing"
7 | )
8 |
9 | func TestAccRolePermissionsDataSource(t *testing.T) {
10 | // Activate httpmock
11 | httpmock.Activate()
12 | defer httpmock.DeactivateAndReset()
13 |
14 | // Mock the Superset API login response
15 | httpmock.RegisterResponder("POST", "http://superset-host/api/v1/security/login",
16 | httpmock.NewStringResponder(200, `{"access_token": "fake-token"}`))
17 |
18 | // Mock the Superset API response for getting role ID by name
19 | httpmock.RegisterResponder("GET", "http://superset-host/api/v1/security/roles?q=(page_size:5000)",
20 | httpmock.NewStringResponder(200, `{"result": [{"id": 1, "name": "DWH-DB-Connect"}]}`))
21 |
22 | // Mock the Superset API response for getting role permissions
23 | httpmock.RegisterResponder("GET", "http://superset-host/api/v1/security/roles/1/permissions/",
24 | httpmock.NewStringResponder(200, `{
25 | "result": [
26 | {"id": 240, "permission_name": "database_access", "view_menu_name": "[Trino].(id:34)"},
27 | {"id": 241, "permission_name": "schema_access", "view_menu_name": "[Trino].[devoriginationzestorage]"}
28 | ]
29 | }`))
30 |
31 | resource.Test(t, resource.TestCase{
32 | PreCheck: func() { testAccPreCheck(t) },
33 | ProtoV6ProviderFactories: testAccProtoV6ProviderFactories,
34 | Steps: []resource.TestStep{
35 | // Read testing
36 | {
37 | Config: providerConfig + testAccRolePermissionsDataSourceConfig,
38 | Check: resource.ComposeAggregateTestCheckFunc(
39 | resource.TestCheckResourceAttr("data.superset_role_permissions.example", "role_name", "DWH-DB-Connect"),
40 | resource.TestCheckResourceAttr("data.superset_role_permissions.example", "permissions.#", "2"),
41 | resource.TestCheckResourceAttr("data.superset_role_permissions.example", "permissions.0.id", "240"),
42 | resource.TestCheckResourceAttr("data.superset_role_permissions.example", "permissions.0.permission_name", "database_access"),
43 | resource.TestCheckResourceAttr("data.superset_role_permissions.example", "permissions.0.view_menu_name", "[Trino].(id:34)"),
44 | resource.TestCheckResourceAttr("data.superset_role_permissions.example", "permissions.1.id", "241"),
45 | resource.TestCheckResourceAttr("data.superset_role_permissions.example", "permissions.1.permission_name", "schema_access"),
46 | resource.TestCheckResourceAttr("data.superset_role_permissions.example", "permissions.1.view_menu_name", "[Trino].[devoriginationzestorage]"),
47 | ),
48 | },
49 | },
50 | })
51 | }
52 |
53 | const testAccRolePermissionsDataSourceConfig = `
54 | data "superset_role_permissions" "example" {
55 | role_name = "DWH-DB-Connect"
56 | }
57 | `
58 |
--------------------------------------------------------------------------------
/.github/workflows/test.yml:
--------------------------------------------------------------------------------
1 | # Terraform Provider testing workflow.
2 | name: Tests
3 |
4 | # This GitHub action runs your tests for each pull request and push.
5 | # Optionally, you can turn it on using a schedule for regular testing.
6 | on:
7 | pull_request:
8 | paths-ignore:
9 | - 'README.md'
10 | push:
11 | paths-ignore:
12 | - 'README.md'
13 |
14 | # Testing only needs permissions to read the repository contents.
15 | permissions:
16 | contents: read
17 |
18 | jobs:
19 | # Ensure project builds before running testing matrix
20 | build:
21 | name: Build
22 | runs-on: ubuntu-latest
23 | timeout-minutes: 5
24 | steps:
25 | - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4
26 | - uses: actions/setup-go@cdcb36043654635271a94b9a6d1392de5bb323a7 # v5.0.1
27 | with:
28 | go-version-file: 'go.mod'
29 | cache: true
30 | - run: go mod download
31 | - run: go build -v .
32 | - name: Run linters
33 | uses: golangci/golangci-lint-action@38e1018663fa5173f3968ea0777460d3de38f256 # v5.3.0
34 | with:
35 | version: latest
36 |
37 | generate:
38 | runs-on: ubuntu-latest
39 | steps:
40 | - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4
41 | - uses: actions/setup-go@cdcb36043654635271a94b9a6d1392de5bb323a7 # v5.0.1
42 | with:
43 | go-version-file: 'go.mod'
44 | cache: true
45 | # Temporarily download Terraform 1.8 prerelease for function documentation support.
46 | # When Terraform 1.8.0 final is released, this can be removed.
47 | - uses: hashicorp/setup-terraform@b9cd54a3c349d3f38e8881555d616ced269862dd # v3.1.2
48 | with:
49 | terraform_version: '1.8.0-alpha20240216'
50 | terraform_wrapper: false
51 | - run: go generate ./...
52 | - name: git diff
53 | run: |
54 | git diff --compact-summary --exit-code || \
55 | (echo; echo "Unexpected difference in directories after code generation. Run 'go generate ./...' command and commit."; exit 1)
56 |
57 | # Run acceptance tests in a matrix with Terraform CLI versions
58 | test:
59 | name: Terraform Provider Acceptance Tests
60 | needs: build
61 | runs-on: ubuntu-latest
62 | timeout-minutes: 15
63 | strategy:
64 | fail-fast: false
65 | matrix:
66 | # list whatever Terraform versions here you would like to support
67 | terraform:
68 | - '1.8.*'
69 | steps:
70 | - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4
71 | - uses: actions/setup-go@cdcb36043654635271a94b9a6d1392de5bb323a7 # v5.0.1
72 | with:
73 | go-version-file: 'go.mod'
74 | cache: true
75 | - uses: hashicorp/setup-terraform@b9cd54a3c349d3f38e8881555d616ced269862dd # v3.1.2
76 | with:
77 | terraform_version: ${{ matrix.terraform }}
78 | terraform_wrapper: false
79 | - run: go mod download
80 | - env:
81 | TF_ACC: "1"
82 | SUPERSET_USERNAME: 'fake-username'
83 | SUPERSET_PASSWORD: 'fake-password'
84 | SUPERSET_HOST: 'http://superset-host'
85 | run: go test -v -cover ./internal/provider/
86 | timeout-minutes: 10
87 |
--------------------------------------------------------------------------------
/internal/provider/users_data_source_test.go:
--------------------------------------------------------------------------------
1 | package provider
2 |
3 | import (
4 | "testing"
5 |
6 | "github.com/hashicorp/terraform-plugin-testing/helper/resource"
7 | "github.com/jarcoal/httpmock"
8 | )
9 |
10 | func TestAccUsersDataSource(t *testing.T) {
11 | // Activate httpmock
12 | httpmock.Activate()
13 | defer httpmock.DeactivateAndReset()
14 |
15 | // Mock the Superset API login response
16 | httpmock.RegisterResponder("POST", "http://superset-host/api/v1/security/login",
17 | httpmock.NewStringResponder(200, `{"access_token": "fake-token"}`))
18 |
19 | // Mock the Superset API response for fetching users
20 | httpmock.RegisterResponder("GET", "http://superset-host/api/v1/security/users/?q=(page_size:5000)",
21 | httpmock.NewStringResponder(200, `{
22 | "result": [
23 | {
24 | "id": 1,
25 | "username": "admin",
26 | "first_name": "Admin",
27 | "last_name": "User",
28 | "email": "admin@example.com",
29 | "active": true,
30 | "roles": [
31 | {"id": 1, "name": "Admin"}
32 | ]
33 | },
34 | {
35 | "id": 2,
36 | "username": "test.user",
37 | "first_name": "Test",
38 | "last_name": "User",
39 | "email": "test.user@example.com",
40 | "active": true,
41 | "roles": [
42 | {"id": 4, "name": "Gamma"}
43 | ]
44 | }
45 | ]
46 | }`))
47 |
48 | resource.Test(t, resource.TestCase{
49 | PreCheck: func() { testAccPreCheck(t) },
50 | ProtoV6ProviderFactories: testAccProtoV6ProviderFactories,
51 | Steps: []resource.TestStep{
52 | // Read testing
53 | {
54 | Config: providerConfig + testAccUsersDataSourceConfig,
55 | Check: resource.ComposeAggregateTestCheckFunc(
56 | resource.TestCheckResourceAttr("data.superset_users.test", "users.#", "2"),
57 | resource.TestCheckResourceAttr("data.superset_users.test", "users.0.id", "1"),
58 | resource.TestCheckResourceAttr("data.superset_users.test", "users.0.username", "admin"),
59 | resource.TestCheckResourceAttr("data.superset_users.test", "users.0.first_name", "Admin"),
60 | resource.TestCheckResourceAttr("data.superset_users.test", "users.0.last_name", "User"),
61 | resource.TestCheckResourceAttr("data.superset_users.test", "users.0.email", "admin@example.com"),
62 | resource.TestCheckResourceAttr("data.superset_users.test", "users.0.active", "true"),
63 | resource.TestCheckResourceAttr("data.superset_users.test", "users.0.roles.#", "1"),
64 | resource.TestCheckResourceAttr("data.superset_users.test", "users.0.roles.0.id", "1"),
65 | resource.TestCheckResourceAttr("data.superset_users.test", "users.0.roles.0.name", "Admin"),
66 | resource.TestCheckResourceAttr("data.superset_users.test", "users.1.id", "2"),
67 | resource.TestCheckResourceAttr("data.superset_users.test", "users.1.username", "test.user"),
68 | resource.TestCheckResourceAttr("data.superset_users.test", "users.1.first_name", "Test"),
69 | resource.TestCheckResourceAttr("data.superset_users.test", "users.1.last_name", "User"),
70 | resource.TestCheckResourceAttr("data.superset_users.test", "users.1.email", "test.user@example.com"),
71 | resource.TestCheckResourceAttr("data.superset_users.test", "users.1.active", "true"),
72 | resource.TestCheckResourceAttr("data.superset_users.test", "users.1.roles.#", "1"),
73 | resource.TestCheckResourceAttr("data.superset_users.test", "users.1.roles.0.id", "4"),
74 | resource.TestCheckResourceAttr("data.superset_users.test", "users.1.roles.0.name", "Gamma"),
75 | ),
76 | },
77 | },
78 | })
79 | }
80 |
81 | const testAccUsersDataSourceConfig = `
82 | data "superset_users" "test" {}
83 | `
84 |
--------------------------------------------------------------------------------
/internal/provider/roles_data_source_test.go:
--------------------------------------------------------------------------------
1 | package provider
2 |
3 | import (
4 | "testing"
5 |
6 | "github.com/hashicorp/terraform-plugin-testing/helper/resource"
7 | "github.com/jarcoal/httpmock"
8 | )
9 |
10 | func TestAccRolesDataSource(t *testing.T) {
11 | // Activate httpmock
12 | httpmock.Activate()
13 | defer httpmock.DeactivateAndReset()
14 |
15 | // Mock the Superset API login response
16 | httpmock.RegisterResponder("POST", "http://superset-host/api/v1/security/login",
17 | httpmock.NewStringResponder(200, `{"access_token": "fake-token"}`))
18 |
19 | // Mock the Superset API response for fetching roles
20 | httpmock.RegisterResponder("GET", "http://superset-host/api/v1/security/roles?q=(page_size:5000)",
21 | httpmock.NewStringResponder(200, `{
22 | "result": [
23 | {"id": 1, "name": "Admin"},
24 | {"id": 2, "name": "Public"},
25 | {"id": 3, "name": "Alpha"},
26 | {"id": 4, "name": "Gamma"},
27 | {"id": 5, "name": "sql_lab"},
28 | {"id": 38, "name": "Trino_Table-Role"},
29 | {"id": 71, "name": "Custom-DWH"},
30 | {"id": 73, "name": "Role for DWH"},
31 | {"id": 555, "name": "Toronto-Team-Role"},
32 | {"id": 129, "name": "DWH-DB-Connect"}
33 | ]
34 | }`))
35 |
36 | resource.Test(t, resource.TestCase{
37 | PreCheck: func() { testAccPreCheck(t) },
38 | ProtoV6ProviderFactories: testAccProtoV6ProviderFactories,
39 | Steps: []resource.TestStep{
40 | // Read testing
41 | {
42 | Config: providerConfig + testAccRolesDataSourceConfig,
43 | Check: resource.ComposeAggregateTestCheckFunc(
44 | resource.TestCheckResourceAttr("data.superset_roles.test", "roles.#", "10"), // Adjust the expected number of roles
45 | resource.TestCheckResourceAttr("data.superset_roles.test", "roles.0.id", "1"),
46 | resource.TestCheckResourceAttr("data.superset_roles.test", "roles.0.name", "Admin"),
47 | resource.TestCheckResourceAttr("data.superset_roles.test", "roles.1.id", "2"),
48 | resource.TestCheckResourceAttr("data.superset_roles.test", "roles.1.name", "Public"),
49 | resource.TestCheckResourceAttr("data.superset_roles.test", "roles.2.id", "3"),
50 | resource.TestCheckResourceAttr("data.superset_roles.test", "roles.2.name", "Alpha"),
51 | resource.TestCheckResourceAttr("data.superset_roles.test", "roles.3.id", "4"),
52 | resource.TestCheckResourceAttr("data.superset_roles.test", "roles.3.name", "Gamma"),
53 | resource.TestCheckResourceAttr("data.superset_roles.test", "roles.4.id", "5"),
54 | resource.TestCheckResourceAttr("data.superset_roles.test", "roles.4.name", "sql_lab"),
55 | resource.TestCheckResourceAttr("data.superset_roles.test", "roles.5.id", "38"),
56 | resource.TestCheckResourceAttr("data.superset_roles.test", "roles.5.name", "Trino_Table-Role"),
57 | resource.TestCheckResourceAttr("data.superset_roles.test", "roles.6.id", "71"),
58 | resource.TestCheckResourceAttr("data.superset_roles.test", "roles.6.name", "Custom-DWH"),
59 | resource.TestCheckResourceAttr("data.superset_roles.test", "roles.7.id", "73"),
60 | resource.TestCheckResourceAttr("data.superset_roles.test", "roles.7.name", "Role for DWH"),
61 | resource.TestCheckResourceAttr("data.superset_roles.test", "roles.8.id", "555"),
62 | resource.TestCheckResourceAttr("data.superset_roles.test", "roles.8.name", "Toronto-Team-Role"),
63 | resource.TestCheckResourceAttr("data.superset_roles.test", "roles.9.id", "129"),
64 | resource.TestCheckResourceAttr("data.superset_roles.test", "roles.9.name", "DWH-DB-Connect"),
65 | ),
66 | },
67 | },
68 | })
69 | }
70 |
71 | const testAccRolesDataSourceConfig = `
72 | data "superset_roles" "test" {}
73 | `
74 |
--------------------------------------------------------------------------------
/internal/provider/roles_data_source.go:
--------------------------------------------------------------------------------
1 | package provider
2 |
3 | import (
4 | "context"
5 | "fmt"
6 |
7 | "github.com/hashicorp/terraform-plugin-framework/datasource"
8 | "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
9 | "github.com/hashicorp/terraform-plugin-framework/types"
10 | "terraform-provider-superset/internal/client"
11 | )
12 |
13 | // Ensure the implementation satisfies the expected interfaces.
14 | var (
15 | _ datasource.DataSource = &rolesDataSource{}
16 | _ datasource.DataSourceWithConfigure = &rolesDataSource{}
17 | )
18 |
19 | // NewRolesDataSource is a helper function to simplify the provider implementation.
20 | func NewRolesDataSource() datasource.DataSource {
21 | return &rolesDataSource{}
22 | }
23 |
24 | // rolesDataSource is the data source implementation.
25 | type rolesDataSource struct {
26 | client *client.Client
27 | }
28 |
29 | // rolesDataSourceModel maps the data source schema data.
30 | type rolesDataSourceModel struct {
31 | Roles []roleModel `tfsdk:"roles"`
32 | }
33 |
34 | // roleModel maps the role schema data.
35 | type roleModel struct {
36 | ID types.Int64 `tfsdk:"id"`
37 | Name types.String `tfsdk:"name"`
38 | }
39 |
40 | // Metadata returns the data source type name.
41 | func (d *rolesDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
42 | resp.TypeName = req.ProviderTypeName + "_roles"
43 | }
44 |
45 | // Schema defines the schema for the data source.
46 | func (d *rolesDataSource) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
47 | resp.Schema = schema.Schema{
48 | Description: "Fetches the list of roles from Superset.",
49 | Attributes: map[string]schema.Attribute{
50 | "roles": schema.ListNestedAttribute{
51 | Description: "List of roles.",
52 | Computed: true,
53 | NestedObject: schema.NestedAttributeObject{
54 | Attributes: map[string]schema.Attribute{
55 | "id": schema.Int64Attribute{
56 | Description: "Numeric identifier of the role.",
57 | Computed: true,
58 | },
59 | "name": schema.StringAttribute{
60 | Description: "Name of the role.",
61 | Computed: true,
62 | },
63 | },
64 | },
65 | },
66 | },
67 | }
68 | }
69 |
70 | // Read refreshes the Terraform state with the latest data.
71 | func (d *rolesDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
72 | var state rolesDataSourceModel
73 |
74 | roles, err := d.client.FetchRoles()
75 | if err != nil {
76 | resp.Diagnostics.AddError(
77 | "Unable to Read Superset Roles",
78 | err.Error(),
79 | )
80 | return
81 | }
82 |
83 | for _, role := range roles {
84 | state.Roles = append(state.Roles, roleModel{
85 | ID: types.Int64Value(role.ID),
86 | Name: types.StringValue(role.Name),
87 | })
88 | }
89 |
90 | diags := resp.State.Set(ctx, &state)
91 | resp.Diagnostics.Append(diags...)
92 | }
93 |
94 | // Configure adds the provider configured client to the data source.
95 | func (d *rolesDataSource) Configure(_ context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
96 | if req.ProviderData == nil {
97 | return
98 | }
99 |
100 | client, ok := req.ProviderData.(*client.Client)
101 | if !ok {
102 | resp.Diagnostics.AddError(
103 | "Unexpected Data Source Configure Type",
104 | fmt.Sprintf("Expected *client.Client, got: %T. Please report this issue to the provider developers.", req.ProviderData),
105 | )
106 | return
107 | }
108 |
109 | d.client = client
110 | }
111 |
--------------------------------------------------------------------------------
/go.mod:
--------------------------------------------------------------------------------
1 | module terraform-provider-superset
2 |
3 | go 1.24.6
4 |
5 | require (
6 | github.com/hashicorp/terraform-plugin-docs v0.24.0
7 | github.com/hashicorp/terraform-plugin-framework v1.16.1
8 | github.com/hashicorp/terraform-plugin-go v0.29.0
9 | github.com/hashicorp/terraform-plugin-log v0.9.0
10 | github.com/hashicorp/terraform-plugin-testing v1.13.3
11 | github.com/jarcoal/httpmock v1.4.1
12 | )
13 |
14 | require (
15 | github.com/BurntSushi/toml v1.3.2 // indirect
16 | github.com/Kunde21/markdownfmt/v3 v3.1.0 // indirect
17 | github.com/Masterminds/goutils v1.1.1 // indirect
18 | github.com/Masterminds/semver/v3 v3.2.1 // indirect
19 | github.com/Masterminds/sprig/v3 v3.2.3 // indirect
20 | github.com/ProtonMail/go-crypto v1.1.6 // indirect
21 | github.com/agext/levenshtein v1.2.2 // indirect
22 | github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect
23 | github.com/armon/go-radix v1.0.0 // indirect
24 | github.com/bgentry/speakeasy v0.1.0 // indirect
25 | github.com/bmatcuk/doublestar/v4 v4.9.1 // indirect
26 | github.com/cloudflare/circl v1.6.1 // indirect
27 | github.com/fatih/color v1.17.0 // indirect
28 | github.com/golang/protobuf v1.5.4 // indirect
29 | github.com/google/go-cmp v0.7.0 // indirect
30 | github.com/google/uuid v1.6.0 // indirect
31 | github.com/hashicorp/cli v1.1.7 // indirect
32 | github.com/hashicorp/errwrap v1.1.0 // indirect
33 | github.com/hashicorp/go-checkpoint v0.5.0 // indirect
34 | github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
35 | github.com/hashicorp/go-cty v1.5.0 // indirect
36 | github.com/hashicorp/go-hclog v1.6.3 // indirect
37 | github.com/hashicorp/go-multierror v1.1.1 // indirect
38 | github.com/hashicorp/go-plugin v1.7.0 // indirect
39 | github.com/hashicorp/go-retryablehttp v0.7.7 // indirect
40 | github.com/hashicorp/go-uuid v1.0.3 // indirect
41 | github.com/hashicorp/go-version v1.7.0 // indirect
42 | github.com/hashicorp/hc-install v0.9.2 // indirect
43 | github.com/hashicorp/hcl/v2 v2.23.0 // indirect
44 | github.com/hashicorp/logutils v1.0.0 // indirect
45 | github.com/hashicorp/terraform-exec v0.24.0 // indirect
46 | github.com/hashicorp/terraform-json v0.27.2 // indirect
47 | github.com/hashicorp/terraform-plugin-sdk/v2 v2.37.0 // indirect
48 | github.com/hashicorp/terraform-registry-address v0.4.0 // indirect
49 | github.com/hashicorp/terraform-svchost v0.1.1 // indirect
50 | github.com/hashicorp/yamux v0.1.2 // indirect
51 | github.com/huandu/xstrings v1.3.3 // indirect
52 | github.com/imdario/mergo v0.3.15 // indirect
53 | github.com/mattn/go-colorable v0.1.14 // indirect
54 | github.com/mattn/go-isatty v0.0.20 // indirect
55 | github.com/mattn/go-runewidth v0.0.15 // indirect
56 | github.com/mitchellh/copystructure v1.2.0 // indirect
57 | github.com/mitchellh/go-testing-interface v1.14.1 // indirect
58 | github.com/mitchellh/go-wordwrap v1.0.0 // indirect
59 | github.com/mitchellh/mapstructure v1.5.0 // indirect
60 | github.com/mitchellh/reflectwalk v1.0.2 // indirect
61 | github.com/oklog/run v1.1.0 // indirect
62 | github.com/posener/complete v1.2.3 // indirect
63 | github.com/rivo/uniseg v0.4.7 // indirect
64 | github.com/shopspring/decimal v1.3.1 // indirect
65 | github.com/spf13/cast v1.6.0 // indirect
66 | github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect
67 | github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect
68 | github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
69 | github.com/yuin/goldmark v1.7.7 // indirect
70 | github.com/yuin/goldmark-meta v1.1.0 // indirect
71 | github.com/zclconf/go-cty v1.17.0 // indirect
72 | go.abhg.dev/goldmark/frontmatter v0.2.0 // indirect
73 | golang.org/x/crypto v0.42.0 // indirect
74 | golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842 // indirect
75 | golang.org/x/mod v0.28.0 // indirect
76 | golang.org/x/net v0.44.0 // indirect
77 | golang.org/x/sync v0.17.0 // indirect
78 | golang.org/x/sys v0.36.0 // indirect
79 | golang.org/x/text v0.30.0 // indirect
80 | golang.org/x/tools v0.37.0 // indirect
81 | google.golang.org/appengine v1.6.8 // indirect
82 | google.golang.org/genproto/googleapis/rpc v0.0.0-20250707201910-8d1bb00bc6a7 // indirect
83 | google.golang.org/grpc v1.75.1 // indirect
84 | google.golang.org/protobuf v1.36.9 // indirect
85 | gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect
86 | gopkg.in/yaml.v2 v2.4.0 // indirect
87 | gopkg.in/yaml.v3 v3.0.1 // indirect
88 | )
89 |
--------------------------------------------------------------------------------
/internal/provider/datasets_data_source_test.go:
--------------------------------------------------------------------------------
1 | package provider
2 |
3 | import (
4 | "testing"
5 |
6 | "github.com/hashicorp/terraform-plugin-testing/helper/resource"
7 | "github.com/jarcoal/httpmock"
8 | )
9 |
10 | func TestAccDatasetsDataSource(t *testing.T) {
11 | // Activate httpmock
12 | httpmock.Activate()
13 | defer httpmock.DeactivateAndReset()
14 |
15 | // Mock the Superset API login response
16 | httpmock.RegisterResponder("POST", "http://superset-host/api/v1/security/login",
17 | httpmock.NewStringResponder(200, `{"access_token": "fake-token"}`))
18 |
19 | // Mock the Superset API response for fetching datasets
20 | httpmock.RegisterResponder("GET", "http://superset-host/api/v1/dataset/",
21 | httpmock.NewStringResponder(200, `{
22 | "result": [
23 | {
24 | "id": 5,
25 | "table_name": "casbin_rule",
26 | "database": {"id": 152, "database_name": "[Cloud]-Backstage-Dev-RO[d_cloud_backstage_db]"},
27 | "schema": "public",
28 | "sql": "SELECT * FROM casbin_rule",
29 | "kind": "virtual",
30 | "owners": [{"id": 5, "first_name": "John", "last_name": "Doe"}]
31 | },
32 | {
33 | "id": 6,
34 | "table_name": "example_table",
35 | "database": {"id": 153, "database_name": "[Cloud]-Backstage-Dev-RO[d_cloud_example_db]"},
36 | "schema": "public",
37 | "sql": "SELECT * FROM example_table",
38 | "kind": "virtual",
39 | "owners": [{"id": 6, "first_name": "John", "last_name": "Doe"}]
40 | }
41 | ]
42 | }`))
43 |
44 | resource.Test(t, resource.TestCase{
45 | PreCheck: func() { testAccPreCheck(t) },
46 | ProtoV6ProviderFactories: testAccProtoV6ProviderFactories,
47 | Steps: []resource.TestStep{
48 | // Read testing
49 | {
50 | Config: providerConfig + testAccDatasetsDataSourceConfig,
51 | Check: resource.ComposeAggregateTestCheckFunc(
52 | resource.TestCheckResourceAttr("data.superset_datasets.test", "datasets.#", "2"),
53 | resource.TestCheckResourceAttr("data.superset_datasets.test", "datasets.0.id", "5"),
54 | resource.TestCheckResourceAttr("data.superset_datasets.test", "datasets.0.table_name", "casbin_rule"),
55 | resource.TestCheckResourceAttr("data.superset_datasets.test", "datasets.0.database_id", "152"),
56 | resource.TestCheckResourceAttr("data.superset_datasets.test", "datasets.0.database_name", "[Cloud]-Backstage-Dev-RO[d_cloud_backstage_db]"),
57 | resource.TestCheckResourceAttr("data.superset_datasets.test", "datasets.0.schema", "public"),
58 | resource.TestCheckResourceAttr("data.superset_datasets.test", "datasets.0.sql", "SELECT * FROM casbin_rule"),
59 | resource.TestCheckResourceAttr("data.superset_datasets.test", "datasets.0.kind", "virtual"),
60 | resource.TestCheckResourceAttr("data.superset_datasets.test", "datasets.0.owners.#", "1"),
61 | resource.TestCheckResourceAttr("data.superset_datasets.test", "datasets.0.owners.0.id", "5"),
62 | resource.TestCheckResourceAttr("data.superset_datasets.test", "datasets.0.owners.0.first_name", "John"),
63 | resource.TestCheckResourceAttr("data.superset_datasets.test", "datasets.0.owners.0.last_name", "Doe"),
64 | resource.TestCheckResourceAttr("data.superset_datasets.test", "datasets.1.id", "6"),
65 | resource.TestCheckResourceAttr("data.superset_datasets.test", "datasets.1.table_name", "example_table"),
66 | resource.TestCheckResourceAttr("data.superset_datasets.test", "datasets.1.database_id", "153"),
67 | resource.TestCheckResourceAttr("data.superset_datasets.test", "datasets.1.database_name", "[Cloud]-Backstage-Dev-RO[d_cloud_example_db]"),
68 | resource.TestCheckResourceAttr("data.superset_datasets.test", "datasets.1.schema", "public"),
69 | resource.TestCheckResourceAttr("data.superset_datasets.test", "datasets.1.sql", "SELECT * FROM example_table"),
70 | resource.TestCheckResourceAttr("data.superset_datasets.test", "datasets.1.kind", "virtual"),
71 | resource.TestCheckResourceAttr("data.superset_datasets.test", "datasets.1.owners.#", "1"),
72 | resource.TestCheckResourceAttr("data.superset_datasets.test", "datasets.1.owners.0.id", "6"),
73 | resource.TestCheckResourceAttr("data.superset_datasets.test", "datasets.1.owners.0.first_name", "John"),
74 | resource.TestCheckResourceAttr("data.superset_datasets.test", "datasets.1.owners.0.last_name", "Doe"),
75 | ),
76 | },
77 | },
78 | })
79 | }
80 |
81 | const testAccDatasetsDataSourceConfig = `
82 | data "superset_datasets" "test" {}
83 | `
84 |
--------------------------------------------------------------------------------
/internal/provider/role_permissions_data_source.go:
--------------------------------------------------------------------------------
1 | package provider
2 |
3 | import (
4 | "context"
5 | "fmt"
6 |
7 | "github.com/hashicorp/terraform-plugin-framework/datasource"
8 | "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
9 | "github.com/hashicorp/terraform-plugin-framework/types"
10 | "terraform-provider-superset/internal/client"
11 | )
12 |
13 | // Ensure the implementation satisfies the expected interfaces.
14 | var (
15 | _ datasource.DataSource = &rolePermissionsDataSource{}
16 | _ datasource.DataSourceWithConfigure = &rolePermissionsDataSource{}
17 | )
18 |
19 | // NewRolePermissionsDataSource is a helper function to simplify the provider implementation.
20 | func NewRolePermissionsDataSource() datasource.DataSource {
21 | return &rolePermissionsDataSource{}
22 | }
23 |
24 | // rolePermissionsDataSource is the data source implementation.
25 | type rolePermissionsDataSource struct {
26 | client *client.Client
27 | }
28 |
29 | // rolePermissionsDataSourceModel maps the data source schema data.
30 | type rolePermissionsDataSourceModel struct {
31 | RoleName types.String `tfsdk:"role_name"`
32 | Permissions []permissionModel `tfsdk:"permissions"`
33 | }
34 |
35 | // permissionModel maps the permission schema data.
36 | type permissionModel struct {
37 | ID types.Int64 `tfsdk:"id"`
38 | PermissionName types.String `tfsdk:"permission_name"`
39 | ViewMenuName types.String `tfsdk:"view_menu_name"`
40 | }
41 |
42 | // Metadata returns the data source type name.
43 | func (d *rolePermissionsDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
44 | resp.TypeName = req.ProviderTypeName + "_role_permissions"
45 | }
46 |
47 | // Schema defines the schema for the data source.
48 | func (d *rolePermissionsDataSource) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
49 | resp.Schema = schema.Schema{
50 | Description: "Fetches the permissions for a role from Superset.",
51 | Attributes: map[string]schema.Attribute{
52 | "role_name": schema.StringAttribute{
53 | Description: "Name of the role.",
54 | Required: true,
55 | },
56 | "permissions": schema.ListNestedAttribute{
57 | Description: "List of permissions.",
58 | Computed: true,
59 | NestedObject: schema.NestedAttributeObject{
60 | Attributes: map[string]schema.Attribute{
61 | "id": schema.Int64Attribute{
62 | Description: "Numeric identifier of the permission.",
63 | Computed: true,
64 | },
65 | "permission_name": schema.StringAttribute{
66 | Description: "Name of the permission.",
67 | Computed: true,
68 | },
69 | "view_menu_name": schema.StringAttribute{
70 | Description: "Name of the view menu associated with the permission.",
71 | Computed: true,
72 | },
73 | },
74 | },
75 | },
76 | },
77 | }
78 | }
79 |
80 | // Read refreshes the Terraform state with the latest data.
81 | func (d *rolePermissionsDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
82 | var state rolePermissionsDataSourceModel
83 |
84 | diags := req.Config.Get(ctx, &state)
85 | resp.Diagnostics.Append(diags...)
86 | if resp.Diagnostics.HasError() {
87 | return
88 | }
89 |
90 | roleID, err := d.client.GetRoleIDByName(state.RoleName.ValueString())
91 | if err != nil {
92 | resp.Diagnostics.AddError(
93 | "Unable to Find Role",
94 | fmt.Sprintf("Unable to find role with name %s: %s", state.RoleName.ValueString(), err.Error()),
95 | )
96 | return
97 | }
98 |
99 | permissions, err := d.client.GetRolePermissions(roleID)
100 | if err != nil {
101 | resp.Diagnostics.AddError(
102 | "Unable to Read Superset Role Permissions",
103 | err.Error(),
104 | )
105 | return
106 | }
107 |
108 | for _, perm := range permissions {
109 | state.Permissions = append(state.Permissions, permissionModel{
110 | ID: types.Int64Value(perm.ID),
111 | PermissionName: types.StringValue(perm.PermissionName),
112 | ViewMenuName: types.StringValue(perm.ViewMenuName),
113 | })
114 | }
115 |
116 | diags = resp.State.Set(ctx, &state)
117 | resp.Diagnostics.Append(diags...)
118 | }
119 |
120 | // Configure adds the provider configured client to the data source.
121 | func (d *rolePermissionsDataSource) Configure(_ context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
122 | if req.ProviderData == nil {
123 | return
124 | }
125 |
126 | client, ok := req.ProviderData.(*client.Client)
127 | if !ok {
128 | resp.Diagnostics.AddError(
129 | "Unexpected Data Source Configure Type",
130 | fmt.Sprintf("Expected *client.Client, got: %T. Please report this issue to the provider developers.", req.ProviderData),
131 | )
132 | return
133 | }
134 |
135 | d.client = client
136 | }
137 |
--------------------------------------------------------------------------------
/internal/provider/user_resource_test.go:
--------------------------------------------------------------------------------
1 | package provider
2 |
3 | import (
4 | "encoding/json"
5 | "fmt"
6 | "net/http"
7 | "testing"
8 |
9 | "github.com/hashicorp/terraform-plugin-testing/helper/resource"
10 | "github.com/jarcoal/httpmock"
11 | )
12 |
13 | func TestAccUserResource(t *testing.T) {
14 | // Activate httpmock
15 | httpmock.Activate()
16 | defer httpmock.DeactivateAndReset()
17 |
18 | // Track user state for mocking
19 | var userLastName = "User"
20 | var userEmail = "test.user@example.com"
21 |
22 | // Mock the Superset API login response
23 | httpmock.RegisterResponder("POST", "http://superset-host/api/v1/security/login",
24 | httpmock.NewStringResponder(200, `{"access_token": "fake-token"}`))
25 |
26 | // Mock the Superset API response for creating users
27 | httpmock.RegisterResponder("POST", "http://superset-host/api/v1/security/users/",
28 | httpmock.NewStringResponder(201, `{"id": 100}`))
29 |
30 | // Mock the Superset API response for reading users by ID (dynamic based on state)
31 | httpmock.RegisterResponder("GET", "http://superset-host/api/v1/security/users/100",
32 | func(req *http.Request) (*http.Response, error) {
33 | resp := httpmock.NewStringResponse(200, fmt.Sprintf(`{
34 | "result": {
35 | "id": 100,
36 | "username": "test.user",
37 | "first_name": "Test",
38 | "last_name": "%s",
39 | "email": "%s",
40 | "active": true,
41 | "roles": [
42 | {"id": 4, "name": "Gamma"}
43 | ]
44 | }
45 | }`, userLastName, userEmail))
46 | resp.Header.Set("Content-Type", "application/json")
47 | return resp, nil
48 | })
49 |
50 | // Mock the Superset API response for updating users (updates state)
51 | httpmock.RegisterResponder("PUT", "http://superset-host/api/v1/security/users/100",
52 | func(req *http.Request) (*http.Response, error) {
53 | // Parse the request body to update our mock state
54 | var updateData map[string]interface{}
55 | if err := json.NewDecoder(req.Body).Decode(&updateData); err == nil {
56 | if ln, ok := updateData["last_name"].(string); ok {
57 | userLastName = ln
58 | }
59 | if em, ok := updateData["email"].(string); ok {
60 | userEmail = em
61 | }
62 | }
63 | return httpmock.NewStringResponse(200, `{}`), nil
64 | })
65 |
66 | // Mock the Superset API response for deleting users
67 | httpmock.RegisterResponder("DELETE", "http://superset-host/api/v1/security/users/100",
68 | httpmock.NewStringResponder(204, ""))
69 |
70 | resource.Test(t, resource.TestCase{
71 | PreCheck: func() { testAccPreCheck(t) },
72 | ProtoV6ProviderFactories: testAccProtoV6ProviderFactories,
73 | Steps: []resource.TestStep{
74 | // Create and Read testing
75 | {
76 | Config: providerConfig + testAccUserResourceConfig,
77 | Check: resource.ComposeAggregateTestCheckFunc(
78 | resource.TestCheckResourceAttr("superset_user.test_user", "username", "test.user"),
79 | resource.TestCheckResourceAttr("superset_user.test_user", "first_name", "Test"),
80 | resource.TestCheckResourceAttr("superset_user.test_user", "last_name", "User"),
81 | resource.TestCheckResourceAttr("superset_user.test_user", "email", "test.user@example.com"),
82 | resource.TestCheckResourceAttr("superset_user.test_user", "active", "true"),
83 | resource.TestCheckResourceAttr("superset_user.test_user", "roles.#", "1"),
84 | resource.TestCheckResourceAttr("superset_user.test_user", "roles.0", "4"),
85 | resource.TestCheckResourceAttrSet("superset_user.test_user", "id"),
86 | resource.TestCheckResourceAttrSet("superset_user.test_user", "last_updated"),
87 | ),
88 | },
89 | // ImportState testing
90 | {
91 | ResourceName: "superset_user.test_user",
92 | ImportState: true,
93 | ImportStateVerify: true,
94 | ImportStateVerifyIgnore: []string{"last_updated", "password"},
95 | },
96 | // Update and Read testing
97 | {
98 | Config: providerConfig + testAccUserResourceConfigUpdated,
99 | Check: resource.ComposeAggregateTestCheckFunc(
100 | resource.TestCheckResourceAttr("superset_user.test_user", "username", "test.user"),
101 | resource.TestCheckResourceAttr("superset_user.test_user", "first_name", "Test"),
102 | resource.TestCheckResourceAttr("superset_user.test_user", "last_name", "UpdatedUser"),
103 | resource.TestCheckResourceAttr("superset_user.test_user", "email", "test.user.updated@example.com"),
104 | resource.TestCheckResourceAttr("superset_user.test_user", "active", "true"),
105 | resource.TestCheckResourceAttr("superset_user.test_user", "roles.#", "1"),
106 | resource.TestCheckResourceAttr("superset_user.test_user", "roles.0", "4"),
107 | ),
108 | },
109 | // Delete testing automatically occurs in TestCase
110 | },
111 | })
112 | }
113 |
114 | const testAccUserResourceConfig = `
115 | resource "superset_user" "test_user" {
116 | username = "test.user"
117 | first_name = "Test"
118 | last_name = "User"
119 | email = "test.user@example.com"
120 | password = "S0meStr0ngPass!"
121 | active = true
122 | roles = [4]
123 | }
124 | `
125 |
126 | const testAccUserResourceConfigUpdated = `
127 | resource "superset_user" "test_user" {
128 | username = "test.user"
129 | first_name = "Test"
130 | last_name = "UpdatedUser"
131 | email = "test.user.updated@example.com"
132 | active = true
133 | roles = [4]
134 | }
135 | `
136 |
--------------------------------------------------------------------------------
/internal/provider/users_data_source.go:
--------------------------------------------------------------------------------
1 | package provider
2 |
3 | import (
4 | "context"
5 | "fmt"
6 |
7 | "github.com/hashicorp/terraform-plugin-framework/datasource"
8 | "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
9 | "github.com/hashicorp/terraform-plugin-framework/types"
10 | "terraform-provider-superset/internal/client"
11 | )
12 |
13 | // Ensure the implementation satisfies the expected interfaces.
14 | var (
15 | _ datasource.DataSource = &usersDataSource{}
16 | _ datasource.DataSourceWithConfigure = &usersDataSource{}
17 | )
18 |
19 | // NewUsersDataSource is a helper function to simplify the provider implementation.
20 | func NewUsersDataSource() datasource.DataSource {
21 | return &usersDataSource{}
22 | }
23 |
24 | // usersDataSource is the data source implementation.
25 | type usersDataSource struct {
26 | client *client.Client
27 | }
28 |
29 | // usersDataSourceModel maps the data source schema data.
30 | type usersDataSourceModel struct {
31 | Users []userModel `tfsdk:"users"`
32 | }
33 |
34 | // userModel maps the user schema data.
35 | type userModel struct {
36 | ID types.Int64 `tfsdk:"id"`
37 | Username types.String `tfsdk:"username"`
38 | FirstName types.String `tfsdk:"first_name"`
39 | LastName types.String `tfsdk:"last_name"`
40 | Email types.String `tfsdk:"email"`
41 | Active types.Bool `tfsdk:"active"`
42 | Roles []roleModel `tfsdk:"roles"`
43 | }
44 |
45 | // Metadata returns the data source type name.
46 | func (d *usersDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
47 | resp.TypeName = req.ProviderTypeName + "_users"
48 | }
49 |
50 | // Schema defines the schema for the data source.
51 | func (d *usersDataSource) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
52 | resp.Schema = schema.Schema{
53 | Description: "Fetches the list of users from Superset.",
54 | Attributes: map[string]schema.Attribute{
55 | "users": schema.ListNestedAttribute{
56 | Description: "List of users.",
57 | Computed: true,
58 | NestedObject: schema.NestedAttributeObject{
59 | Attributes: map[string]schema.Attribute{
60 | "id": schema.Int64Attribute{
61 | Description: "Numeric identifier of the user.",
62 | Computed: true,
63 | },
64 | "username": schema.StringAttribute{
65 | Description: "Username of the user.",
66 | Computed: true,
67 | },
68 | "first_name": schema.StringAttribute{
69 | Description: "First name of the user.",
70 | Computed: true,
71 | },
72 | "last_name": schema.StringAttribute{
73 | Description: "Last name of the user.",
74 | Computed: true,
75 | },
76 | "email": schema.StringAttribute{
77 | Description: "Email address of the user.",
78 | Computed: true,
79 | },
80 | "active": schema.BoolAttribute{
81 | Description: "Whether the user is active.",
82 | Computed: true,
83 | },
84 | "roles": schema.ListNestedAttribute{
85 | Description: "List of roles assigned to the user.",
86 | Computed: true,
87 | NestedObject: schema.NestedAttributeObject{
88 | Attributes: map[string]schema.Attribute{
89 | "id": schema.Int64Attribute{
90 | Description: "Numeric identifier of the role.",
91 | Computed: true,
92 | },
93 | "name": schema.StringAttribute{
94 | Description: "Name of the role.",
95 | Computed: true,
96 | },
97 | },
98 | },
99 | },
100 | },
101 | },
102 | },
103 | },
104 | }
105 | }
106 |
107 | // Read refreshes the Terraform state with the latest data.
108 | func (d *usersDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
109 | var state usersDataSourceModel
110 |
111 | users, err := d.client.FetchUsers()
112 | if err != nil {
113 | resp.Diagnostics.AddError(
114 | "Unable to Read Superset Users",
115 | err.Error(),
116 | )
117 | return
118 | }
119 |
120 | for _, user := range users {
121 | userRoles := make([]roleModel, len(user.Roles))
122 | for i, role := range user.Roles {
123 | userRoles[i] = roleModel{
124 | ID: types.Int64Value(role.ID),
125 | Name: types.StringValue(role.Name),
126 | }
127 | }
128 |
129 | state.Users = append(state.Users, userModel{
130 | ID: types.Int64Value(user.ID),
131 | Username: types.StringValue(user.Username),
132 | FirstName: types.StringValue(user.FirstName),
133 | LastName: types.StringValue(user.LastName),
134 | Email: types.StringValue(user.Email),
135 | Active: types.BoolValue(user.Active),
136 | Roles: userRoles,
137 | })
138 | }
139 |
140 | diags := resp.State.Set(ctx, &state)
141 | resp.Diagnostics.Append(diags...)
142 | }
143 |
144 | // Configure adds the provider configured client to the data source.
145 | func (d *usersDataSource) Configure(_ context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
146 | if req.ProviderData == nil {
147 | return
148 | }
149 |
150 | client, ok := req.ProviderData.(*client.Client)
151 | if !ok {
152 | resp.Diagnostics.AddError(
153 | "Unexpected Data Source Configure Type",
154 | fmt.Sprintf("Expected *client.Client, got: %T. Please report this issue to the provider developers.", req.ProviderData),
155 | )
156 | return
157 | }
158 |
159 | d.client = client
160 | }
161 |
--------------------------------------------------------------------------------
/internal/provider/databases_data_source.go:
--------------------------------------------------------------------------------
1 | package provider
2 |
3 | import (
4 | "context"
5 | "fmt"
6 |
7 | "github.com/hashicorp/terraform-plugin-framework/datasource"
8 | "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
9 | "github.com/hashicorp/terraform-plugin-framework/types"
10 | "github.com/hashicorp/terraform-plugin-log/tflog"
11 | "terraform-provider-superset/internal/client"
12 | )
13 |
14 | // Ensure the implementation satisfies the expected interfaces.
15 | var (
16 | _ datasource.DataSource = &databasesDataSource{}
17 | _ datasource.DataSourceWithConfigure = &databasesDataSource{}
18 | )
19 |
20 | // NewDatabasesDataSource is a helper function to simplify the provider implementation.
21 | func NewDatabasesDataSource() datasource.DataSource {
22 | return &databasesDataSource{}
23 | }
24 |
25 | // databasesDataSource is the data source implementation.
26 | type databasesDataSource struct {
27 | client *client.Client
28 | }
29 |
30 | // databasesDataSourceModel maps the data source schema data.
31 | type databasesDataSourceModel struct {
32 | Databases []databaseModel `tfsdk:"databases"`
33 | }
34 |
35 | // databaseModel maps the database schema data.
36 | type databaseModel struct {
37 | ID types.Int64 `tfsdk:"id"`
38 | DatabaseName types.String `tfsdk:"database_name"`
39 | }
40 |
41 | // Metadata returns the data source type name.
42 | func (d *databasesDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
43 | tflog.Debug(ctx, "Starting Metadata method")
44 | resp.TypeName = req.ProviderTypeName + "_databases"
45 | tflog.Debug(ctx, "Completed Metadata method", map[string]interface{}{
46 | "type_name": resp.TypeName,
47 | })
48 | }
49 |
50 | // Schema defines the schema for the data source.
51 | func (d *databasesDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) {
52 | tflog.Debug(ctx, "Starting Schema method")
53 | resp.Schema = schema.Schema{
54 | Description: "Fetches the list of databases from Superset.",
55 | Attributes: map[string]schema.Attribute{
56 | "databases": schema.ListNestedAttribute{
57 | Description: "List of databases.",
58 | Computed: true,
59 | NestedObject: schema.NestedAttributeObject{
60 | Attributes: map[string]schema.Attribute{
61 | "id": schema.Int64Attribute{
62 | Description: "Numeric identifier of the database.",
63 | Computed: true,
64 | },
65 | "database_name": schema.StringAttribute{
66 | Description: "Name of the database.",
67 | Computed: true,
68 | },
69 | },
70 | },
71 | },
72 | },
73 | }
74 | tflog.Debug(ctx, "Completed Schema method")
75 | }
76 |
77 | // Read refreshes the Terraform state with the latest data.
78 | func (d *databasesDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
79 | tflog.Debug(ctx, "Starting Read method")
80 |
81 | var state databasesDataSourceModel
82 |
83 | dbInfosRaw, err := d.client.GetAllDatabases()
84 | if err != nil {
85 | tflog.Error(ctx, "Error fetching databases", map[string]interface{}{
86 | "error": err.Error(),
87 | })
88 | resp.Diagnostics.AddError(
89 | "Unable to Read Superset Databases",
90 | err.Error(),
91 | )
92 | return
93 | }
94 |
95 | for _, db := range dbInfosRaw {
96 | tflog.Debug(ctx, "Processing database", map[string]interface{}{
97 | "database": db,
98 | })
99 |
100 | // Use type assertion to handle int64 type conversion
101 | id, ok := db["id"].(int64)
102 | if !ok {
103 | if floatID, ok := db["id"].(float64); ok {
104 | id = int64(floatID)
105 | } else {
106 | tflog.Error(ctx, "Type assertion error for database ID", map[string]interface{}{
107 | "database_id_type": fmt.Sprintf("%T", db["id"]),
108 | })
109 | resp.Diagnostics.AddError(
110 | "Type Assertion Error",
111 | fmt.Sprintf("Expected int64 or float64 for database ID, got: %T", db["id"]),
112 | )
113 | return
114 | }
115 | }
116 |
117 | name, ok := db["database_name"].(string)
118 | if !ok {
119 | tflog.Error(ctx, "Type assertion error for database name", map[string]interface{}{
120 | "database_name_type": fmt.Sprintf("%T", db["database_name"]),
121 | })
122 | resp.Diagnostics.AddError(
123 | "Type Assertion Error",
124 | fmt.Sprintf("Expected string for database name, got: %T", db["database_name"]),
125 | )
126 | return
127 | }
128 |
129 | state.Databases = append(state.Databases, databaseModel{
130 | ID: types.Int64Value(id),
131 | DatabaseName: types.StringValue(name),
132 | })
133 | }
134 |
135 | diags := resp.State.Set(ctx, &state)
136 | resp.Diagnostics.Append(diags...)
137 |
138 | tflog.Debug(ctx, "Completed Read method")
139 | }
140 |
141 | // Configure adds the provider configured client to the data source.
142 | func (d *databasesDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
143 | tflog.Debug(ctx, "Starting Configure method")
144 | if req.ProviderData == nil {
145 | tflog.Debug(ctx, "No provider data received")
146 | return
147 | }
148 |
149 | client, ok := req.ProviderData.(*client.Client)
150 | if !ok {
151 | tflog.Error(ctx, "Unexpected Data Source Configure Type", map[string]interface{}{
152 | "expected": "*client.Client",
153 | "got": fmt.Sprintf("%T", req.ProviderData),
154 | })
155 | resp.Diagnostics.AddError(
156 | "Unexpected Data Source Configure Type",
157 | fmt.Sprintf("Expected *client.Client, got: %T. Please report this issue to the provider developers.", req.ProviderData),
158 | )
159 | return
160 | }
161 |
162 | d.client = client
163 | tflog.Debug(ctx, "Completed Configure method")
164 | }
165 |
--------------------------------------------------------------------------------
/internal/provider/databases_resource_test.go:
--------------------------------------------------------------------------------
1 | package provider
2 |
3 | import (
4 | "testing"
5 |
6 | "github.com/hashicorp/terraform-plugin-testing/helper/resource"
7 | "github.com/jarcoal/httpmock"
8 | )
9 |
10 | func TestAccDatabaseResource(t *testing.T) {
11 | // Activate httpmock
12 | httpmock.Activate()
13 | defer httpmock.DeactivateAndReset()
14 |
15 | // Mock the Superset API login response
16 | httpmock.RegisterResponder("POST", "http://superset-host/api/v1/security/login",
17 | httpmock.NewStringResponder(200, `{"access_token": "fake-token"}`))
18 |
19 | // Mock the Superset API CSRF token response
20 | httpmock.RegisterResponder("GET", "http://superset-host/api/v1/security/csrf_token/",
21 | httpmock.NewStringResponder(200, `{"result": "fake-csrf-token"}`))
22 |
23 | // Mock the Superset API response for creating a database
24 | httpmock.RegisterResponder("POST", "http://superset-host/api/v1/database/",
25 | httpmock.NewStringResponder(201, `{
26 | "id": 208,
27 | "result": {
28 | "allow_ctas": false,
29 | "allow_cvas": false,
30 | "allow_dml": false,
31 | "allow_run_async": true,
32 | "cache_timeout": null,
33 | "configuration_method": "sqlalchemy_form",
34 | "database_name": "DWH_database_connection4",
35 | "driver": "psycopg2",
36 | "expose_in_sqllab": true,
37 | "extra": "{\"client_encoding\": \"utf8\"}",
38 | "parameters": {
39 | "database": "superset_db",
40 | "encryption": false,
41 | "host": "pg.db.ro.domain.com",
42 | "password": "XXXXXXXXXX",
43 | "port": 5432,
44 | "query": {},
45 | "username": "superset_user"
46 | },
47 | "sqlalchemy_uri": "postgresql://superset_user:XXXXXXXXXX@pg.db.ro.domain.com:5432/superset_db",
48 | "uuid": "f5007595-5a43-45d8-a1da-9612bdb12b22"
49 | }
50 | }`))
51 |
52 | // Mock the Superset API response for reading a database connection
53 | httpmock.RegisterResponder("GET", "http://superset-host/api/v1/database/208/connection",
54 | httpmock.NewStringResponder(200, `{
55 | "result": {
56 | "allow_ctas": false,
57 | "allow_cvas": false,
58 | "allow_dml": false,
59 | "allow_run_async": true,
60 | "cache_timeout": null,
61 | "configuration_method": "sqlalchemy_form",
62 | "database_name": "DWH_database_connection4",
63 | "driver": "psycopg2",
64 | "expose_in_sqllab": true,
65 | "extra": "{\"client_encoding\": \"utf8\"}",
66 | "parameters": {
67 | "database": "superset_db",
68 | "encryption": false,
69 | "host": "pg.db.ro.domain.com",
70 | "password": "XXXXXXXXXX",
71 | "port": 5432,
72 | "query": {},
73 | "username": "superset_user"
74 | },
75 | "sqlalchemy_uri": "postgresql://superset_user:XXXXXXXXXX@pg.db.ro.domain.com:5432/superset_db",
76 | "uuid": "f5007595-5a43-45d8-a1da-9612bdb12b22"
77 | }
78 | }`))
79 |
80 | // Mock the Superset API response for updating a database connection
81 | httpmock.RegisterResponder("PUT", "http://superset-host/api/v1/database/208",
82 | httpmock.NewStringResponder(200, `{
83 | "id": 208,
84 | "result": {
85 | "allow_ctas": false,
86 | "allow_cvas": false,
87 | "allow_dml": false,
88 | "allow_run_async": true,
89 | "cache_timeout": null,
90 | "configuration_method": "sqlalchemy_form",
91 | "database_name": "DWH_database_connection4",
92 | "driver": "psycopg2",
93 | "expose_in_sqllab": false,
94 | "extra": "{\"client_encoding\": \"utf8\"}",
95 | "parameters": {
96 | "database": "superset_db",
97 | "encryption": false,
98 | "host": "pg.db.ro.domain.com",
99 | "password": "XXXXXXXXXX",
100 | "port": 5432,
101 | "query": {},
102 | "username": "superset_user"
103 | },
104 | "sqlalchemy_uri": "postgresql://superset_user:XXXXXXXXXX@pg.db.ro.domain.com:5432/superset_db",
105 | "uuid": "f5007595-5a43-45d8-a1da-9612bdb12b22"
106 | }
107 | }`))
108 |
109 | // Mock the Superset API response for deleting a database
110 | httpmock.RegisterResponder("DELETE", "http://superset-host/api/v1/database/208",
111 | httpmock.NewStringResponder(200, ""))
112 |
113 | resource.Test(t, resource.TestCase{
114 | PreCheck: func() { testAccPreCheck(t) },
115 | ProtoV6ProviderFactories: testAccProtoV6ProviderFactories,
116 | Steps: []resource.TestStep{
117 | // Create and Read testing
118 | {
119 | Config: providerConfig + testAccDatabaseResourceConfig,
120 | Check: resource.ComposeAggregateTestCheckFunc(
121 | resource.TestCheckResourceAttr("superset_database.test", "connection_name", "DWH_database_connection4"),
122 | resource.TestCheckResourceAttr("superset_database.test", "db_engine", "postgresql"),
123 | resource.TestCheckResourceAttr("superset_database.test", "db_user", "superset_user"),
124 | resource.TestCheckResourceAttr("superset_database.test", "db_host", "pg.db.ro.domain.com"),
125 | resource.TestCheckResourceAttr("superset_database.test", "db_port", "5432"),
126 | resource.TestCheckResourceAttr("superset_database.test", "db_name", "superset_db"),
127 | resource.TestCheckResourceAttr("superset_database.test", "allow_ctas", "false"),
128 | resource.TestCheckResourceAttr("superset_database.test", "allow_cvas", "false"),
129 | resource.TestCheckResourceAttr("superset_database.test", "allow_dml", "false"),
130 | resource.TestCheckResourceAttr("superset_database.test", "allow_run_async", "true"),
131 | resource.TestCheckResourceAttr("superset_database.test", "expose_in_sqllab", "true"),
132 | ),
133 | },
134 | },
135 | })
136 | }
137 |
138 | const testAccDatabaseResourceConfig = `
139 | resource "superset_database" "test" {
140 | connection_name = "DWH_database_connection4"
141 | db_engine = "postgresql"
142 | db_user = "superset_user"
143 | db_pass = "dbpassword"
144 | db_host = "pg.db.ro.domain.com"
145 | db_port = 5432
146 | db_name = "superset_db"
147 | allow_ctas = false
148 | allow_cvas = false
149 | allow_dml = false
150 | allow_run_async = true
151 | expose_in_sqllab = true
152 | }
153 | `
154 |
--------------------------------------------------------------------------------
/internal/provider/role_permissions_resource_test.go:
--------------------------------------------------------------------------------
1 | package provider
2 |
3 | import (
4 | "testing"
5 |
6 | "github.com/hashicorp/terraform-plugin-testing/helper/resource"
7 | "github.com/jarcoal/httpmock"
8 | )
9 |
10 | func TestAccRolePermissionsResource(t *testing.T) {
11 |
12 | t.Run("CreateRead", func(t *testing.T) {
13 | httpmock.Activate()
14 | defer httpmock.DeactivateAndReset()
15 |
16 | // Mock the Superset API login response
17 | httpmock.RegisterResponder("POST", "http://superset-host/api/v1/security/login",
18 | httpmock.NewStringResponder(200, `{"access_token": "fake-token"}`))
19 |
20 | // Mock the Superset API response for reading roles by ID
21 | httpmock.RegisterResponder("GET", "http://superset-host/api/v1/security/roles/129",
22 | httpmock.NewStringResponder(200, `{"result": {"id": 129, "name": "DWH-DB-Connect"}}`))
23 |
24 | // Mock the Superset API response for fetching roles
25 | httpmock.RegisterResponder("GET", "http://superset-host/api/v1/security/roles?q=(page_size:5000)",
26 | httpmock.NewStringResponder(200, `{
27 | "result": [
28 | {"id": 129, "name": "DWH-DB-Connect"}
29 | ]
30 | }`))
31 |
32 | // Mock the Superset API response for fetching permissions resources
33 | httpmock.RegisterResponder("GET", "http://superset-host/api/v1/security/permissions-resources?q=(page_size:5000)",
34 | httpmock.NewStringResponder(200, `{ "result": [
35 | {
36 | "id": 240,
37 | "permission": {
38 | "name": "database_access"
39 | },
40 | "view_menu": {
41 | "name": "[SelfPostgreSQL].(id:1)"
42 | }
43 | }
44 | ]}`))
45 |
46 | // Mock the Superset API response for fetching a specific permission by name and view
47 | httpmock.RegisterResponder("GET", "http://superset-host/api/v1/security/permissions?q=(filters:[(permission_name:eq:database_access),(view_menu_name:eq:[SelfPostgreSQL].(id:1))])",
48 | httpmock.NewStringResponder(200, `{ "result": [
49 | {
50 | "id": 240
51 | }
52 | ]}`))
53 |
54 | // Mock the Superset API response for updating role permissions
55 | httpmock.RegisterResponder("POST", "http://superset-host/api/v1/security/roles/129/permissions",
56 | httpmock.NewStringResponder(200, `{"status": "success"}`))
57 |
58 | // Mock the Superset API response for fetching role permissions
59 | httpmock.RegisterResponder("GET", "http://superset-host/api/v1/security/roles/129/permissions/",
60 | httpmock.NewStringResponder(200, `{ "result": [
61 | {
62 | "id": 240,
63 | "permission_name": "database_access",
64 | "view_menu_name": "[SelfPostgreSQL].(id:1)"
65 | }
66 | ]}`))
67 |
68 | // Mock the Superset API response for deleting role permissions
69 | httpmock.RegisterResponder("DELETE", "http://superset-host/api/v1/security/roles/129/permissions",
70 | httpmock.NewStringResponder(204, ""))
71 |
72 | resource.Test(t, resource.TestCase{
73 | ProtoV6ProviderFactories: testAccProtoV6ProviderFactories,
74 | Steps: []resource.TestStep{
75 | // Create and Read testing
76 | {
77 | Config: providerConfig + `
78 | resource "superset_role_permissions" "team" {
79 | role_name = "DWH-DB-Connect"
80 | resource_permissions = [
81 | {
82 | permission = "database_access"
83 | view_menu = "[SelfPostgreSQL].(id:1)"
84 | }
85 | ]
86 | }
87 | `,
88 | Check: resource.ComposeAggregateTestCheckFunc(
89 | resource.TestCheckResourceAttr("superset_role_permissions.team", "role_name", "DWH-DB-Connect"),
90 | resource.TestCheckResourceAttr("superset_role_permissions.team", "resource_permissions.#", "1"),
91 | resource.TestCheckResourceAttr("superset_role_permissions.team", "resource_permissions.0.permission", "database_access"),
92 | resource.TestCheckResourceAttr("superset_role_permissions.team", "resource_permissions.0.view_menu", "[SelfPostgreSQL].(id:1)"),
93 | ),
94 | },
95 | // ImportState testing
96 | {
97 | ResourceName: "superset_role_permissions.team",
98 | ImportState: true,
99 | ImportStateVerify: true,
100 | ImportStateVerifyIgnore: []string{"last_updated"},
101 | },
102 | },
103 | })
104 | })
105 |
106 | t.Run("UpdateRead", func(t *testing.T) {
107 | httpmock.Activate()
108 | defer httpmock.DeactivateAndReset()
109 |
110 | // Mock the Superset API login response
111 | httpmock.RegisterResponder("POST", "http://superset-host/api/v1/security/login",
112 | httpmock.NewStringResponder(200, `{"access_token": "fake-token"}`))
113 |
114 | // Mock the Superset API response for fetching roles
115 | httpmock.RegisterResponder("GET", "http://superset-host/api/v1/security/roles?q=(page_size:5000)",
116 | httpmock.NewStringResponder(200, `{
117 | "result": [
118 | {"id": 129, "name": "DWH-DB-Connect"}
119 | ]
120 | }`))
121 |
122 | // Mock the Superset API response for fetching permissions resources
123 | httpmock.RegisterResponder("GET", "http://superset-host/api/v1/security/permissions-resources?q=(page_size:5000)",
124 | httpmock.NewStringResponder(200, `{ "result": [
125 | {
126 | "id": 240,
127 | "permission": {
128 | "name": "database_access"
129 | },
130 | "view_menu": {
131 | "name": "[SelfPostgreSQL].(id:1)"
132 | }
133 | },
134 | {
135 | "id": 241,
136 | "permission": {
137 | "name": "schema_access"
138 | },
139 | "view_menu": {
140 | "name": "[Trino].[devoriginationzestorage]"
141 | }
142 | }
143 | ]}`))
144 |
145 | // Mock the Superset API response for fetching a specific permission by name and view
146 | httpmock.RegisterResponder("GET", "http://superset-host/api/v1/security/permissions?q=(filters:[(permission_name:eq:database_access),(view_menu_name:eq:[SelfPostgreSQL].(id:1))])",
147 | httpmock.NewStringResponder(200, `{ "result": [
148 | {
149 | "id": 240
150 | }
151 | ]}`))
152 |
153 | httpmock.RegisterResponder("GET", "http://superset-host/api/v1/security/permissions?q=(filters:[(permission_name:eq:schema_access),(view_menu_name:eq:[Trino].[devoriginationzestorage])])",
154 | httpmock.NewStringResponder(200, `{ "result": [
155 | {
156 | "id": 241
157 | }
158 | ]}`))
159 |
160 | // Mock the Superset API response for updating role permissions
161 | httpmock.RegisterResponder("POST", "http://superset-host/api/v1/security/roles/129/permissions",
162 | httpmock.NewStringResponder(200, `{"status": "success"}`))
163 |
164 | // Mock the Superset API response for fetching role permissions
165 | httpmock.RegisterResponder("GET", "http://superset-host/api/v1/security/roles/129/permissions/",
166 | httpmock.NewStringResponder(200, `{ "result": [
167 | {
168 | "id": 240,
169 | "permission_name": "database_access",
170 | "view_menu_name": "[SelfPostgreSQL].(id:1)"
171 | },
172 | {
173 | "id": 241,
174 | "permission_name": "schema_access",
175 | "view_menu_name": "[Trino].[devoriginationzestorage]"
176 | }
177 | ]}`))
178 |
179 | // Mock the Superset API response for deleting role permissions
180 | httpmock.RegisterResponder("DELETE", "http://superset-host/api/v1/security/roles/129/permissions",
181 | httpmock.NewStringResponder(204, ""))
182 |
183 | resource.Test(t, resource.TestCase{
184 | ProtoV6ProviderFactories: testAccProtoV6ProviderFactories,
185 | Steps: []resource.TestStep{
186 | // Update and Read testing
187 | {
188 | Config: providerConfig + `
189 | resource "superset_role_permissions" "team" {
190 | role_name = "DWH-DB-Connect"
191 | resource_permissions = [
192 | {
193 | permission = "database_access"
194 | view_menu = "[SelfPostgreSQL].(id:1)"
195 | },
196 | {
197 | permission = "schema_access"
198 | view_menu = "[Trino].[devoriginationzestorage]"
199 | },
200 | ]
201 | }
202 | `,
203 | Check: resource.ComposeAggregateTestCheckFunc(
204 | resource.TestCheckResourceAttr("superset_role_permissions.team", "role_name", "DWH-DB-Connect"),
205 | resource.TestCheckResourceAttr("superset_role_permissions.team", "resource_permissions.#", "2"),
206 | resource.TestCheckResourceAttr("superset_role_permissions.team", "resource_permissions.1.permission", "schema_access"),
207 | resource.TestCheckResourceAttr("superset_role_permissions.team", "resource_permissions.1.view_menu", "[Trino].[devoriginationzestorage]"),
208 | ),
209 | },
210 | },
211 | })
212 | })
213 | }
214 |
--------------------------------------------------------------------------------
/internal/provider/provider.go:
--------------------------------------------------------------------------------
1 | package provider
2 |
3 | import (
4 | "context"
5 | "os"
6 |
7 | "terraform-provider-superset/internal/client"
8 |
9 | "github.com/hashicorp/terraform-plugin-framework/datasource"
10 | "github.com/hashicorp/terraform-plugin-framework/path"
11 | "github.com/hashicorp/terraform-plugin-framework/provider"
12 | "github.com/hashicorp/terraform-plugin-framework/provider/schema"
13 | "github.com/hashicorp/terraform-plugin-framework/resource"
14 | "github.com/hashicorp/terraform-plugin-framework/types"
15 | "github.com/hashicorp/terraform-plugin-log/tflog"
16 | )
17 |
18 | // Ensure the implementation satisfies the expected interfaces.
19 | var (
20 | _ provider.Provider = &supersetProvider{}
21 | )
22 |
23 | // New is a helper function to simplify provider server and testing implementation.
24 | func New(version string) func() provider.Provider {
25 | return func() provider.Provider {
26 | return &supersetProvider{
27 | version: version,
28 | }
29 | }
30 | }
31 |
32 | // supersetProvider is the provider implementation.
33 | type supersetProvider struct {
34 | version string
35 | }
36 |
37 | // supersetProviderModel maps provider schema data to a Go type.
38 | type supersetProviderModel struct {
39 | Host types.String `tfsdk:"host"`
40 | Username types.String `tfsdk:"username"`
41 | Password types.String `tfsdk:"password"`
42 | }
43 |
44 | // Metadata returns the provider type name.
45 | func (p *supersetProvider) Metadata(_ context.Context, _ provider.MetadataRequest, resp *provider.MetadataResponse) {
46 | resp.TypeName = "superset"
47 | resp.Version = p.version
48 | }
49 |
50 | // Schema defines the provider-level schema for configuration data.
51 | func (p *supersetProvider) Schema(_ context.Context, _ provider.SchemaRequest, resp *provider.SchemaResponse) {
52 | resp.Schema = schema.Schema{
53 | Description: "Superset provider for managing Superset resources.",
54 | Attributes: map[string]schema.Attribute{
55 | "host": schema.StringAttribute{
56 | Description: "The URL of the Superset instance. This should include the protocol (http or https) and the hostname or IP address. Example: 'https://superset.example.com'.",
57 | Optional: true,
58 | },
59 | "username": schema.StringAttribute{
60 | Description: "The username to authenticate with Superset. This user should have the necessary permissions to manage resources within Superset.",
61 | Optional: true,
62 | },
63 | "password": schema.StringAttribute{
64 | Description: "The password to authenticate with Superset. This value is sensitive and will not be displayed in logs or state files.",
65 | Optional: true,
66 | Sensitive: true,
67 | },
68 | },
69 | }
70 | }
71 |
72 | // Configure prepares a Superset API client for data sources and resources.
73 | func (p *supersetProvider) Configure(ctx context.Context, req provider.ConfigureRequest, resp *provider.ConfigureResponse) {
74 | tflog.Info(ctx, "Configuring Superset client")
75 |
76 | // Retrieve provider data from configuration
77 | var config supersetProviderModel
78 | diags := req.Config.Get(ctx, &config)
79 | resp.Diagnostics.Append(diags...)
80 | if resp.Diagnostics.HasError() {
81 | return
82 | }
83 |
84 | // If practitioner provided a configuration value for any of the attributes, it must be a known value.
85 | if config.Host.IsUnknown() {
86 | resp.Diagnostics.AddAttributeError(
87 | path.Root("host"),
88 | "Unknown Superset API Host",
89 | "The provider cannot create the Superset API client as there is an unknown configuration value for the Superset API host. "+
90 | "Either target apply the source of the value first, set the value statically in the configuration, or use the SUPERSET_HOST environment variable.",
91 | )
92 | }
93 |
94 | if config.Username.IsUnknown() {
95 | resp.Diagnostics.AddAttributeError(
96 | path.Root("username"),
97 | "Unknown Superset API Username",
98 | "The provider cannot create the Superset API client as there is an unknown configuration value for the Superset API username. "+
99 | "Either target apply the source of the value first, set the value statically in the configuration, or use the SUPERSET_USERNAME environment variable.",
100 | )
101 | }
102 |
103 | if config.Password.IsUnknown() {
104 | resp.Diagnostics.AddAttributeError(
105 | path.Root("password"),
106 | "Unknown Superset API Password",
107 | "The provider cannot create the Superset API client as there is an unknown configuration value for the Superset API password. "+
108 | "Either target apply the source of the value first, set the value statically in the configuration, or use the SUPERSET_PASSWORD environment variable.",
109 | )
110 | }
111 |
112 | if resp.Diagnostics.HasError() {
113 | return
114 | }
115 |
116 | // Default values to environment variables, but override with Terraform configuration value if set.
117 | host := os.Getenv("SUPERSET_HOST")
118 | username := os.Getenv("SUPERSET_USERNAME")
119 | password := os.Getenv("SUPERSET_PASSWORD")
120 |
121 | if !config.Host.IsNull() {
122 | host = config.Host.ValueString()
123 | }
124 |
125 | if !config.Username.IsNull() {
126 | username = config.Username.ValueString()
127 | }
128 |
129 | if !config.Password.IsNull() {
130 | password = config.Password.ValueString()
131 | }
132 |
133 | // If any of the expected configurations are missing, return errors with provider-specific guidance.
134 | if host == "" {
135 | resp.Diagnostics.AddAttributeError(
136 | path.Root("host"),
137 | "Missing Superset API Host",
138 | "The provider cannot create the Superset API client as there is a missing or empty value for the Superset API host. "+
139 | "Set the host value in the configuration or use the SUPERSET_HOST environment variable. "+
140 | "If either is already set, ensure the value is not empty.",
141 | )
142 | }
143 |
144 | if username == "" {
145 | resp.Diagnostics.AddAttributeError(
146 | path.Root("username"),
147 | "Missing Superset API Username",
148 | "The provider cannot create the Superset API client as there is a missing or empty value for the Superset API username. "+
149 | "Set the username value in the configuration or use the SUPERSET_USERNAME environment variable. "+
150 | "If either is already set, ensure the value is not empty.",
151 | )
152 | }
153 |
154 | if password == "" {
155 | resp.Diagnostics.AddAttributeError(
156 | path.Root("password"),
157 | "Missing Superset API Password",
158 | "The provider cannot create the Superset API client as there is a missing or empty value for the Superset API password. "+
159 | "Set the password value in the configuration or use the SUPERSET_PASSWORD environment variable. "+
160 | "If either is already set, ensure the value is not empty.",
161 | )
162 | }
163 |
164 | if resp.Diagnostics.HasError() {
165 | return
166 | }
167 |
168 | // Add structured log fields
169 | ctx = tflog.SetField(ctx, "superset_host", host)
170 | ctx = tflog.SetField(ctx, "superset_username", username)
171 | ctx = tflog.SetField(ctx, "superset_password", password)
172 | ctx = tflog.MaskFieldValuesWithFieldKeys(ctx, "superset_username")
173 | ctx = tflog.MaskFieldValuesWithFieldKeys(ctx, "superset_password")
174 |
175 | tflog.Debug(ctx, "Creating Superset client")
176 |
177 | // Create a new Superset client using the configuration values
178 | client, err := client.NewClient(host, username, password)
179 | if err != nil {
180 | resp.Diagnostics.AddError(
181 | "Unable to Create Superset API Client",
182 | "An unexpected error occurred when creating the Superset API client. "+
183 | "If the error is not clear, please contact the provider developers.\n\n"+
184 | "Superset Client Error: "+err.Error(),
185 | )
186 | return
187 | }
188 |
189 | // Make the Superset client available during DataSource and Resource type Configure methods.
190 | resp.DataSourceData = client
191 | resp.ResourceData = client
192 |
193 | tflog.Info(ctx, "Configured Superset client", map[string]any{"success": true})
194 | }
195 |
196 | // DataSources defines the data sources implemented in the provider.
197 | func (p *supersetProvider) DataSources(_ context.Context) []func() datasource.DataSource {
198 | return []func() datasource.DataSource{
199 | NewRolesDataSource, // Existing data source
200 | NewRolePermissionsDataSource, // New data source
201 | NewDatabasesDataSource, // New databases data source
202 | NewDatasetsDataSource, // New datasets data source
203 | NewUsersDataSource, // New users data source
204 | }
205 | }
206 |
207 | // Resources defines the resources implemented in the provider.
208 | func (p *supersetProvider) Resources(_ context.Context) []func() resource.Resource {
209 | return []func() resource.Resource{
210 | NewRoleResource, // New resource
211 | NewRolePermissionsResource, // New resource
212 | NewDatabaseResource, // New resource
213 | NewMetaDatabaseResource, // Meta database resource
214 | NewDatasetResource, // New dataset resource
215 | NewUserResource, // New user resource
216 | }
217 | }
218 |
--------------------------------------------------------------------------------
/internal/provider/role_resource.go:
--------------------------------------------------------------------------------
1 | package provider
2 |
3 | import (
4 | "context"
5 | "fmt"
6 | "strconv"
7 | "time"
8 |
9 | "github.com/hashicorp/terraform-plugin-framework/path"
10 | "github.com/hashicorp/terraform-plugin-framework/resource"
11 | "github.com/hashicorp/terraform-plugin-framework/resource/schema"
12 | "github.com/hashicorp/terraform-plugin-framework/resource/schema/int64planmodifier"
13 | "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
14 | "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
15 | "github.com/hashicorp/terraform-plugin-framework/types"
16 | "github.com/hashicorp/terraform-plugin-log/tflog"
17 | "terraform-provider-superset/internal/client"
18 | )
19 |
20 | // Ensure the implementation satisfies the expected interfaces.
21 | var (
22 | _ resource.Resource = &roleResource{}
23 | _ resource.ResourceWithConfigure = &roleResource{}
24 | _ resource.ResourceWithImportState = &roleResource{}
25 | )
26 |
27 | // NewRoleResource is a helper function to simplify the provider implementation.
28 | func NewRoleResource() resource.Resource {
29 | return &roleResource{}
30 | }
31 |
32 | // roleResource is the resource implementation.
33 | type roleResource struct {
34 | client *client.Client
35 | }
36 |
37 | // roleResourceModel maps the resource schema data.
38 | type roleResourceModel struct {
39 | ID types.Int64 `tfsdk:"id"`
40 | Name types.String `tfsdk:"name"`
41 | LastUpdated types.String `tfsdk:"last_updated"`
42 | }
43 |
44 | // Metadata returns the resource type name.
45 | func (r *roleResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
46 | resp.TypeName = req.ProviderTypeName + "_role"
47 | }
48 |
49 | // Schema defines the schema for the resource.
50 | func (r *roleResource) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
51 | resp.Schema = schema.Schema{
52 | Description: "Manages a role in Superset.",
53 | Attributes: map[string]schema.Attribute{
54 | "id": schema.Int64Attribute{
55 | Description: "Numeric identifier of the role.",
56 | Computed: true,
57 | PlanModifiers: []planmodifier.Int64{
58 | int64planmodifier.UseStateForUnknown(),
59 | },
60 | },
61 | "name": schema.StringAttribute{
62 | Description: "Name of the role.",
63 | Required: true,
64 | },
65 | "last_updated": schema.StringAttribute{
66 | Description: "Timestamp of the last update.",
67 | Computed: true,
68 | PlanModifiers: []planmodifier.String{
69 | stringplanmodifier.UseStateForUnknown(),
70 | },
71 | },
72 | },
73 | }
74 | }
75 |
76 | // Create creates the resource and sets the initial Terraform state.
77 | func (r *roleResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
78 | tflog.Debug(ctx, "Starting Create method")
79 | var plan roleResourceModel
80 | diags := req.Plan.Get(ctx, &plan)
81 | resp.Diagnostics.Append(diags...)
82 | if resp.Diagnostics.HasError() {
83 | tflog.Debug(ctx, "Exiting Create due to error in retrieving plan", map[string]interface{}{
84 | "diagnostics": resp.Diagnostics,
85 | })
86 | return
87 | }
88 |
89 | id, err := r.client.CreateRole(plan.Name.ValueString())
90 | if err != nil {
91 | resp.Diagnostics.AddError(
92 | "Unable to Create Superset Role",
93 | fmt.Sprintf("CreateRole failed: %s", err.Error()),
94 | )
95 | return
96 | }
97 |
98 | plan.ID = types.Int64Value(id)
99 | plan.LastUpdated = types.StringValue(time.Now().Format(time.RFC3339))
100 |
101 | diags = resp.State.Set(ctx, &plan)
102 | resp.Diagnostics.Append(diags...)
103 | if resp.Diagnostics.HasError() {
104 | tflog.Debug(ctx, "Exiting Create due to error in setting state", map[string]interface{}{
105 | "diagnostics": resp.Diagnostics,
106 | })
107 | return
108 | }
109 |
110 | tflog.Debug(ctx, fmt.Sprintf("Created role: ID=%d, Name=%s", plan.ID.ValueInt64(), plan.Name.ValueString()))
111 | }
112 |
113 | // Read refreshes the Terraform state with the latest data from Superset.
114 | func (r *roleResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
115 | tflog.Debug(ctx, "Starting Read method")
116 | var state roleResourceModel
117 | diags := req.State.Get(ctx, &state)
118 | resp.Diagnostics.Append(diags...)
119 | if resp.Diagnostics.HasError() {
120 | tflog.Debug(ctx, "Exiting Read due to error in getting state", map[string]interface{}{
121 | "diagnostics": resp.Diagnostics,
122 | })
123 | return
124 | }
125 |
126 | role, err := r.client.GetRole(state.ID.ValueInt64())
127 | if err != nil {
128 | resp.Diagnostics.AddError(
129 | "Error reading role",
130 | fmt.Sprintf("Could not read role ID %d: %s", state.ID.ValueInt64(), err.Error()),
131 | )
132 | return
133 | }
134 |
135 | // Correct logging using structured logging format
136 | tflog.Debug(ctx, "API returned role", map[string]interface{}{
137 | "id": role.ID,
138 | "name": role.Name,
139 | })
140 |
141 | if role.Name == "" {
142 | tflog.Warn(ctx, "Received empty name for role", map[string]interface{}{
143 | "roleID": role.ID,
144 | })
145 | }
146 |
147 | // Assuming role.Name is a string and needs to be converted to types.String
148 | state.Name = types.StringValue(role.Name)
149 |
150 | // Save updated state
151 | diags = resp.State.Set(ctx, &state)
152 | resp.Diagnostics.Append(diags...)
153 | if resp.Diagnostics.HasError() {
154 | tflog.Debug(ctx, "Exiting Read due to error in setting state", map[string]interface{}{
155 | "diagnostics": resp.Diagnostics,
156 | })
157 | return
158 | }
159 | }
160 |
161 | // Update updates the resource and sets the updated Terraform state on success.
162 | func (r *roleResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
163 | tflog.Debug(ctx, "Starting Update method")
164 | var plan roleResourceModel
165 | var state roleResourceModel
166 |
167 | req.Plan.Get(ctx, &plan)
168 | req.State.Get(ctx, &state)
169 |
170 | if plan.Name != state.Name {
171 | // Only update if there is a real change
172 | err := r.client.UpdateRole(state.ID.ValueInt64(), plan.Name.ValueString())
173 | if err != nil {
174 | resp.Diagnostics.AddError("Failed to update role", "Error: "+err.Error())
175 | return
176 | }
177 | state.Name = plan.Name
178 | state.LastUpdated = types.StringValue(time.Now().Format(time.RFC3339))
179 | }
180 |
181 | resp.State.Set(ctx, &state)
182 | tflog.Debug(ctx, fmt.Sprintf("Updated role: ID=%d, Name=%s", state.ID.ValueInt64(), state.Name.ValueString()))
183 | }
184 |
185 | // Delete deletes the resource and removes the Terraform state on success.
186 | func (r *roleResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
187 | tflog.Debug(ctx, "Starting Delete method")
188 | var state roleResourceModel
189 | diags := req.State.Get(ctx, &state)
190 | resp.Diagnostics.Append(diags...)
191 | if resp.Diagnostics.HasError() {
192 | tflog.Debug(ctx, "Exiting Delete due to error in getting state", map[string]interface{}{
193 | "diagnostics": resp.Diagnostics,
194 | })
195 | return
196 | }
197 |
198 | err := r.client.DeleteRole(state.ID.ValueInt64())
199 | if err != nil {
200 | if err.Error() == "failed to delete role, status code: 404" {
201 | resp.State.RemoveResource(ctx)
202 | tflog.Debug(ctx, fmt.Sprintf("Role ID %d not found, removing from state", state.ID.ValueInt64()))
203 | return
204 | }
205 | resp.Diagnostics.AddError(
206 | "Unable to Delete Superset Role",
207 | fmt.Sprintf("DeleteRole failed: %s", err.Error()),
208 | )
209 | return
210 | }
211 |
212 | resp.State.RemoveResource(ctx)
213 | tflog.Debug(ctx, fmt.Sprintf("Deleted role: ID=%d", state.ID.ValueInt64()))
214 | }
215 |
216 | // ImportState imports an existing resource.
217 | func (r *roleResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
218 | tflog.Debug(ctx, "Starting ImportState method", map[string]interface{}{
219 | "import_id": req.ID,
220 | })
221 |
222 | // Convert import ID to int64 and set it to the state
223 | id, err := strconv.ParseInt(req.ID, 10, 64)
224 | if err != nil {
225 | resp.Diagnostics.AddError(
226 | "Invalid Import ID",
227 | fmt.Sprintf("The provided import ID '%s' is not a valid int64: %s", req.ID, err.Error()),
228 | )
229 | return
230 | }
231 |
232 | // Set the ID in the state and call Read
233 | resp.State.SetAttribute(ctx, path.Root("id"), id)
234 |
235 | tflog.Debug(ctx, "ImportState completed successfully", map[string]interface{}{
236 | "import_id": req.ID,
237 | })
238 | }
239 |
240 | // Configure adds the provider configured client to the resource.
241 | func (r *roleResource) Configure(_ context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) {
242 | if req.ProviderData == nil {
243 | return
244 | }
245 |
246 | client, ok := req.ProviderData.(*client.Client)
247 | if !ok {
248 | resp.Diagnostics.AddError(
249 | "Unexpected Resource Configure Type",
250 | fmt.Sprintf("Expected *client.Client, got: %T. Please report this issue to the provider developers.", req.ProviderData),
251 | )
252 | return
253 | }
254 |
255 | r.client = client
256 | }
257 |
--------------------------------------------------------------------------------
/internal/provider/dataset_resource_test.go:
--------------------------------------------------------------------------------
1 | package provider
2 |
3 | import (
4 | "fmt"
5 | "net/http"
6 | "testing"
7 |
8 | "github.com/hashicorp/terraform-plugin-testing/helper/resource"
9 | "github.com/jarcoal/httpmock"
10 | "terraform-provider-superset/internal/client"
11 | )
12 |
13 | func TestAccDatasetResource(t *testing.T) {
14 | httpmock.Activate()
15 | defer httpmock.DeactivateAndReset()
16 |
17 | // Clear the global database cache to ensure our mocks are used
18 | client.ClearGlobalDatabaseCache()
19 |
20 | // Mock authentication response
21 | mockLoginResponse := `{
22 | "access_token": "fake-token",
23 | "refresh_token": "fake-refresh-token"
24 | }`
25 |
26 | // Mock database list response (make it match what the test framework expects)
27 | mockDatabasesResponse := `{
28 | "result": [
29 | {
30 | "id": 1,
31 | "database_name": "PostgreSQL Database",
32 | "backend": "postgresql"
33 | },
34 | {
35 | "id": 2,
36 | "database_name": "MySQL Database",
37 | "backend": "mysql"
38 | },
39 | {
40 | "id": 3,
41 | "database_name": "SQLite Database",
42 | "backend": "sqlite"
43 | },
44 | {
45 | "id": 4,
46 | "database_name": "Test Database 1",
47 | "backend": "postgresql"
48 | },
49 | {
50 | "id": 5,
51 | "database_name": "Test Database 2",
52 | "backend": "mysql"
53 | },
54 | {
55 | "id": 6,
56 | "database_name": "Test Database 3",
57 | "backend": "sqlite"
58 | }
59 | ]
60 | }`
61 |
62 | // Mock dataset creation response
63 | mockDatasetCreateResponse := `{
64 | "id": 123,
65 | "table_name": "test_table",
66 | "database": {
67 | "id": 1,
68 | "database_name": "PostgreSQL Database"
69 | },
70 | "schema": "public"
71 | }`
72 |
73 | // Mock dataset read response - initial version
74 | mockDatasetReadResponseInitial := `{
75 | "result": {
76 | "id": 123,
77 | "table_name": "test_table",
78 | "database": {
79 | "id": 1,
80 | "database_name": "PostgreSQL Database"
81 | },
82 | "schema": "public",
83 | "sql": null
84 | }
85 | }`
86 |
87 | // Mock dataset read response - after update
88 | mockDatasetReadResponseUpdated := `{
89 | "result": {
90 | "id": 123,
91 | "table_name": "updated_table",
92 | "database": {
93 | "id": 1,
94 | "database_name": "PostgreSQL Database"
95 | },
96 | "schema": "updated_schema",
97 | "sql": null
98 | }
99 | }`
100 |
101 | // Mock dataset update response (PUT returns empty on success)
102 | mockDatasetUpdateResponse := `{}`
103 |
104 | // Setup mocks
105 | httpmock.RegisterResponder("POST", "http://superset-host/api/v1/security/login",
106 | httpmock.NewStringResponder(200, mockLoginResponse))
107 |
108 | httpmock.RegisterResponder("GET", "http://superset-host/api/v1/database/?q=(page_size:5000)",
109 | httpmock.NewStringResponder(200, mockDatabasesResponse))
110 |
111 | httpmock.RegisterResponder("POST", "http://superset-host/api/v1/dataset/",
112 | httpmock.NewStringResponder(201, mockDatasetCreateResponse))
113 |
114 | // Setup dynamic GET responder that returns different responses based on call count
115 | callCount := 0
116 | httpmock.RegisterResponder("GET", "http://superset-host/api/v1/dataset/123",
117 | func(req *http.Request) (*http.Response, error) {
118 | callCount++
119 | if callCount <= 2 { // First two calls return initial values
120 | return httpmock.NewStringResponse(200, mockDatasetReadResponseInitial), nil
121 | } else { // Subsequent calls return updated values
122 | return httpmock.NewStringResponse(200, mockDatasetReadResponseUpdated), nil
123 | }
124 | })
125 |
126 | httpmock.RegisterResponder("PUT", "http://superset-host/api/v1/dataset/123",
127 | httpmock.NewStringResponder(200, mockDatasetUpdateResponse))
128 |
129 | httpmock.RegisterResponder("DELETE", "http://superset-host/api/v1/dataset/123",
130 | httpmock.NewStringResponder(200, "{}"))
131 |
132 | resource.Test(t, resource.TestCase{
133 | ProtoV6ProviderFactories: testAccProtoV6ProviderFactories,
134 | Steps: []resource.TestStep{
135 | // Create and Read testing
136 | {
137 | Config: testAccDatasetResourceConfig("test_table", "PostgreSQL Database", "public"),
138 | Check: resource.ComposeAggregateTestCheckFunc(
139 | resource.TestCheckResourceAttr("superset_dataset.test", "table_name", "test_table"),
140 | resource.TestCheckResourceAttr("superset_dataset.test", "database_name", "PostgreSQL Database"),
141 | resource.TestCheckResourceAttr("superset_dataset.test", "schema", "public"),
142 | resource.TestCheckResourceAttrSet("superset_dataset.test", "id"),
143 | ),
144 | },
145 | // ImportState testing
146 | {
147 | ResourceName: "superset_dataset.test",
148 | ImportState: true,
149 | ImportStateVerify: true,
150 | ImportStateId: "123",
151 | },
152 | // Update and Read testing
153 | {
154 | Config: testAccDatasetResourceConfig("updated_table", "PostgreSQL Database", "updated_schema"),
155 | Check: resource.ComposeAggregateTestCheckFunc(
156 | resource.TestCheckResourceAttr("superset_dataset.test", "table_name", "updated_table"),
157 | resource.TestCheckResourceAttr("superset_dataset.test", "database_name", "PostgreSQL Database"),
158 | resource.TestCheckResourceAttr("superset_dataset.test", "schema", "updated_schema"),
159 | ),
160 | },
161 | // Delete testing automatically occurs in TestCase
162 | },
163 | })
164 | }
165 |
166 | func TestAccDatasetResourceWithSQL(t *testing.T) {
167 | httpmock.Activate()
168 | defer httpmock.DeactivateAndReset()
169 |
170 | // Clear the global database cache to ensure our mocks are used
171 | client.ClearGlobalDatabaseCache()
172 |
173 | // Mock authentication response
174 | mockLoginResponse := `{
175 | "access_token": "fake-token",
176 | "refresh_token": "fake-refresh-token"
177 | }`
178 |
179 | // Mock database list response (same as first test)
180 | mockDatabasesResponse := `{
181 | "result": [
182 | {
183 | "id": 1,
184 | "database_name": "PostgreSQL Database",
185 | "backend": "postgresql"
186 | },
187 | {
188 | "id": 2,
189 | "database_name": "MySQL Database",
190 | "backend": "mysql"
191 | },
192 | {
193 | "id": 3,
194 | "database_name": "SQLite Database",
195 | "backend": "sqlite"
196 | },
197 | {
198 | "id": 4,
199 | "database_name": "Test Database 1",
200 | "backend": "postgresql"
201 | },
202 | {
203 | "id": 5,
204 | "database_name": "Test Database 2",
205 | "backend": "mysql"
206 | },
207 | {
208 | "id": 6,
209 | "database_name": "Test Database 3",
210 | "backend": "sqlite"
211 | }
212 | ]
213 | }`
214 |
215 | // Mock dataset creation response with SQL
216 | mockDatasetCreateResponse := `{
217 | "id": 124,
218 | "table_name": "sql_dataset",
219 | "database": {
220 | "id": 1,
221 | "database_name": "PostgreSQL Database"
222 | },
223 | "sql": "SELECT * FROM users"
224 | }`
225 |
226 | // Mock dataset read response with SQL
227 | mockDatasetReadResponse := `{
228 | "result": {
229 | "id": 124,
230 | "table_name": "sql_dataset",
231 | "database": {
232 | "id": 1,
233 | "database_name": "PostgreSQL Database"
234 | },
235 | "schema": null,
236 | "sql": "SELECT * FROM users"
237 | }
238 | }`
239 |
240 | // Setup mocks
241 | httpmock.RegisterResponder("POST", "http://superset-host/api/v1/security/login",
242 | httpmock.NewStringResponder(200, mockLoginResponse))
243 |
244 | httpmock.RegisterResponder("GET", "http://superset-host/api/v1/database/?q=(page_size:5000)",
245 | httpmock.NewStringResponder(200, mockDatabasesResponse))
246 |
247 | httpmock.RegisterResponder("POST", "http://superset-host/api/v1/dataset/",
248 | httpmock.NewStringResponder(201, mockDatasetCreateResponse))
249 |
250 | httpmock.RegisterResponder("GET", "http://superset-host/api/v1/dataset/124",
251 | httpmock.NewStringResponder(200, mockDatasetReadResponse))
252 |
253 | httpmock.RegisterResponder("DELETE", "http://superset-host/api/v1/dataset/124",
254 | httpmock.NewStringResponder(200, "{}"))
255 |
256 | resource.Test(t, resource.TestCase{
257 | ProtoV6ProviderFactories: testAccProtoV6ProviderFactories,
258 | Steps: []resource.TestStep{
259 | // Create and Read testing with SQL
260 | {
261 | Config: testAccDatasetResourceConfigWithSQL("sql_dataset", "PostgreSQL Database", "SELECT * FROM users"),
262 | Check: resource.ComposeAggregateTestCheckFunc(
263 | resource.TestCheckResourceAttr("superset_dataset.test", "table_name", "sql_dataset"),
264 | resource.TestCheckResourceAttr("superset_dataset.test", "database_name", "PostgreSQL Database"),
265 | resource.TestCheckResourceAttr("superset_dataset.test", "sql", "SELECT * FROM users"),
266 | resource.TestCheckResourceAttrSet("superset_dataset.test", "id"),
267 | ),
268 | },
269 | },
270 | })
271 | }
272 |
273 | func testAccDatasetResourceConfig(tableName, databaseName, schema string) string {
274 | return fmt.Sprintf(`
275 | resource "superset_dataset" "test" {
276 | table_name = %[1]q
277 | database_name = %[2]q
278 | schema = %[3]q
279 | }
280 | `, tableName, databaseName, schema)
281 | }
282 |
283 | func testAccDatasetResourceConfigWithSQL(tableName, databaseName, sql string) string {
284 | return fmt.Sprintf(`
285 | resource "superset_dataset" "test" {
286 | table_name = %[1]q
287 | database_name = %[2]q
288 | sql = %[3]q
289 | }
290 | `, tableName, databaseName, sql)
291 | }
292 |
--------------------------------------------------------------------------------
/internal/provider/datasets_data_source.go:
--------------------------------------------------------------------------------
1 | package provider
2 |
3 | import (
4 | "context"
5 | "encoding/json"
6 | "fmt"
7 |
8 | "github.com/hashicorp/terraform-plugin-framework/attr"
9 | "github.com/hashicorp/terraform-plugin-framework/datasource"
10 | "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
11 | "github.com/hashicorp/terraform-plugin-framework/types"
12 | "github.com/hashicorp/terraform-plugin-log/tflog"
13 | "terraform-provider-superset/internal/client"
14 | )
15 |
16 | var (
17 | _ datasource.DataSource = &datasetsDataSource{}
18 | _ datasource.DataSourceWithConfigure = &datasetsDataSource{}
19 | )
20 |
21 | func NewDatasetsDataSource() datasource.DataSource {
22 | return &datasetsDataSource{}
23 | }
24 |
25 | type datasetsDataSource struct {
26 | client *client.Client
27 | }
28 |
29 | type datasetsDataSourceModel struct {
30 | Datasets []dataset `tfsdk:"datasets"`
31 | }
32 |
33 | type dataset struct {
34 | ID types.Int64 `tfsdk:"id"`
35 | TableName types.String `tfsdk:"table_name"`
36 | DatabaseID types.Int64 `tfsdk:"database_id"`
37 | DatabaseName types.String `tfsdk:"database_name"`
38 | Schema types.String `tfsdk:"schema"`
39 | SQL types.String `tfsdk:"sql"`
40 | Kind types.String `tfsdk:"kind"`
41 | Owners types.List `tfsdk:"owners"`
42 | }
43 |
44 | func (d *datasetsDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
45 | resp.TypeName = req.ProviderTypeName + "_datasets"
46 | }
47 |
48 | func (d *datasetsDataSource) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
49 | resp.Schema = schema.Schema{
50 | Description: "Fetches all datasets from Superset.",
51 | Attributes: map[string]schema.Attribute{
52 | "datasets": schema.ListNestedAttribute{
53 | Description: "List of Superset datasets.",
54 | Computed: true,
55 | NestedObject: schema.NestedAttributeObject{
56 | Attributes: map[string]schema.Attribute{
57 | "id": schema.Int64Attribute{
58 | Description: "Dataset ID.",
59 | Computed: true,
60 | },
61 | "table_name": schema.StringAttribute{
62 | Description: "Name of the table.",
63 | Computed: true,
64 | },
65 | "database_id": schema.Int64Attribute{
66 | Description: "Database ID to which the dataset belongs.",
67 | Computed: true,
68 | },
69 | "database_name": schema.StringAttribute{
70 | Description: "Database name to which the dataset belongs.",
71 | Computed: true,
72 | },
73 | "schema": schema.StringAttribute{
74 | Description: "Schema of the dataset.",
75 | Computed: true,
76 | },
77 | "sql": schema.StringAttribute{
78 | Description: "SQL query of the dataset.",
79 | Computed: true,
80 | },
81 | "kind": schema.StringAttribute{
82 | Description: "Kind of the dataset.",
83 | Computed: true,
84 | },
85 | "owners": schema.ListNestedAttribute{
86 | Description: "List of owners of the dataset.",
87 | Computed: true,
88 | NestedObject: schema.NestedAttributeObject{
89 | Attributes: map[string]schema.Attribute{
90 | "id": schema.Int64Attribute{
91 | Description: "Owner ID.",
92 | Computed: true,
93 | },
94 | "first_name": schema.StringAttribute{
95 | Description: "First name of the owner.",
96 | Computed: true,
97 | },
98 | "last_name": schema.StringAttribute{
99 | Description: "Last name of the owner.",
100 | Computed: true,
101 | },
102 | },
103 | },
104 | },
105 | },
106 | },
107 | },
108 | },
109 | }
110 | }
111 |
112 | func (d *datasetsDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
113 | tflog.Debug(ctx, "Starting Read method")
114 |
115 | // Fetch datasets from the Superset instance
116 | datasets, err := d.client.GetAllDatasets()
117 | if err != nil {
118 | resp.Diagnostics.AddError(
119 | "Unable to Read Superset Datasets",
120 | fmt.Sprintf("GetAllDatasets failed: %s", err.Error()),
121 | )
122 | return
123 | }
124 |
125 | // Log the entire API response for debugging
126 | responseJSON, _ := json.Marshal(datasets)
127 | tflog.Debug(ctx, fmt.Sprintf("API Response: %s", responseJSON))
128 |
129 | var datasetsModel []dataset
130 | for _, ds := range datasets {
131 | tflog.Debug(ctx, fmt.Sprintf("Processing dataset: %v", ds))
132 |
133 | id, ok := ds["id"].(float64)
134 | if !ok {
135 | resp.Diagnostics.AddError(
136 | "Invalid Response",
137 | "Missing or invalid 'id' field in the API response",
138 | )
139 | return
140 | }
141 | tflog.Debug(ctx, fmt.Sprintf("Dataset ID: %f", id))
142 |
143 | tableName, ok := ds["table_name"].(string)
144 | if !ok {
145 | resp.Diagnostics.AddError(
146 | "Invalid Response",
147 | "Missing or invalid 'table_name' field in the API response",
148 | )
149 | return
150 | }
151 | tflog.Debug(ctx, fmt.Sprintf("Table Name: %s", tableName))
152 |
153 | database, ok := ds["database"].(map[string]interface{})
154 | if !ok {
155 | resp.Diagnostics.AddError(
156 | "Invalid Response",
157 | "Missing or invalid 'database' field in the API response",
158 | )
159 | return
160 | }
161 | tflog.Debug(ctx, fmt.Sprintf("Database Field: %v", database))
162 |
163 | databaseID, ok := database["id"].(float64)
164 | if !ok {
165 | resp.Diagnostics.AddError(
166 | "Invalid Response",
167 | "Missing or invalid 'database.id' field in the API response",
168 | )
169 | return
170 | }
171 | tflog.Debug(ctx, fmt.Sprintf("Database ID: %f", databaseID))
172 |
173 | databaseName, ok := database["database_name"].(string)
174 | if !ok {
175 | resp.Diagnostics.AddError(
176 | "Invalid Response",
177 | "Missing or invalid 'database.database_name' field in the API response",
178 | )
179 | return
180 | }
181 | tflog.Debug(ctx, fmt.Sprintf("Database Name: %s", databaseName))
182 |
183 | schema, ok := ds["schema"].(string)
184 | if !ok {
185 | schema = ""
186 | }
187 | tflog.Debug(ctx, fmt.Sprintf("Schema: %s", schema))
188 |
189 | sql, ok := ds["sql"].(string)
190 | if !ok {
191 | sql = ""
192 | }
193 | tflog.Debug(ctx, fmt.Sprintf("SQL: %s", sql))
194 |
195 | kind, ok := ds["kind"].(string)
196 | if !ok {
197 | kind = ""
198 | }
199 | tflog.Debug(ctx, fmt.Sprintf("Kind: %s", kind))
200 |
201 | ownersList := []attr.Value{}
202 | owners, ok := ds["owners"].([]interface{})
203 | if ok {
204 | for _, owner := range owners {
205 | ownerMap, ok := owner.(map[string]interface{})
206 | if ok {
207 | ownerID, ok := ownerMap["id"].(float64)
208 | if !ok {
209 | ownerID = 0
210 | }
211 | firstName, ok := ownerMap["first_name"].(string)
212 | if !ok {
213 | firstName = ""
214 | }
215 | lastName, ok := ownerMap["last_name"].(string)
216 | if !ok {
217 | lastName = ""
218 | }
219 |
220 | ownerObject := map[string]attr.Value{
221 | "id": types.Int64Value(int64(ownerID)),
222 | "first_name": types.StringValue(firstName),
223 | "last_name": types.StringValue(lastName),
224 | }
225 | ownerVal, diags := types.ObjectValue(map[string]attr.Type{
226 | "id": types.Int64Type,
227 | "first_name": types.StringType,
228 | "last_name": types.StringType,
229 | }, ownerObject)
230 | resp.Diagnostics.Append(diags...)
231 | if resp.Diagnostics.HasError() {
232 | return
233 | }
234 | ownersList = append(ownersList, ownerVal)
235 | }
236 | }
237 | }
238 |
239 | ownersAttr, diags := types.ListValue(types.ObjectType{AttrTypes: map[string]attr.Type{"id": types.Int64Type, "first_name": types.StringType, "last_name": types.StringType}}, ownersList)
240 | resp.Diagnostics.Append(diags...)
241 | if resp.Diagnostics.HasError() {
242 | return
243 | }
244 |
245 | dataset := dataset{
246 | ID: types.Int64Value(int64(id)),
247 | TableName: types.StringValue(tableName),
248 | DatabaseID: types.Int64Value(int64(databaseID)),
249 | DatabaseName: types.StringValue(databaseName),
250 | Schema: types.StringValue(schema),
251 | SQL: types.StringValue(sql),
252 | Kind: types.StringValue(kind),
253 | Owners: ownersAttr,
254 | }
255 | datasetsModel = append(datasetsModel, dataset)
256 | }
257 |
258 | state := datasetsDataSourceModel{
259 | Datasets: datasetsModel,
260 | }
261 |
262 | // Additional debug for the final state
263 | finalStateJSON, _ := json.Marshal(state)
264 | tflog.Debug(ctx, fmt.Sprintf("Final state to be set: %s", finalStateJSON))
265 |
266 | diags := resp.State.Set(ctx, &state)
267 | resp.Diagnostics.Append(diags...)
268 | if resp.Diagnostics.HasError() {
269 | tflog.Debug(ctx, fmt.Sprintf("Error setting state: %v", resp.Diagnostics))
270 | return
271 | }
272 |
273 | tflog.Debug(ctx, "Fetched datasets successfully", map[string]interface{}{
274 | "datasets_count": len(datasetsModel),
275 | })
276 | }
277 |
278 | func (d *datasetsDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
279 | if req.ProviderData == nil {
280 | return
281 | }
282 |
283 | client, ok := req.ProviderData.(*client.Client)
284 | if !ok {
285 | resp.Diagnostics.AddError(
286 | "Unexpected Resource Configure Type",
287 | fmt.Sprintf("Expected *client.Client, got: %T. Please report this issue to the provider developers.", req.ProviderData),
288 | )
289 | return
290 | }
291 |
292 | d.client = client
293 | }
294 |
--------------------------------------------------------------------------------
/internal/provider/dataset_resource.go:
--------------------------------------------------------------------------------
1 | package provider
2 |
3 | import (
4 | "context"
5 | "fmt"
6 | "strconv"
7 |
8 | "github.com/hashicorp/terraform-plugin-framework/path"
9 | "github.com/hashicorp/terraform-plugin-framework/resource"
10 | "github.com/hashicorp/terraform-plugin-framework/resource/schema"
11 | "github.com/hashicorp/terraform-plugin-framework/resource/schema/int64planmodifier"
12 | "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
13 | "github.com/hashicorp/terraform-plugin-framework/types"
14 | "github.com/hashicorp/terraform-plugin-log/tflog"
15 | "terraform-provider-superset/internal/client"
16 | )
17 |
18 | // Ensure the implementation satisfies the expected interfaces.
19 | var (
20 | _ resource.Resource = &datasetResource{}
21 | _ resource.ResourceWithConfigure = &datasetResource{}
22 | _ resource.ResourceWithImportState = &datasetResource{}
23 | )
24 |
25 | // NewDatasetResource is a helper function to simplify the provider implementation.
26 | func NewDatasetResource() resource.Resource {
27 | return &datasetResource{}
28 | }
29 |
30 | // datasetResource is the resource implementation.
31 | type datasetResource struct {
32 | client *client.Client
33 | }
34 |
35 | // datasetResourceModel maps the resource schema data.
36 | type datasetResourceModel struct {
37 | ID types.Int64 `tfsdk:"id"`
38 | TableName types.String `tfsdk:"table_name"`
39 | DatabaseName types.String `tfsdk:"database_name"`
40 | Schema types.String `tfsdk:"schema"`
41 | SQL types.String `tfsdk:"sql"`
42 | }
43 |
44 | // Metadata returns the resource type name.
45 | func (r *datasetResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
46 | resp.TypeName = req.ProviderTypeName + "_dataset"
47 | }
48 |
49 | // Schema defines the schema for the resource.
50 | func (r *datasetResource) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
51 | resp.Schema = schema.Schema{
52 | Description: "Manages a dataset in Superset.",
53 | Attributes: map[string]schema.Attribute{
54 | "id": schema.Int64Attribute{
55 | Description: "Numeric identifier of the dataset.",
56 | Computed: true,
57 | PlanModifiers: []planmodifier.Int64{
58 | int64planmodifier.UseStateForUnknown(),
59 | },
60 | },
61 | "table_name": schema.StringAttribute{
62 | Description: "Name of the table or dataset.",
63 | Required: true,
64 | },
65 | "database_name": schema.StringAttribute{
66 | Description: "Name of the database where the dataset resides. Cannot be changed after creation.",
67 | Required: true,
68 | },
69 | "schema": schema.StringAttribute{
70 | Description: "Database schema name (optional).",
71 | Optional: true,
72 | },
73 | "sql": schema.StringAttribute{
74 | Description: "SQL query for the dataset (optional, for SQL-based datasets).",
75 | Optional: true,
76 | },
77 | },
78 | }
79 | }
80 |
81 | // Create creates the resource and sets the initial Terraform state.
82 | func (r *datasetResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
83 | // Retrieve values from plan
84 | var plan datasetResourceModel
85 | diags := req.Plan.Get(ctx, &plan)
86 | resp.Diagnostics.Append(diags...)
87 | if resp.Diagnostics.HasError() {
88 | return
89 | }
90 |
91 | tflog.Debug(ctx, "Creating dataset", map[string]interface{}{
92 | "table_name": plan.TableName.ValueString(),
93 | "database_name": plan.DatabaseName.ValueString(),
94 | })
95 |
96 | // Get database ID by name
97 | databaseID, err := r.client.GetDatabaseIDByName(plan.DatabaseName.ValueString())
98 | if err != nil {
99 | resp.Diagnostics.AddError(
100 | "Error finding database",
101 | fmt.Sprintf("Could not find database '%s': %s", plan.DatabaseName.ValueString(), err.Error()),
102 | )
103 | return
104 | }
105 |
106 | // Create dataset request
107 | datasetReq := client.DatasetRequest{
108 | TableName: plan.TableName.ValueString(),
109 | Database: databaseID,
110 | Schema: plan.Schema.ValueString(),
111 | SQL: plan.SQL.ValueString(),
112 | }
113 |
114 | // Create dataset
115 | datasetResp, err := r.client.CreateDataset(datasetReq)
116 | if err != nil {
117 | resp.Diagnostics.AddError(
118 | "Error creating dataset",
119 | "Could not create dataset: "+err.Error(),
120 | )
121 | return
122 | }
123 |
124 | // Extract ID from response
125 | var datasetID int64
126 | if id, ok := (*datasetResp)["id"].(float64); ok {
127 | datasetID = int64(id)
128 | } else {
129 | resp.Diagnostics.AddError(
130 | "Error creating dataset",
131 | "Could not extract ID from create response",
132 | )
133 | return
134 | }
135 |
136 | // Update the state
137 | plan.ID = types.Int64Value(datasetID)
138 |
139 | tflog.Debug(ctx, "Created dataset", map[string]interface{}{
140 | "id": datasetID,
141 | })
142 |
143 | // Set state to fully populated data
144 | diags = resp.State.Set(ctx, plan)
145 | resp.Diagnostics.Append(diags...)
146 | if resp.Diagnostics.HasError() {
147 | return
148 | }
149 | }
150 |
151 | // Read refreshes the Terraform state with the latest data.
152 | func (r *datasetResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
153 | // Get current state
154 | var state datasetResourceModel
155 | diags := req.State.Get(ctx, &state)
156 | resp.Diagnostics.Append(diags...)
157 | if resp.Diagnostics.HasError() {
158 | return
159 | }
160 |
161 | // Get dataset from API
162 | dataset, err := r.client.GetDataset(state.ID.ValueInt64())
163 | if err != nil {
164 | resp.Diagnostics.AddError(
165 | "Error reading dataset",
166 | "Could not read dataset ID "+fmt.Sprintf("%d", state.ID.ValueInt64())+": "+err.Error(),
167 | )
168 | return
169 | }
170 |
171 | // Update state from API response
172 | if tableName, ok := (*dataset)["table_name"].(string); ok {
173 | state.TableName = types.StringValue(tableName)
174 | }
175 |
176 | if schema, ok := (*dataset)["schema"].(string); ok {
177 | state.Schema = types.StringValue(schema)
178 | }
179 |
180 | if sql, ok := (*dataset)["sql"].(string); ok {
181 | state.SQL = types.StringValue(sql)
182 | }
183 |
184 | // Get database name by ID
185 | if database, ok := (*dataset)["database"].(map[string]interface{}); ok {
186 | if dbID, ok := database["id"].(float64); ok {
187 | databaseName, err := r.client.GetDatabaseNameByID(int64(dbID))
188 | if err != nil {
189 | resp.Diagnostics.AddError(
190 | "Error reading dataset",
191 | "Could not get database name: "+err.Error(),
192 | )
193 | return
194 | }
195 | state.DatabaseName = types.StringValue(databaseName)
196 | }
197 | }
198 |
199 | // Set refreshed state
200 | diags = resp.State.Set(ctx, &state)
201 | resp.Diagnostics.Append(diags...)
202 | if resp.Diagnostics.HasError() {
203 | return
204 | }
205 | }
206 |
207 | // Update updates the resource and sets the updated Terraform state on success.
208 | func (r *datasetResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
209 | // Retrieve values from plan
210 | var plan datasetResourceModel
211 | diags := req.Plan.Get(ctx, &plan)
212 | resp.Diagnostics.Append(diags...)
213 | if resp.Diagnostics.HasError() {
214 | return
215 | }
216 |
217 | // Update dataset (database cannot be changed, so we don't validate it)
218 | err := r.client.UpdateDataset(
219 | plan.ID.ValueInt64(),
220 | plan.TableName.ValueString(),
221 | plan.Schema.ValueString(),
222 | plan.SQL.ValueString(),
223 | )
224 | if err != nil {
225 | resp.Diagnostics.AddError(
226 | "Error updating dataset",
227 | "Could not update dataset ID "+fmt.Sprintf("%d", plan.ID.ValueInt64())+": "+err.Error(),
228 | )
229 | return
230 | }
231 |
232 | tflog.Debug(ctx, "Updated dataset", map[string]interface{}{
233 | "id": plan.ID.ValueInt64(),
234 | })
235 |
236 | // Set updated state
237 | diags = resp.State.Set(ctx, plan)
238 | resp.Diagnostics.Append(diags...)
239 | if resp.Diagnostics.HasError() {
240 | return
241 | }
242 | }
243 |
244 | // Delete deletes the resource and removes the Terraform state on success.
245 | func (r *datasetResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
246 | // Retrieve values from state
247 | var state datasetResourceModel
248 | diags := req.State.Get(ctx, &state)
249 | resp.Diagnostics.Append(diags...)
250 | if resp.Diagnostics.HasError() {
251 | return
252 | }
253 |
254 | // Delete existing dataset
255 | err := r.client.DeleteDataset(state.ID.ValueInt64())
256 | if err != nil {
257 | resp.Diagnostics.AddError(
258 | "Error deleting dataset",
259 | "Could not delete dataset, unexpected error: "+err.Error(),
260 | )
261 | return
262 | }
263 |
264 | tflog.Debug(ctx, "Deleted dataset", map[string]interface{}{
265 | "id": state.ID.ValueInt64(),
266 | })
267 | }
268 |
269 | // Configure adds the provider configured client to the resource.
270 | func (r *datasetResource) Configure(_ context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) {
271 | if req.ProviderData == nil {
272 | return
273 | }
274 |
275 | client, ok := req.ProviderData.(*client.Client)
276 |
277 | if !ok {
278 | resp.Diagnostics.AddError(
279 | "Unexpected Resource Configure Type",
280 | fmt.Sprintf("Expected *client.Client, got: %T. Please report this issue to the provider developers.", req.ProviderData),
281 | )
282 | return
283 | }
284 |
285 | r.client = client
286 | }
287 |
288 | // ImportState imports the resource state.
289 | func (r *datasetResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
290 | // Retrieve import ID and save to id attribute
291 | id, err := strconv.ParseInt(req.ID, 10, 64)
292 | if err != nil {
293 | resp.Diagnostics.AddError(
294 | "Error importing dataset",
295 | "Could not parse dataset ID: "+err.Error(),
296 | )
297 | return
298 | }
299 |
300 | resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("id"), id)...)
301 | }
302 |
--------------------------------------------------------------------------------
/internal/provider/user_resource.go:
--------------------------------------------------------------------------------
1 | package provider
2 |
3 | import (
4 | "context"
5 | "fmt"
6 | "strconv"
7 | "time"
8 |
9 | "github.com/hashicorp/terraform-plugin-framework/path"
10 | "github.com/hashicorp/terraform-plugin-framework/resource"
11 | "github.com/hashicorp/terraform-plugin-framework/resource/schema"
12 | "github.com/hashicorp/terraform-plugin-framework/resource/schema/booldefault"
13 | "github.com/hashicorp/terraform-plugin-framework/resource/schema/int64planmodifier"
14 | "github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier"
15 | "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
16 | "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
17 | "github.com/hashicorp/terraform-plugin-framework/types"
18 | "github.com/hashicorp/terraform-plugin-log/tflog"
19 | "terraform-provider-superset/internal/client"
20 | )
21 |
22 | // Ensure the implementation satisfies the expected interfaces.
23 | var (
24 | _ resource.Resource = &userResource{}
25 | _ resource.ResourceWithConfigure = &userResource{}
26 | _ resource.ResourceWithImportState = &userResource{}
27 | )
28 |
29 | // NewUserResource is a helper function to simplify the provider implementation.
30 | func NewUserResource() resource.Resource {
31 | return &userResource{}
32 | }
33 |
34 | // userResource is the resource implementation.
35 | type userResource struct {
36 | client *client.Client
37 | }
38 |
39 | // userResourceModel maps the resource schema data.
40 | type userResourceModel struct {
41 | ID types.Int64 `tfsdk:"id"`
42 | Username types.String `tfsdk:"username"`
43 | FirstName types.String `tfsdk:"first_name"`
44 | LastName types.String `tfsdk:"last_name"`
45 | Email types.String `tfsdk:"email"`
46 | Password types.String `tfsdk:"password"`
47 | Active types.Bool `tfsdk:"active"`
48 | Roles types.List `tfsdk:"roles"`
49 | LastUpdated types.String `tfsdk:"last_updated"`
50 | }
51 |
52 | // Metadata returns the resource type name.
53 | func (r *userResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
54 | resp.TypeName = req.ProviderTypeName + "_user"
55 | }
56 |
57 | // Schema defines the schema for the resource.
58 | func (r *userResource) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
59 | resp.Schema = schema.Schema{
60 | Description: "Manages a user in Superset.",
61 | Attributes: map[string]schema.Attribute{
62 | "id": schema.Int64Attribute{
63 | Description: "Numeric identifier of the user.",
64 | Computed: true,
65 | PlanModifiers: []planmodifier.Int64{
66 | int64planmodifier.UseStateForUnknown(),
67 | },
68 | },
69 | "username": schema.StringAttribute{
70 | Description: "Username of the user.",
71 | Required: true,
72 | PlanModifiers: []planmodifier.String{
73 | stringplanmodifier.RequiresReplace(),
74 | },
75 | },
76 | "first_name": schema.StringAttribute{
77 | Description: "First name of the user.",
78 | Optional: true,
79 | },
80 | "last_name": schema.StringAttribute{
81 | Description: "Last name of the user.",
82 | Optional: true,
83 | },
84 | "email": schema.StringAttribute{
85 | Description: "Email address of the user.",
86 | Required: true,
87 | },
88 | "password": schema.StringAttribute{
89 | Description: "Password of the user. Required for creation, optional for updates.",
90 | Optional: true,
91 | Sensitive: true,
92 | },
93 | "active": schema.BoolAttribute{
94 | Description: "Whether the user is active. Defaults to true.",
95 | Optional: true,
96 | Computed: true,
97 | Default: booldefault.StaticBool(true),
98 | },
99 | "roles": schema.ListAttribute{
100 | Description: "List of role IDs assigned to the user.",
101 | Required: true,
102 | ElementType: types.Int64Type,
103 | PlanModifiers: []planmodifier.List{
104 | listplanmodifier.UseStateForUnknown(),
105 | },
106 | },
107 | "last_updated": schema.StringAttribute{
108 | Description: "Timestamp of the last update.",
109 | Computed: true,
110 | },
111 | },
112 | }
113 | }
114 |
115 | // Create creates the resource and sets the initial Terraform state.
116 | func (r *userResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
117 | tflog.Debug(ctx, "Starting Create method")
118 | var plan userResourceModel
119 | diags := req.Plan.Get(ctx, &plan)
120 | resp.Diagnostics.Append(diags...)
121 | if resp.Diagnostics.HasError() {
122 | tflog.Debug(ctx, "Exiting Create due to error in retrieving plan", map[string]interface{}{
123 | "diagnostics": resp.Diagnostics,
124 | })
125 | return
126 | }
127 |
128 | // Validate password is provided for creation
129 | if plan.Password.IsNull() || plan.Password.ValueString() == "" {
130 | resp.Diagnostics.AddError(
131 | "Missing Password",
132 | "Password is required when creating a new user",
133 | )
134 | return
135 | }
136 |
137 | // Extract roles from plan
138 | var roles []int64
139 | diags = plan.Roles.ElementsAs(ctx, &roles, false)
140 | resp.Diagnostics.Append(diags...)
141 | if resp.Diagnostics.HasError() {
142 | return
143 | }
144 |
145 | id, err := r.client.CreateUser(
146 | plan.Username.ValueString(),
147 | plan.FirstName.ValueString(),
148 | plan.LastName.ValueString(),
149 | plan.Email.ValueString(),
150 | plan.Password.ValueString(),
151 | plan.Active.ValueBool(),
152 | roles,
153 | )
154 | if err != nil {
155 | resp.Diagnostics.AddError(
156 | "Unable to Create Superset User",
157 | fmt.Sprintf("CreateUser failed: %s", err.Error()),
158 | )
159 | return
160 | }
161 |
162 | plan.ID = types.Int64Value(id)
163 | plan.LastUpdated = types.StringValue(time.Now().Format(time.RFC3339))
164 |
165 | diags = resp.State.Set(ctx, &plan)
166 | resp.Diagnostics.Append(diags...)
167 | if resp.Diagnostics.HasError() {
168 | tflog.Debug(ctx, "Exiting Create due to error in setting state", map[string]interface{}{
169 | "diagnostics": resp.Diagnostics,
170 | })
171 | return
172 | }
173 |
174 | tflog.Debug(ctx, fmt.Sprintf("Created user: ID=%d, Username=%s", plan.ID.ValueInt64(), plan.Username.ValueString()))
175 | }
176 |
177 | // Read refreshes the Terraform state with the latest data from Superset.
178 | func (r *userResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
179 | tflog.Debug(ctx, "Starting Read method")
180 | var state userResourceModel
181 | diags := req.State.Get(ctx, &state)
182 | resp.Diagnostics.Append(diags...)
183 | if resp.Diagnostics.HasError() {
184 | tflog.Debug(ctx, "Exiting Read due to error in getting state", map[string]interface{}{
185 | "diagnostics": resp.Diagnostics,
186 | })
187 | return
188 | }
189 |
190 | user, err := r.client.GetUser(state.ID.ValueInt64())
191 | if err != nil {
192 | resp.Diagnostics.AddError(
193 | "Error reading user",
194 | fmt.Sprintf("Could not read user ID %d: %s", state.ID.ValueInt64(), err.Error()),
195 | )
196 | return
197 | }
198 |
199 | tflog.Debug(ctx, "API returned user", map[string]interface{}{
200 | "id": user.ID,
201 | "username": user.Username,
202 | })
203 |
204 | // Update state with values from API
205 | state.Username = types.StringValue(user.Username)
206 | state.FirstName = types.StringValue(user.FirstName)
207 | state.LastName = types.StringValue(user.LastName)
208 | state.Email = types.StringValue(user.Email)
209 | state.Active = types.BoolValue(user.Active)
210 |
211 | // Convert roles to list
212 | rolesList, diags := types.ListValueFrom(ctx, types.Int64Type, user.Roles)
213 | resp.Diagnostics.Append(diags...)
214 | if resp.Diagnostics.HasError() {
215 | return
216 | }
217 | state.Roles = rolesList
218 |
219 | // Save updated state
220 | diags = resp.State.Set(ctx, &state)
221 | resp.Diagnostics.Append(diags...)
222 | if resp.Diagnostics.HasError() {
223 | tflog.Debug(ctx, "Exiting Read due to error in setting state", map[string]interface{}{
224 | "diagnostics": resp.Diagnostics,
225 | })
226 | return
227 | }
228 | }
229 |
230 | // Update updates the resource and sets the updated Terraform state on success.
231 | func (r *userResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
232 | tflog.Debug(ctx, "Starting Update method")
233 | var plan userResourceModel
234 | var state userResourceModel
235 |
236 | req.Plan.Get(ctx, &plan)
237 | req.State.Get(ctx, &state)
238 |
239 | // Extract roles from plan
240 | var roles []int64
241 | diags := plan.Roles.ElementsAs(ctx, &roles, false)
242 | resp.Diagnostics.Append(diags...)
243 | if resp.Diagnostics.HasError() {
244 | return
245 | }
246 |
247 | err := r.client.UpdateUser(
248 | state.ID.ValueInt64(),
249 | plan.Username.ValueString(),
250 | plan.FirstName.ValueString(),
251 | plan.LastName.ValueString(),
252 | plan.Email.ValueString(),
253 | plan.Password.ValueString(), // Can be empty string for no password change
254 | plan.Active.ValueBool(),
255 | roles,
256 | )
257 | if err != nil {
258 | resp.Diagnostics.AddError("Failed to update user", "Error: "+err.Error())
259 | return
260 | }
261 |
262 | plan.ID = state.ID
263 | plan.LastUpdated = types.StringValue(time.Now().Format(time.RFC3339))
264 |
265 | resp.State.Set(ctx, &plan)
266 | tflog.Debug(ctx, fmt.Sprintf("Updated user: ID=%d, Username=%s", plan.ID.ValueInt64(), plan.Username.ValueString()))
267 | }
268 |
269 | // Delete deletes the resource and removes the Terraform state on success.
270 | func (r *userResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
271 | tflog.Debug(ctx, "Starting Delete method")
272 | var state userResourceModel
273 | diags := req.State.Get(ctx, &state)
274 | resp.Diagnostics.Append(diags...)
275 | if resp.Diagnostics.HasError() {
276 | tflog.Debug(ctx, "Exiting Delete due to error in getting state", map[string]interface{}{
277 | "diagnostics": resp.Diagnostics,
278 | })
279 | return
280 | }
281 |
282 | err := r.client.DeleteUser(state.ID.ValueInt64())
283 | if err != nil {
284 | if err.Error() == "failed to delete user, status code: 404" {
285 | resp.State.RemoveResource(ctx)
286 | tflog.Debug(ctx, fmt.Sprintf("User ID %d not found, removing from state", state.ID.ValueInt64()))
287 | return
288 | }
289 | resp.Diagnostics.AddError(
290 | "Unable to Delete Superset User",
291 | fmt.Sprintf("DeleteUser failed: %s", err.Error()),
292 | )
293 | return
294 | }
295 |
296 | resp.State.RemoveResource(ctx)
297 | tflog.Debug(ctx, fmt.Sprintf("Deleted user: ID=%d", state.ID.ValueInt64()))
298 | }
299 |
300 | // ImportState imports an existing resource.
301 | func (r *userResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
302 | tflog.Debug(ctx, "Starting ImportState method", map[string]interface{}{
303 | "import_id": req.ID,
304 | })
305 |
306 | // Convert import ID to int64 and set it to the state
307 | id, err := strconv.ParseInt(req.ID, 10, 64)
308 | if err != nil {
309 | resp.Diagnostics.AddError(
310 | "Invalid Import ID",
311 | fmt.Sprintf("The provided import ID '%s' is not a valid int64: %s", req.ID, err.Error()),
312 | )
313 | return
314 | }
315 |
316 | // Set the ID in the state and call Read
317 | resp.State.SetAttribute(ctx, path.Root("id"), id)
318 |
319 | tflog.Debug(ctx, "ImportState completed successfully", map[string]interface{}{
320 | "import_id": req.ID,
321 | })
322 | }
323 |
324 | // Configure adds the provider configured client to the resource.
325 | func (r *userResource) Configure(_ context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) {
326 | if req.ProviderData == nil {
327 | return
328 | }
329 |
330 | client, ok := req.ProviderData.(*client.Client)
331 | if !ok {
332 | resp.Diagnostics.AddError(
333 | "Unexpected Resource Configure Type",
334 | fmt.Sprintf("Expected *client.Client, got: %T. Please report this issue to the provider developers.", req.ProviderData),
335 | )
336 | return
337 | }
338 |
339 | r.client = client
340 | }
341 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (c) 2021 HashiCorp, Inc.
2 |
3 | Mozilla Public License Version 2.0
4 | ==================================
5 |
6 | 1. Definitions
7 | --------------
8 |
9 | 1.1. "Contributor"
10 | means each individual or legal entity that creates, contributes to
11 | the creation of, or owns Covered Software.
12 |
13 | 1.2. "Contributor Version"
14 | means the combination of the Contributions of others (if any) used
15 | by a Contributor and that particular Contributor's Contribution.
16 |
17 | 1.3. "Contribution"
18 | means Covered Software of a particular Contributor.
19 |
20 | 1.4. "Covered Software"
21 | means Source Code Form to which the initial Contributor has attached
22 | the notice in Exhibit A, the Executable Form of such Source Code
23 | Form, and Modifications of such Source Code Form, in each case
24 | including portions thereof.
25 |
26 | 1.5. "Incompatible With Secondary Licenses"
27 | means
28 |
29 | (a) that the initial Contributor has attached the notice described
30 | in Exhibit B to the Covered Software; or
31 |
32 | (b) that the Covered Software was made available under the terms of
33 | version 1.1 or earlier of the License, but not also under the
34 | terms of a Secondary License.
35 |
36 | 1.6. "Executable Form"
37 | means any form of the work other than Source Code Form.
38 |
39 | 1.7. "Larger Work"
40 | means a work that combines Covered Software with other material, in
41 | a separate file or files, that is not Covered Software.
42 |
43 | 1.8. "License"
44 | means this document.
45 |
46 | 1.9. "Licensable"
47 | means having the right to grant, to the maximum extent possible,
48 | whether at the time of the initial grant or subsequently, any and
49 | all of the rights conveyed by this License.
50 |
51 | 1.10. "Modifications"
52 | means any of the following:
53 |
54 | (a) any file in Source Code Form that results from an addition to,
55 | deletion from, or modification of the contents of Covered
56 | Software; or
57 |
58 | (b) any new file in Source Code Form that contains any Covered
59 | Software.
60 |
61 | 1.11. "Patent Claims" of a Contributor
62 | means any patent claim(s), including without limitation, method,
63 | process, and apparatus claims, in any patent Licensable by such
64 | Contributor that would be infringed, but for the grant of the
65 | License, by the making, using, selling, offering for sale, having
66 | made, import, or transfer of either its Contributions or its
67 | Contributor Version.
68 |
69 | 1.12. "Secondary License"
70 | means either the GNU General Public License, Version 2.0, the GNU
71 | Lesser General Public License, Version 2.1, the GNU Affero General
72 | Public License, Version 3.0, or any later versions of those
73 | licenses.
74 |
75 | 1.13. "Source Code Form"
76 | means the form of the work preferred for making modifications.
77 |
78 | 1.14. "You" (or "Your")
79 | means an individual or a legal entity exercising rights under this
80 | License. For legal entities, "You" includes any entity that
81 | controls, is controlled by, or is under common control with You. For
82 | purposes of this definition, "control" means (a) the power, direct
83 | or indirect, to cause the direction or management of such entity,
84 | whether by contract or otherwise, or (b) ownership of more than
85 | fifty percent (50%) of the outstanding shares or beneficial
86 | ownership of such entity.
87 |
88 | 2. License Grants and Conditions
89 | --------------------------------
90 |
91 | 2.1. Grants
92 |
93 | Each Contributor hereby grants You a world-wide, royalty-free,
94 | non-exclusive license:
95 |
96 | (a) under intellectual property rights (other than patent or trademark)
97 | Licensable by such Contributor to use, reproduce, make available,
98 | modify, display, perform, distribute, and otherwise exploit its
99 | Contributions, either on an unmodified basis, with Modifications, or
100 | as part of a Larger Work; and
101 |
102 | (b) under Patent Claims of such Contributor to make, use, sell, offer
103 | for sale, have made, import, and otherwise transfer either its
104 | Contributions or its Contributor Version.
105 |
106 | 2.2. Effective Date
107 |
108 | The licenses granted in Section 2.1 with respect to any Contribution
109 | become effective for each Contribution on the date the Contributor first
110 | distributes such Contribution.
111 |
112 | 2.3. Limitations on Grant Scope
113 |
114 | The licenses granted in this Section 2 are the only rights granted under
115 | this License. No additional rights or licenses will be implied from the
116 | distribution or licensing of Covered Software under this License.
117 | Notwithstanding Section 2.1(b) above, no patent license is granted by a
118 | Contributor:
119 |
120 | (a) for any code that a Contributor has removed from Covered Software;
121 | or
122 |
123 | (b) for infringements caused by: (i) Your and any other third party's
124 | modifications of Covered Software, or (ii) the combination of its
125 | Contributions with other software (except as part of its Contributor
126 | Version); or
127 |
128 | (c) under Patent Claims infringed by Covered Software in the absence of
129 | its Contributions.
130 |
131 | This License does not grant any rights in the trademarks, service marks,
132 | or logos of any Contributor (except as may be necessary to comply with
133 | the notice requirements in Section 3.4).
134 |
135 | 2.4. Subsequent Licenses
136 |
137 | No Contributor makes additional grants as a result of Your choice to
138 | distribute the Covered Software under a subsequent version of this
139 | License (see Section 10.2) or under the terms of a Secondary License (if
140 | permitted under the terms of Section 3.3).
141 |
142 | 2.5. Representation
143 |
144 | Each Contributor represents that the Contributor believes its
145 | Contributions are its original creation(s) or it has sufficient rights
146 | to grant the rights to its Contributions conveyed by this License.
147 |
148 | 2.6. Fair Use
149 |
150 | This License is not intended to limit any rights You have under
151 | applicable copyright doctrines of fair use, fair dealing, or other
152 | equivalents.
153 |
154 | 2.7. Conditions
155 |
156 | Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
157 | in Section 2.1.
158 |
159 | 3. Responsibilities
160 | -------------------
161 |
162 | 3.1. Distribution of Source Form
163 |
164 | All distribution of Covered Software in Source Code Form, including any
165 | Modifications that You create or to which You contribute, must be under
166 | the terms of this License. You must inform recipients that the Source
167 | Code Form of the Covered Software is governed by the terms of this
168 | License, and how they can obtain a copy of this License. You may not
169 | attempt to alter or restrict the recipients' rights in the Source Code
170 | Form.
171 |
172 | 3.2. Distribution of Executable Form
173 |
174 | If You distribute Covered Software in Executable Form then:
175 |
176 | (a) such Covered Software must also be made available in Source Code
177 | Form, as described in Section 3.1, and You must inform recipients of
178 | the Executable Form how they can obtain a copy of such Source Code
179 | Form by reasonable means in a timely manner, at a charge no more
180 | than the cost of distribution to the recipient; and
181 |
182 | (b) You may distribute such Executable Form under the terms of this
183 | License, or sublicense it under different terms, provided that the
184 | license for the Executable Form does not attempt to limit or alter
185 | the recipients' rights in the Source Code Form under this License.
186 |
187 | 3.3. Distribution of a Larger Work
188 |
189 | You may create and distribute a Larger Work under terms of Your choice,
190 | provided that You also comply with the requirements of this License for
191 | the Covered Software. If the Larger Work is a combination of Covered
192 | Software with a work governed by one or more Secondary Licenses, and the
193 | Covered Software is not Incompatible With Secondary Licenses, this
194 | License permits You to additionally distribute such Covered Software
195 | under the terms of such Secondary License(s), so that the recipient of
196 | the Larger Work may, at their option, further distribute the Covered
197 | Software under the terms of either this License or such Secondary
198 | License(s).
199 |
200 | 3.4. Notices
201 |
202 | You may not remove or alter the substance of any license notices
203 | (including copyright notices, patent notices, disclaimers of warranty,
204 | or limitations of liability) contained within the Source Code Form of
205 | the Covered Software, except that You may alter any license notices to
206 | the extent required to remedy known factual inaccuracies.
207 |
208 | 3.5. Application of Additional Terms
209 |
210 | You may choose to offer, and to charge a fee for, warranty, support,
211 | indemnity or liability obligations to one or more recipients of Covered
212 | Software. However, You may do so only on Your own behalf, and not on
213 | behalf of any Contributor. You must make it absolutely clear that any
214 | such warranty, support, indemnity, or liability obligation is offered by
215 | You alone, and You hereby agree to indemnify every Contributor for any
216 | liability incurred by such Contributor as a result of warranty, support,
217 | indemnity or liability terms You offer. You may include additional
218 | disclaimers of warranty and limitations of liability specific to any
219 | jurisdiction.
220 |
221 | 4. Inability to Comply Due to Statute or Regulation
222 | ---------------------------------------------------
223 |
224 | If it is impossible for You to comply with any of the terms of this
225 | License with respect to some or all of the Covered Software due to
226 | statute, judicial order, or regulation then You must: (a) comply with
227 | the terms of this License to the maximum extent possible; and (b)
228 | describe the limitations and the code they affect. Such description must
229 | be placed in a text file included with all distributions of the Covered
230 | Software under this License. Except to the extent prohibited by statute
231 | or regulation, such description must be sufficiently detailed for a
232 | recipient of ordinary skill to be able to understand it.
233 |
234 | 5. Termination
235 | --------------
236 |
237 | 5.1. The rights granted under this License will terminate automatically
238 | if You fail to comply with any of its terms. However, if You become
239 | compliant, then the rights granted under this License from a particular
240 | Contributor are reinstated (a) provisionally, unless and until such
241 | Contributor explicitly and finally terminates Your grants, and (b) on an
242 | ongoing basis, if such Contributor fails to notify You of the
243 | non-compliance by some reasonable means prior to 60 days after You have
244 | come back into compliance. Moreover, Your grants from a particular
245 | Contributor are reinstated on an ongoing basis if such Contributor
246 | notifies You of the non-compliance by some reasonable means, this is the
247 | first time You have received notice of non-compliance with this License
248 | from such Contributor, and You become compliant prior to 30 days after
249 | Your receipt of the notice.
250 |
251 | 5.2. If You initiate litigation against any entity by asserting a patent
252 | infringement claim (excluding declaratory judgment actions,
253 | counter-claims, and cross-claims) alleging that a Contributor Version
254 | directly or indirectly infringes any patent, then the rights granted to
255 | You by any and all Contributors for the Covered Software under Section
256 | 2.1 of this License shall terminate.
257 |
258 | 5.3. In the event of termination under Sections 5.1 or 5.2 above, all
259 | end user license agreements (excluding distributors and resellers) which
260 | have been validly granted by You or Your distributors under this License
261 | prior to termination shall survive termination.
262 |
263 | ************************************************************************
264 | * *
265 | * 6. Disclaimer of Warranty *
266 | * ------------------------- *
267 | * *
268 | * Covered Software is provided under this License on an "as is" *
269 | * basis, without warranty of any kind, either expressed, implied, or *
270 | * statutory, including, without limitation, warranties that the *
271 | * Covered Software is free of defects, merchantable, fit for a *
272 | * particular purpose or non-infringing. The entire risk as to the *
273 | * quality and performance of the Covered Software is with You. *
274 | * Should any Covered Software prove defective in any respect, You *
275 | * (not any Contributor) assume the cost of any necessary servicing, *
276 | * repair, or correction. This disclaimer of warranty constitutes an *
277 | * essential part of this License. No use of any Covered Software is *
278 | * authorized under this License except under this disclaimer. *
279 | * *
280 | ************************************************************************
281 |
282 | ************************************************************************
283 | * *
284 | * 7. Limitation of Liability *
285 | * -------------------------- *
286 | * *
287 | * Under no circumstances and under no legal theory, whether tort *
288 | * (including negligence), contract, or otherwise, shall any *
289 | * Contributor, or anyone who distributes Covered Software as *
290 | * permitted above, be liable to You for any direct, indirect, *
291 | * special, incidental, or consequential damages of any character *
292 | * including, without limitation, damages for lost profits, loss of *
293 | * goodwill, work stoppage, computer failure or malfunction, or any *
294 | * and all other commercial damages or losses, even if such party *
295 | * shall have been informed of the possibility of such damages. This *
296 | * limitation of liability shall not apply to liability for death or *
297 | * personal injury resulting from such party's negligence to the *
298 | * extent applicable law prohibits such limitation. Some *
299 | * jurisdictions do not allow the exclusion or limitation of *
300 | * incidental or consequential damages, so this exclusion and *
301 | * limitation may not apply to You. *
302 | * *
303 | ************************************************************************
304 |
305 | 8. Litigation
306 | -------------
307 |
308 | Any litigation relating to this License may be brought only in the
309 | courts of a jurisdiction where the defendant maintains its principal
310 | place of business and such litigation shall be governed by laws of that
311 | jurisdiction, without reference to its conflict-of-law provisions.
312 | Nothing in this Section shall prevent a party's ability to bring
313 | cross-claims or counter-claims.
314 |
315 | 9. Miscellaneous
316 | ----------------
317 |
318 | This License represents the complete agreement concerning the subject
319 | matter hereof. If any provision of this License is held to be
320 | unenforceable, such provision shall be reformed only to the extent
321 | necessary to make it enforceable. Any law or regulation which provides
322 | that the language of a contract shall be construed against the drafter
323 | shall not be used to construe this License against a Contributor.
324 |
325 | 10. Versions of the License
326 | ---------------------------
327 |
328 | 10.1. New Versions
329 |
330 | Mozilla Foundation is the license steward. Except as provided in Section
331 | 10.3, no one other than the license steward has the right to modify or
332 | publish new versions of this License. Each version will be given a
333 | distinguishing version number.
334 |
335 | 10.2. Effect of New Versions
336 |
337 | You may distribute the Covered Software under the terms of the version
338 | of the License under which You originally received the Covered Software,
339 | or under the terms of any subsequent version published by the license
340 | steward.
341 |
342 | 10.3. Modified Versions
343 |
344 | If you create software not governed by this License, and you want to
345 | create a new license for such software, you may create and use a
346 | modified version of this License if you rename the license and remove
347 | any references to the name of the license steward (except to note that
348 | such modified license differs from this License).
349 |
350 | 10.4. Distributing Source Code Form that is Incompatible With Secondary
351 | Licenses
352 |
353 | If You choose to distribute Source Code Form that is Incompatible With
354 | Secondary Licenses under the terms of this version of the License, the
355 | notice described in Exhibit B of this License must be attached.
356 |
357 | Exhibit A - Source Code Form License Notice
358 | -------------------------------------------
359 |
360 | This Source Code Form is subject to the terms of the Mozilla Public
361 | License, v. 2.0. If a copy of the MPL was not distributed with this
362 | file, You can obtain one at http://mozilla.org/MPL/2.0/.
363 |
364 | If it is not possible or desirable to put the notice in a particular
365 | file, then You may include the notice in a location (such as a LICENSE
366 | file in a relevant directory) where a recipient would be likely to look
367 | for such a notice.
368 |
369 | You may add additional accurate notices of copyright ownership.
370 |
371 | Exhibit B - "Incompatible With Secondary Licenses" Notice
372 | ---------------------------------------------------------
373 |
374 | This Source Code Form is "Incompatible With Secondary Licenses", as
375 | defined by the Mozilla Public License, v. 2.0.
376 |
--------------------------------------------------------------------------------
/internal/provider/databases_resource.go:
--------------------------------------------------------------------------------
1 | package provider
2 |
3 | import (
4 | "context"
5 | "fmt"
6 | "strconv"
7 |
8 | "github.com/hashicorp/terraform-plugin-framework/path"
9 | "github.com/hashicorp/terraform-plugin-framework/resource"
10 | "github.com/hashicorp/terraform-plugin-framework/resource/schema"
11 | "github.com/hashicorp/terraform-plugin-framework/resource/schema/int64planmodifier"
12 | "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
13 | "github.com/hashicorp/terraform-plugin-framework/types"
14 | "github.com/hashicorp/terraform-plugin-log/tflog"
15 | "terraform-provider-superset/internal/client"
16 | )
17 |
18 | // Ensure the implementation satisfies the expected interfaces.
19 | var (
20 | _ resource.Resource = &databaseResource{}
21 | _ resource.ResourceWithConfigure = &databaseResource{}
22 | _ resource.ResourceWithImportState = &databaseResource{}
23 | )
24 |
25 | // NewDatabaseResource is a helper function to simplify the provider implementation.
26 | func NewDatabaseResource() resource.Resource {
27 | return &databaseResource{}
28 | }
29 |
30 | // databaseResource is the resource implementation.
31 | type databaseResource struct {
32 | client *client.Client
33 | }
34 |
35 | // databaseResourceModel maps the resource schema data.
36 | type databaseResourceModel struct {
37 | ID types.Int64 `tfsdk:"id"`
38 | ConnectionName types.String `tfsdk:"connection_name"`
39 | DBEngine types.String `tfsdk:"db_engine"`
40 | DBUser types.String `tfsdk:"db_user"`
41 | DBPass types.String `tfsdk:"db_pass"`
42 | DBHost types.String `tfsdk:"db_host"`
43 | DBPort types.Int64 `tfsdk:"db_port"`
44 | DBName types.String `tfsdk:"db_name"`
45 | AllowCTAS types.Bool `tfsdk:"allow_ctas"`
46 | AllowCVAS types.Bool `tfsdk:"allow_cvas"`
47 | AllowDML types.Bool `tfsdk:"allow_dml"`
48 | AllowRunAsync types.Bool `tfsdk:"allow_run_async"`
49 | ExposeInSQLLab types.Bool `tfsdk:"expose_in_sqllab"`
50 | }
51 |
52 | // Metadata returns the resource type name.
53 | func (r *databaseResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
54 | resp.TypeName = req.ProviderTypeName + "_database"
55 | }
56 |
57 | // Schema defines the schema for the resource.
58 | func (r *databaseResource) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
59 | resp.Schema = schema.Schema{
60 | Description: "Manages a database connection in Superset.",
61 | Attributes: map[string]schema.Attribute{
62 | "id": schema.Int64Attribute{
63 | Description: "Numeric identifier of the database connection.",
64 | Computed: true,
65 | PlanModifiers: []planmodifier.Int64{
66 | int64planmodifier.UseStateForUnknown(),
67 | },
68 | },
69 | "connection_name": schema.StringAttribute{
70 | Description: "Name of the database connection.",
71 | Required: true,
72 | },
73 | "db_engine": schema.StringAttribute{
74 | Description: "Database engine (e.g., postgresql, mysql).",
75 | Required: true,
76 | },
77 | "db_user": schema.StringAttribute{
78 | Description: "Database username.",
79 | Required: true,
80 | },
81 | "db_pass": schema.StringAttribute{
82 | Description: "Database password.",
83 | Required: true,
84 | Sensitive: true,
85 | },
86 | "db_host": schema.StringAttribute{
87 | Description: "Database host.",
88 | Required: true,
89 | },
90 | "db_port": schema.Int64Attribute{
91 | Description: "Database port.",
92 | Required: true,
93 | },
94 | "db_name": schema.StringAttribute{
95 | Description: "Database name.",
96 | Required: true,
97 | },
98 | "allow_ctas": schema.BoolAttribute{
99 | Description: "Allow CTAS.",
100 | Required: true,
101 | },
102 | "allow_cvas": schema.BoolAttribute{
103 | Description: "Allow CVAS.",
104 | Required: true,
105 | },
106 | "allow_dml": schema.BoolAttribute{
107 | Description: "Allow DML.",
108 | Required: true,
109 | },
110 | "allow_run_async": schema.BoolAttribute{
111 | Description: "Allow run async.",
112 | Required: true,
113 | },
114 | "expose_in_sqllab": schema.BoolAttribute{
115 | Description: "Expose in SQL Lab.",
116 | Required: true,
117 | },
118 | },
119 | }
120 | }
121 |
122 | // Create creates the resource and sets the initial Terraform state.
123 | func (r *databaseResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
124 | tflog.Debug(ctx, "Starting Create method")
125 | var plan databaseResourceModel
126 | diags := req.Plan.Get(ctx, &plan)
127 | resp.Diagnostics.Append(diags...)
128 | if resp.Diagnostics.HasError() {
129 | tflog.Debug(ctx, "Exiting Create due to error in retrieving plan", map[string]interface{}{
130 | "diagnostics": resp.Diagnostics,
131 | })
132 | return
133 | }
134 |
135 | sqlalchemyURI := fmt.Sprintf("%s://%s:%s@%s:%d/%s", plan.DBEngine.ValueString(), plan.DBUser.ValueString(), plan.DBPass.ValueString(), plan.DBHost.ValueString(), plan.DBPort.ValueInt64(), plan.DBName.ValueString())
136 | extra := `{"client_encoding": "utf8"}`
137 | payload := map[string]interface{}{
138 | "allow_csv_upload": false,
139 | "allow_ctas": plan.AllowCTAS.ValueBool(),
140 | "allow_cvas": plan.AllowCVAS.ValueBool(),
141 | "allow_dml": plan.AllowDML.ValueBool(),
142 | "allow_multi_schema_metadata_fetch": true,
143 | "allow_run_async": plan.AllowRunAsync.ValueBool(),
144 | "cache_timeout": 0,
145 | "expose_in_sqllab": plan.ExposeInSQLLab.ValueBool(),
146 | "database_name": plan.ConnectionName.ValueString(),
147 | "sqlalchemy_uri": sqlalchemyURI,
148 | "extra": extra,
149 | }
150 |
151 | result, err := r.client.CreateDatabase(payload)
152 | if err != nil {
153 | resp.Diagnostics.AddError(
154 | "Unable to Create Superset Database Connection",
155 | fmt.Sprintf("CreateDatabase failed: %s", err.Error()),
156 | )
157 | return
158 | }
159 |
160 | // Type assertion with error handling
161 | idFloat, ok := result["id"].(float64)
162 | if !ok {
163 | resp.Diagnostics.AddError(
164 | "Invalid Response",
165 | "The 'id' field in the response is not a float64",
166 | )
167 | return
168 | }
169 | plan.ID = types.Int64Value(int64(idFloat))
170 |
171 | resultData, ok := result["result"].(map[string]interface{})
172 | if !ok {
173 | resp.Diagnostics.AddError(
174 | "Invalid Response",
175 | "The response from the API does not contain the expected 'result' field",
176 | )
177 | return
178 | }
179 |
180 | // Handle type assertions with error handling
181 | if val, ok := resultData["database_name"].(string); ok {
182 | plan.ConnectionName = types.StringValue(val)
183 | } else {
184 | resp.Diagnostics.AddError(
185 | "Invalid Response",
186 | "The response from the API does not contain a valid 'database_name' field",
187 | )
188 | return
189 | }
190 | if val, ok := resultData["allow_ctas"].(bool); ok {
191 | plan.AllowCTAS = types.BoolValue(val)
192 | }
193 | if val, ok := resultData["allow_cvas"].(bool); ok {
194 | plan.AllowCVAS = types.BoolValue(val)
195 | }
196 | if val, ok := resultData["allow_dml"].(bool); ok {
197 | plan.AllowDML = types.BoolValue(val)
198 | }
199 | if val, ok := resultData["allow_run_async"].(bool); ok {
200 | plan.AllowRunAsync = types.BoolValue(val)
201 | }
202 | if val, ok := resultData["expose_in_sqllab"].(bool); ok {
203 | plan.ExposeInSQLLab = types.BoolValue(val)
204 | }
205 |
206 | diags = resp.State.Set(ctx, &plan)
207 | resp.Diagnostics.Append(diags...)
208 | if resp.Diagnostics.HasError() {
209 | tflog.Debug(ctx, "Exiting Create due to error in setting state", map[string]interface{}{
210 | "diagnostics": resp.Diagnostics,
211 | })
212 | return
213 | }
214 |
215 | tflog.Debug(ctx, fmt.Sprintf("Created database connection: ID=%d, ConnectionName=%s", plan.ID.ValueInt64(), plan.ConnectionName.ValueString()))
216 | }
217 |
218 | // Read refreshes the Terraform state with the latest data from Superset.
219 | func (r *databaseResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
220 | tflog.Debug(ctx, "Starting Read method")
221 | var state databaseResourceModel
222 | diags := req.State.Get(ctx, &state)
223 | resp.Diagnostics.Append(diags...)
224 | if resp.Diagnostics.HasError() {
225 | tflog.Debug(ctx, "Exiting Read due to error in getting state", map[string]interface{}{
226 | "diagnostics": resp.Diagnostics,
227 | })
228 | return
229 | }
230 |
231 | db, err := r.client.GetDatabaseConnectionByID(state.ID.ValueInt64())
232 | if err != nil {
233 | resp.Diagnostics.AddError(
234 | "Error reading database connection",
235 | fmt.Sprintf("Could not read database ID %d: %s", state.ID.ValueInt64(), err.Error()),
236 | )
237 | return
238 | }
239 |
240 | result, ok := db["result"].(map[string]interface{})
241 | if !ok {
242 | resp.Diagnostics.AddError(
243 | "Invalid Response",
244 | "The response from the API does not contain the expected 'result' field",
245 | )
246 | return
247 | }
248 |
249 | if val, ok := result["database_name"].(string); ok {
250 | state.ConnectionName = types.StringValue(val)
251 | } else {
252 | resp.Diagnostics.AddError(
253 | "Invalid Response",
254 | "The response from the API does not contain a valid 'database_name' field",
255 | )
256 | return
257 | }
258 | if val, ok := result["allow_ctas"].(bool); ok {
259 | state.AllowCTAS = types.BoolValue(val)
260 | }
261 | if val, ok := result["allow_cvas"].(bool); ok {
262 | state.AllowCVAS = types.BoolValue(val)
263 | }
264 | if val, ok := result["allow_dml"].(bool); ok {
265 | state.AllowDML = types.BoolValue(val)
266 | }
267 | if val, ok := result["allow_run_async"].(bool); ok {
268 | state.AllowRunAsync = types.BoolValue(val)
269 | }
270 | if val, ok := result["expose_in_sqllab"].(bool); ok {
271 | state.ExposeInSQLLab = types.BoolValue(val)
272 | }
273 | if val, ok := result["backend"].(string); ok {
274 | state.DBEngine = types.StringValue(val)
275 | }
276 | if params, ok := result["parameters"].(map[string]interface{}); ok {
277 | if val, ok := params["host"].(string); ok {
278 | state.DBHost = types.StringValue(val)
279 | }
280 | if val, ok := params["username"].(string); ok {
281 | state.DBUser = types.StringValue(val)
282 | }
283 | if val, ok := params["port"].(float64); ok {
284 | state.DBPort = types.Int64Value(int64(val))
285 | }
286 | if val, ok := params["database"].(string); ok {
287 | state.DBName = types.StringValue(val)
288 | }
289 | // Preserve the db_pass value from the state if it exists.
290 | if !state.DBPass.IsNull() {
291 | state.DBPass = types.StringValue(state.DBPass.ValueString())
292 | } else {
293 | state.DBPass = types.StringNull()
294 | }
295 | }
296 |
297 | diags = resp.State.Set(ctx, &state)
298 | resp.Diagnostics.Append(diags...)
299 | if resp.Diagnostics.HasError() {
300 | tflog.Debug(ctx, "Exiting Read due to error in setting state", map[string]interface{}{
301 | "diagnostics": resp.Diagnostics,
302 | })
303 | return
304 | }
305 | }
306 |
307 | // Update updates the resource and sets the updated Terraform state on success.
308 | func (r *databaseResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
309 | tflog.Debug(ctx, "Starting Update method")
310 | var plan databaseResourceModel
311 | var state databaseResourceModel
312 |
313 | diags := req.Plan.Get(ctx, &plan)
314 | resp.Diagnostics.Append(diags...)
315 | if resp.Diagnostics.HasError() {
316 | tflog.Debug(ctx, "Exiting Update due to error in retrieving plan", map[string]interface{}{
317 | "diagnostics": resp.Diagnostics,
318 | })
319 | return
320 | }
321 |
322 | diags = req.State.Get(ctx, &state)
323 | resp.Diagnostics.Append(diags...)
324 | if resp.Diagnostics.HasError() {
325 | tflog.Debug(ctx, "Exiting Update due to error in retrieving state", map[string]interface{}{
326 | "diagnostics": resp.Diagnostics,
327 | })
328 | return
329 | }
330 |
331 | sqlalchemyURI := fmt.Sprintf("%s://%s:%s@%s:%d/%s", plan.DBEngine.ValueString(), plan.DBUser.ValueString(), plan.DBPass.ValueString(), plan.DBHost.ValueString(), plan.DBPort.ValueInt64(), plan.DBName.ValueString())
332 | extra := `{"client_encoding": "utf8"}`
333 | payload := map[string]interface{}{
334 | "allow_csv_upload": false,
335 | "allow_ctas": plan.AllowCTAS.ValueBool(),
336 | "allow_cvas": plan.AllowCVAS.ValueBool(),
337 | "allow_dml": plan.AllowDML.ValueBool(),
338 | "allow_multi_schema_metadata_fetch": true,
339 | "allow_run_async": plan.AllowRunAsync.ValueBool(),
340 | "cache_timeout": 0,
341 | "expose_in_sqllab": plan.ExposeInSQLLab.ValueBool(),
342 | "database_name": plan.ConnectionName.ValueString(),
343 | "sqlalchemy_uri": sqlalchemyURI,
344 | "extra": extra,
345 | }
346 |
347 | result, err := r.client.UpdateDatabase(state.ID.ValueInt64(), payload)
348 | if err != nil {
349 | resp.Diagnostics.AddError(
350 | "Unable to Update Superset Database Connection",
351 | fmt.Sprintf("UpdateDatabase failed: %s", err.Error()),
352 | )
353 | return
354 | }
355 |
356 | resultData, ok := result["result"].(map[string]interface{})
357 | if !ok {
358 | resp.Diagnostics.AddError(
359 | "Invalid Response",
360 | "The response from the API does not contain the expected 'result' field",
361 | )
362 | return
363 | }
364 |
365 | // Update state attributes with the values from the response
366 | if val, ok := resultData["database_name"].(string); ok {
367 | state.ConnectionName = types.StringValue(val)
368 | } else {
369 | resp.Diagnostics.AddError(
370 | "Invalid Response",
371 | "The response from the API does not contain a valid 'database_name' field",
372 | )
373 | return
374 | }
375 | if val, ok := resultData["allow_ctas"].(bool); ok {
376 | state.AllowCTAS = types.BoolValue(val)
377 | }
378 | if val, ok := resultData["allow_cvas"].(bool); ok {
379 | state.AllowCVAS = types.BoolValue(val)
380 | }
381 | if val, ok := resultData["allow_dml"].(bool); ok {
382 | state.AllowDML = types.BoolValue(val)
383 | }
384 | if val, ok := resultData["allow_run_async"].(bool); ok {
385 | state.AllowRunAsync = types.BoolValue(val)
386 | }
387 | if val, ok := resultData["expose_in_sqllab"].(bool); ok {
388 | state.ExposeInSQLLab = types.BoolValue(val)
389 | }
390 |
391 | state.DBEngine = types.StringValue(plan.DBEngine.ValueString())
392 | state.DBUser = types.StringValue(plan.DBUser.ValueString())
393 | state.DBPass = types.StringValue(plan.DBPass.ValueString())
394 | state.DBHost = types.StringValue(plan.DBHost.ValueString())
395 | state.DBPort = types.Int64Value(plan.DBPort.ValueInt64())
396 | state.DBName = types.StringValue(plan.DBName.ValueString())
397 |
398 | diags = resp.State.Set(ctx, &state)
399 | resp.Diagnostics.Append(diags...)
400 | if resp.Diagnostics.HasError() {
401 | tflog.Debug(ctx, "Exiting Update due to error in setting state", map[string]interface{}{
402 | "diagnostics": resp.Diagnostics,
403 | })
404 | return
405 | }
406 |
407 | tflog.Debug(ctx, fmt.Sprintf("Updated database connection: ID=%d, ConnectionName=%s", state.ID.ValueInt64(), state.ConnectionName.ValueString()))
408 | }
409 |
410 | // Delete deletes the resource and removes the Terraform state on success.
411 | func (r *databaseResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
412 | tflog.Debug(ctx, "Starting Delete method")
413 | var state databaseResourceModel
414 | diags := req.State.Get(ctx, &state)
415 | resp.Diagnostics.Append(diags...)
416 | if resp.Diagnostics.HasError() {
417 | tflog.Debug(ctx, "Exiting Delete due to error in getting state", map[string]interface{}{
418 | "diagnostics": resp.Diagnostics,
419 | })
420 | return
421 | }
422 |
423 | err := r.client.DeleteDatabase(state.ID.ValueInt64())
424 | if err != nil {
425 | resp.Diagnostics.AddError(
426 | "Unable to Delete Superset Database Connection",
427 | fmt.Sprintf("DeleteDatabase failed: %s", err.Error()),
428 | )
429 | return
430 | }
431 |
432 | resp.State.RemoveResource(ctx)
433 | tflog.Debug(ctx, fmt.Sprintf("Deleted database connection: ID=%d", state.ID.ValueInt64()))
434 | }
435 |
436 | // ImportState imports an existing resource.
437 | func (r *databaseResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
438 | tflog.Debug(ctx, "Starting ImportState method", map[string]interface{}{
439 | "import_id": req.ID,
440 | })
441 |
442 | // Convert import ID to int64 and set it to the state
443 | id, err := strconv.ParseInt(req.ID, 10, 64)
444 | if err != nil {
445 | resp.Diagnostics.AddError(
446 | "Invalid Import ID",
447 | fmt.Sprintf("The provided import ID '%s' is not a valid int64: %s", req.ID, err.Error()),
448 | )
449 | return
450 | }
451 |
452 | // Set the ID in the state and call Read
453 | resp.State.SetAttribute(ctx, path.Root("id"), id)
454 |
455 | // Call Read to refresh the state with the latest data
456 | r.Read(ctx, resource.ReadRequest{State: resp.State}, &resource.ReadResponse{
457 | State: resp.State,
458 | Diagnostics: resp.Diagnostics,
459 | })
460 |
461 | tflog.Debug(ctx, "ImportState completed successfully", map[string]interface{}{
462 | "import_id": req.ID,
463 | })
464 | }
465 |
466 | // Configure adds the provider configured client to the resource.
467 | func (r *databaseResource) Configure(_ context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) {
468 | if req.ProviderData == nil {
469 | return
470 | }
471 |
472 | client, ok := req.ProviderData.(*client.Client)
473 | if !ok {
474 | resp.Diagnostics.AddError(
475 | "Unexpected Resource Configure Type",
476 | fmt.Sprintf("Expected *client.Client, got: %T. Please report this issue to the provider developers.", req.ProviderData),
477 | )
478 | return
479 | }
480 |
481 | r.client = client
482 | }
483 |
--------------------------------------------------------------------------------
/internal/provider/role_permissions_resource.go:
--------------------------------------------------------------------------------
1 | package provider
2 |
3 | import (
4 | "context"
5 | "fmt"
6 |
7 | "strconv"
8 | "terraform-provider-superset/internal/client"
9 | "time"
10 |
11 | "github.com/hashicorp/terraform-plugin-framework/path"
12 | "github.com/hashicorp/terraform-plugin-framework/resource"
13 | "github.com/hashicorp/terraform-plugin-framework/resource/schema"
14 | "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
15 | "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
16 | "github.com/hashicorp/terraform-plugin-framework/types"
17 | "github.com/hashicorp/terraform-plugin-log/tflog"
18 | )
19 |
20 | // Ensure the implementation satisfies the expected interfaces.
21 | var (
22 | _ resource.Resource = &rolePermissionsResource{}
23 | _ resource.ResourceWithConfigure = &rolePermissionsResource{}
24 | _ resource.ResourceWithImportState = &rolePermissionsResource{}
25 | )
26 |
27 | // NewRolePermissionsResource is a helper function to simplify the provider implementation.
28 | func NewRolePermissionsResource() resource.Resource {
29 | return &rolePermissionsResource{}
30 | }
31 |
32 | // rolePermissionsResource is the resource implementation.
33 | type rolePermissionsResource struct {
34 | client *client.Client
35 | }
36 |
37 | // rolePermissionsResourceModel maps the resource schema data.
38 | type rolePermissionsResourceModel struct {
39 | ID types.String `tfsdk:"id"`
40 | RoleName types.String `tfsdk:"role_name"`
41 | ResourcePermissions []resourcePermissionModel `tfsdk:"resource_permissions"`
42 | LastUpdated types.String `tfsdk:"last_updated"`
43 | }
44 |
45 | type resourcePermissionModel struct {
46 | ID types.Int64 `tfsdk:"id"`
47 | Permission types.String `tfsdk:"permission"`
48 | ViewMenu types.String `tfsdk:"view_menu"`
49 | }
50 |
51 | // Metadata returns the resource type name.
52 | func (r *rolePermissionsResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
53 | resp.TypeName = req.ProviderTypeName + "_role_permissions"
54 | }
55 |
56 | // Schema defines the schema for the resource.
57 | func (r *rolePermissionsResource) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
58 | resp.Schema = schema.Schema{
59 | Description: "Manages the permissions associated with a role in Superset.",
60 | Attributes: map[string]schema.Attribute{
61 | "id": schema.StringAttribute{
62 | Description: "The unique identifier for the role permissions resource.",
63 | Computed: true,
64 | PlanModifiers: []planmodifier.String{
65 | stringplanmodifier.UseStateForUnknown(),
66 | },
67 | },
68 | "last_updated": schema.StringAttribute{
69 | Description: "The timestamp of the last update to the role permissions.",
70 | Computed: true,
71 | },
72 | "role_name": schema.StringAttribute{
73 | Description: "The name of the role to which the permissions are assigned.",
74 | Required: true,
75 | },
76 | "resource_permissions": schema.ListNestedAttribute{
77 | Description: "A list of permissions associated with the role.",
78 | Required: true,
79 | NestedObject: schema.NestedAttributeObject{
80 | Attributes: map[string]schema.Attribute{
81 | "id": schema.Int64Attribute{
82 | Description: "The unique identifier of the permission.",
83 | Computed: true,
84 | },
85 | "permission": schema.StringAttribute{
86 | Description: "The name of the permission.",
87 | Required: true,
88 | },
89 | "view_menu": schema.StringAttribute{
90 | Description: "The name of the view menu associated with the permission.",
91 | Required: true,
92 | },
93 | },
94 | },
95 | },
96 | },
97 | }
98 | }
99 |
100 | // Create creates the resource and sets the initial Terraform state.
101 | func (r *rolePermissionsResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
102 | tflog.Debug(ctx, "Starting Create method")
103 | // Retrieve values from plan
104 | var plan rolePermissionsResourceModel
105 | diags := req.Plan.Get(ctx, &plan)
106 | resp.Diagnostics.Append(diags...)
107 | if resp.Diagnostics.HasError() {
108 | tflog.Debug(ctx, "Exiting Create due to error in retrieving plan", map[string]interface{}{
109 | "diagnostics": resp.Diagnostics,
110 | })
111 | return
112 | }
113 |
114 | tflog.Debug(ctx, "Plan obtained", map[string]interface{}{
115 | "roleName": plan.RoleName.ValueString(),
116 | })
117 |
118 | // Get the role ID based on role name
119 | roleID, err := r.client.GetRoleIDByName(plan.RoleName.ValueString())
120 | if err != nil {
121 | resp.Diagnostics.AddError(
122 | "Error finding role",
123 | fmt.Sprintf("Could not find role '%s': %s", plan.RoleName.ValueString(), err),
124 | )
125 | return
126 | }
127 |
128 | tflog.Debug(ctx, "Role ID obtained", map[string]interface{}{
129 | "roleID": roleID,
130 | })
131 |
132 | // Prepare permission IDs from plan using a map to ensure unique IDs
133 | var resourcePermissions []resourcePermissionModel
134 | permissionIDs := map[int64]bool{}
135 | for _, perm := range plan.ResourcePermissions {
136 | permID, err := r.client.GetPermissionIDByNameAndView(perm.Permission.ValueString(), perm.ViewMenu.ValueString())
137 | if err != nil {
138 | resp.Diagnostics.AddError(
139 | "Error finding permission ID",
140 | fmt.Sprintf("Could not find permission ID for '%s' and view '%s': %s", perm.Permission.ValueString(), perm.ViewMenu.ValueString(), err),
141 | )
142 | return
143 | }
144 | permissionIDs[permID] = true
145 | resourcePermissions = append(resourcePermissions, resourcePermissionModel{
146 | ID: types.Int64Value(permID),
147 | Permission: perm.Permission,
148 | ViewMenu: perm.ViewMenu,
149 | })
150 | }
151 |
152 | tflog.Debug(ctx, "Permission IDs prepared", map[string]interface{}{
153 | "permissionIDs": permissionIDs,
154 | })
155 |
156 | // Convert map to slice for the API call
157 | var permIDList []int64
158 | for id := range permissionIDs {
159 | permIDList = append(permIDList, id)
160 | }
161 |
162 | tflog.Debug(ctx, "Permission ID list for API call", map[string]interface{}{
163 | "permIDList": permIDList,
164 | })
165 |
166 | // Update role permissions using the client
167 | if err := r.client.UpdateRolePermissions(roleID, permIDList); err != nil {
168 | resp.Diagnostics.AddError(
169 | "Error updating role permissions",
170 | "Failed to update role permissions: "+err.Error(),
171 | )
172 | return
173 | }
174 |
175 | tflog.Debug(ctx, "Role permissions updated")
176 |
177 | // Set the state with the updated data
178 | // sort.Slice(resourcePermissions, func(i, j int) bool {
179 | // return resourcePermissions[i].ID.ValueInt64() < resourcePermissions[j].ID.ValueInt64()
180 | // })
181 |
182 | result := rolePermissionsResourceModel{
183 | ID: types.StringValue(fmt.Sprintf("%d", roleID)),
184 | RoleName: plan.RoleName,
185 | ResourcePermissions: resourcePermissions,
186 | LastUpdated: types.StringValue(time.Now().Format(time.RFC3339)),
187 | }
188 |
189 | diags = resp.State.Set(ctx, &result)
190 | resp.Diagnostics.Append(diags...)
191 | if resp.Diagnostics.HasError() {
192 | tflog.Debug(ctx, "Exiting Create due to error in setting state", map[string]interface{}{
193 | "diagnostics": resp.Diagnostics,
194 | })
195 | return
196 | }
197 |
198 | tflog.Debug(ctx, "Create method completed successfully")
199 | }
200 |
201 | // Read refreshes the Terraform state with the latest data.
202 | func (r *rolePermissionsResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
203 | tflog.Debug(ctx, "Starting Read method")
204 |
205 | // Get current state
206 | var state rolePermissionsResourceModel
207 | diags := req.State.Get(ctx, &state)
208 | resp.Diagnostics.Append(diags...)
209 | if resp.Diagnostics.HasError() {
210 | return
211 | }
212 |
213 | tflog.Debug(ctx, "State obtained", map[string]interface{}{
214 | "roleName": state.RoleName.ValueString(),
215 | })
216 |
217 | // Get role ID
218 | roleID, err := r.client.GetRoleIDByName(state.RoleName.ValueString())
219 | if err != nil {
220 | resp.Diagnostics.AddError(
221 | "Error finding role",
222 | fmt.Sprintf("Could not find role '%s': %s", state.RoleName.ValueString(), err),
223 | )
224 | return
225 | }
226 |
227 | tflog.Debug(ctx, "Role ID obtained", map[string]interface{}{
228 | "roleID": roleID,
229 | })
230 |
231 | // Get permissions from Superset
232 | permissions, err := r.client.GetRolePermissions(roleID)
233 | if err != nil {
234 | resp.Diagnostics.AddError(
235 | "Error reading role permissions",
236 | fmt.Sprintf("Could not read permissions for role ID %d: %s", roleID, err),
237 | )
238 | return
239 | }
240 |
241 | tflog.Debug(ctx, "Permissions fetched from Superset", map[string]interface{}{
242 | "permissions": permissions,
243 | })
244 |
245 | // Map permissions to resource model
246 | var resourcePermissions []resourcePermissionModel
247 | for _, perm := range permissions {
248 | tflog.Debug(ctx, "Processing fetched permission", map[string]interface{}{
249 | "ID": perm.ID,
250 | "Permission": perm.PermissionName,
251 | "ViewMenu": perm.ViewMenuName,
252 | })
253 |
254 | // Create mapped permission
255 | mappedPermission := resourcePermissionModel{
256 | ID: types.Int64Value(perm.ID),
257 | Permission: types.StringValue(perm.PermissionName),
258 | ViewMenu: types.StringValue(perm.ViewMenuName),
259 | }
260 |
261 | // Verify mapping immediately after setting the values
262 | tflog.Debug(ctx, "Mapped Permission", map[string]interface{}{
263 | "ID": mappedPermission.ID.ValueInt64(),
264 | "Permission": mappedPermission.Permission.ValueString(),
265 | "ViewMenu": mappedPermission.ViewMenu.ValueString(),
266 | })
267 |
268 | resourcePermissions = append(resourcePermissions, mappedPermission)
269 | }
270 |
271 | // Debug full content of resourcePermissions by converting to a slice of maps
272 | var debugResourcePermissions []map[string]interface{}
273 | for _, rp := range resourcePermissions {
274 | debugResourcePermissions = append(debugResourcePermissions, map[string]interface{}{
275 | "ID": rp.ID.ValueInt64(),
276 | "Permission": rp.Permission.ValueString(),
277 | "ViewMenu": rp.ViewMenu.ValueString(),
278 | })
279 | }
280 |
281 | tflog.Debug(ctx, "Full content of resourcePermissions", map[string]interface{}{
282 | "resourcePermissions": debugResourcePermissions,
283 | })
284 |
285 | // Verify the final mapped permissions
286 | // sort.Slice(resourcePermissions, func(i, j int) bool {
287 | // return resourcePermissions[i].ID.ValueInt64() < resourcePermissions[j].ID.ValueInt64()
288 | // })
289 |
290 | for _, rp := range resourcePermissions {
291 | tflog.Debug(ctx, "Mapped Permission in List", map[string]interface{}{
292 | "ID": rp.ID.ValueInt64(),
293 | "Permission": rp.Permission.ValueString(),
294 | "ViewMenu": rp.ViewMenu.ValueString(),
295 | })
296 | }
297 |
298 | tflog.Debug(ctx, "Final Permissions mapped to resource model", map[string]interface{}{
299 | "resourcePermissions": debugResourcePermissions,
300 | })
301 |
302 | // Overwrite state with refreshed values
303 | state.ResourcePermissions = resourcePermissions
304 | state.LastUpdated = types.StringValue(time.Now().Format(time.RFC3339))
305 |
306 | diags = resp.State.Set(ctx, &state)
307 | resp.Diagnostics.Append(diags...)
308 | if resp.Diagnostics.HasError() {
309 | return
310 | }
311 |
312 | tflog.Debug(ctx, "Read method completed successfully")
313 | }
314 |
315 | // Update updates the resource and sets the updated Terraform state on success.
316 | func (r *rolePermissionsResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
317 | tflog.Debug(ctx, "Starting Update method")
318 | // Retrieve values from plan
319 | var plan rolePermissionsResourceModel
320 | diags := req.Plan.Get(ctx, &plan)
321 | resp.Diagnostics.Append(diags...)
322 | if resp.Diagnostics.HasError() {
323 | tflog.Debug(ctx, "Exiting Update due to error in retrieving plan", map[string]interface{}{
324 | "diagnostics": resp.Diagnostics,
325 | })
326 | return
327 | }
328 |
329 | tflog.Debug(ctx, "Plan obtained", map[string]interface{}{
330 | "roleName": plan.RoleName.ValueString(),
331 | })
332 |
333 | // Get the role ID based on role name
334 | roleID, err := r.client.GetRoleIDByName(plan.RoleName.ValueString())
335 | if err != nil {
336 | resp.Diagnostics.AddError(
337 | "Error finding role",
338 | fmt.Sprintf("Could not find role '%s': %s", plan.RoleName.ValueString(), err),
339 | )
340 | return
341 | }
342 |
343 | tflog.Debug(ctx, "Role ID obtained", map[string]interface{}{
344 | "roleID": roleID,
345 | })
346 |
347 | // Prepare permission IDs from plan using a map to ensure unique IDs
348 | var resourcePermissions []resourcePermissionModel
349 | permissionIDs := map[int64]bool{}
350 | for _, perm := range plan.ResourcePermissions {
351 | permID, err := r.client.GetPermissionIDByNameAndView(perm.Permission.ValueString(), perm.ViewMenu.ValueString())
352 | if err != nil {
353 | resp.Diagnostics.AddError(
354 | "Error finding permission ID",
355 | fmt.Sprintf("Could not find permission ID for '%s' and view '%s': %s", perm.Permission.ValueString(), perm.ViewMenu.ValueString(), err),
356 | )
357 | return
358 | }
359 | permissionIDs[permID] = true
360 | resourcePermissions = append(resourcePermissions, resourcePermissionModel{
361 | ID: types.Int64Value(permID),
362 | Permission: perm.Permission,
363 | ViewMenu: perm.ViewMenu,
364 | })
365 | }
366 |
367 | tflog.Debug(ctx, "Permission IDs prepared", map[string]interface{}{
368 | "permissionIDs": permissionIDs,
369 | })
370 |
371 | // Convert map to slice for the API call
372 | var permIDList []int64
373 | for id := range permissionIDs {
374 | permIDList = append(permIDList, id)
375 | }
376 |
377 | tflog.Debug(ctx, "Permission ID list for API call", map[string]interface{}{
378 | "permIDList": permIDList,
379 | })
380 |
381 | // Update role permissions using the client
382 | if err := r.client.UpdateRolePermissions(roleID, permIDList); err != nil {
383 | resp.Diagnostics.AddError(
384 | "Error updating role permissions",
385 | "Failed to update role permissions: "+err.Error(),
386 | )
387 | return
388 | }
389 |
390 | tflog.Debug(ctx, "Role permissions updated")
391 |
392 | // Set the state with the updated data
393 | // sort.Slice(resourcePermissions, func(i, j int) bool {
394 | // return resourcePermissions[i].ID.ValueInt64() < resourcePermissions[j].ID.ValueInt64()
395 | // })
396 |
397 | result := rolePermissionsResourceModel{
398 | ID: types.StringValue(fmt.Sprintf("%d", roleID)),
399 | RoleName: plan.RoleName,
400 | ResourcePermissions: resourcePermissions,
401 | LastUpdated: types.StringValue(time.Now().Format(time.RFC3339)),
402 | }
403 |
404 | diags = resp.State.Set(ctx, &result)
405 | resp.Diagnostics.Append(diags...)
406 | if resp.Diagnostics.HasError() {
407 | tflog.Debug(ctx, "Exiting Update due to error in setting state", map[string]interface{}{
408 | "diagnostics": resp.Diagnostics,
409 | })
410 | return
411 | }
412 |
413 | tflog.Debug(ctx, "Update method completed successfully")
414 | }
415 |
416 | // Delete deletes the resource and removes the Terraform state on success.
417 | func (r *rolePermissionsResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
418 | tflog.Debug(ctx, "Starting Delete method")
419 | var state rolePermissionsResourceModel
420 | diags := req.State.Get(ctx, &state)
421 | resp.Diagnostics.Append(diags...)
422 | if resp.Diagnostics.HasError() {
423 | tflog.Debug(ctx, "Exiting Delete due to error in getting state", map[string]interface{}{
424 | "diagnostics": resp.Diagnostics,
425 | })
426 | return
427 | }
428 |
429 | tflog.Debug(ctx, "State obtained", map[string]interface{}{
430 | "roleName": state.RoleName.ValueString(),
431 | })
432 |
433 | roleID, err := r.client.GetRoleIDByName(state.RoleName.ValueString())
434 | if err != nil {
435 | resp.Diagnostics.AddError(
436 | "Error finding role",
437 | fmt.Sprintf("Could not find role '%s': %s", state.RoleName.ValueString(), err),
438 | )
439 | return
440 | }
441 |
442 | tflog.Debug(ctx, "Role ID obtained", map[string]interface{}{
443 | "roleID": roleID,
444 | })
445 |
446 | err = r.client.ClearRolePermissions(roleID)
447 | if err != nil {
448 | resp.Diagnostics.AddError(
449 | "Error clearing role permissions",
450 | fmt.Sprintf("Could not clear permissions for role ID %d: %s", roleID, err),
451 | )
452 | return
453 | }
454 |
455 | tflog.Debug(ctx, "Role permissions cleared")
456 |
457 | resp.State.RemoveResource(ctx)
458 | tflog.Debug(ctx, "Delete method completed successfully")
459 | }
460 |
461 | // Configure adds the provider configured client to the resource.
462 | func (r *rolePermissionsResource) Configure(_ context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) {
463 | if req.ProviderData == nil {
464 | return
465 | }
466 |
467 | client, ok := req.ProviderData.(*client.Client)
468 | if !ok {
469 | resp.Diagnostics.AddError(
470 | "Unexpected Resource Configure Type",
471 | fmt.Sprintf("Expected *client.Client, got: %T. Please report this issue to the provider developers.", req.ProviderData),
472 | )
473 | return
474 | }
475 |
476 | r.client = client
477 | }
478 |
479 | // ImportState imports the resource state.
480 | func (r *rolePermissionsResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
481 | tflog.Debug(ctx, "Starting ImportState method", map[string]interface{}{
482 | "import_id": req.ID,
483 | })
484 |
485 | // Use the role ID from the import ID
486 | resource.ImportStatePassthroughID(ctx, path.Root("id"), req, resp)
487 |
488 | // Fetch the role name based on the ID
489 | roleID, err := strconv.ParseInt(req.ID, 10, 64)
490 | if err != nil {
491 | resp.Diagnostics.AddError("Error parsing role ID", fmt.Sprintf("Could not parse role ID '%s': %s", req.ID, err))
492 | return
493 | }
494 |
495 | role, err := r.client.GetRole(roleID)
496 | if err != nil {
497 | resp.Diagnostics.AddError("Error fetching role", fmt.Sprintf("Could not fetch role with ID '%d': %s", roleID, err))
498 | return
499 | }
500 |
501 | // Manually set the role name in the state
502 | resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("role_name"), role.Name)...)
503 | if resp.Diagnostics.HasError() {
504 | return
505 | }
506 |
507 | tflog.Debug(ctx, "ImportState completed successfully", map[string]interface{}{
508 | "import_id": req.ID,
509 | "role_name": role.Name,
510 | })
511 | }
512 |
--------------------------------------------------------------------------------