From 3e2ca4c632bbfb9d138b33b3068f55bd38aa954a Mon Sep 17 00:00:00 2001
From: Dariusz Porowski <3431813+DariuszPorowski@users.noreply.github.com>
Date: Mon, 30 Dec 2024 11:23:06 -0800
Subject: [PATCH 1/2] refactor(fabric-items): use generic impl
Co-authored-by: Pablo Zaidenvoren <2192882+PabloZaiden@users.noreply.github.com>
---
.github/workflows/changelog-existence.yml | 4 +-
.github/workflows/codeql.yml | 2 +-
.github/workflows/dependency-review.yml | 2 +-
.github/workflows/ghpages.yml | 4 +-
.github/workflows/pr-assign-author.yml | 2 +-
.github/workflows/release.yml | 2 +-
.github/workflows/semantic-pr.yml | 2 +-
.github/workflows/tag.yml | 2 +-
.github/workflows/test.yml | 14 +-
docs/data-sources/environments.md | 46 ++
docs/data-sources/eventhouse.md | 39 +-
docs/data-sources/eventhouses.md | 11 +
docs/data-sources/kql_databases.md | 12 +
docs/data-sources/lakehouses.md | 22 +
docs/data-sources/warehouse.md | 6 +-
docs/data-sources/warehouses.md | 11 +
docs/resources/environment.md | 8 +-
docs/resources/eventhouse.md | 62 ++-
docs/resources/kql_database.md | 24 +-
docs/resources/lakehouse.md | 8 +-
docs/resources/spark_job_definition.md | 6 +-
docs/resources/warehouse.md | 14 +-
.../fabric_eventhouse/data-source.tf | 18 +
.../resources/fabric_eventhouse/outputs.tf | 8 +
.../resources/fabric_eventhouse/resource.tf | 31 +-
.../resources/fabric_kql_database/resource.tf | 8 +-
.../fabric_spark_job_definition/resource.tf | 6 +-
go.mod | 2 +-
go.sum | 4 +-
internal/common/errors.go | 26 +-
internal/pkg/fabricitem/data_item.go | 2 +-
.../pkg/fabricitem/data_item_definition.go | 11 +-
.../data_item_definition_properties.go | 39 +-
.../pkg/fabricitem/data_item_properties.go | 170 +++++++
internal/pkg/fabricitem/data_items.go | 2 +-
.../pkg/fabricitem/data_items_properties.go | 8 +-
internal/pkg/fabricitem/data_schema.go | 97 ++--
internal/pkg/fabricitem/models.go | 4 +-
internal/pkg/fabricitem/models_data_item.go | 22 +-
.../fabricitem/models_data_item_definition.go | 44 +-
.../models_data_item_definition_properties.go | 22 +-
.../models_data_item_properties.go} | 6 +-
internal/pkg/fabricitem/models_data_items.go | 10 +-
.../pkg/fabricitem/models_resource_item.go | 103 +++-
...ource_item_config_definition_properties.go | 19 +
.../models_resource_item_config_properties.go | 15 +
.../models_resource_item_definition.go | 55 +-
...els_resource_item_definition_properties.go | 124 +----
.../models_resource_item_properties.go | 13 +
internal/pkg/fabricitem/resource_item.go | 9 +-
...ource_item_config_definition_properties.go | 433 ++++++++++++++++
.../resource_item_config_properties.go | 341 +++++++++++++
.../fabricitem/resource_item_definition.go | 50 +-
.../resource_item_definition_properties.go | 79 +--
.../fabricitem/resource_item_properties.go | 324 ++++++++++++
internal/pkg/fabricitem/resource_schema.go | 151 ++++--
internal/pkg/utils/errors.go | 4 +-
internal/pkg/utils/errors_test.go | 4 +-
internal/provider/provider.go | 24 +-
.../dashboard/data_dashboards_test.go | 2 +-
.../services/datamart/data_datamarts_test.go | 2 +-
.../datapipeline/data_data_pipeline_test.go | 2 +-
.../datapipeline/data_data_pipelines_test.go | 2 +-
.../resource_data_pipeline_test.go | 2 +-
...ource_domain_workspace_assignments_test.go | 2 +-
.../services/environment/data_environment.go | 260 ++--------
.../environment/data_environment_test.go | 14 +-
.../services/environment/data_environments.go | 74 ++-
.../environment/data_environments_test.go | 14 +-
internal/services/environment/models.go | 39 --
.../models_resource_environment.go | 32 --
.../environment/resource_environment.go | 396 ++------------
.../environment/resource_environment_test.go | 14 +-
.../environment/schema_data_environment.go | 80 +++
.../schema_resource_environment.go | 80 +++
internal/services/eventhouse/base.go | 22 +-
.../services/eventhouse/data_eventhouse.go | 222 ++------
.../eventhouse/data_eventhouse_test.go | 35 +-
.../services/eventhouse/data_eventhouses.go | 71 ++-
.../eventhouse/data_eventhouses_test.go | 14 +-
internal/services/eventhouse/models.go | 69 ---
.../eventhouse/resource_eventhouse.go | 362 ++-----------
.../eventhouse/resource_eventhouse_test.go | 229 ++++++---
.../eventhouse/schema_data_eventhouse.go | 31 ++
.../eventhouse/schema_resource_eventhouse.go | 31 ++
.../eventstream/data_eventstream_test.go | 2 +-
.../eventstream/data_eventstreams_test.go | 2 +-
.../eventstream/resource_eventstream_test.go | 2 +-
internal/services/kqldatabase/base.go | 21 +-
.../services/kqldatabase/data_kql_database.go | 225 ++------
.../kqldatabase/data_kql_database_test.go | 43 +-
.../kqldatabase/data_kql_databases.go | 69 ++-
.../kqldatabase/data_kql_databases_test.go | 2 +-
internal/services/kqldatabase/models.go | 126 -----
.../models_resource_kql_database.go | 18 +
.../kqldatabase/resource_kql_database.go | 481 +++---------------
.../kqldatabase/resource_kql_database_test.go | 124 ++++-
.../kqldatabase/schema_data_kql_database.go | 38 ++
.../schema_resource_kql_database.go | 117 +++++
.../kqlqueryset/data_kql_queryset_test.go | 2 +-
.../kqlqueryset/data_kql_querysets_test.go | 2 +-
internal/services/lakehouse/data_lakehouse.go | 230 ++-------
.../lakehouse/data_lakehouse_table_test.go | 2 +-
.../lakehouse/data_lakehouse_tables_test.go | 2 +-
.../services/lakehouse/data_lakehouse_test.go | 2 +-
.../services/lakehouse/data_lakehouses.go | 74 ++-
.../lakehouse/data_lakehouses_test.go | 8 +-
internal/services/lakehouse/models.go | 33 --
.../lakehouse/models_data_lakehouse.go | 13 -
.../lakehouse/models_resource_lakehouse.go | 43 --
.../services/lakehouse/resource_lakehouse.go | 453 +++--------------
.../lakehouse/resource_lakehouse_test.go | 4 +-
.../lakehouse/schema_data_lakehouse.go | 50 ++
.../lakehouse/schema_resource_lakehouse.go | 64 +++
.../mlexperiment/data_ml_experiment_test.go | 2 +-
.../mlexperiment/data_ml_experiments_test.go | 2 +-
.../resource_ml_experiment_test.go | 2 +-
.../services/mlmodel/data_ml_model_test.go | 2 +-
.../services/mlmodel/data_ml_models_test.go | 2 +-
.../mlmodel/resource_ml_model_test.go | 2 +-
.../services/notebook/data_notebook_test.go | 2 +-
.../services/notebook/data_notebooks_test.go | 2 +-
.../notebook/resource_notebook_test.go | 2 +-
internal/services/report/data_report_test.go | 2 +-
internal/services/report/data_reports_test.go | 2 +-
.../services/report/resource_report_test.go | 2 +-
.../semanticmodel/data_semantic_model_test.go | 2 +-
.../data_semantic_models_test.go | 2 +-
.../resource_semantic_model_test.go | 2 +-
.../data_spark_environment_settings_test.go | 2 +-
.../data_spark_job_definition.go | 26 +-
.../data_spark_job_definition_test.go | 9 +-
.../data_spark_job_definitions.go | 24 +-
.../data_spark_job_definitions_test.go | 4 +-
.../resource_spark_job_definition.go | 21 +-
.../resource_spark_job_definition_test.go | 5 +-
.../schema_data_spark_job_definition.go | 19 +
.../schema_resource_spark_job_definition.go | 19 +
.../sqlendpoint/data_sql_endpoints_test.go | 2 +-
internal/services/warehouse/data_warehouse.go | 227 ++-------
.../services/warehouse/data_warehouse_test.go | 14 +-
.../services/warehouse/data_warehouses.go | 69 ++-
.../warehouse/data_warehouses_test.go | 14 +-
internal/services/warehouse/models.go | 36 --
.../warehouse/models_data_warehouse.go | 13 -
.../warehouse/models_resource_warehouse.go | 32 --
.../services/warehouse/resource_warehouse.go | 392 ++------------
.../warehouse/resource_warehouse_test.go | 14 +-
.../warehouse/schema_data_warehouse.go | 30 ++
.../warehouse/schema_resource_warehouse.go | 30 ++
.../data_workspace_role_assignments_test.go | 2 +-
.../services/workspace/data_workspace_test.go | 2 +-
internal/testhelp/fakes/fabric_environment.go | 16 +-
internal/testhelp/fakes/fabric_eventhouse.go | 72 ++-
internal/testhelp/fakes/fabric_kqldatabase.go | 77 ++-
internal/testhelp/fakes/fabric_lakehouse.go | 17 +-
.../fakes/fabric_sparkjobdefinition.go | 18 +-
internal/testhelp/fakes/fabric_warehouse.go | 15 +-
internal/testhelp/fakes/fake_interfaces.go | 7 +
internal/testhelp/fakes/fake_typedhandler.go | 53 +-
.../eventhouse/EventhouseProperties.json.tmpl | 1 +
.../kql_database/DatabaseProperties.json.tmpl | 6 +
.../fixtures/kql_database/DatabaseSchema.kql | 7 +
tools/scripts/Set-WellKnown.ps1 | 139 +++--
164 files changed, 4412 insertions(+), 3996 deletions(-)
create mode 100644 internal/pkg/fabricitem/data_item_properties.go
rename internal/{services/environment/models_data_environment.go => pkg/fabricitem/models_data_item_properties.go} (60%)
create mode 100644 internal/pkg/fabricitem/models_resource_item_config_definition_properties.go
create mode 100644 internal/pkg/fabricitem/models_resource_item_config_properties.go
create mode 100644 internal/pkg/fabricitem/models_resource_item_properties.go
create mode 100644 internal/pkg/fabricitem/resource_item_config_definition_properties.go
create mode 100644 internal/pkg/fabricitem/resource_item_config_properties.go
create mode 100644 internal/pkg/fabricitem/resource_item_properties.go
delete mode 100644 internal/services/environment/models_resource_environment.go
create mode 100644 internal/services/environment/schema_data_environment.go
create mode 100644 internal/services/environment/schema_resource_environment.go
create mode 100644 internal/services/eventhouse/schema_data_eventhouse.go
create mode 100644 internal/services/eventhouse/schema_resource_eventhouse.go
create mode 100644 internal/services/kqldatabase/models_resource_kql_database.go
create mode 100644 internal/services/kqldatabase/schema_data_kql_database.go
create mode 100644 internal/services/kqldatabase/schema_resource_kql_database.go
delete mode 100644 internal/services/lakehouse/models_data_lakehouse.go
create mode 100644 internal/services/lakehouse/schema_data_lakehouse.go
create mode 100644 internal/services/lakehouse/schema_resource_lakehouse.go
create mode 100644 internal/services/sparkjobdefinition/schema_data_spark_job_definition.go
create mode 100644 internal/services/sparkjobdefinition/schema_resource_spark_job_definition.go
delete mode 100644 internal/services/warehouse/models_data_warehouse.go
delete mode 100644 internal/services/warehouse/models_resource_warehouse.go
create mode 100644 internal/services/warehouse/schema_data_warehouse.go
create mode 100644 internal/services/warehouse/schema_resource_warehouse.go
create mode 100644 internal/testhelp/fixtures/eventhouse/EventhouseProperties.json.tmpl
create mode 100644 internal/testhelp/fixtures/kql_database/DatabaseProperties.json.tmpl
create mode 100644 internal/testhelp/fixtures/kql_database/DatabaseSchema.kql
diff --git a/.github/workflows/changelog-existence.yml b/.github/workflows/changelog-existence.yml
index 6af2e79c..d010e428 100644
--- a/.github/workflows/changelog-existence.yml
+++ b/.github/workflows/changelog-existence.yml
@@ -20,7 +20,7 @@ jobs:
changelog-existence:
name: ๐ Check Changelog
if: ${{ !contains(github.event.pull_request.labels.*.name, 'skip-changelog') && github.actor != 'dependabot[bot]' }}
- runs-on: ubuntu-latest
+ runs-on: ubuntu-24.04
steps:
- name: โคต๏ธ Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
@@ -121,7 +121,7 @@ jobs:
changelog-skip:
name: ๐ Check Changelog
if: ${{ contains(github.event.pull_request.labels.*.name, 'skip-changelog') || github.actor == 'dependabot[bot]' }}
- runs-on: ubuntu-latest
+ runs-on: ubuntu-24.04
steps:
- name: ๐ Find comment
if: github.actor != 'dependabot[bot]'
diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml
index 9e465017..562fcd90 100644
--- a/.github/workflows/codeql.yml
+++ b/.github/workflows/codeql.yml
@@ -34,7 +34,7 @@ permissions:
jobs:
codeql:
name: ๐ฉบ CodeQL Analysis
- runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }}
+ runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-24.04' }}
timeout-minutes: ${{ (matrix.language == 'swift' && 120) || 360 }}
permissions:
# required for all workflows
diff --git a/.github/workflows/dependency-review.yml b/.github/workflows/dependency-review.yml
index c6471548..12893fad 100644
--- a/.github/workflows/dependency-review.yml
+++ b/.github/workflows/dependency-review.yml
@@ -13,7 +13,7 @@ permissions:
jobs:
main:
name: ๐ต๏ธ Check Dependency
- runs-on: ubuntu-latest
+ runs-on: ubuntu-24.04
steps:
- name: โคต๏ธ Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
diff --git a/.github/workflows/ghpages.yml b/.github/workflows/ghpages.yml
index 85133dc3..86c44718 100644
--- a/.github/workflows/ghpages.yml
+++ b/.github/workflows/ghpages.yml
@@ -32,7 +32,7 @@ jobs:
# Build job
build:
name: ๐๏ธ Build
- runs-on: ubuntu-latest
+ runs-on: ubuntu-24.04
steps:
- name: โคต๏ธ Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
@@ -70,7 +70,7 @@ jobs:
deploy:
name: ๐ Deploy
needs: build
- runs-on: ubuntu-latest
+ runs-on: ubuntu-24.04
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
diff --git a/.github/workflows/pr-assign-author.yml b/.github/workflows/pr-assign-author.yml
index 4b705007..480c10b3 100644
--- a/.github/workflows/pr-assign-author.yml
+++ b/.github/workflows/pr-assign-author.yml
@@ -13,7 +13,7 @@ permissions:
jobs:
assign-author:
- runs-on: ubuntu-latest
+ runs-on: ubuntu-24.04
steps:
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 7391f9e9..2a630c53 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -12,7 +12,7 @@ permissions:
jobs:
release:
name: ๐ Release
- runs-on: ubuntu-latest
+ runs-on: ubuntu-24.04
permissions:
id-token: write
contents: write
diff --git a/.github/workflows/semantic-pr.yml b/.github/workflows/semantic-pr.yml
index c81234bc..0c977d54 100644
--- a/.github/workflows/semantic-pr.yml
+++ b/.github/workflows/semantic-pr.yml
@@ -20,7 +20,7 @@ permissions:
jobs:
main:
name: ๐ Check PR title
- runs-on: ubuntu-latest
+ runs-on: ubuntu-24.04
steps:
- name: ๐ Run Semantic PR validation
uses: amannn/action-semantic-pull-request@0723387faaf9b38adef4775cd42cfd5155ed6017 # v5.5.3
diff --git a/.github/workflows/tag.yml b/.github/workflows/tag.yml
index 855475fe..d2daf08f 100644
--- a/.github/workflows/tag.yml
+++ b/.github/workflows/tag.yml
@@ -14,7 +14,7 @@ permissions:
jobs:
release-tag:
name: ๐ท๏ธ Set Release Tag
- runs-on: ubuntu-latest
+ runs-on: ubuntu-24.04
permissions:
contents: write
steps:
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index ffe42b04..bb3f6052 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -33,7 +33,7 @@ permissions:
jobs:
changes:
name: ๐ Check Changes
- runs-on: ubuntu-latest
+ runs-on: ubuntu-24.04
outputs:
src: ${{ steps.filter.outputs.src }}
steps:
@@ -56,7 +56,7 @@ jobs:
if: needs.changes.outputs.src == 'true'
environment:
name: development
- runs-on: ubuntu-latest
+ runs-on: ubuntu-24.04
strategy:
fail-fast: false
matrix:
@@ -182,7 +182,7 @@ jobs:
name: ๐๏ธ Check Build
needs: changes
if: needs.changes.outputs.src == 'true'
- runs-on: ubuntu-latest
+ runs-on: ubuntu-24.04
timeout-minutes: 30
permissions:
contents: read
@@ -258,7 +258,7 @@ jobs:
checkdocs:
name: ๐ Check Docs
- runs-on: ubuntu-latest
+ runs-on: ubuntu-24.04
permissions:
contents: read
steps:
@@ -305,7 +305,7 @@ jobs:
if: needs.changes.outputs.src == 'true'
environment:
name: development
- runs-on: ubuntu-latest
+ runs-on: ubuntu-24.04
timeout-minutes: 30
permissions:
contents: read
@@ -462,7 +462,7 @@ jobs:
- test
- changes
if: always() && needs.changes.outputs.src == 'true'
- runs-on: ubuntu-latest
+ runs-on: ubuntu-24.04
permissions:
contents: read
issues: write
@@ -514,7 +514,7 @@ jobs:
if: always()
name: ๐งช Check Tests
needs: test
- runs-on: ubuntu-latest
+ runs-on: ubuntu-24.04
steps:
- name: โ
OK
if: ${{ !(contains(needs.*.result, 'failure')) }}
diff --git a/docs/data-sources/environments.md b/docs/data-sources/environments.md
index 8fa82ba2..fae58862 100644
--- a/docs/data-sources/environments.md
+++ b/docs/data-sources/environments.md
@@ -56,4 +56,50 @@ Read-Only:
- `description` (String) The Environment description.
- `display_name` (String) The Environment display name.
- `id` (String) The Environment ID.
+- `properties` (Attributes) The Environment properties. (see [below for nested schema](#nestedatt--values--properties))
- `workspace_id` (String) The Workspace ID.
+
+
+
+### Nested Schema for `values.properties`
+
+Read-Only:
+
+- `publish_details` (Attributes) Environment publish operation details. (see [below for nested schema](#nestedatt--values--properties--publish_details))
+
+
+
+### Nested Schema for `values.properties.publish_details`
+
+Read-Only:
+
+- `component_publish_info` (Attributes) Environment component publish information. (see [below for nested schema](#nestedatt--values--properties--publish_details--component_publish_info))
+- `end_time` (String) End time of publish operation.
+- `start_time` (String) Start time of publish operation.
+- `state` (String) Publish state. Possible values: `Cancelled`, `Cancelling`, `Failed`, `Running`, `Success`, `Waiting`.
+- `target_version` (String) Target version to be published.
+
+
+
+### Nested Schema for `values.properties.publish_details.component_publish_info`
+
+Read-Only:
+
+- `spark_libraries` (Attributes) Spark libraries publish information. (see [below for nested schema](#nestedatt--values--properties--publish_details--component_publish_info--spark_libraries))
+- `spark_settings` (Attributes) Spark settings publish information. (see [below for nested schema](#nestedatt--values--properties--publish_details--component_publish_info--spark_settings))
+
+
+
+### Nested Schema for `values.properties.publish_details.component_publish_info.spark_libraries`
+
+Read-Only:
+
+- `state` (String) Publish state. Possible values: `Cancelled`, `Cancelling`, `Failed`, `Running`, `Success`, `Waiting`.
+
+
+
+### Nested Schema for `values.properties.publish_details.component_publish_info.spark_settings`
+
+Read-Only:
+
+- `state` (String) Publish state. Possible values: `Cancelled`, `Cancelling`, `Failed`, `Running`, `Success`, `Waiting`.
diff --git a/docs/data-sources/eventhouse.md b/docs/data-sources/eventhouse.md
index 11df9ab9..2e060250 100644
--- a/docs/data-sources/eventhouse.md
+++ b/docs/data-sources/eventhouse.md
@@ -4,7 +4,7 @@ page_title: "fabric_eventhouse Data Source - terraform-provider-fabric"
subcategory: ""
description: |-
Get a Fabric Eventhouse.
- Use this data source to fetch a Eventhouse https://learn.microsoft.com/fabric/real-time-intelligence/eventhouse.
+ Use this data source to fetch an Eventhouse https://learn.microsoft.com/fabric/real-time-intelligence/eventhouse.
-> This item supports Service Principal authentication.
---
@@ -12,7 +12,7 @@ description: |-
Get a Fabric Eventhouse.
-Use this data source to fetch a [Eventhouse](https://learn.microsoft.com/fabric/real-time-intelligence/eventhouse).
+Use this data source to fetch an [Eventhouse](https://learn.microsoft.com/fabric/real-time-intelligence/eventhouse).
-> This item supports Service Principal authentication.
@@ -29,6 +29,24 @@ data "fabric_eventhouse" "example_by_name" {
workspace_id = "00000000-0000-0000-0000-000000000000"
}
+# Get item details with definition
+# Examples uses `id` but `display_name` can be used as well
+data "fabric_eventhouse" "example_definition" {
+ id = "11111111-1111-1111-1111-111111111111"
+ workspace_id = "00000000-0000-0000-0000-000000000000"
+ output_definition = true
+}
+
+# Access the content of the definition with JSONPath expression
+output "example_definition_content_jsonpath" {
+ value = provider::fabric::content_decode(data.fabric_eventhouse.example_definition.definition["EventhouseProperties.json"].content, ".")
+}
+
+# Access the content of the definition as JSON object
+output "example_definition_content_object" {
+ value = provider::fabric::content_decode(data.fabric_eventhouse.example_definition.definition["EventhouseProperties.json"].content)
+}
+
# This is an invalid data source
# Do not specify `id` and `display_name` in the same data source block
# data "fabric_eventhouse" "example" {
@@ -49,11 +67,17 @@ data "fabric_eventhouse" "example_by_name" {
- `display_name` (String) The Eventhouse display name.
- `id` (String) The Eventhouse ID.
+- `output_definition` (Boolean) Output definition parts as gzip base64 content? Default: `false`
+
+!> Your terraform state file may grow a lot if you output definition content. Only use it when you must use data from the definition.
+
- `timeouts` (Attributes) (see [below for nested schema](#nestedatt--timeouts))
### Read-Only
+- `definition` (Attributes Map) Definition parts. Possible path keys: `EventhouseProperties.json`. (see [below for nested schema](#nestedatt--definition))
- `description` (String) The Eventhouse description.
+- `format` (String) The Eventhouse format. Possible values: `NotApplicable`
- `properties` (Attributes) The Eventhouse properties. (see [below for nested schema](#nestedatt--properties))
@@ -64,12 +88,21 @@ Optional:
- `read` (String) A string that can be [parsed as a duration](https://pkg.go.dev/time#ParseDuration) consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours).
+
+
+### Nested Schema for `definition`
+
+Read-Only:
+
+- `content` (String) Gzip base64 content of definition part.
+Use [`provider::fabric::content_decode`](../functions/content_decode.md) function to decode content.
+
### Nested Schema for `properties`
Read-Only:
-- `database_ids` (List of String) The IDs list of KQL Databases.
+- `database_ids` (List of String) List of all KQL Database children IDs.
- `ingestion_service_uri` (String) Ingestion service URI.
- `query_service_uri` (String) Query service URI.
diff --git a/docs/data-sources/eventhouses.md b/docs/data-sources/eventhouses.md
index e36dfa93..5a327bbf 100644
--- a/docs/data-sources/eventhouses.md
+++ b/docs/data-sources/eventhouses.md
@@ -56,4 +56,15 @@ Read-Only:
- `description` (String) The Eventhouse description.
- `display_name` (String) The Eventhouse display name.
- `id` (String) The Eventhouse ID.
+- `properties` (Attributes) The Eventhouse properties. (see [below for nested schema](#nestedatt--values--properties))
- `workspace_id` (String) The Workspace ID.
+
+
+
+### Nested Schema for `values.properties`
+
+Read-Only:
+
+- `database_ids` (List of String) List of all KQL Database children IDs.
+- `ingestion_service_uri` (String) Ingestion service URI.
+- `query_service_uri` (String) Query service URI.
diff --git a/docs/data-sources/kql_databases.md b/docs/data-sources/kql_databases.md
index 7c44fbf1..2ac7bb45 100644
--- a/docs/data-sources/kql_databases.md
+++ b/docs/data-sources/kql_databases.md
@@ -56,4 +56,16 @@ Read-Only:
- `description` (String) The KQL Database description.
- `display_name` (String) The KQL Database display name.
- `id` (String) The KQL Database ID.
+- `properties` (Attributes) The KQL Database properties. (see [below for nested schema](#nestedatt--values--properties))
- `workspace_id` (String) The Workspace ID.
+
+
+
+### Nested Schema for `values.properties`
+
+Read-Only:
+
+- `database_type` (String) The type of the database. Possible values:`ReadWrite`, `Shortcut`.
+- `eventhouse_id` (String) Parent Eventhouse ID.
+- `ingestion_service_uri` (String) Ingestion service URI.
+- `query_service_uri` (String) Query service URI.
diff --git a/docs/data-sources/lakehouses.md b/docs/data-sources/lakehouses.md
index 778e5a23..7d90c163 100644
--- a/docs/data-sources/lakehouses.md
+++ b/docs/data-sources/lakehouses.md
@@ -56,4 +56,26 @@ Read-Only:
- `description` (String) The Lakehouse description.
- `display_name` (String) The Lakehouse display name.
- `id` (String) The Lakehouse ID.
+- `properties` (Attributes) The Lakehouse properties. (see [below for nested schema](#nestedatt--values--properties))
- `workspace_id` (String) The Workspace ID.
+
+
+
+### Nested Schema for `values.properties`
+
+Read-Only:
+
+- `default_schema` (String) Default schema of the Lakehouse. This property is returned only for schema enabled Lakehouse.
+- `onelake_files_path` (String) OneLake path to the Lakehouse files directory
+- `onelake_tables_path` (String) OneLake path to the Lakehouse tables directory.
+- `sql_endpoint_properties` (Attributes) An object containing the properties of the SQL endpoint. (see [below for nested schema](#nestedatt--values--properties--sql_endpoint_properties))
+
+
+
+### Nested Schema for `values.properties.sql_endpoint_properties`
+
+Read-Only:
+
+- `connection_string` (String) SQL endpoint connection string.
+- `id` (String) SQL endpoint ID.
+- `provisioning_status` (String) The SQL endpoint provisioning status.
diff --git a/docs/data-sources/warehouse.md b/docs/data-sources/warehouse.md
index 3f709c33..d270948f 100644
--- a/docs/data-sources/warehouse.md
+++ b/docs/data-sources/warehouse.md
@@ -70,6 +70,6 @@ Optional:
Read-Only:
-- `connection_string` (String) Connection String
-- `created_date` (String) Created Date
-- `last_updated_time` (String) Last Updated Time
+- `connection_string` (String) The SQL connection string connected to the workspace containing this warehouse.
+- `created_date` (String) The date and time the warehouse was created.
+- `last_updated_time` (String) The date and time the warehouse was last updated.
diff --git a/docs/data-sources/warehouses.md b/docs/data-sources/warehouses.md
index 7d3cf28c..6a72d531 100644
--- a/docs/data-sources/warehouses.md
+++ b/docs/data-sources/warehouses.md
@@ -56,4 +56,15 @@ Read-Only:
- `description` (String) The Warehouse description.
- `display_name` (String) The Warehouse display name.
- `id` (String) The Warehouse ID.
+- `properties` (Attributes) The Warehouse properties. (see [below for nested schema](#nestedatt--values--properties))
- `workspace_id` (String) The Workspace ID.
+
+
+
+### Nested Schema for `values.properties`
+
+Read-Only:
+
+- `connection_string` (String) The SQL connection string connected to the workspace containing this warehouse.
+- `created_date` (String) The date and time the warehouse was created.
+- `last_updated_time` (String) The date and time the warehouse was last updated.
diff --git a/docs/resources/environment.md b/docs/resources/environment.md
index 071b3a59..de1275fd 100644
--- a/docs/resources/environment.md
+++ b/docs/resources/environment.md
@@ -3,16 +3,16 @@
page_title: "fabric_environment Resource - terraform-provider-fabric"
subcategory: ""
description: |-
- This resource manages a Fabric Environment.
- See Environment https://learn.microsoft.com/fabric/data-engineering/create-and-use-environment for more information.
+ Manage a Fabric Environment.
+ Use this resource to manage an Environment https://learn.microsoft.com/fabric/data-engineering/create-and-use-environment.
-> This item supports Service Principal authentication.
---
# fabric_environment (Resource)
-This resource manages a Fabric Environment.
+Manage a Fabric Environment.
-See [Environment](https://learn.microsoft.com/fabric/data-engineering/create-and-use-environment) for more information.
+Use this resource to manage an [Environment](https://learn.microsoft.com/fabric/data-engineering/create-and-use-environment).
-> This item supports Service Principal authentication.
diff --git a/docs/resources/eventhouse.md b/docs/resources/eventhouse.md
index ef379bb6..4b2dae98 100644
--- a/docs/resources/eventhouse.md
+++ b/docs/resources/eventhouse.md
@@ -3,26 +3,55 @@
page_title: "fabric_eventhouse Resource - terraform-provider-fabric"
subcategory: ""
description: |-
- This resource manages a Fabric Eventhouse.
- See Eventhouse https://learn.microsoft.com/fabric/real-time-intelligence/eventhouse for more information.
+ Manage a Fabric Eventhouse.
+ Use this resource to manage an Eventhouse https://learn.microsoft.com/fabric/real-time-intelligence/eventhouse.
-> This item supports Service Principal authentication.
---
# fabric_eventhouse (Resource)
-This resource manages a Fabric Eventhouse.
+Manage a Fabric Eventhouse.
-See [Eventhouse](https://learn.microsoft.com/fabric/real-time-intelligence/eventhouse) for more information.
+Use this resource to manage an [Eventhouse](https://learn.microsoft.com/fabric/real-time-intelligence/eventhouse).
-> This item supports Service Principal authentication.
## Example Usage
```terraform
+# Example 1 - Item without definition
resource "fabric_eventhouse" "example" {
- display_name = "example"
+ display_name = "example1"
workspace_id = "00000000-0000-0000-0000-000000000000"
}
+
+# Example 2 - Item with definition bootstrapping only
+resource "fabric_eventhouse" "example_definition_bootstrap" {
+ display_name = "example2"
+ description = "example with definition bootstrapping"
+ workspace_id = "00000000-0000-0000-0000-000000000000"
+ definition_update_enabled = false # <-- Disable definition update
+ definition = {
+ "EventhouseProperties.json" = {
+ source = "${local.path}/EventhouseProperties.json.tmpl"
+ }
+ }
+}
+
+# Example 3 - Item with definition update when source or tokens changed
+resource "fabric_eventhouse" "example_definition_update" {
+ display_name = "example3"
+ description = "example with definition update when source or tokens changed"
+ workspace_id = "00000000-0000-0000-0000-000000000000"
+ definition = {
+ "EventhouseProperties.json" = {
+ source = "${local.path}/EventhouseProperties.json.tmpl"
+ tokens = {
+ "MyKey" = "MyValue"
+ }
+ }
+ }
+}
```
@@ -35,14 +64,35 @@ resource "fabric_eventhouse" "example" {
### Optional
+- `definition` (Attributes Map) Definition parts. Accepted path keys: `EventhouseProperties.json`. Read more about [Eventhouse definition part paths](https://learn.microsoft.com/rest/api/fabric/articles/item-management/definitions/eventhouse-definition). (see [below for nested schema](#nestedatt--definition))
+- `definition_update_enabled` (Boolean) Update definition on change of source content. Default: `true`.
- `description` (String) The Eventhouse description.
- `timeouts` (Attributes) (see [below for nested schema](#nestedatt--timeouts))
### Read-Only
+- `format` (String) The Eventhouse format. Possible values: `NotApplicable`
- `id` (String) The Eventhouse ID.
- `properties` (Attributes) The Eventhouse properties. (see [below for nested schema](#nestedatt--properties))
+
+
+### Nested Schema for `definition`
+
+Required:
+
+- `source` (String) Path to the file with source of the definition part.
+
+The source content may include placeholders for token substitution. Use the dot with the token name `{{ .TokenName }}`.
+
+Optional:
+
+- `tokens` (Map of String) A map of key/value pairs of tokens substitutes in the source.
+
+Read-Only:
+
+- `source_content_sha256` (String) SHA256 of source's content of definition part.
+
### Nested Schema for `timeouts`
@@ -60,7 +110,7 @@ Optional:
Read-Only:
-- `database_ids` (List of String) The IDs list of KQL Databases.
+- `database_ids` (List of String) List of all KQL Database children IDs.
- `ingestion_service_uri` (String) Ingestion service URI.
- `query_service_uri` (String) Query service URI.
diff --git a/docs/resources/kql_database.md b/docs/resources/kql_database.md
index 373e4415..a30e6f58 100644
--- a/docs/resources/kql_database.md
+++ b/docs/resources/kql_database.md
@@ -3,23 +3,23 @@
page_title: "fabric_kql_database Resource - terraform-provider-fabric"
subcategory: ""
description: |-
- This resource manages a Fabric KQL Database.
- See KQL Database https://learn.microsoft.com/fabric/real-time-intelligence/create-database for more information.
+ Manage a Fabric KQL Database.
+ Use this resource to manage a KQL Database https://learn.microsoft.com/fabric/real-time-intelligence/create-database.
-> This item supports Service Principal authentication.
---
# fabric_kql_database (Resource)
-This resource manages a Fabric KQL Database.
+Manage a Fabric KQL Database.
-See [KQL Database](https://learn.microsoft.com/fabric/real-time-intelligence/create-database) for more information.
+Use this resource to manage a [KQL Database](https://learn.microsoft.com/fabric/real-time-intelligence/create-database).
-> This item supports Service Principal authentication.
## Example Usage
```terraform
-# Create a ReadWrite KQL database example
+# Example 1 - Create a ReadWrite KQL database
resource "fabric_kql_database" "example1" {
display_name = "example1"
workspace_id = "00000000-0000-0000-0000-000000000000"
@@ -30,7 +30,7 @@ resource "fabric_kql_database" "example1" {
}
}
-# Create a Shortcut KQL database to source Azure Data Explorer cluster example
+# Example 2 - Create a Shortcut KQL database to source Azure Data Explorer cluster
resource "fabric_kql_database" "example2" {
display_name = "example2"
workspace_id = "00000000-0000-0000-0000-000000000000"
@@ -43,7 +43,7 @@ resource "fabric_kql_database" "example2" {
}
}
-# Create a Shortcut KQL database to source Azure Data Explorer cluster with invitation token example
+# Example 3 - Create a Shortcut KQL database to source Azure Data Explorer cluster with invitation token
resource "fabric_kql_database" "example3" {
display_name = "example3"
workspace_id = "00000000-0000-0000-0000-000000000000"
@@ -55,7 +55,7 @@ resource "fabric_kql_database" "example3" {
}
}
-# Create a Shortcut KQL database to source KQL database example
+# Example 4 - Create a Shortcut KQL database to source KQL database
resource "fabric_kql_database" "example4" {
display_name = "example4"
workspace_id = "00000000-0000-0000-0000-000000000000"
@@ -73,15 +73,15 @@ resource "fabric_kql_database" "example4" {
### Required
-- `configuration` (Attributes) The KQL Database creation configuration.
-
-Any changes to this configuration will result in recreation of the KQL Database. (see [below for nested schema](#nestedatt--configuration))
-
- `display_name` (String) The KQL Database display name.
- `workspace_id` (String) The Workspace ID.
### Optional
+- `configuration` (Attributes) The KQL Database creation configuration.
+
+Any changes to this configuration will result in recreation of the KQL Database. (see [below for nested schema](#nestedatt--configuration))
+
- `description` (String) The KQL Database description.
- `timeouts` (Attributes) (see [below for nested schema](#nestedatt--timeouts))
diff --git a/docs/resources/lakehouse.md b/docs/resources/lakehouse.md
index ea71c2e2..1625b9a2 100644
--- a/docs/resources/lakehouse.md
+++ b/docs/resources/lakehouse.md
@@ -3,16 +3,16 @@
page_title: "fabric_lakehouse Resource - terraform-provider-fabric"
subcategory: ""
description: |-
- This resource manages a Fabric Lakehouse.
- See Lakehouse https://learn.microsoft.com/training/modules/get-started-lakehouses for more information.
+ Manage a Fabric Lakehouse.
+ Use this resource to manage a Lakehouse https://learn.microsoft.com/training/modules/get-started-lakehouses.
-> This item supports Service Principal authentication.
---
# fabric_lakehouse (Resource)
-This resource manages a Fabric Lakehouse.
+Manage a Fabric Lakehouse.
-See [Lakehouse](https://learn.microsoft.com/training/modules/get-started-lakehouses) for more information.
+Use this resource to manage a [Lakehouse](https://learn.microsoft.com/training/modules/get-started-lakehouses).
-> This item supports Service Principal authentication.
diff --git a/docs/resources/spark_job_definition.md b/docs/resources/spark_job_definition.md
index c08ab096..047c6665 100644
--- a/docs/resources/spark_job_definition.md
+++ b/docs/resources/spark_job_definition.md
@@ -19,13 +19,13 @@ Use this resource to manage a [Spark Job Definition](https://learn.microsoft.com
## Example Usage
```terraform
-# Example 1 - Spark Job Definition without definition
+# Example 1 - Item without definition
resource "fabric_spark_job_definition" "example" {
display_name = "example1"
workspace_id = "00000000-0000-0000-0000-000000000000"
}
-# Example 2 - Spark Job Definition with definition bootstrapping only
+# Example 2 - Item with definition bootstrapping only
resource "fabric_spark_job_definition" "example_definition_bootstrap" {
display_name = "example2"
description = "example with definition bootstrapping"
@@ -38,7 +38,7 @@ resource "fabric_spark_job_definition" "example_definition_bootstrap" {
}
}
-# Example 3 - Spark Job Definition with definition update when source or tokens changed
+# Example 3 - Item with definition update when source or tokens changed
resource "fabric_spark_job_definition" "example_definition_update" {
display_name = "example3"
description = "example with definition update when source or tokens changed"
diff --git a/docs/resources/warehouse.md b/docs/resources/warehouse.md
index c783a169..90a79530 100644
--- a/docs/resources/warehouse.md
+++ b/docs/resources/warehouse.md
@@ -3,16 +3,16 @@
page_title: "fabric_warehouse Resource - terraform-provider-fabric"
subcategory: ""
description: |-
- This resource manages a Fabric Warehouse.
- See Warehouse https://learn.microsoft.com/fabric/data-warehouse/data-warehousing for more information.
+ Manage a Fabric Warehouse.
+ Use this resource to manage a Warehouse https://learn.microsoft.com/fabric/data-warehouse/data-warehousing.
-> This item does not support Service Principal. Please use a User context authentication.
---
# fabric_warehouse (Resource)
-This resource manages a Fabric Warehouse.
+Manage a Fabric Warehouse.
-See [Warehouse](https://learn.microsoft.com/fabric/data-warehouse/data-warehousing) for more information.
+Use this resource to manage a [Warehouse](https://learn.microsoft.com/fabric/data-warehouse/data-warehousing).
-> This item does not support Service Principal. Please use a User context authentication.
@@ -60,9 +60,9 @@ Optional:
Read-Only:
-- `connection_string` (String) Connection String
-- `created_date` (String) Created Date
-- `last_updated_time` (String) Last Updated Time
+- `connection_string` (String) The SQL connection string connected to the workspace containing this warehouse.
+- `created_date` (String) The date and time the warehouse was created.
+- `last_updated_time` (String) The date and time the warehouse was last updated.
## Import
diff --git a/examples/data-sources/fabric_eventhouse/data-source.tf b/examples/data-sources/fabric_eventhouse/data-source.tf
index 9621d743..dd29f59b 100644
--- a/examples/data-sources/fabric_eventhouse/data-source.tf
+++ b/examples/data-sources/fabric_eventhouse/data-source.tf
@@ -8,6 +8,24 @@ data "fabric_eventhouse" "example_by_name" {
workspace_id = "00000000-0000-0000-0000-000000000000"
}
+# Get item details with definition
+# Examples uses `id` but `display_name` can be used as well
+data "fabric_eventhouse" "example_definition" {
+ id = "11111111-1111-1111-1111-111111111111"
+ workspace_id = "00000000-0000-0000-0000-000000000000"
+ output_definition = true
+}
+
+# Access the content of the definition with JSONPath expression
+output "example_definition_content_jsonpath" {
+ value = provider::fabric::content_decode(data.fabric_eventhouse.example_definition.definition["EventhouseProperties.json"].content, ".")
+}
+
+# Access the content of the definition as JSON object
+output "example_definition_content_object" {
+ value = provider::fabric::content_decode(data.fabric_eventhouse.example_definition.definition["EventhouseProperties.json"].content)
+}
+
# This is an invalid data source
# Do not specify `id` and `display_name` in the same data source block
# data "fabric_eventhouse" "example" {
diff --git a/examples/resources/fabric_eventhouse/outputs.tf b/examples/resources/fabric_eventhouse/outputs.tf
index d0574aa1..06bbf83b 100644
--- a/examples/resources/fabric_eventhouse/outputs.tf
+++ b/examples/resources/fabric_eventhouse/outputs.tf
@@ -1,3 +1,11 @@
output "example" {
value = resource.fabric_eventhouse.example
}
+
+output "example_definition_bootstrap" {
+ value = resource.fabric_spark_job_definition.example_definition_bootstrap
+}
+
+output "example_definition_update" {
+ value = resource.fabric_spark_job_definition.example_definition_update
+}
diff --git a/examples/resources/fabric_eventhouse/resource.tf b/examples/resources/fabric_eventhouse/resource.tf
index ee14006f..d417cff4 100644
--- a/examples/resources/fabric_eventhouse/resource.tf
+++ b/examples/resources/fabric_eventhouse/resource.tf
@@ -1,4 +1,33 @@
+# Example 1 - Item without definition
resource "fabric_eventhouse" "example" {
- display_name = "example"
+ display_name = "example1"
workspace_id = "00000000-0000-0000-0000-000000000000"
}
+
+# Example 2 - Item with definition bootstrapping only
+resource "fabric_eventhouse" "example_definition_bootstrap" {
+ display_name = "example2"
+ description = "example with definition bootstrapping"
+ workspace_id = "00000000-0000-0000-0000-000000000000"
+ definition_update_enabled = false # <-- Disable definition update
+ definition = {
+ "EventhouseProperties.json" = {
+ source = "${local.path}/EventhouseProperties.json.tmpl"
+ }
+ }
+}
+
+# Example 3 - Item with definition update when source or tokens changed
+resource "fabric_eventhouse" "example_definition_update" {
+ display_name = "example3"
+ description = "example with definition update when source or tokens changed"
+ workspace_id = "00000000-0000-0000-0000-000000000000"
+ definition = {
+ "EventhouseProperties.json" = {
+ source = "${local.path}/EventhouseProperties.json.tmpl"
+ tokens = {
+ "MyKey" = "MyValue"
+ }
+ }
+ }
+}
diff --git a/examples/resources/fabric_kql_database/resource.tf b/examples/resources/fabric_kql_database/resource.tf
index fc7eb6f1..39c6477a 100644
--- a/examples/resources/fabric_kql_database/resource.tf
+++ b/examples/resources/fabric_kql_database/resource.tf
@@ -1,4 +1,4 @@
-# Create a ReadWrite KQL database example
+# Example 1 - Create a ReadWrite KQL database
resource "fabric_kql_database" "example1" {
display_name = "example1"
workspace_id = "00000000-0000-0000-0000-000000000000"
@@ -9,7 +9,7 @@ resource "fabric_kql_database" "example1" {
}
}
-# Create a Shortcut KQL database to source Azure Data Explorer cluster example
+# Example 2 - Create a Shortcut KQL database to source Azure Data Explorer cluster
resource "fabric_kql_database" "example2" {
display_name = "example2"
workspace_id = "00000000-0000-0000-0000-000000000000"
@@ -22,7 +22,7 @@ resource "fabric_kql_database" "example2" {
}
}
-# Create a Shortcut KQL database to source Azure Data Explorer cluster with invitation token example
+# Example 3 - Create a Shortcut KQL database to source Azure Data Explorer cluster with invitation token
resource "fabric_kql_database" "example3" {
display_name = "example3"
workspace_id = "00000000-0000-0000-0000-000000000000"
@@ -34,7 +34,7 @@ resource "fabric_kql_database" "example3" {
}
}
-# Create a Shortcut KQL database to source KQL database example
+# Example 4 - Create a Shortcut KQL database to source KQL database
resource "fabric_kql_database" "example4" {
display_name = "example4"
workspace_id = "00000000-0000-0000-0000-000000000000"
diff --git a/examples/resources/fabric_spark_job_definition/resource.tf b/examples/resources/fabric_spark_job_definition/resource.tf
index 619d28b3..e71733e3 100644
--- a/examples/resources/fabric_spark_job_definition/resource.tf
+++ b/examples/resources/fabric_spark_job_definition/resource.tf
@@ -1,10 +1,10 @@
-# Example 1 - Spark Job Definition without definition
+# Example 1 - Item without definition
resource "fabric_spark_job_definition" "example" {
display_name = "example1"
workspace_id = "00000000-0000-0000-0000-000000000000"
}
-# Example 2 - Spark Job Definition with definition bootstrapping only
+# Example 2 - Item with definition bootstrapping only
resource "fabric_spark_job_definition" "example_definition_bootstrap" {
display_name = "example2"
description = "example with definition bootstrapping"
@@ -17,7 +17,7 @@ resource "fabric_spark_job_definition" "example_definition_bootstrap" {
}
}
-# Example 3 - Spark Job Definition with definition update when source or tokens changed
+# Example 3 - Item with definition update when source or tokens changed
resource "fabric_spark_job_definition" "example_definition_update" {
display_name = "example3"
description = "example with definition update when source or tokens changed"
diff --git a/go.mod b/go.mod
index e88cdebf..410f4adc 100644
--- a/go.mod
+++ b/go.mod
@@ -76,7 +76,7 @@ require (
github.com/zclconf/go-cty v1.15.0 // indirect
golang.org/x/crypto v0.31.0 // indirect
golang.org/x/mod v0.21.0 // indirect
- golang.org/x/net v0.29.0 // indirect
+ golang.org/x/net v0.33.0 // indirect
golang.org/x/sync v0.10.0 // indirect
golang.org/x/sys v0.28.0 // indirect
golang.org/x/text v0.21.0 // indirect
diff --git a/go.sum b/go.sum
index 7672fb33..c6849b71 100644
--- a/go.sum
+++ b/go.sum
@@ -222,8 +222,8 @@ golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
-golang.org/x/net v0.29.0 h1:5ORfpBpCs4HzDYoodCDBbwHzdR5UrLBZ3sOnUJmFoHo=
-golang.org/x/net v0.29.0/go.mod h1:gLkgy8jTGERgjzMic6DS9+SP0ajcu6Xu3Orq/SpETg0=
+golang.org/x/net v0.33.0 h1:74SYHlV8BIgHIFC/LrYkOGIwL19eTYXQ5wc6TBuO36I=
+golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
diff --git a/internal/common/errors.go b/internal/common/errors.go
index e9820969..b983fe15 100644
--- a/internal/common/errors.go
+++ b/internal/common/errors.go
@@ -15,33 +15,33 @@ const (
ErrorDataSourceConfigType string = "Unexpected Data Source Configure Type"
ErrorResourceConfigType string = "Unexpected Resource Configure Type"
ErrorModelConversion string = "Data Model Conversion Error"
- ErrorCreateHeader string = "create operation"
+ ErrorCreateHeader string = "Create operation"
ErrorCreateDetails string = "Could not create resource"
- ErrorReadHeader string = "read operation"
+ ErrorReadHeader string = "Read operation"
ErrorReadDetails string = "Could not read resource"
- ErrorUpdateHeader string = "update operation"
+ ErrorUpdateHeader string = "Update operation"
ErrorUpdateDetails string = "Could not update resource"
- ErrorDeleteHeader string = "delete operation"
+ ErrorDeleteHeader string = "Delete operation"
ErrorDeleteDetails string = "Could not delete resource"
- ErrorListHeader string = "list operation"
+ ErrorListHeader string = "List operation"
ErrorListDetails string = "Could not list resource"
- ErrorImportHeader string = "import operation"
+ ErrorImportHeader string = "Import operation"
ErrorImportDetails string = "Could not import resource"
ErrorImportIdentifierHeader string = "Invalid import identifier"
ErrorImportIdentifierDetails string = "Expected identifier must be in the format: %s"
ErrorInvalidURL string = "must be a valid URL."
ErrorFabricClientType string = "Expected *fabric.Client, got: %T. Please report this issue to the provider developers."
ErrorGenericUnexpected string = "Unexpected error occurred"
- ErrorBase64DecodeHeader string = "base64 decode operation"
- ErrorBase64EncodeHeader string = "base64 encode operation"
- ErrorBase64GzipEncodeHeader string = "base64 gzip encode operation"
- ErrorJSONNormalizeHeader string = "json normalize operation"
- ErrorFileReadHeader string = "file read operation"
+ ErrorBase64DecodeHeader string = "Base64 decode operation"
+ ErrorBase64EncodeHeader string = "Base64 encode operation"
+ ErrorBase64GzipEncodeHeader string = "Base64 Gzip encode operation"
+ ErrorJSONNormalizeHeader string = "JSON normalize operation"
+ ErrorFileReadHeader string = "File read operation"
ErrorTmplParseHeader string = "template parse operation"
)
// Warnings.
const (
- WarningItemDefinitionUpdateHeader = "Item definition update"
- WarningItemDefinitionUpdateDetails = "%s definition update will overwrite the existing definition."
+ WarningItemDefinitionUpdateHeader = "Fabric Item definition update"
+ WarningItemDefinitionUpdateDetails = "%s definition update operation will overwrite the existing definition on the Fabric side."
)
diff --git a/internal/pkg/fabricitem/data_item.go b/internal/pkg/fabricitem/data_item.go
index ef7ad641..5e6d6d4e 100644
--- a/internal/pkg/fabricitem/data_item.go
+++ b/internal/pkg/fabricitem/data_item.go
@@ -45,7 +45,7 @@ func (d *DataSourceFabricItem) Metadata(_ context.Context, req datasource.Metada
}
func (d *DataSourceFabricItem) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- resp.Schema = GetDataSourceFabricItemSchema(ctx, *d)
+ resp.Schema = getDataSourceFabricItemSchema(ctx, *d)
}
func (d *DataSourceFabricItem) ConfigValidators(_ context.Context) []datasource.ConfigValidator {
diff --git a/internal/pkg/fabricitem/data_item_definition.go b/internal/pkg/fabricitem/data_item_definition.go
index ceb63cce..41dd22d6 100644
--- a/internal/pkg/fabricitem/data_item_definition.go
+++ b/internal/pkg/fabricitem/data_item_definition.go
@@ -49,7 +49,7 @@ func (d *DataSourceFabricItemDefinition) Metadata(_ context.Context, req datasou
}
func (d *DataSourceFabricItemDefinition) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- resp.Schema = GetDataSourceFabricItemDefinitionSchema(ctx, *d)
+ resp.Schema = getDataSourceFabricItemDefinitionSchema(ctx, *d)
}
func (d *DataSourceFabricItemDefinition) ConfigValidators(_ context.Context) []datasource.ConfigValidator {
@@ -211,5 +211,12 @@ func (d *DataSourceFabricItemDefinition) getDefinition(ctx context.Context, mode
return diags
}
- return model.setDefinition(ctx, *respGet.Definition)
+ definition, diags := getDataSourceDefinitionModel(ctx, *respGet.Definition)
+ if diags.HasError() {
+ return diags
+ }
+
+ model.setDefinition(definition)
+
+ return nil
}
diff --git a/internal/pkg/fabricitem/data_item_definition_properties.go b/internal/pkg/fabricitem/data_item_definition_properties.go
index a74acded..7ca644d2 100644
--- a/internal/pkg/fabricitem/data_item_definition_properties.go
+++ b/internal/pkg/fabricitem/data_item_definition_properties.go
@@ -31,10 +31,10 @@ var (
type DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop any] struct {
DataSourceFabricItemDefinition
- PropertiesSchema schema.SingleNestedAttribute
- PropertiesSetter func(ctx context.Context, from *Titemprop, to *DataSourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop]) diag.Diagnostics
- ItemGetter func(ctx context.Context, fabricClient fabric.Client, model DataSourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop], fabricItem *FabricItemProperties[Titemprop]) error
- ItemListGetter func(ctx context.Context, fabricClient fabric.Client, model DataSourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop], errNotFound fabcore.ResponseError, fabricItem *FabricItemProperties[Titemprop]) error
+ PropertiesAttributes map[string]schema.Attribute
+ PropertiesSetter func(ctx context.Context, from *Titemprop, to *DataSourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop]) diag.Diagnostics
+ ItemGetter func(ctx context.Context, fabricClient fabric.Client, model DataSourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop], fabricItem *FabricItemProperties[Titemprop]) error
+ ItemListGetter func(ctx context.Context, fabricClient fabric.Client, model DataSourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop], errNotFound fabcore.ResponseError, fabricItem *FabricItemProperties[Titemprop]) error
}
func NewDataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop any](config DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) datasource.DataSource {
@@ -46,10 +46,10 @@ func (d *DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) Metadata(
}
func (d *DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { //revive:disable-line:confusing-naming
- resp.Schema = GetDataSourceFabricItemDefinitionPropertiesSchema1(ctx, *d)
+ resp.Schema = getDataSourceFabricItemDefinitionPropertiesSchema(ctx, *d)
}
-func (d *DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) ConfigValidators(_ context.Context) []datasource.ConfigValidator {
+func (d *DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) ConfigValidators(_ context.Context) []datasource.ConfigValidator { //revive:disable-line:confusing-naming
if d.IsDisplayNameUnique {
return []datasource.ConfigValidator{
datasourcevalidator.Conflicting(
@@ -148,7 +148,7 @@ func (d *DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) Read(ctx
}
}
-func (d *DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) getByID(ctx context.Context, model *DataSourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop]) diag.Diagnostics {
+func (d *DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) getByID(ctx context.Context, model *DataSourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop]) diag.Diagnostics { //revive:disable-line:confusing-naming
tflog.Trace(ctx, fmt.Sprintf("getting %s by ID: %s", d.Name, model.ID.ValueString()))
var fabricItem FabricItemProperties[Titemprop]
@@ -160,15 +160,10 @@ func (d *DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) getByID(c
model.set(fabricItem)
- diags := d.PropertiesSetter(ctx, fabricItem.Properties, model)
- if diags.HasError() {
- return diags
- }
-
- return nil
+ return d.PropertiesSetter(ctx, fabricItem.Properties, model)
}
-func (d *DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) getByDisplayName(ctx context.Context, model *DataSourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop]) diag.Diagnostics {
+func (d *DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) getByDisplayName(ctx context.Context, model *DataSourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop]) diag.Diagnostics { //revive:disable-line:confusing-naming
tflog.Trace(ctx, fmt.Sprintf("getting %s by Display Name: %s", d.Name, model.DisplayName.ValueString()))
errNotFoundCode := fabcore.ErrCommon.EntityNotFound.Error()
@@ -192,12 +187,7 @@ func (d *DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) getByDisp
model.set(fabricItem)
- diags := d.PropertiesSetter(ctx, fabricItem.Properties, model)
- if diags.HasError() {
- return diags
- }
-
- return nil
+ return d.PropertiesSetter(ctx, fabricItem.Properties, model)
}
func (d *DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) getDefinition(ctx context.Context, model *DataSourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop]) diag.Diagnostics {
@@ -214,5 +204,12 @@ func (d *DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) getDefini
return diags
}
- return model.setDefinition(ctx, *respGet.Definition)
+ definition, diags := getDataSourceDefinitionModel(ctx, *respGet.Definition)
+ if diags.HasError() {
+ return diags
+ }
+
+ model.setDefinition(definition)
+
+ return nil
}
diff --git a/internal/pkg/fabricitem/data_item_properties.go b/internal/pkg/fabricitem/data_item_properties.go
new file mode 100644
index 00000000..122aeb7e
--- /dev/null
+++ b/internal/pkg/fabricitem/data_item_properties.go
@@ -0,0 +1,170 @@
+// Copyright (c) Microsoft Corporation
+// SPDX-License-Identifier: MPL-2.0
+
+package fabricitem
+
+import (
+ "context"
+ "fmt"
+ "net/http"
+
+ "github.com/hashicorp/terraform-plugin-framework-validators/datasourcevalidator"
+ "github.com/hashicorp/terraform-plugin-framework/datasource"
+ "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/diag"
+ "github.com/hashicorp/terraform-plugin-framework/path"
+ "github.com/hashicorp/terraform-plugin-log/tflog"
+ "github.com/microsoft/fabric-sdk-go/fabric"
+ fabcore "github.com/microsoft/fabric-sdk-go/fabric/core"
+
+ "github.com/microsoft/terraform-provider-fabric/internal/common"
+ "github.com/microsoft/terraform-provider-fabric/internal/pkg/utils"
+ pconfig "github.com/microsoft/terraform-provider-fabric/internal/provider/config"
+)
+
+// Ensure the implementation satisfies the expected interfaces.
+var (
+ _ datasource.DataSourceWithConfigValidators = (*DataSourceFabricItemProperties[struct{}, struct{}])(nil)
+ _ datasource.DataSourceWithConfigure = (*DataSourceFabricItemProperties[struct{}, struct{}])(nil)
+)
+
+type DataSourceFabricItemProperties[Ttfprop, Titemprop any] struct {
+ DataSourceFabricItem
+ PropertiesAttributes map[string]schema.Attribute
+ PropertiesSetter func(ctx context.Context, from *Titemprop, to *DataSourceFabricItemPropertiesModel[Ttfprop, Titemprop]) diag.Diagnostics
+ ItemGetter func(ctx context.Context, fabricClient fabric.Client, model DataSourceFabricItemPropertiesModel[Ttfprop, Titemprop], fabricItem *FabricItemProperties[Titemprop]) error
+ ItemListGetter func(ctx context.Context, fabricClient fabric.Client, model DataSourceFabricItemPropertiesModel[Ttfprop, Titemprop], errNotFound fabcore.ResponseError, fabricItem *FabricItemProperties[Titemprop]) error
+}
+
+func NewDataSourceFabricItemProperties[Ttfprop, Titemprop any](config DataSourceFabricItemProperties[Ttfprop, Titemprop]) datasource.DataSource {
+ return &config
+}
+
+func (d *DataSourceFabricItemProperties[Ttfprop, Titemprop]) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { //revive:disable-line:confusing-naming
+ resp.TypeName = req.ProviderTypeName + "_" + d.TFName
+}
+
+func (d *DataSourceFabricItemProperties[Ttfprop, Titemprop]) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { //revive:disable-line:confusing-naming
+ resp.Schema = getDataSourceFabricItemPropertiesSchema(ctx, *d)
+}
+
+func (d *DataSourceFabricItemProperties[Ttfprop, Titemprop]) ConfigValidators(_ context.Context) []datasource.ConfigValidator { //revive:disable-line:confusing-naming
+ if d.IsDisplayNameUnique {
+ return []datasource.ConfigValidator{
+ datasourcevalidator.Conflicting(
+ path.MatchRoot("id"),
+ path.MatchRoot("display_name"),
+ ),
+ datasourcevalidator.ExactlyOneOf(
+ path.MatchRoot("id"),
+ path.MatchRoot("display_name"),
+ ),
+ }
+ }
+
+ return []datasource.ConfigValidator{}
+}
+
+func (d *DataSourceFabricItemProperties[Ttfprop, Titemprop]) Configure(_ context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { //revive:disable-line:confusing-naming
+ if req.ProviderData == nil {
+ return
+ }
+
+ pConfigData, ok := req.ProviderData.(*pconfig.ProviderData)
+ if !ok {
+ resp.Diagnostics.AddError(
+ common.ErrorDataSourceConfigType,
+ fmt.Sprintf(common.ErrorFabricClientType, req.ProviderData),
+ )
+
+ return
+ }
+
+ d.pConfigData = pConfigData
+ d.client = fabcore.NewClientFactoryWithClient(*pConfigData.FabricClient).NewItemsClient()
+}
+
+func (d *DataSourceFabricItemProperties[Ttfprop, Titemprop]) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { //revive:disable-line:confusing-naming
+ tflog.Debug(ctx, "READ", map[string]any{
+ "action": "start",
+ })
+ tflog.Trace(ctx, "READ", map[string]any{
+ "config": req.Config,
+ })
+
+ var data DataSourceFabricItemPropertiesModel[Ttfprop, Titemprop]
+
+ if resp.Diagnostics.Append(req.Config.Get(ctx, &data)...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ timeout, diags := data.Timeouts.Read(ctx, d.pConfigData.Timeout)
+ if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ ctx, cancel := context.WithTimeout(ctx, timeout)
+ defer cancel()
+
+ if data.ID.ValueString() != "" {
+ diags = d.getByID(ctx, &data)
+ } else {
+ diags = d.getByDisplayName(ctx, &data)
+ }
+
+ if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(resp.State.Set(ctx, data)...)
+
+ tflog.Debug(ctx, "READ", map[string]any{
+ "action": "end",
+ })
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+}
+
+func (d *DataSourceFabricItemProperties[Ttfprop, Titemprop]) getByID(ctx context.Context, model *DataSourceFabricItemPropertiesModel[Ttfprop, Titemprop]) diag.Diagnostics { //revive:disable-line:confusing-naming
+ tflog.Trace(ctx, fmt.Sprintf("getting %s by ID: %s", d.Name, model.ID.ValueString()))
+
+ var fabricItem FabricItemProperties[Titemprop]
+
+ err := d.ItemGetter(ctx, *d.pConfigData.FabricClient, *model, &fabricItem)
+ if diags := utils.GetDiagsFromError(ctx, err, utils.OperationRead, nil); diags.HasError() {
+ return diags
+ }
+
+ model.set(fabricItem)
+
+ return d.PropertiesSetter(ctx, fabricItem.Properties, model)
+}
+
+func (d *DataSourceFabricItemProperties[Ttfprop, Titemprop]) getByDisplayName(ctx context.Context, model *DataSourceFabricItemPropertiesModel[Ttfprop, Titemprop]) diag.Diagnostics { //revive:disable-line:confusing-naming
+ tflog.Trace(ctx, fmt.Sprintf("getting %s by Display Name: %s", d.Name, model.DisplayName.ValueString()))
+
+ errNotFoundCode := fabcore.ErrCommon.EntityNotFound.Error()
+ errNotFoundMsg := fmt.Sprintf("Unable to find %s with 'display_name': %s in the Workspace ID: %s", d.Name, model.DisplayName.ValueString(), model.WorkspaceID.ValueString())
+
+ errNotFound := fabcore.ResponseError{
+ ErrorCode: errNotFoundCode,
+ StatusCode: http.StatusNotFound,
+ ErrorResponse: &fabcore.ErrorResponse{
+ ErrorCode: &errNotFoundCode,
+ Message: &errNotFoundMsg,
+ },
+ }
+
+ var fabricItem FabricItemProperties[Titemprop]
+
+ err := d.ItemListGetter(ctx, *d.pConfigData.FabricClient, *model, errNotFound, &fabricItem)
+ if diags := utils.GetDiagsFromError(ctx, err, utils.OperationRead, nil); diags.HasError() {
+ return diags
+ }
+
+ model.set(fabricItem)
+
+ return d.PropertiesSetter(ctx, fabricItem.Properties, model)
+}
diff --git a/internal/pkg/fabricitem/data_items.go b/internal/pkg/fabricitem/data_items.go
index 016aae18..7cf9029d 100644
--- a/internal/pkg/fabricitem/data_items.go
+++ b/internal/pkg/fabricitem/data_items.go
@@ -57,7 +57,7 @@ func (d *DataSourceFabricItems) Schema(ctx context.Context, _ datasource.SchemaR
"values": schema.ListNestedAttribute{
Computed: true,
MarkdownDescription: fmt.Sprintf("The list of %s.", d.Names),
- CustomType: supertypes.NewListNestedObjectTypeOf[baseFabricItemModel](ctx),
+ CustomType: supertypes.NewListNestedObjectTypeOf[fabricItemModel](ctx),
NestedObject: schema.NestedAttributeObject{
Attributes: map[string]schema.Attribute{
"workspace_id": schema.StringAttribute{
diff --git a/internal/pkg/fabricitem/data_items_properties.go b/internal/pkg/fabricitem/data_items_properties.go
index 918c0605..4f756011 100644
--- a/internal/pkg/fabricitem/data_items_properties.go
+++ b/internal/pkg/fabricitem/data_items_properties.go
@@ -28,9 +28,9 @@ var (
type DataSourceFabricItemsProperties[Ttfprop, Titemprop any] struct {
DataSourceFabricItems
- PropertiesSchema schema.SingleNestedAttribute
- PropertiesSetter func(ctx context.Context, from *Titemprop, to *FabricItemPropertiesModel[Ttfprop, Titemprop]) diag.Diagnostics
- ItemListGetter func(ctx context.Context, fabricClient fabric.Client, model DataSourceFabricItemsPropertiesModel[Ttfprop, Titemprop], fabricItems *[]FabricItemProperties[Titemprop]) error
+ PropertiesAttributes map[string]schema.Attribute
+ PropertiesSetter func(ctx context.Context, from *Titemprop, to *FabricItemPropertiesModel[Ttfprop, Titemprop]) diag.Diagnostics
+ ItemListGetter func(ctx context.Context, fabricClient fabric.Client, model DataSourceFabricItemsPropertiesModel[Ttfprop, Titemprop], fabricItems *[]FabricItemProperties[Titemprop]) error
}
func NewDataSourceFabricItemsProperties[Ttfprop, Titemprop any](config DataSourceFabricItemsProperties[Ttfprop, Titemprop]) datasource.DataSource {
@@ -63,7 +63,7 @@ func (d *DataSourceFabricItemsProperties[Ttfprop, Titemprop]) Schema(ctx context
},
}
- attributes["properties"] = d.PropertiesSchema
+ attributes["properties"] = getDataSourceFabricItemPropertiesNestedAttr[Ttfprop](ctx, d.Name, d.PropertiesAttributes)
resp.Schema = schema.Schema{
MarkdownDescription: d.MarkdownDescription,
diff --git a/internal/pkg/fabricitem/data_schema.go b/internal/pkg/fabricitem/data_schema.go
index 7cdbb7b1..e2f11a2a 100644
--- a/internal/pkg/fabricitem/data_schema.go
+++ b/internal/pkg/fabricitem/data_schema.go
@@ -15,7 +15,7 @@ import (
"github.com/microsoft/terraform-provider-fabric/internal/pkg/utils"
)
-func GetDataSourceFabricItemSchema(ctx context.Context, d DataSourceFabricItem) schema.Schema {
+func getDataSourceFabricItemSchema(ctx context.Context, d DataSourceFabricItem) schema.Schema {
attributes := getDataSourceFabricItemBaseAttributes(ctx, d.Name, d.IsDisplayNameUnique)
return schema.Schema{
@@ -24,7 +24,7 @@ func GetDataSourceFabricItemSchema(ctx context.Context, d DataSourceFabricItem)
}
}
-func GetDataSourceFabricItemDefinitionSchema(ctx context.Context, d DataSourceFabricItemDefinition) schema.Schema {
+func getDataSourceFabricItemDefinitionSchema(ctx context.Context, d DataSourceFabricItemDefinition) schema.Schema {
attributes := getDataSourceFabricItemBaseAttributes(ctx, d.Name, d.IsDisplayNameUnique)
for key, value := range getDataSourceFabricItemDefinitionAttributes(ctx, d.Name, d.FormatTypes, d.DefinitionPathKeys) {
@@ -37,9 +37,9 @@ func GetDataSourceFabricItemDefinitionSchema(ctx context.Context, d DataSourceFa
}
}
-func GetDataSourceFabricItemPropertiesSchema(ctx context.Context, d DataSourceFabricItem, properties schema.SingleNestedAttribute) schema.Schema {
+func getDataSourceFabricItemPropertiesSchema[Ttfprop, Titemprop any](ctx context.Context, d DataSourceFabricItemProperties[Ttfprop, Titemprop]) schema.Schema {
attributes := getDataSourceFabricItemBaseAttributes(ctx, d.Name, d.IsDisplayNameUnique)
- attributes["properties"] = properties
+ attributes["properties"] = getDataSourceFabricItemPropertiesNestedAttr[Ttfprop](ctx, d.Name, d.PropertiesAttributes)
return schema.Schema{
MarkdownDescription: d.MarkdownDescription,
@@ -47,23 +47,9 @@ func GetDataSourceFabricItemPropertiesSchema(ctx context.Context, d DataSourceFa
}
}
-func GetDataSourceFabricItemDefinitionPropertiesSchema(ctx context.Context, d DataSourceFabricItemDefinition, properties schema.SingleNestedAttribute) schema.Schema {
+func getDataSourceFabricItemDefinitionPropertiesSchema[Ttfprop, Titemprop any](ctx context.Context, d DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) schema.Schema {
attributes := getDataSourceFabricItemBaseAttributes(ctx, d.Name, d.IsDisplayNameUnique)
- attributes["properties"] = properties
-
- for key, value := range getDataSourceFabricItemDefinitionAttributes(ctx, d.Name, d.FormatTypes, d.DefinitionPathKeys) {
- attributes[key] = value
- }
-
- return schema.Schema{
- MarkdownDescription: d.MarkdownDescription,
- Attributes: attributes,
- }
-}
-
-func GetDataSourceFabricItemDefinitionPropertiesSchema1[Ttfprop, Titemprop any](ctx context.Context, d DataSourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) schema.Schema {
- attributes := getDataSourceFabricItemBaseAttributes(ctx, d.Name, d.IsDisplayNameUnique)
- attributes["properties"] = d.PropertiesSchema
+ attributes["properties"] = getDataSourceFabricItemPropertiesNestedAttr[Ttfprop](ctx, d.Name, d.PropertiesAttributes)
for key, value := range getDataSourceFabricItemDefinitionAttributes(ctx, d.Name, d.FormatTypes, d.DefinitionPathKeys) {
attributes[key] = value
@@ -90,30 +76,31 @@ func getDataSourceFabricItemBaseAttributes(ctx context.Context, itemName string,
"timeouts": timeouts.Attributes(ctx),
}
+ // id attribute
+ attrID := schema.StringAttribute{}
+ attrID.MarkdownDescription = fmt.Sprintf("The %s ID.", itemName)
+ attrID.CustomType = customtypes.UUIDType{}
+
if isDisplayNameUnique {
- attributes["id"] = schema.StringAttribute{
- MarkdownDescription: fmt.Sprintf("The %s ID.", itemName),
- Optional: true,
- Computed: true,
- CustomType: customtypes.UUIDType{},
- }
- attributes["display_name"] = schema.StringAttribute{
- MarkdownDescription: fmt.Sprintf("The %s display name.", itemName),
- Optional: true,
- Computed: true,
- }
+ attrID.Optional = true
+ attrID.Computed = true
} else {
- attributes["id"] = schema.StringAttribute{
- MarkdownDescription: fmt.Sprintf("The %s ID.", itemName),
- Required: true,
- CustomType: customtypes.UUIDType{},
- }
- attributes["display_name"] = schema.StringAttribute{
- MarkdownDescription: fmt.Sprintf("The %s display name.", itemName),
- Computed: true,
- }
+ attrID.Required = true
+ }
+
+ attributes["id"] = attrID
+
+ // display_name attribute
+ attrDisplayName := schema.StringAttribute{}
+ attrDisplayName.MarkdownDescription = fmt.Sprintf("The %s display name.", itemName)
+ attrDisplayName.Computed = true
+
+ if isDisplayNameUnique {
+ attrDisplayName.Optional = true
}
+ attributes["display_name"] = attrDisplayName
+
return attributes
}
@@ -140,16 +127,9 @@ func getDataSourceFabricItemDefinitionAttributes(ctx context.Context, name strin
Computed: true,
}
- definitionMarkdownDescription := "Definition parts."
-
- if len(definitionPathKeys) > 0 {
- definitionMarkdownDescription = definitionMarkdownDescription + " Possible path keys: " + utils.ConvertStringSlicesToString(definitionPathKeys, true, false) + "."
- }
-
- attributes["definition"] = schema.MapNestedAttribute{
- MarkdownDescription: definitionMarkdownDescription,
- Computed: true,
- CustomType: supertypes.NewMapNestedObjectTypeOf[DataSourceFabricItemDefinitionPartModel](ctx),
+ attrDefinition := schema.MapNestedAttribute{
+ Computed: true,
+ CustomType: supertypes.NewMapNestedObjectTypeOf[dataSourceFabricItemDefinitionPartModel](ctx),
NestedObject: schema.NestedAttributeObject{
Attributes: map[string]schema.Attribute{
"content": schema.StringAttribute{
@@ -161,5 +141,22 @@ func getDataSourceFabricItemDefinitionAttributes(ctx context.Context, name strin
},
}
+ if len(definitionPathKeys) > 0 {
+ attrDefinition.MarkdownDescription = "Definition parts. Possible path keys: " + utils.ConvertStringSlicesToString(definitionPathKeys, true, false) + "."
+ } else {
+ attrDefinition.MarkdownDescription = "Definition parts."
+ }
+
+ attributes["definition"] = attrDefinition
+
return attributes
}
+
+func getDataSourceFabricItemPropertiesNestedAttr[Ttfprop any](ctx context.Context, name string, attributes map[string]schema.Attribute) schema.SingleNestedAttribute {
+ return schema.SingleNestedAttribute{
+ MarkdownDescription: "The " + name + " properties.",
+ Computed: true,
+ CustomType: supertypes.NewSingleNestedObjectTypeOf[Ttfprop](ctx),
+ Attributes: attributes,
+ }
+}
diff --git a/internal/pkg/fabricitem/models.go b/internal/pkg/fabricitem/models.go
index e49db23f..af1cffbd 100644
--- a/internal/pkg/fabricitem/models.go
+++ b/internal/pkg/fabricitem/models.go
@@ -13,14 +13,14 @@ import (
"github.com/microsoft/terraform-provider-fabric/internal/framework/customtypes"
)
-type baseFabricItemModel struct {
+type fabricItemModel struct {
WorkspaceID customtypes.UUID `tfsdk:"workspace_id"`
ID customtypes.UUID `tfsdk:"id"`
DisplayName types.String `tfsdk:"display_name"`
Description types.String `tfsdk:"description"`
}
-func (to *baseFabricItemModel) set(from fabcore.Item) {
+func (to *fabricItemModel) set(from fabcore.Item) {
to.WorkspaceID = customtypes.NewUUIDPointerValue(from.WorkspaceID)
to.ID = customtypes.NewUUIDPointerValue(from.ID)
to.DisplayName = types.StringPointerValue(from.DisplayName)
diff --git a/internal/pkg/fabricitem/models_data_item.go b/internal/pkg/fabricitem/models_data_item.go
index 7c500ed6..cdbd1391 100644
--- a/internal/pkg/fabricitem/models_data_item.go
+++ b/internal/pkg/fabricitem/models_data_item.go
@@ -5,29 +5,9 @@ package fabricitem
import (
"github.com/hashicorp/terraform-plugin-framework-timeouts/datasource/timeouts"
- "github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/types"
-
- "github.com/microsoft/terraform-provider-fabric/internal/pkg/transforms"
)
type dataSourceFabricItemModel struct {
- baseFabricItemModel
+ fabricItemModel
Timeouts timeouts.Value `tfsdk:"timeouts"`
}
-
-type DataSourceFabricItemDefinitionPartModel struct {
- Content types.String `tfsdk:"content"`
-}
-
-func (to *DataSourceFabricItemDefinitionPartModel) Set(from string) diag.Diagnostics {
- content := from
-
- if diags := transforms.PayloadToGzip(&content); diags.HasError() {
- return diags
- }
-
- to.Content = types.StringPointerValue(&content)
-
- return nil
-}
diff --git a/internal/pkg/fabricitem/models_data_item_definition.go b/internal/pkg/fabricitem/models_data_item_definition.go
index f56ae96d..953ba3a7 100644
--- a/internal/pkg/fabricitem/models_data_item_definition.go
+++ b/internal/pkg/fabricitem/models_data_item_definition.go
@@ -11,28 +11,56 @@ import (
"github.com/hashicorp/terraform-plugin-framework/diag"
"github.com/hashicorp/terraform-plugin-framework/types"
fabcore "github.com/microsoft/fabric-sdk-go/fabric/core"
+
+ "github.com/microsoft/terraform-provider-fabric/internal/pkg/transforms"
)
type dataSourceFabricItemDefinitionModel struct {
- baseFabricItemModel
+ fabricItemModel
Format types.String `tfsdk:"format"`
OutputDefinition types.Bool `tfsdk:"output_definition"`
- Definition supertypes.MapNestedObjectValueOf[DataSourceFabricItemDefinitionPartModel] `tfsdk:"definition"`
+ Definition supertypes.MapNestedObjectValueOf[dataSourceFabricItemDefinitionPartModel] `tfsdk:"definition"`
Timeouts timeouts.Value `tfsdk:"timeouts"`
}
-func (to *dataSourceFabricItemDefinitionModel) setDefinition(ctx context.Context, from fabcore.ItemDefinition) diag.Diagnostics {
- defParts := make(map[string]*DataSourceFabricItemDefinitionPartModel, len(from.Parts))
+func (to *dataSourceFabricItemDefinitionModel) setDefinition(v supertypes.MapNestedObjectValueOf[dataSourceFabricItemDefinitionPartModel]) {
+ to.Definition = v
+}
+
+func getDataSourceDefinitionModel(ctx context.Context, from fabcore.ItemDefinition) (supertypes.MapNestedObjectValueOf[dataSourceFabricItemDefinitionPartModel], diag.Diagnostics) {
+ defParts := make(map[string]*dataSourceFabricItemDefinitionPartModel, len(from.Parts))
+
+ result := supertypes.NewMapNestedObjectValueOfNull[dataSourceFabricItemDefinitionPartModel](ctx)
for _, part := range from.Parts {
- newPart := &DataSourceFabricItemDefinitionPartModel{}
+ newPart := &dataSourceFabricItemDefinitionPartModel{}
- if diags := newPart.Set(*part.Payload); diags.HasError() {
- return diags
+ if diags := newPart.set(*part.Payload); diags.HasError() {
+ return result, diags
}
defParts[*part.Path] = newPart
}
- return to.Definition.Set(ctx, defParts)
+ if diags := result.Set(ctx, defParts); diags.HasError() {
+ return result, diags
+ }
+
+ return result, nil
+}
+
+type dataSourceFabricItemDefinitionPartModel struct {
+ Content types.String `tfsdk:"content"`
+}
+
+func (to *dataSourceFabricItemDefinitionPartModel) set(from string) diag.Diagnostics {
+ content := from
+
+ if diags := transforms.PayloadToGzip(&content); diags.HasError() {
+ return diags
+ }
+
+ to.Content = types.StringPointerValue(&content)
+
+ return nil
}
diff --git a/internal/pkg/fabricitem/models_data_item_definition_properties.go b/internal/pkg/fabricitem/models_data_item_definition_properties.go
index ea0faf44..32da42e9 100644
--- a/internal/pkg/fabricitem/models_data_item_definition_properties.go
+++ b/internal/pkg/fabricitem/models_data_item_definition_properties.go
@@ -4,35 +4,19 @@
package fabricitem
import (
- "context"
-
supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes"
"github.com/hashicorp/terraform-plugin-framework-timeouts/datasource/timeouts"
- "github.com/hashicorp/terraform-plugin-framework/diag"
"github.com/hashicorp/terraform-plugin-framework/types"
- fabcore "github.com/microsoft/fabric-sdk-go/fabric/core"
)
type DataSourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop any] struct {
FabricItemPropertiesModel[Ttfprop, Titemprop]
Format types.String `tfsdk:"format"`
OutputDefinition types.Bool `tfsdk:"output_definition"`
- Definition supertypes.MapNestedObjectValueOf[DataSourceFabricItemDefinitionPartModel] `tfsdk:"definition"`
+ Definition supertypes.MapNestedObjectValueOf[dataSourceFabricItemDefinitionPartModel] `tfsdk:"definition"`
Timeouts timeouts.Value `tfsdk:"timeouts"`
}
-func (to *DataSourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop]) setDefinition(ctx context.Context, from fabcore.ItemDefinition) diag.Diagnostics {
- defParts := make(map[string]*DataSourceFabricItemDefinitionPartModel, len(from.Parts))
-
- for _, part := range from.Parts {
- newPart := &DataSourceFabricItemDefinitionPartModel{}
-
- if diags := newPart.Set(*part.Payload); diags.HasError() {
- return diags
- }
-
- defParts[*part.Path] = newPart
- }
-
- return to.Definition.Set(ctx, defParts)
+func (to *DataSourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop]) setDefinition(v supertypes.MapNestedObjectValueOf[dataSourceFabricItemDefinitionPartModel]) {
+ to.Definition = v
}
diff --git a/internal/services/environment/models_data_environment.go b/internal/pkg/fabricitem/models_data_item_properties.go
similarity index 60%
rename from internal/services/environment/models_data_environment.go
rename to internal/pkg/fabricitem/models_data_item_properties.go
index 07b5ade0..93ddb514 100644
--- a/internal/services/environment/models_data_environment.go
+++ b/internal/pkg/fabricitem/models_data_item_properties.go
@@ -1,13 +1,13 @@
// Copyright (c) Microsoft Corporation
// SPDX-License-Identifier: MPL-2.0
-package environment
+package fabricitem
import (
"github.com/hashicorp/terraform-plugin-framework-timeouts/datasource/timeouts"
)
-type dataSourceEnvironmentModel struct {
- baseEnvironmentPropertiesModel
+type DataSourceFabricItemPropertiesModel[Ttfprop, Titemprop any] struct {
+ FabricItemPropertiesModel[Ttfprop, Titemprop]
Timeouts timeouts.Value `tfsdk:"timeouts"`
}
diff --git a/internal/pkg/fabricitem/models_data_items.go b/internal/pkg/fabricitem/models_data_items.go
index 5cc098cb..2fadab7e 100644
--- a/internal/pkg/fabricitem/models_data_items.go
+++ b/internal/pkg/fabricitem/models_data_items.go
@@ -15,16 +15,16 @@ import (
)
type dataSourceFabricItemsModel struct {
- WorkspaceID customtypes.UUID `tfsdk:"workspace_id"`
- Values supertypes.ListNestedObjectValueOf[baseFabricItemModel] `tfsdk:"values"`
- Timeouts timeouts.Value `tfsdk:"timeouts"`
+ WorkspaceID customtypes.UUID `tfsdk:"workspace_id"`
+ Values supertypes.ListNestedObjectValueOf[fabricItemModel] `tfsdk:"values"`
+ Timeouts timeouts.Value `tfsdk:"timeouts"`
}
func (to *dataSourceFabricItemsModel) setValues(ctx context.Context, from []fabcore.Item) diag.Diagnostics {
- slice := make([]*baseFabricItemModel, 0, len(from))
+ slice := make([]*fabricItemModel, 0, len(from))
for _, entity := range from {
- var entityModel baseFabricItemModel
+ var entityModel fabricItemModel
entityModel.set(entity)
slice = append(slice, &entityModel)
}
diff --git a/internal/pkg/fabricitem/models_resource_item.go b/internal/pkg/fabricitem/models_resource_item.go
index 5252685f..a6486fa1 100644
--- a/internal/pkg/fabricitem/models_resource_item.go
+++ b/internal/pkg/fabricitem/models_resource_item.go
@@ -4,13 +4,18 @@
package fabricitem
import (
- azto "github.com/Azure/azure-sdk-for-go/sdk/azcore/to"
+ "context"
+ "reflect"
+
+ supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes"
"github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts"
+ "github.com/hashicorp/terraform-plugin-framework/diag"
+ "github.com/hashicorp/terraform-plugin-framework/types"
fabcore "github.com/microsoft/fabric-sdk-go/fabric/core"
)
type resourceFabricItemModel struct {
- baseFabricItemModel
+ fabricItemModel
Timeouts timeouts.Value `tfsdk:"timeouts"`
}
@@ -18,17 +23,97 @@ type requestCreateFabricItem struct {
fabcore.CreateItemRequest
}
-func (to *requestCreateFabricItem) set(from resourceFabricItemModel, itemType fabcore.ItemType) {
- to.DisplayName = from.DisplayName.ValueStringPointer()
- to.Description = from.Description.ValueStringPointer()
- to.Type = azto.Ptr(itemType)
+type DBPointer[T any] interface {
+ *T
+ fabcore.CreateItemRequest
+}
+
+func (to *requestCreateFabricItem) setDisplayName(v types.String) {
+ to.DisplayName = v.ValueStringPointer()
+}
+
+func (to *requestCreateFabricItem) setDescription(v types.String) {
+ to.Description = v.ValueStringPointer()
+}
+
+func (to *requestCreateFabricItem) setType(v fabcore.ItemType) {
+ to.Type = &v
+}
+
+func (to *requestCreateFabricItem) setDefinition(ctx context.Context, definition supertypes.MapNestedObjectValueOf[resourceFabricItemDefinitionPartModel], format types.String, definitionUpdateEnabled types.Bool) diag.Diagnostics {
+ if !definition.IsNull() && !definition.IsUnknown() {
+ var def fabricItemDefinition
+
+ def.setFormat(format)
+
+ if diags := def.setParts(ctx, definition, "", []string{}, definitionUpdateEnabled, false); diags.HasError() {
+ return diags
+ }
+
+ to.Definition = &def.ItemDefinition
+ }
+
+ return nil
+}
+
+func (to *requestCreateFabricItem) setCreationPayload(v any) {
+ if v != nil {
+ to.CreationPayload = v
+ }
+}
+
+func getCreationPayload[Ttfconfig, Titemconfig any](ctx context.Context, configuration supertypes.SingleNestedObjectValueOf[Ttfconfig], creationPayloadSetter func(ctx context.Context, from Ttfconfig) (*Titemconfig, diag.Diagnostics)) (*Titemconfig, diag.Diagnostics) {
+ if !configuration.IsNull() && !configuration.IsUnknown() {
+ config, diags := configuration.Get(ctx)
+ if diags.HasError() {
+ return nil, diags
+ }
+
+ creationPayload, diags := creationPayloadSetter(ctx, *config)
+ if diags.HasError() {
+ return nil, diags
+ }
+
+ return creationPayload, nil
+ }
+
+ return nil, nil
}
type requestUpdateFabricItem struct {
fabcore.UpdateItemRequest
}
-func (to *requestUpdateFabricItem) set(from resourceFabricItemModel) {
- to.DisplayName = from.DisplayName.ValueStringPointer()
- to.Description = from.Description.ValueStringPointer()
+func (to *requestUpdateFabricItem) setDisplayName(v types.String) {
+ to.DisplayName = v.ValueStringPointer()
+}
+
+func (to *requestUpdateFabricItem) setDescription(v types.String) {
+ to.Description = v.ValueStringPointer()
+}
+
+func fabricItemCheckUpdate(planDisplayName, planDescription, stateDisplayName, stateDescription types.String, reqUpdatePlan *requestUpdateFabricItem) bool {
+ var reqUpdateState requestUpdateFabricItem
+
+ reqUpdatePlan.setDisplayName(planDisplayName)
+ reqUpdatePlan.setDescription(planDescription)
+
+ reqUpdateState.setDisplayName(stateDisplayName)
+ reqUpdateState.setDescription(stateDescription)
+
+ return !reflect.DeepEqual(reqUpdatePlan.UpdateItemRequest, reqUpdateState.UpdateItemRequest)
+}
+
+func fabricItemCheckUpdateDefinition(ctx context.Context, planDefinition, stateDefinition supertypes.MapNestedObjectValueOf[resourceFabricItemDefinitionPartModel], planFormat types.String, planDefinitionUpdateEnabled types.Bool, definitionEmpty string, definitionPaths []string, reqUpdate *requestUpdateFabricItemDefinition) (bool, diag.Diagnostics) {
+ if !planDefinition.Equal(stateDefinition) && planDefinitionUpdateEnabled.ValueBool() {
+ if diags := reqUpdate.setDefinition(ctx, planDefinition, planFormat, planDefinitionUpdateEnabled, definitionEmpty, definitionPaths); diags.HasError() {
+ return false, diags
+ }
+
+ if len(reqUpdate.Definition.Parts) > 0 && !planDefinition.Equal(stateDefinition) {
+ return true, nil
+ }
+ }
+
+ return false, nil
}
diff --git a/internal/pkg/fabricitem/models_resource_item_config_definition_properties.go b/internal/pkg/fabricitem/models_resource_item_config_definition_properties.go
new file mode 100644
index 00000000..3e071da3
--- /dev/null
+++ b/internal/pkg/fabricitem/models_resource_item_config_definition_properties.go
@@ -0,0 +1,19 @@
+// Copyright (c) Microsoft Corporation
+// SPDX-License-Identifier: MPL-2.0
+
+package fabricitem
+
+import (
+ supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes"
+ "github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+)
+
+type ResourceFabricItemConfigDefinitionPropertiesModel[Ttfprop, Titemprop, Ttfconfig, Titemconfig any] struct {
+ FabricItemPropertiesModel[Ttfprop, Titemprop]
+ Configuration supertypes.SingleNestedObjectValueOf[Ttfconfig] `tfsdk:"configuration"`
+ Format types.String `tfsdk:"format"`
+ DefinitionUpdateEnabled types.Bool `tfsdk:"definition_update_enabled"`
+ Definition supertypes.MapNestedObjectValueOf[resourceFabricItemDefinitionPartModel] `tfsdk:"definition"`
+ Timeouts timeouts.Value `tfsdk:"timeouts"`
+}
diff --git a/internal/pkg/fabricitem/models_resource_item_config_properties.go b/internal/pkg/fabricitem/models_resource_item_config_properties.go
new file mode 100644
index 00000000..c9ab62a1
--- /dev/null
+++ b/internal/pkg/fabricitem/models_resource_item_config_properties.go
@@ -0,0 +1,15 @@
+// Copyright (c) Microsoft Corporation
+// SPDX-License-Identifier: MPL-2.0
+
+package fabricitem
+
+import (
+ supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes"
+ "github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts"
+)
+
+type ResourceFabricItemConfigPropertiesModel[Ttfprop, Titemprop, Ttfconfig, Titemconfig any] struct {
+ FabricItemPropertiesModel[Ttfprop, Titemprop]
+ Configuration supertypes.SingleNestedObjectValueOf[Ttfconfig] `tfsdk:"configuration"`
+ Timeouts timeouts.Value `tfsdk:"timeouts"`
+}
diff --git a/internal/pkg/fabricitem/models_resource_item_definition.go b/internal/pkg/fabricitem/models_resource_item_definition.go
index 2cfc7427..e78b27ea 100644
--- a/internal/pkg/fabricitem/models_resource_item_definition.go
+++ b/internal/pkg/fabricitem/models_resource_item_definition.go
@@ -18,14 +18,14 @@ import (
)
type resourceFabricItemDefinitionModel struct {
- baseFabricItemModel
+ fabricItemModel
Format types.String `tfsdk:"format"`
DefinitionUpdateEnabled types.Bool `tfsdk:"definition_update_enabled"`
- Definition supertypes.MapNestedObjectValueOf[ResourceFabricItemDefinitionPartModel] `tfsdk:"definition"`
+ Definition supertypes.MapNestedObjectValueOf[resourceFabricItemDefinitionPartModel] `tfsdk:"definition"`
Timeouts timeouts.Value `tfsdk:"timeouts"`
}
-type ResourceFabricItemDefinitionPartModel struct {
+type resourceFabricItemDefinitionPartModel struct {
Source types.String `tfsdk:"source"`
Tokens supertypes.MapValueOf[string] `tfsdk:"tokens"`
SourceContentSha256 types.String `tfsdk:"source_content_sha256"`
@@ -35,14 +35,16 @@ type fabricItemDefinition struct {
fabcore.ItemDefinition
}
-func (to *fabricItemDefinition) set(ctx context.Context, from resourceFabricItemDefinitionModel, update bool, definitionEmpty string, definitionPaths []string) diag.Diagnostics { //revive:disable-line:flag-parameter
- if from.Format.ValueString() != DefinitionFormatNotApplicable {
- to.Format = from.Format.ValueStringPointer()
+func (to *fabricItemDefinition) setFormat(v types.String) {
+ if v.ValueString() != DefinitionFormatNotApplicable && v.ValueString() != "" {
+ to.Format = v.ValueStringPointer()
}
+}
+func (to *fabricItemDefinition) setParts(ctx context.Context, definition supertypes.MapNestedObjectValueOf[resourceFabricItemDefinitionPartModel], definitionEmpty string, definitionPaths []string, definitionUpdateEnabled types.Bool, update bool) diag.Diagnostics { //revive:disable-line:flag-parameter
to.Parts = []fabcore.ItemDefinitionPart{}
- defParts, diags := from.Definition.Get(ctx)
+ defParts, diags := definition.Get(ctx)
if diags.HasError() {
return diags
}
@@ -69,7 +71,7 @@ func (to *fabricItemDefinition) set(ctx context.Context, from resourceFabricItem
}
for defPartKey, defPartValue := range defParts {
- if !update || (update && from.DefinitionUpdateEnabled.ValueBool()) {
+ if !update || (update && definitionUpdateEnabled.ValueBool()) {
payloadB64, _, diags := transforms.SourceFileToPayload(ctx, defPartValue.Source, defPartValue.Tokens)
if diags.HasError() {
return diags
@@ -86,45 +88,16 @@ func (to *fabricItemDefinition) set(ctx context.Context, from resourceFabricItem
return nil
}
-type requestCreateFabricItemDefinition struct {
- fabcore.CreateItemRequest
-}
-
-func (to *requestCreateFabricItemDefinition) set(ctx context.Context, from resourceFabricItemDefinitionModel, itemType fabcore.ItemType) diag.Diagnostics {
- to.DisplayName = from.DisplayName.ValueStringPointer()
- to.Description = from.Description.ValueStringPointer()
- to.Type = azto.Ptr(itemType)
-
- if !from.Definition.IsNull() && !from.Definition.IsUnknown() {
- var def fabricItemDefinition
-
- if diags := def.set(ctx, from, false, "", []string{}); diags.HasError() {
- return diags
- }
-
- to.Definition = &def.ItemDefinition
- }
-
- return nil
-}
-
type requestUpdateFabricItemDefinition struct {
- fabcore.UpdateItemRequest
-}
-
-func (to *requestUpdateFabricItemDefinition) set(from resourceFabricItemDefinitionModel) {
- to.DisplayName = from.DisplayName.ValueStringPointer()
- to.Description = from.Description.ValueStringPointer()
-}
-
-type requestUpdateFabricItemDefinitionDefinition struct {
fabcore.UpdateItemDefinitionRequest
}
-func (to *requestUpdateFabricItemDefinitionDefinition) set(ctx context.Context, from resourceFabricItemDefinitionModel, definitionEmpty string, definitionPaths []string) diag.Diagnostics {
+func (to *requestUpdateFabricItemDefinition) setDefinition(ctx context.Context, definition supertypes.MapNestedObjectValueOf[resourceFabricItemDefinitionPartModel], format types.String, definitionUpdateEnabled types.Bool, definitionEmpty string, definitionPaths []string) diag.Diagnostics {
var def fabricItemDefinition
- if diags := def.set(ctx, from, true, definitionEmpty, definitionPaths); diags.HasError() {
+ def.setFormat(format)
+
+ if diags := def.setParts(ctx, definition, definitionEmpty, definitionPaths, definitionUpdateEnabled, true); diags.HasError() {
return diags
}
diff --git a/internal/pkg/fabricitem/models_resource_item_definition_properties.go b/internal/pkg/fabricitem/models_resource_item_definition_properties.go
index 37abc206..4259df39 100644
--- a/internal/pkg/fabricitem/models_resource_item_definition_properties.go
+++ b/internal/pkg/fabricitem/models_resource_item_definition_properties.go
@@ -4,135 +4,15 @@
package fabricitem
import (
- "context"
-
- azto "github.com/Azure/azure-sdk-for-go/sdk/azcore/to"
supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes"
"github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts"
- "github.com/hashicorp/terraform-plugin-framework/diag"
"github.com/hashicorp/terraform-plugin-framework/types"
- fabcore "github.com/microsoft/fabric-sdk-go/fabric/core"
-
- "github.com/microsoft/terraform-provider-fabric/internal/common"
- "github.com/microsoft/terraform-provider-fabric/internal/framework/customtypes"
- "github.com/microsoft/terraform-provider-fabric/internal/pkg/transforms"
)
type ResourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop any] struct {
- // FabricItemPropertiesModel[Titemprop, Titemprop]
- baseFabricItemModel
- Properties supertypes.SingleNestedObjectValueOf[Ttfprop] `tfsdk:"properties"`
+ FabricItemPropertiesModel[Ttfprop, Titemprop]
Format types.String `tfsdk:"format"`
DefinitionUpdateEnabled types.Bool `tfsdk:"definition_update_enabled"`
- Definition supertypes.MapNestedObjectValueOf[ResourceFabricItemDefinitionPartModel] `tfsdk:"definition"`
+ Definition supertypes.MapNestedObjectValueOf[resourceFabricItemDefinitionPartModel] `tfsdk:"definition"`
Timeouts timeouts.Value `tfsdk:"timeouts"`
}
-
-func (to *ResourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop]) set(from FabricItemProperties[Titemprop]) { //revive:disable-line:confusing-naming
- to.WorkspaceID = customtypes.NewUUIDPointerValue(from.WorkspaceID)
- to.ID = customtypes.NewUUIDPointerValue(from.ID)
- to.DisplayName = types.StringPointerValue(from.DisplayName)
- to.Description = types.StringPointerValue(from.Description)
-}
-
-type FabricItemDefinitionProperties[Ttfprop, Titemprop any] struct { //revive:disable-line:exported
- fabcore.ItemDefinition
-}
-
-func (to *FabricItemDefinitionProperties[Ttfprop, Titemprop]) set(ctx context.Context, from ResourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop], update bool, definitionEmpty string, definitionPaths []string) diag.Diagnostics { //revive:disable-line:flag-parameter,confusing-naming
- if from.Format.ValueString() != DefinitionFormatNotApplicable {
- to.Format = from.Format.ValueStringPointer()
- }
-
- to.Parts = []fabcore.ItemDefinitionPart{}
-
- defParts, diags := from.Definition.Get(ctx)
- if diags.HasError() {
- return diags
- }
-
- if (len(defParts) == 0) && len(definitionPaths) > 0 && update {
- content := definitionEmpty
-
- if err := transforms.Base64Encode(&content); err != nil {
- diags.AddError(
- common.ErrorBase64EncodeHeader,
- err.Error(),
- )
-
- return diags
- }
-
- to.Parts = append(to.Parts, fabcore.ItemDefinitionPart{
- Path: azto.Ptr(definitionPaths[0]),
- Payload: &content,
- PayloadType: azto.Ptr(fabcore.PayloadTypeInlineBase64),
- })
-
- return nil
- }
-
- for defPartKey, defPartValue := range defParts {
- if !update || (update && from.DefinitionUpdateEnabled.ValueBool()) {
- payloadB64, _, diags := transforms.SourceFileToPayload(ctx, defPartValue.Source, defPartValue.Tokens)
- if diags.HasError() {
- return diags
- }
-
- to.Parts = append(to.Parts, fabcore.ItemDefinitionPart{
- Path: azto.Ptr(defPartKey),
- Payload: payloadB64,
- PayloadType: azto.Ptr(fabcore.PayloadTypeInlineBase64),
- })
- }
- }
-
- return nil
-}
-
-type requestCreateFabricItemDefinitionProperties[Ttfprop, Titemprop any] struct {
- fabcore.CreateItemRequest
-}
-
-func (to *requestCreateFabricItemDefinitionProperties[Ttfprop, Titemprop]) set(ctx context.Context, from ResourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop], itemType fabcore.ItemType) diag.Diagnostics { //revive:disable-line:confusing-naming
- to.DisplayName = from.DisplayName.ValueStringPointer()
- to.Description = from.Description.ValueStringPointer()
- to.Type = azto.Ptr(itemType)
-
- if !from.Definition.IsNull() && !from.Definition.IsUnknown() {
- var def FabricItemDefinitionProperties[Ttfprop, Titemprop]
-
- if diags := def.set(ctx, from, false, "", []string{}); diags.HasError() {
- return diags
- }
-
- to.Definition = &def.ItemDefinition
- }
-
- return nil
-}
-
-type requestUpdateFabricItemDefinitionProperties[Ttfprop, Titemprop any] struct {
- fabcore.UpdateItemRequest
-}
-
-func (to *requestUpdateFabricItemDefinitionProperties[Ttfprop, Titemprop]) set(from ResourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop]) { //revive:disable-line:confusing-naming
- to.DisplayName = from.DisplayName.ValueStringPointer()
- to.Description = from.Description.ValueStringPointer()
-}
-
-type requestUpdateFabricItemDefinitionPropertiesDefinition[Ttfprop, Titemprop any] struct {
- fabcore.UpdateItemDefinitionRequest
-}
-
-func (to *requestUpdateFabricItemDefinitionPropertiesDefinition[Ttfprop, Titemprop]) set(ctx context.Context, from ResourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop], definitionEmpty string, definitionPaths []string) diag.Diagnostics { //revive:disable-line:confusing-naming
- var def FabricItemDefinitionProperties[Ttfprop, Titemprop]
-
- if diags := def.set(ctx, from, true, definitionEmpty, definitionPaths); diags.HasError() {
- return diags
- }
-
- to.Definition = &def.ItemDefinition
-
- return nil
-}
diff --git a/internal/pkg/fabricitem/models_resource_item_properties.go b/internal/pkg/fabricitem/models_resource_item_properties.go
new file mode 100644
index 00000000..415f9553
--- /dev/null
+++ b/internal/pkg/fabricitem/models_resource_item_properties.go
@@ -0,0 +1,13 @@
+// Copyright (c) Microsoft Corporation
+// SPDX-License-Identifier: MPL-2.0
+
+package fabricitem
+
+import (
+ "github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts"
+)
+
+type ResourceFabricItemPropertiesModel[Ttfprop, Titemprop any] struct {
+ FabricItemPropertiesModel[Ttfprop, Titemprop]
+ Timeouts timeouts.Value `tfsdk:"timeouts"`
+}
diff --git a/internal/pkg/fabricitem/resource_item.go b/internal/pkg/fabricitem/resource_item.go
index 09384fad..72951173 100644
--- a/internal/pkg/fabricitem/resource_item.go
+++ b/internal/pkg/fabricitem/resource_item.go
@@ -48,7 +48,7 @@ func (r *ResourceFabricItem) Metadata(_ context.Context, req resource.MetadataRe
}
func (r *ResourceFabricItem) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
- resp.Schema = GetResourceFabricItemSchema(ctx, *r)
+ resp.Schema = getResourceFabricItemSchema(ctx, *r)
}
func (r *ResourceFabricItem) Configure(_ context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) {
@@ -95,7 +95,9 @@ func (r *ResourceFabricItem) Create(ctx context.Context, req resource.CreateRequ
var reqCreate requestCreateFabricItem
- reqCreate.set(plan, r.Type)
+ reqCreate.setDisplayName(plan.DisplayName)
+ reqCreate.setDescription(plan.Description)
+ reqCreate.setType(r.Type)
respCreate, err := r.client.CreateItem(ctx, plan.WorkspaceID.ValueString(), reqCreate.CreateItemRequest, nil)
if resp.Diagnostics.Append(utils.GetDiagsFromError(ctx, err, utils.OperationCreate, nil)...); resp.Diagnostics.HasError() {
@@ -187,7 +189,8 @@ func (r *ResourceFabricItem) Update(ctx context.Context, req resource.UpdateRequ
var reqUpdate requestUpdateFabricItem
- reqUpdate.set(plan)
+ reqUpdate.setDisplayName(plan.DisplayName)
+ reqUpdate.setDescription(plan.Description)
respUpdate, err := r.client.UpdateItem(ctx, plan.WorkspaceID.ValueString(), plan.ID.ValueString(), reqUpdate.UpdateItemRequest, nil)
if resp.Diagnostics.Append(utils.GetDiagsFromError(ctx, err, utils.OperationUpdate, nil)...); resp.Diagnostics.HasError() {
diff --git a/internal/pkg/fabricitem/resource_item_config_definition_properties.go b/internal/pkg/fabricitem/resource_item_config_definition_properties.go
new file mode 100644
index 00000000..f4032f52
--- /dev/null
+++ b/internal/pkg/fabricitem/resource_item_config_definition_properties.go
@@ -0,0 +1,433 @@
+// Copyright (c) Microsoft Corporation
+// SPDX-License-Identifier: MPL-2.0
+
+package fabricitem
+
+import (
+ "context"
+ "fmt"
+ "strings"
+
+ supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes"
+ "github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts"
+ "github.com/hashicorp/terraform-plugin-framework-validators/resourcevalidator"
+ "github.com/hashicorp/terraform-plugin-framework/diag"
+ "github.com/hashicorp/terraform-plugin-framework/path"
+ "github.com/hashicorp/terraform-plugin-framework/resource"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-log/tflog"
+ "github.com/microsoft/fabric-sdk-go/fabric"
+ fabcore "github.com/microsoft/fabric-sdk-go/fabric/core"
+
+ "github.com/microsoft/terraform-provider-fabric/internal/common"
+ "github.com/microsoft/terraform-provider-fabric/internal/framework/customtypes"
+ "github.com/microsoft/terraform-provider-fabric/internal/pkg/utils"
+ pconfig "github.com/microsoft/terraform-provider-fabric/internal/provider/config"
+)
+
+// Ensure the implementation satisfies the expected interfaces.
+var (
+ _ resource.ResourceWithModifyPlan = (*ResourceFabricItemConfigDefinitionProperties[struct{}, struct{}, struct{}, struct{}])(nil)
+ _ resource.ResourceWithConfigValidators = (*ResourceFabricItemConfigDefinitionProperties[struct{}, struct{}, struct{}, struct{}])(nil)
+ _ resource.ResourceWithConfigure = (*ResourceFabricItemConfigDefinitionProperties[struct{}, struct{}, struct{}, struct{}])(nil)
+ _ resource.ResourceWithImportState = (*ResourceFabricItemConfigDefinitionProperties[struct{}, struct{}, struct{}, struct{}])(nil)
+)
+
+type ResourceFabricItemConfigDefinitionProperties[Ttfprop, Titemprop, Ttfconfig, Titemconfig any] struct {
+ ResourceFabricItemDefinition
+ IsConfigRequired bool
+ ConfigAttributes map[string]schema.Attribute
+ PropertiesAttributes map[string]schema.Attribute
+ PropertiesSetter func(ctx context.Context, from *Titemprop, to *ResourceFabricItemConfigDefinitionPropertiesModel[Ttfprop, Titemprop, Ttfconfig, Titemconfig]) diag.Diagnostics
+ CreationPayloadSetter func(ctx context.Context, from Ttfconfig) (*Titemconfig, diag.Diagnostics)
+ ItemGetter func(ctx context.Context, fabricClient fabric.Client, model ResourceFabricItemConfigDefinitionPropertiesModel[Ttfprop, Titemprop, Ttfconfig, Titemconfig], fabricItem *FabricItemProperties[Titemprop]) error
+}
+
+func NewResourceFabricItemConfigDefinitionProperties[Ttfprop, Titemprop, Ttfconfig, Titemconfig any](config ResourceFabricItemConfigDefinitionProperties[Ttfprop, Titemprop, Ttfconfig, Titemconfig]) resource.Resource {
+ return &config
+}
+
+func (r *ResourceFabricItemConfigDefinitionProperties[Ttfprop, Titemprop, Ttfconfig, Titemconfig]) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { //revive:disable-line:confusing-naming
+ resp.TypeName = req.ProviderTypeName + "_" + r.TFName
+}
+
+func (r *ResourceFabricItemConfigDefinitionProperties[Ttfprop, Titemprop, Ttfconfig, Titemconfig]) ModifyPlan(ctx context.Context, req resource.ModifyPlanRequest, resp *resource.ModifyPlanResponse) { //revive:disable-line:confusing-naming
+ tflog.Debug(ctx, "MODIFY PLAN", map[string]any{
+ "action": "start",
+ })
+ tflog.Trace(ctx, "MODIFY PLAN", map[string]any{
+ "config": req.Config,
+ "plan": req.Plan,
+ "state": req.State,
+ })
+
+ if !req.State.Raw.IsNull() && !req.Plan.Raw.IsNull() {
+ var plan, state ResourceFabricItemConfigDefinitionPropertiesModel[Ttfprop, Titemprop, Ttfconfig, Titemconfig]
+
+ resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...)
+ resp.Diagnostics.Append(req.State.Get(ctx, &state)...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ var reqUpdateDefinition requestUpdateFabricItemDefinition
+
+ doUpdateDefinition, diags := fabricItemCheckUpdateDefinition(ctx, plan.Definition, state.Definition, plan.Format, plan.DefinitionUpdateEnabled, r.DefinitionEmpty, r.DefinitionPathKeys, &reqUpdateDefinition)
+ if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ if doUpdateDefinition {
+ resp.Diagnostics.AddWarning(
+ common.WarningItemDefinitionUpdateHeader,
+ fmt.Sprintf(common.WarningItemDefinitionUpdateDetails, r.Name),
+ )
+ }
+ }
+
+ tflog.Debug(ctx, "MODIFY PLAN", map[string]any{
+ "action": "end",
+ })
+}
+
+func (r *ResourceFabricItemConfigDefinitionProperties[Ttfprop, Titemprop, Ttfconfig, Titemconfig]) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { //revive:disable-line:confusing-naming
+ resp.Schema = getResourceFabricItemConfigDefinitionPropertiesSchema(ctx, *r)
+}
+
+func (r *ResourceFabricItemConfigDefinitionProperties[Ttfprop, Titemprop, Ttfconfig, Titemconfig]) ConfigValidators(_ context.Context) []resource.ConfigValidator { //revive:disable-line:confusing-naming
+ return []resource.ConfigValidator{
+ resourcevalidator.Conflicting(
+ path.MatchRoot("configuration"),
+ path.MatchRoot("definition"),
+ ),
+ resourcevalidator.ExactlyOneOf(
+ path.MatchRoot("configuration"),
+ path.MatchRoot("definition"),
+ ),
+ }
+}
+
+func (r *ResourceFabricItemConfigDefinitionProperties[Ttfprop, Titemprop, Ttfconfig, Titemconfig]) Configure(_ context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { //revive:disable-line:confusing-naming
+ if req.ProviderData == nil {
+ return
+ }
+
+ pConfigData, ok := req.ProviderData.(*pconfig.ProviderData)
+ if !ok {
+ resp.Diagnostics.AddError(
+ common.ErrorResourceConfigType,
+ fmt.Sprintf(common.ErrorFabricClientType, req.ProviderData),
+ )
+
+ return
+ }
+
+ r.pConfigData = pConfigData
+ r.client = fabcore.NewClientFactoryWithClient(*pConfigData.FabricClient).NewItemsClient()
+}
+
+func (r *ResourceFabricItemConfigDefinitionProperties[Ttfprop, Titemprop, Ttfconfig, Titemconfig]) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { //revive:disable-line:confusing-naming
+ tflog.Debug(ctx, "CREATE", map[string]any{
+ "action": "start",
+ })
+ tflog.Trace(ctx, "CREATE", map[string]any{
+ "config": req.Config,
+ "plan": req.Plan,
+ })
+
+ var plan ResourceFabricItemConfigDefinitionPropertiesModel[Ttfprop, Titemprop, Ttfconfig, Titemconfig]
+
+ if resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ timeout, diags := plan.Timeouts.Create(ctx, r.pConfigData.Timeout)
+ if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ ctx, cancel := context.WithTimeout(ctx, timeout)
+ defer cancel()
+
+ var reqCreate requestCreateFabricItem
+
+ reqCreate.setDisplayName(plan.DisplayName)
+ reqCreate.setDescription(plan.Description)
+ reqCreate.setType(r.Type)
+
+ if resp.Diagnostics.Append(reqCreate.setDefinition(ctx, plan.Definition, plan.Format, plan.DefinitionUpdateEnabled)...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ creationPayload, diags := getCreationPayload(ctx, plan.Configuration, r.CreationPayloadSetter)
+ if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ reqCreate.setCreationPayload(creationPayload)
+
+ respCreate, err := r.client.CreateItem(ctx, plan.WorkspaceID.ValueString(), reqCreate.CreateItemRequest, nil)
+ if resp.Diagnostics.Append(utils.GetDiagsFromError(ctx, err, utils.OperationCreate, nil)...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ plan.ID = customtypes.NewUUIDPointerValue(respCreate.ID)
+ plan.WorkspaceID = customtypes.NewUUIDPointerValue(respCreate.WorkspaceID)
+
+ if resp.Diagnostics.Append(r.get(ctx, &plan)...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(resp.State.Set(ctx, plan)...)
+
+ tflog.Debug(ctx, "CREATE", map[string]any{
+ "action": "end",
+ })
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+}
+
+func (r *ResourceFabricItemConfigDefinitionProperties[Ttfprop, Titemprop, Ttfconfig, Titemconfig]) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { //revive:disable-line:confusing-naming
+ tflog.Debug(ctx, "READ", map[string]any{
+ "action": "start",
+ })
+ tflog.Trace(ctx, "READ", map[string]any{
+ "state": req.State,
+ })
+
+ var state ResourceFabricItemConfigDefinitionPropertiesModel[Ttfprop, Titemprop, Ttfconfig, Titemconfig]
+
+ if resp.Diagnostics.Append(req.State.Get(ctx, &state)...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ timeout, diags := state.Timeouts.Read(ctx, r.pConfigData.Timeout)
+ if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ ctx, cancel := context.WithTimeout(ctx, timeout)
+ defer cancel()
+
+ diags = r.get(ctx, &state)
+ if utils.IsErrNotFound(state.ID.ValueString(), &diags, fabcore.ErrCommon.EntityNotFound) {
+ resp.State.RemoveResource(ctx)
+
+ resp.Diagnostics.Append(diags...)
+
+ return
+ }
+
+ if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(resp.State.Set(ctx, state)...)
+
+ tflog.Debug(ctx, "READ", map[string]any{
+ "action": "end",
+ })
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+}
+
+func (r *ResourceFabricItemConfigDefinitionProperties[Ttfprop, Titemprop, Ttfconfig, Titemconfig]) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { //revive:disable-line:confusing-naming
+ tflog.Debug(ctx, "UPDATE", map[string]any{
+ "action": "start",
+ })
+ tflog.Trace(ctx, "UPDATE", map[string]any{
+ "config": req.Config,
+ "plan": req.Plan,
+ "state": req.State,
+ })
+
+ var plan, state ResourceFabricItemConfigDefinitionPropertiesModel[Ttfprop, Titemprop, Ttfconfig, Titemconfig]
+
+ resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...)
+ resp.Diagnostics.Append(req.State.Get(ctx, &state)...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ timeout, diags := plan.Timeouts.Update(ctx, r.pConfigData.Timeout)
+ if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ ctx, cancel := context.WithTimeout(ctx, timeout)
+ defer cancel()
+
+ var reqUpdatePlan requestUpdateFabricItem
+
+ if fabricItemCheckUpdate(plan.DisplayName, plan.Description, state.DisplayName, state.Description, &reqUpdatePlan) {
+ tflog.Trace(ctx, fmt.Sprintf("updating %s (WorkspaceID: %s ItemID: %s)", r.Name, plan.WorkspaceID.ValueString(), plan.ID.ValueString()))
+
+ _, err := r.client.UpdateItem(ctx, plan.WorkspaceID.ValueString(), plan.ID.ValueString(), reqUpdatePlan.UpdateItemRequest, nil)
+ if resp.Diagnostics.Append(utils.GetDiagsFromError(ctx, err, utils.OperationUpdate, nil)...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ if resp.Diagnostics.Append(r.get(ctx, &plan)...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(resp.State.Set(ctx, plan)...)
+ }
+
+ var reqUpdateDefinition requestUpdateFabricItemDefinition
+
+ doUpdateDefinition, diags := fabricItemCheckUpdateDefinition(ctx, plan.Definition, state.Definition, plan.Format, plan.DefinitionUpdateEnabled, r.DefinitionEmpty, r.DefinitionPathKeys, &reqUpdateDefinition)
+ if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ if doUpdateDefinition {
+ tflog.Trace(ctx, fmt.Sprintf("updating %s definition", r.Name))
+
+ _, err := r.client.UpdateItemDefinition(ctx, plan.WorkspaceID.ValueString(), plan.ID.ValueString(), reqUpdateDefinition.UpdateItemDefinitionRequest, nil)
+ if resp.Diagnostics.Append(utils.GetDiagsFromError(ctx, err, utils.OperationUpdate, nil)...); resp.Diagnostics.HasError() {
+ return
+ }
+ }
+
+ resp.Diagnostics.Append(resp.State.Set(ctx, plan)...)
+
+ tflog.Debug(ctx, "UPDATE", map[string]any{
+ "action": "end",
+ })
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+}
+
+func (r *ResourceFabricItemConfigDefinitionProperties[Ttfprop, Titemprop, Ttfconfig, Titemconfig]) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { //revive:disable-line:confusing-naming
+ tflog.Debug(ctx, "DELETE", map[string]any{
+ "action": "start",
+ })
+ tflog.Trace(ctx, "DELETE", map[string]any{
+ "state": req.State,
+ })
+
+ var state ResourceFabricItemConfigDefinitionPropertiesModel[Ttfprop, Titemprop, Ttfconfig, Titemconfig]
+
+ if resp.Diagnostics.Append(req.State.Get(ctx, &state)...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ timeout, diags := state.Timeouts.Delete(ctx, r.pConfigData.Timeout)
+ if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ ctx, cancel := context.WithTimeout(ctx, timeout)
+ defer cancel()
+
+ _, err := r.client.DeleteItem(ctx, state.WorkspaceID.ValueString(), state.ID.ValueString(), nil)
+ if resp.Diagnostics.Append(utils.GetDiagsFromError(ctx, err, utils.OperationDelete, nil)...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ tflog.Debug(ctx, "DELETE", map[string]any{
+ "action": "end",
+ })
+}
+
+func (r *ResourceFabricItemConfigDefinitionProperties[Ttfprop, Titemprop, Ttfconfig, Titemconfig]) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { //revive:disable-line:confusing-naming
+ tflog.Debug(ctx, "IMPORT", map[string]any{
+ "action": "start",
+ })
+ tflog.Trace(ctx, "IMPORT", map[string]any{
+ "id": req.ID,
+ })
+
+ workspaceID, fabricItemID, found := strings.Cut(req.ID, "/")
+ if !found {
+ resp.Diagnostics.AddError(
+ common.ErrorImportIdentifierHeader,
+ fmt.Sprintf(
+ common.ErrorImportIdentifierDetails,
+ fmt.Sprintf("WorkspaceID/%sID", string(r.Type)),
+ ),
+ )
+
+ return
+ }
+
+ uuidWorkspaceID, diags := customtypes.NewUUIDValueMust(workspaceID)
+ resp.Diagnostics.Append(diags...)
+
+ uuidFabricItemID, diags := customtypes.NewUUIDValueMust(fabricItemID)
+ resp.Diagnostics.Append(diags...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ var configuration supertypes.SingleNestedObjectValueOf[Ttfconfig]
+ if resp.Diagnostics.Append(resp.State.GetAttribute(ctx, path.Root("configuration"), &configuration)...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ var timeout timeouts.Value
+ if resp.Diagnostics.Append(resp.State.GetAttribute(ctx, path.Root("timeouts"), &timeout)...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ var definitionUpdateEnabled types.Bool
+ if resp.Diagnostics.Append(resp.State.GetAttribute(ctx, path.Root("definition_update_enabled"), &definitionUpdateEnabled)...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ var definition supertypes.MapNestedObjectValueOf[resourceFabricItemDefinitionPartModel]
+ if resp.Diagnostics.Append(resp.State.GetAttribute(ctx, path.Root("definition"), &definition)...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ state := ResourceFabricItemConfigDefinitionPropertiesModel[Ttfprop, Titemprop, Ttfconfig, Titemconfig]{
+ FabricItemPropertiesModel: FabricItemPropertiesModel[Ttfprop, Titemprop]{
+ ID: uuidFabricItemID,
+ WorkspaceID: uuidWorkspaceID,
+ },
+ Configuration: configuration,
+ DefinitionUpdateEnabled: definitionUpdateEnabled,
+ Definition: definition,
+ Timeouts: timeout,
+ }
+
+ if resp.Diagnostics.Append(r.get(ctx, &state)...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(resp.State.Set(ctx, state)...)
+
+ tflog.Debug(ctx, "IMPORT", map[string]any{
+ "action": "end",
+ })
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+}
+
+func (r *ResourceFabricItemConfigDefinitionProperties[Ttfprop, Titemprop, Ttfconfig, Titemconfig]) get(ctx context.Context, model *ResourceFabricItemConfigDefinitionPropertiesModel[Ttfprop, Titemprop, Ttfconfig, Titemconfig]) diag.Diagnostics { //revive:disable-line:confusing-naming
+ tflog.Trace(ctx, fmt.Sprintf("getting %s by ID: %s", r.Name, model.ID.ValueString()))
+
+ var fabricItem FabricItemProperties[Titemprop]
+
+ err := r.ItemGetter(ctx, *r.pConfigData.FabricClient, *model, &fabricItem)
+ if diags := utils.GetDiagsFromError(ctx, err, utils.OperationRead, nil); diags.HasError() {
+ return diags
+ }
+
+ model.set(fabricItem)
+
+ return r.PropertiesSetter(ctx, fabricItem.Properties, model)
+}
diff --git a/internal/pkg/fabricitem/resource_item_config_properties.go b/internal/pkg/fabricitem/resource_item_config_properties.go
new file mode 100644
index 00000000..33184302
--- /dev/null
+++ b/internal/pkg/fabricitem/resource_item_config_properties.go
@@ -0,0 +1,341 @@
+// Copyright (c) Microsoft Corporation
+// SPDX-License-Identifier: MPL-2.0
+
+package fabricitem
+
+import (
+ "context"
+ "fmt"
+ "strings"
+
+ supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes"
+ "github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts"
+ "github.com/hashicorp/terraform-plugin-framework/diag"
+ "github.com/hashicorp/terraform-plugin-framework/path"
+ "github.com/hashicorp/terraform-plugin-framework/resource"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema"
+ "github.com/hashicorp/terraform-plugin-log/tflog"
+ "github.com/microsoft/fabric-sdk-go/fabric"
+ fabcore "github.com/microsoft/fabric-sdk-go/fabric/core"
+
+ "github.com/microsoft/terraform-provider-fabric/internal/common"
+ "github.com/microsoft/terraform-provider-fabric/internal/framework/customtypes"
+ "github.com/microsoft/terraform-provider-fabric/internal/pkg/utils"
+ pconfig "github.com/microsoft/terraform-provider-fabric/internal/provider/config"
+)
+
+// Ensure the implementation satisfies the expected interfaces.
+var (
+ _ resource.ResourceWithConfigure = (*ResourceFabricItemConfigProperties[struct{}, struct{}, struct{}, struct{}])(nil)
+ _ resource.ResourceWithImportState = (*ResourceFabricItemConfigProperties[struct{}, struct{}, struct{}, struct{}])(nil)
+)
+
+type ResourceFabricItemConfigProperties[Ttfprop, Titemprop, Ttfconfig, Titemconfig any] struct {
+ ResourceFabricItem
+ IsConfigRequired bool
+ ConfigAttributes map[string]schema.Attribute
+ PropertiesAttributes map[string]schema.Attribute
+ PropertiesSetter func(ctx context.Context, from *Titemprop, to *ResourceFabricItemConfigPropertiesModel[Ttfprop, Titemprop, Ttfconfig, Titemconfig]) diag.Diagnostics
+ CreationPayloadSetter func(ctx context.Context, from Ttfconfig) (*Titemconfig, diag.Diagnostics)
+ ItemGetter func(ctx context.Context, fabricClient fabric.Client, model ResourceFabricItemConfigPropertiesModel[Ttfprop, Titemprop, Ttfconfig, Titemconfig], fabricItem *FabricItemProperties[Titemprop]) error
+}
+
+func NewResourceFabricItemConfigProperties[Ttfprop, Titemprop, Ttfconfig, Titemconfig any](config ResourceFabricItemConfigProperties[Ttfprop, Titemprop, Ttfconfig, Titemconfig]) resource.Resource {
+ return &config
+}
+
+func (r *ResourceFabricItemConfigProperties[Ttfprop, Titemprop, Ttfconfig, Titemconfig]) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { //revive:disable-line:confusing-naming
+ resp.TypeName = req.ProviderTypeName + "_" + r.TFName
+}
+
+func (r *ResourceFabricItemConfigProperties[Ttfprop, Titemprop, Ttfconfig, Titemconfig]) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { //revive:disable-line:confusing-naming
+ resp.Schema = getResourceFabricItemConfigPropertiesSchema(ctx, *r)
+}
+
+func (r *ResourceFabricItemConfigProperties[Ttfprop, Titemprop, Ttfconfig, Titemconfig]) Configure(_ context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { //revive:disable-line:confusing-naming
+ if req.ProviderData == nil {
+ return
+ }
+
+ pConfigData, ok := req.ProviderData.(*pconfig.ProviderData)
+ if !ok {
+ resp.Diagnostics.AddError(
+ common.ErrorResourceConfigType,
+ fmt.Sprintf(common.ErrorFabricClientType, req.ProviderData),
+ )
+
+ return
+ }
+
+ r.pConfigData = pConfigData
+ r.client = fabcore.NewClientFactoryWithClient(*pConfigData.FabricClient).NewItemsClient()
+}
+
+func (r *ResourceFabricItemConfigProperties[Ttfprop, Titemprop, Ttfconfig, Titemconfig]) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { //revive:disable-line:confusing-naming
+ tflog.Debug(ctx, "CREATE", map[string]any{
+ "action": "start",
+ })
+ tflog.Trace(ctx, "CREATE", map[string]any{
+ "config": req.Config,
+ "plan": req.Plan,
+ })
+
+ var plan ResourceFabricItemConfigPropertiesModel[Ttfprop, Titemprop, Ttfconfig, Titemconfig]
+
+ if resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ timeout, diags := plan.Timeouts.Create(ctx, r.pConfigData.Timeout)
+ if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ ctx, cancel := context.WithTimeout(ctx, timeout)
+ defer cancel()
+
+ var reqCreate requestCreateFabricItem
+
+ reqCreate.setDisplayName(plan.DisplayName)
+ reqCreate.setDescription(plan.Description)
+ reqCreate.setType(r.Type)
+
+ creationPayload, diags := getCreationPayload(ctx, plan.Configuration, r.CreationPayloadSetter)
+ if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ reqCreate.setCreationPayload(creationPayload)
+
+ respCreate, err := r.client.CreateItem(ctx, plan.WorkspaceID.ValueString(), reqCreate.CreateItemRequest, nil)
+ if resp.Diagnostics.Append(utils.GetDiagsFromError(ctx, err, utils.OperationCreate, nil)...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ plan.ID = customtypes.NewUUIDPointerValue(respCreate.ID)
+ plan.WorkspaceID = customtypes.NewUUIDPointerValue(respCreate.WorkspaceID)
+
+ if resp.Diagnostics.Append(r.get(ctx, &plan)...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(resp.State.Set(ctx, plan)...)
+
+ tflog.Debug(ctx, "CREATE", map[string]any{
+ "action": "end",
+ })
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+}
+
+func (r *ResourceFabricItemConfigProperties[Ttfprop, Titemprop, Ttfconfig, Titemconfig]) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { //revive:disable-line:confusing-naming
+ tflog.Debug(ctx, "READ", map[string]any{
+ "action": "start",
+ })
+ tflog.Trace(ctx, "READ", map[string]any{
+ "state": req.State,
+ })
+
+ var state ResourceFabricItemConfigPropertiesModel[Ttfprop, Titemprop, Ttfconfig, Titemconfig]
+
+ if resp.Diagnostics.Append(req.State.Get(ctx, &state)...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ timeout, diags := state.Timeouts.Read(ctx, r.pConfigData.Timeout)
+ if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ ctx, cancel := context.WithTimeout(ctx, timeout)
+ defer cancel()
+
+ diags = r.get(ctx, &state)
+ if utils.IsErrNotFound(state.ID.ValueString(), &diags, fabcore.ErrCommon.EntityNotFound) {
+ resp.State.RemoveResource(ctx)
+
+ resp.Diagnostics.Append(diags...)
+
+ return
+ }
+
+ if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(resp.State.Set(ctx, state)...)
+
+ tflog.Debug(ctx, "READ", map[string]any{
+ "action": "end",
+ })
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+}
+
+func (r *ResourceFabricItemConfigProperties[Ttfprop, Titemprop, Ttfconfig, Titemconfig]) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { //revive:disable-line:confusing-naming
+ tflog.Debug(ctx, "UPDATE", map[string]any{
+ "action": "start",
+ })
+ tflog.Trace(ctx, "UPDATE", map[string]any{
+ "config": req.Config,
+ "plan": req.Plan,
+ "state": req.State,
+ })
+
+ var plan, state ResourceFabricItemConfigPropertiesModel[Ttfprop, Titemprop, Ttfconfig, Titemconfig]
+
+ resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...)
+ resp.Diagnostics.Append(req.State.Get(ctx, &state)...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ timeout, diags := plan.Timeouts.Update(ctx, r.pConfigData.Timeout)
+ if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ ctx, cancel := context.WithTimeout(ctx, timeout)
+ defer cancel()
+
+ var reqUpdate requestUpdateFabricItem
+
+ reqUpdate.setDisplayName(plan.DisplayName)
+ reqUpdate.setDescription(plan.Description)
+
+ _, err := r.client.UpdateItem(ctx, plan.WorkspaceID.ValueString(), plan.ID.ValueString(), reqUpdate.UpdateItemRequest, nil)
+ if resp.Diagnostics.Append(utils.GetDiagsFromError(ctx, err, utils.OperationUpdate, nil)...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ if resp.Diagnostics.Append(r.get(ctx, &plan)...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(resp.State.Set(ctx, plan)...)
+
+ tflog.Debug(ctx, "UPDATE", map[string]any{
+ "action": "end",
+ })
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+}
+
+func (r *ResourceFabricItemConfigProperties[Ttfprop, Titemprop, Ttfconfig, Titemconfig]) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { //revive:disable-line:confusing-naming
+ tflog.Debug(ctx, "DELETE", map[string]any{
+ "action": "start",
+ })
+ tflog.Trace(ctx, "DELETE", map[string]any{
+ "state": req.State,
+ })
+
+ var state ResourceFabricItemConfigPropertiesModel[Ttfprop, Titemprop, Ttfconfig, Titemconfig]
+
+ if resp.Diagnostics.Append(req.State.Get(ctx, &state)...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ timeout, diags := state.Timeouts.Delete(ctx, r.pConfigData.Timeout)
+ if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ ctx, cancel := context.WithTimeout(ctx, timeout)
+ defer cancel()
+
+ _, err := r.client.DeleteItem(ctx, state.WorkspaceID.ValueString(), state.ID.ValueString(), nil)
+ if resp.Diagnostics.Append(utils.GetDiagsFromError(ctx, err, utils.OperationDelete, nil)...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ tflog.Debug(ctx, "DELETE", map[string]any{
+ "action": "end",
+ })
+}
+
+func (r *ResourceFabricItemConfigProperties[Ttfprop, Titemprop, Ttfconfig, Titemconfig]) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { //revive:disable-line:confusing-naming
+ tflog.Debug(ctx, "IMPORT", map[string]any{
+ "action": "start",
+ })
+ tflog.Trace(ctx, "IMPORT", map[string]any{
+ "id": req.ID,
+ })
+
+ workspaceID, fabricItemID, found := strings.Cut(req.ID, "/")
+ if !found {
+ resp.Diagnostics.AddError(
+ common.ErrorImportIdentifierHeader,
+ fmt.Sprintf(
+ common.ErrorImportIdentifierDetails,
+ fmt.Sprintf("WorkspaceID/%sID", string(r.Type)),
+ ),
+ )
+
+ return
+ }
+
+ uuidWorkspaceID, diags := customtypes.NewUUIDValueMust(workspaceID)
+ resp.Diagnostics.Append(diags...)
+
+ uuidFabricItemID, diags := customtypes.NewUUIDValueMust(fabricItemID)
+ resp.Diagnostics.Append(diags...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ var configuration supertypes.SingleNestedObjectValueOf[Ttfconfig]
+ if resp.Diagnostics.Append(resp.State.GetAttribute(ctx, path.Root("configuration"), &configuration)...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ var timeout timeouts.Value
+ if resp.Diagnostics.Append(resp.State.GetAttribute(ctx, path.Root("timeouts"), &timeout)...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ state := ResourceFabricItemConfigPropertiesModel[Ttfprop, Titemprop, Ttfconfig, Titemconfig]{
+ FabricItemPropertiesModel: FabricItemPropertiesModel[Ttfprop, Titemprop]{
+ ID: uuidFabricItemID,
+ WorkspaceID: uuidWorkspaceID,
+ },
+ Configuration: configuration,
+ Timeouts: timeout,
+ }
+
+ if resp.Diagnostics.Append(r.get(ctx, &state)...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(resp.State.Set(ctx, state)...)
+
+ tflog.Debug(ctx, "IMPORT", map[string]any{
+ "action": "end",
+ })
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+}
+
+func (r *ResourceFabricItemConfigProperties[Ttfprop, Titemprop, Ttfconfig, Titemconfig]) get(ctx context.Context, model *ResourceFabricItemConfigPropertiesModel[Ttfprop, Titemprop, Ttfconfig, Titemconfig]) diag.Diagnostics { //revive:disable-line:confusing-naming
+ tflog.Trace(ctx, fmt.Sprintf("getting %s by ID: %s", r.Name, model.ID.ValueString()))
+
+ var fabricItem FabricItemProperties[Titemprop]
+
+ err := r.ItemGetter(ctx, *r.pConfigData.FabricClient, *model, &fabricItem)
+ if diags := utils.GetDiagsFromError(ctx, err, utils.OperationRead, nil); diags.HasError() {
+ return diags
+ }
+
+ model.set(fabricItem)
+
+ return r.PropertiesSetter(ctx, fabricItem.Properties, model)
+}
diff --git a/internal/pkg/fabricitem/resource_item_definition.go b/internal/pkg/fabricitem/resource_item_definition.go
index 97c4672e..32c0e292 100644
--- a/internal/pkg/fabricitem/resource_item_definition.go
+++ b/internal/pkg/fabricitem/resource_item_definition.go
@@ -6,7 +6,6 @@ package fabricitem
import (
"context"
"fmt"
- "reflect"
"strings"
supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes"
@@ -79,9 +78,9 @@ func (r *ResourceFabricItemDefinition) ModifyPlan(ctx context.Context, req resou
return
}
- var reqUpdate requestUpdateFabricItemDefinitionDefinition
+ var reqUpdateDefinition requestUpdateFabricItemDefinition
- doUpdateDefinition, diags := r.checkUpdateDefinition(ctx, plan, state, &reqUpdate)
+ doUpdateDefinition, diags := fabricItemCheckUpdateDefinition(ctx, plan.Definition, state.Definition, plan.Format, plan.DefinitionUpdateEnabled, r.DefinitionEmpty, r.DefinitionPathKeys, &reqUpdateDefinition)
if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
return
}
@@ -100,7 +99,7 @@ func (r *ResourceFabricItemDefinition) ModifyPlan(ctx context.Context, req resou
}
func (r *ResourceFabricItemDefinition) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
- resp.Schema = GetResourceFabricItemDefinitionSchema(ctx, *r)
+ resp.Schema = getResourceFabricItemDefinitionSchema(ctx, *r)
}
func (r *ResourceFabricItemDefinition) Configure(_ context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) {
@@ -145,9 +144,13 @@ func (r *ResourceFabricItemDefinition) Create(ctx context.Context, req resource.
ctx, cancel := context.WithTimeout(ctx, timeout)
defer cancel()
- var reqCreate requestCreateFabricItemDefinition
+ var reqCreate requestCreateFabricItem
- if resp.Diagnostics.Append(reqCreate.set(ctx, plan, r.Type)...); resp.Diagnostics.HasError() {
+ reqCreate.setDisplayName(plan.DisplayName)
+ reqCreate.setDescription(plan.Description)
+ reqCreate.setType(r.Type)
+
+ if resp.Diagnostics.Append(reqCreate.setDefinition(ctx, plan.Definition, plan.Format, plan.DefinitionUpdateEnabled)...); resp.Diagnostics.HasError() {
return
}
@@ -242,9 +245,9 @@ func (r *ResourceFabricItemDefinition) Update(ctx context.Context, req resource.
ctx, cancel := context.WithTimeout(ctx, timeout)
defer cancel()
- var reqUpdatePlan requestUpdateFabricItemDefinition
+ var reqUpdatePlan requestUpdateFabricItem
- if r.checkUpdateItem(plan, state, &reqUpdatePlan) {
+ if fabricItemCheckUpdate(plan.DisplayName, plan.Description, state.DisplayName, state.Description, &reqUpdatePlan) {
tflog.Trace(ctx, fmt.Sprintf("updating %s (WorkspaceID: %s ItemID: %s)", r.Name, plan.WorkspaceID.ValueString(), plan.ID.ValueString()))
respUpdate, err := r.client.UpdateItem(ctx, plan.WorkspaceID.ValueString(), plan.ID.ValueString(), reqUpdatePlan.UpdateItemRequest, nil)
@@ -257,9 +260,9 @@ func (r *ResourceFabricItemDefinition) Update(ctx context.Context, req resource.
resp.Diagnostics.Append(resp.State.Set(ctx, plan)...)
}
- var reqUpdateDefinition requestUpdateFabricItemDefinitionDefinition
+ var reqUpdateDefinition requestUpdateFabricItemDefinition
- doUpdateDefinition, diags := r.checkUpdateDefinition(ctx, plan, state, &reqUpdateDefinition)
+ doUpdateDefinition, diags := fabricItemCheckUpdateDefinition(ctx, plan.Definition, state.Definition, plan.Format, plan.DefinitionUpdateEnabled, r.DefinitionEmpty, r.DefinitionPathKeys, &reqUpdateDefinition)
if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
return
}
@@ -357,13 +360,13 @@ func (r *ResourceFabricItemDefinition) ImportState(ctx context.Context, req reso
return
}
- var definition supertypes.MapNestedObjectValueOf[ResourceFabricItemDefinitionPartModel]
+ var definition supertypes.MapNestedObjectValueOf[resourceFabricItemDefinitionPartModel]
if resp.Diagnostics.Append(resp.State.GetAttribute(ctx, path.Root("definition"), &definition)...); resp.Diagnostics.HasError() {
return
}
state := resourceFabricItemDefinitionModel{
- baseFabricItemModel: baseFabricItemModel{
+ fabricItemModel: fabricItemModel{
ID: uuidFabricItemID,
WorkspaceID: uuidWorkspaceID,
},
@@ -399,26 +402,3 @@ func (r *ResourceFabricItemDefinition) get(ctx context.Context, model *resourceF
return nil
}
-
-func (r *ResourceFabricItemDefinition) checkUpdateItem(plan, state resourceFabricItemDefinitionModel, reqUpdatePlan *requestUpdateFabricItemDefinition) bool {
- var reqUpdateState requestUpdateFabricItemDefinition
-
- reqUpdatePlan.set(plan)
- reqUpdateState.set(state)
-
- return !reflect.DeepEqual(reqUpdatePlan.UpdateItemRequest, reqUpdateState.UpdateItemRequest)
-}
-
-func (r *ResourceFabricItemDefinition) checkUpdateDefinition(ctx context.Context, plan, state resourceFabricItemDefinitionModel, reqUpdate *requestUpdateFabricItemDefinitionDefinition) (bool, diag.Diagnostics) {
- if !plan.Definition.Equal(state.Definition) && plan.DefinitionUpdateEnabled.ValueBool() {
- if diags := reqUpdate.set(ctx, plan, r.DefinitionEmpty, r.DefinitionPathKeys); diags.HasError() {
- return false, diags
- }
-
- if len(reqUpdate.Definition.Parts) > 0 && !plan.Definition.Equal(state.Definition) {
- return true, nil
- }
- }
-
- return false, nil
-}
diff --git a/internal/pkg/fabricitem/resource_item_definition_properties.go b/internal/pkg/fabricitem/resource_item_definition_properties.go
index affc75b0..d16e8520 100644
--- a/internal/pkg/fabricitem/resource_item_definition_properties.go
+++ b/internal/pkg/fabricitem/resource_item_definition_properties.go
@@ -6,7 +6,6 @@ package fabricitem
import (
"context"
"fmt"
- "reflect"
"strings"
supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes"
@@ -35,9 +34,9 @@ var (
type ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop any] struct {
ResourceFabricItemDefinition
- PropertiesSchema schema.SingleNestedAttribute
- PropertiesSetter func(ctx context.Context, from *Titemprop, to *ResourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop]) diag.Diagnostics
- ItemGetter func(ctx context.Context, fabricClient fabric.Client, model ResourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop], fabricItem *FabricItemProperties[Titemprop]) error
+ PropertiesAttributes map[string]schema.Attribute
+ PropertiesSetter func(ctx context.Context, from *Titemprop, to *ResourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop]) diag.Diagnostics
+ ItemGetter func(ctx context.Context, fabricClient fabric.Client, model ResourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop], fabricItem *FabricItemProperties[Titemprop]) error
}
func NewResourceFabricItemDefinitionProperties[Ttfprop, Titemprop any](config ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) resource.Resource {
@@ -48,7 +47,7 @@ func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) Metadata(_
resp.TypeName = req.ProviderTypeName + "_" + r.TFName
}
-func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) ModifyPlan(ctx context.Context, req resource.ModifyPlanRequest, resp *resource.ModifyPlanResponse) {
+func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) ModifyPlan(ctx context.Context, req resource.ModifyPlanRequest, resp *resource.ModifyPlanResponse) { //revive:disable-line:confusing-naming
tflog.Debug(ctx, "MODIFY PLAN", map[string]any{
"action": "start",
})
@@ -68,9 +67,9 @@ func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) ModifyPlan(
return
}
- var reqUpdate requestUpdateFabricItemDefinitionPropertiesDefinition[Ttfprop, Titemprop]
+ var reqUpdateDefinition requestUpdateFabricItemDefinition
- doUpdateDefinition, diags := r.checkUpdateDefinition(ctx, plan, state, &reqUpdate)
+ doUpdateDefinition, diags := fabricItemCheckUpdateDefinition(ctx, plan.Definition, state.Definition, plan.Format, plan.DefinitionUpdateEnabled, r.DefinitionEmpty, r.DefinitionPathKeys, &reqUpdateDefinition)
if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
return
}
@@ -89,7 +88,7 @@ func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) ModifyPlan(
}
func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { //revive:disable-line:confusing-naming
- resp.Schema = GetResourceFabricItemDefinitionPropertiesSchema1(ctx, *r)
+ resp.Schema = getResourceFabricItemDefinitionPropertiesSchema(ctx, *r)
}
func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) Configure(_ context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { //revive:disable-line:confusing-naming
@@ -111,7 +110,7 @@ func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) Configure(_
r.client = fabcore.NewClientFactoryWithClient(*pConfigData.FabricClient).NewItemsClient()
}
-func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
+func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { //revive:disable-line:confusing-naming
tflog.Debug(ctx, "CREATE", map[string]any{
"action": "start",
})
@@ -134,9 +133,13 @@ func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) Create(ctx
ctx, cancel := context.WithTimeout(ctx, timeout)
defer cancel()
- var reqCreate requestCreateFabricItemDefinitionProperties[Ttfprop, Titemprop]
+ var reqCreate requestCreateFabricItem
- if resp.Diagnostics.Append(reqCreate.set(ctx, plan, r.Type)...); resp.Diagnostics.HasError() {
+ reqCreate.setDisplayName(plan.DisplayName)
+ reqCreate.setDescription(plan.Description)
+ reqCreate.setType(r.Type)
+
+ if resp.Diagnostics.Append(reqCreate.setDefinition(ctx, plan.Definition, plan.Format, plan.DefinitionUpdateEnabled)...); resp.Diagnostics.HasError() {
return
}
@@ -145,8 +148,8 @@ func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) Create(ctx
return
}
- plan.ID = customtypes.NewUUIDValue(*respCreate.ID)
- plan.WorkspaceID = customtypes.NewUUIDValue(*respCreate.WorkspaceID)
+ plan.ID = customtypes.NewUUIDPointerValue(respCreate.ID)
+ plan.WorkspaceID = customtypes.NewUUIDPointerValue(respCreate.WorkspaceID)
if resp.Diagnostics.Append(r.get(ctx, &plan)...); resp.Diagnostics.HasError() {
return
@@ -209,7 +212,7 @@ func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) Read(ctx co
}
}
-func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
+func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { //revive:disable-line:confusing-naming
tflog.Debug(ctx, "UPDATE", map[string]any{
"action": "start",
})
@@ -236,9 +239,9 @@ func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) Update(ctx
ctx, cancel := context.WithTimeout(ctx, timeout)
defer cancel()
- var reqUpdatePlan requestUpdateFabricItemDefinitionProperties[Ttfprop, Titemprop]
+ var reqUpdatePlan requestUpdateFabricItem
- if r.checkUpdateItem(plan, state, &reqUpdatePlan) {
+ if fabricItemCheckUpdate(plan.DisplayName, plan.Description, state.DisplayName, state.Description, &reqUpdatePlan) {
tflog.Trace(ctx, fmt.Sprintf("updating %s (WorkspaceID: %s ItemID: %s)", r.Name, plan.WorkspaceID.ValueString(), plan.ID.ValueString()))
_, err := r.client.UpdateItem(ctx, plan.WorkspaceID.ValueString(), plan.ID.ValueString(), reqUpdatePlan.UpdateItemRequest, nil)
@@ -253,9 +256,9 @@ func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) Update(ctx
resp.Diagnostics.Append(resp.State.Set(ctx, plan)...)
}
- var reqUpdateDefinition requestUpdateFabricItemDefinitionPropertiesDefinition[Ttfprop, Titemprop]
+ var reqUpdateDefinition requestUpdateFabricItemDefinition
- doUpdateDefinition, diags := r.checkUpdateDefinition(ctx, plan, state, &reqUpdateDefinition)
+ doUpdateDefinition, diags := fabricItemCheckUpdateDefinition(ctx, plan.Definition, state.Definition, plan.Format, plan.DefinitionUpdateEnabled, r.DefinitionEmpty, r.DefinitionPathKeys, &reqUpdateDefinition)
if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
return
}
@@ -280,7 +283,7 @@ func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) Update(ctx
}
}
-func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
+func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { //revive:disable-line:confusing-naming
tflog.Debug(ctx, "DELETE", map[string]any{
"action": "start",
})
@@ -312,7 +315,7 @@ func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) Delete(ctx
})
}
-func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
+func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { //revive:disable-line:confusing-naming
tflog.Debug(ctx, "IMPORT", map[string]any{
"action": "start",
})
@@ -353,13 +356,13 @@ func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) ImportState
return
}
- var definition supertypes.MapNestedObjectValueOf[ResourceFabricItemDefinitionPartModel]
+ var definition supertypes.MapNestedObjectValueOf[resourceFabricItemDefinitionPartModel]
if resp.Diagnostics.Append(resp.State.GetAttribute(ctx, path.Root("definition"), &definition)...); resp.Diagnostics.HasError() {
return
}
state := ResourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop]{
- baseFabricItemModel: baseFabricItemModel{
+ FabricItemPropertiesModel: FabricItemPropertiesModel[Ttfprop, Titemprop]{
ID: uuidFabricItemID,
WorkspaceID: uuidWorkspaceID,
},
@@ -383,7 +386,7 @@ func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) ImportState
}
}
-func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) get(ctx context.Context, model *ResourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop]) diag.Diagnostics {
+func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) get(ctx context.Context, model *ResourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop]) diag.Diagnostics { //revive:disable-line:confusing-naming
tflog.Trace(ctx, fmt.Sprintf("getting %s by ID: %s", r.Name, model.ID.ValueString()))
var fabricItem FabricItemProperties[Titemprop]
@@ -395,33 +398,5 @@ func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) get(ctx con
model.set(fabricItem)
- diags := r.PropertiesSetter(ctx, fabricItem.Properties, model)
- if diags.HasError() {
- return diags
- }
-
- return nil
-}
-
-func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) checkUpdateItem(plan, state ResourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop], reqUpdatePlan *requestUpdateFabricItemDefinitionProperties[Ttfprop, Titemprop]) bool {
- var reqUpdateState requestUpdateFabricItemDefinitionProperties[Ttfprop, Titemprop]
-
- reqUpdatePlan.set(plan)
- reqUpdateState.set(state)
-
- return !reflect.DeepEqual(reqUpdatePlan.UpdateItemRequest, reqUpdateState.UpdateItemRequest)
-}
-
-func (r *ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) checkUpdateDefinition(ctx context.Context, plan, state ResourceFabricItemDefinitionPropertiesModel[Ttfprop, Titemprop], reqUpdate *requestUpdateFabricItemDefinitionPropertiesDefinition[Ttfprop, Titemprop]) (bool, diag.Diagnostics) {
- if !plan.Definition.Equal(state.Definition) && plan.DefinitionUpdateEnabled.ValueBool() {
- if diags := reqUpdate.set(ctx, plan, r.DefinitionEmpty, r.DefinitionPathKeys); diags.HasError() {
- return false, diags
- }
-
- if len(reqUpdate.Definition.Parts) > 0 && !plan.Definition.Equal(state.Definition) {
- return true, nil
- }
- }
-
- return false, nil
+ return r.PropertiesSetter(ctx, fabricItem.Properties, model)
}
diff --git a/internal/pkg/fabricitem/resource_item_properties.go b/internal/pkg/fabricitem/resource_item_properties.go
new file mode 100644
index 00000000..ece1bff2
--- /dev/null
+++ b/internal/pkg/fabricitem/resource_item_properties.go
@@ -0,0 +1,324 @@
+// Copyright (c) Microsoft Corporation
+// SPDX-License-Identifier: MPL-2.0
+
+package fabricitem
+
+import (
+ "context"
+ "fmt"
+ "strings"
+
+ "github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts"
+ "github.com/hashicorp/terraform-plugin-framework/diag"
+ "github.com/hashicorp/terraform-plugin-framework/path"
+ "github.com/hashicorp/terraform-plugin-framework/resource"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema"
+ "github.com/hashicorp/terraform-plugin-log/tflog"
+ "github.com/microsoft/fabric-sdk-go/fabric"
+ fabcore "github.com/microsoft/fabric-sdk-go/fabric/core"
+
+ "github.com/microsoft/terraform-provider-fabric/internal/common"
+ "github.com/microsoft/terraform-provider-fabric/internal/framework/customtypes"
+ "github.com/microsoft/terraform-provider-fabric/internal/pkg/utils"
+ pconfig "github.com/microsoft/terraform-provider-fabric/internal/provider/config"
+)
+
+// Ensure the implementation satisfies the expected interfaces.
+var (
+ _ resource.ResourceWithConfigure = (*ResourceFabricItemProperties[struct{}, struct{}])(nil)
+ _ resource.ResourceWithImportState = (*ResourceFabricItemProperties[struct{}, struct{}])(nil)
+)
+
+type ResourceFabricItemProperties[Ttfprop, Titemprop any] struct {
+ ResourceFabricItem
+ PropertiesAttributes map[string]schema.Attribute
+ PropertiesSetter func(ctx context.Context, from *Titemprop, to *ResourceFabricItemPropertiesModel[Ttfprop, Titemprop]) diag.Diagnostics
+ ItemGetter func(ctx context.Context, fabricClient fabric.Client, model ResourceFabricItemPropertiesModel[Ttfprop, Titemprop], fabricItem *FabricItemProperties[Titemprop]) error
+}
+
+func NewResourceFabricItemProperties[Ttfprop, Titemprop any](config ResourceFabricItemProperties[Ttfprop, Titemprop]) resource.Resource {
+ return &config
+}
+
+func (r *ResourceFabricItemProperties[Ttfprop, Titemprop]) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { //revive:disable-line:confusing-naming
+ resp.TypeName = req.ProviderTypeName + "_" + r.TFName
+}
+
+func (r *ResourceFabricItemProperties[Ttfprop, Titemprop]) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { //revive:disable-line:confusing-naming
+ resp.Schema = getResourceFabricItemPropertiesSchema(ctx, *r)
+}
+
+func (r *ResourceFabricItemProperties[Ttfprop, Titemprop]) Configure(_ context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { //revive:disable-line:confusing-naming
+ if req.ProviderData == nil {
+ return
+ }
+
+ pConfigData, ok := req.ProviderData.(*pconfig.ProviderData)
+ if !ok {
+ resp.Diagnostics.AddError(
+ common.ErrorResourceConfigType,
+ fmt.Sprintf(common.ErrorFabricClientType, req.ProviderData),
+ )
+
+ return
+ }
+
+ r.pConfigData = pConfigData
+ r.client = fabcore.NewClientFactoryWithClient(*pConfigData.FabricClient).NewItemsClient()
+}
+
+func (r *ResourceFabricItemProperties[Ttfprop, Titemprop]) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { //revive:disable-line:confusing-naming
+ tflog.Debug(ctx, "CREATE", map[string]any{
+ "action": "start",
+ })
+ tflog.Trace(ctx, "CREATE", map[string]any{
+ "config": req.Config,
+ "plan": req.Plan,
+ })
+
+ var plan ResourceFabricItemPropertiesModel[Ttfprop, Titemprop]
+
+ if resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ timeout, diags := plan.Timeouts.Create(ctx, r.pConfigData.Timeout)
+ if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ ctx, cancel := context.WithTimeout(ctx, timeout)
+ defer cancel()
+
+ var reqCreate requestCreateFabricItem
+
+ reqCreate.setDisplayName(plan.DisplayName)
+ reqCreate.setDescription(plan.Description)
+ reqCreate.setType(r.Type)
+
+ respCreate, err := r.client.CreateItem(ctx, plan.WorkspaceID.ValueString(), reqCreate.CreateItemRequest, nil)
+ if resp.Diagnostics.Append(utils.GetDiagsFromError(ctx, err, utils.OperationCreate, nil)...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ plan.ID = customtypes.NewUUIDValue(*respCreate.ID)
+ plan.WorkspaceID = customtypes.NewUUIDValue(*respCreate.WorkspaceID)
+
+ if resp.Diagnostics.Append(r.get(ctx, &plan)...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(resp.State.Set(ctx, plan)...)
+
+ tflog.Debug(ctx, "CREATE", map[string]any{
+ "action": "end",
+ })
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+}
+
+func (r *ResourceFabricItemProperties[Ttfprop, Titemprop]) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { //revive:disable-line:confusing-naming
+ tflog.Debug(ctx, "READ", map[string]any{
+ "action": "start",
+ })
+ tflog.Trace(ctx, "READ", map[string]any{
+ "state": req.State,
+ })
+
+ var state ResourceFabricItemPropertiesModel[Ttfprop, Titemprop]
+
+ if resp.Diagnostics.Append(req.State.Get(ctx, &state)...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ timeout, diags := state.Timeouts.Read(ctx, r.pConfigData.Timeout)
+ if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ ctx, cancel := context.WithTimeout(ctx, timeout)
+ defer cancel()
+
+ diags = r.get(ctx, &state)
+ if utils.IsErrNotFound(state.ID.ValueString(), &diags, fabcore.ErrCommon.EntityNotFound) {
+ resp.State.RemoveResource(ctx)
+
+ resp.Diagnostics.Append(diags...)
+
+ return
+ }
+
+ if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(resp.State.Set(ctx, state)...)
+
+ tflog.Debug(ctx, "READ", map[string]any{
+ "action": "end",
+ })
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+}
+
+func (r *ResourceFabricItemProperties[Ttfprop, Titemprop]) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { //revive:disable-line:confusing-naming
+ tflog.Debug(ctx, "UPDATE", map[string]any{
+ "action": "start",
+ })
+ tflog.Trace(ctx, "UPDATE", map[string]any{
+ "config": req.Config,
+ "plan": req.Plan,
+ "state": req.State,
+ })
+
+ var plan, state ResourceFabricItemPropertiesModel[Ttfprop, Titemprop]
+
+ resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...)
+ resp.Diagnostics.Append(req.State.Get(ctx, &state)...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ timeout, diags := plan.Timeouts.Update(ctx, r.pConfigData.Timeout)
+ if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ ctx, cancel := context.WithTimeout(ctx, timeout)
+ defer cancel()
+
+ var reqUpdate requestUpdateFabricItem
+
+ reqUpdate.setDisplayName(plan.DisplayName)
+ reqUpdate.setDescription(plan.Description)
+
+ _, err := r.client.UpdateItem(ctx, plan.WorkspaceID.ValueString(), plan.ID.ValueString(), reqUpdate.UpdateItemRequest, nil)
+ if resp.Diagnostics.Append(utils.GetDiagsFromError(ctx, err, utils.OperationUpdate, nil)...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ if resp.Diagnostics.Append(r.get(ctx, &plan)...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(resp.State.Set(ctx, plan)...)
+
+ tflog.Debug(ctx, "UPDATE", map[string]any{
+ "action": "end",
+ })
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+}
+
+func (r *ResourceFabricItemProperties[Ttfprop, Titemprop]) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { //revive:disable-line:confusing-naming
+ tflog.Debug(ctx, "DELETE", map[string]any{
+ "action": "start",
+ })
+ tflog.Trace(ctx, "DELETE", map[string]any{
+ "state": req.State,
+ })
+
+ var state ResourceFabricItemPropertiesModel[Ttfprop, Titemprop]
+
+ if resp.Diagnostics.Append(req.State.Get(ctx, &state)...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ timeout, diags := state.Timeouts.Delete(ctx, r.pConfigData.Timeout)
+ if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ ctx, cancel := context.WithTimeout(ctx, timeout)
+ defer cancel()
+
+ _, err := r.client.DeleteItem(ctx, state.WorkspaceID.ValueString(), state.ID.ValueString(), nil)
+ if resp.Diagnostics.Append(utils.GetDiagsFromError(ctx, err, utils.OperationDelete, nil)...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ tflog.Debug(ctx, "DELETE", map[string]any{
+ "action": "end",
+ })
+}
+
+func (r *ResourceFabricItemProperties[Ttfprop, Titemprop]) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { //revive:disable-line:confusing-naming
+ tflog.Debug(ctx, "IMPORT", map[string]any{
+ "action": "start",
+ })
+ tflog.Trace(ctx, "IMPORT", map[string]any{
+ "id": req.ID,
+ })
+
+ workspaceID, fabricItemID, found := strings.Cut(req.ID, "/")
+ if !found {
+ resp.Diagnostics.AddError(
+ common.ErrorImportIdentifierHeader,
+ fmt.Sprintf(
+ common.ErrorImportIdentifierDetails,
+ fmt.Sprintf("WorkspaceID/%sID", string(r.Type)),
+ ),
+ )
+
+ return
+ }
+
+ uuidWorkspaceID, diags := customtypes.NewUUIDValueMust(workspaceID)
+ resp.Diagnostics.Append(diags...)
+
+ uuidFabricItemID, diags := customtypes.NewUUIDValueMust(fabricItemID)
+ resp.Diagnostics.Append(diags...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ var timeout timeouts.Value
+ if resp.Diagnostics.Append(resp.State.GetAttribute(ctx, path.Root("timeouts"), &timeout)...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ state := ResourceFabricItemPropertiesModel[Ttfprop, Titemprop]{
+ FabricItemPropertiesModel: FabricItemPropertiesModel[Ttfprop, Titemprop]{
+ ID: uuidFabricItemID,
+ WorkspaceID: uuidWorkspaceID,
+ },
+ Timeouts: timeout,
+ }
+
+ if resp.Diagnostics.Append(r.get(ctx, &state)...); resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(resp.State.Set(ctx, state)...)
+
+ tflog.Debug(ctx, "IMPORT", map[string]any{
+ "action": "end",
+ })
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+}
+
+func (r *ResourceFabricItemProperties[Ttfprop, Titemprop]) get(ctx context.Context, model *ResourceFabricItemPropertiesModel[Ttfprop, Titemprop]) diag.Diagnostics { //revive:disable-line:confusing-naming
+ tflog.Trace(ctx, fmt.Sprintf("getting %s by ID: %s", r.Name, model.ID.ValueString()))
+
+ var fabricItem FabricItemProperties[Titemprop]
+
+ err := r.ItemGetter(ctx, *r.pConfigData.FabricClient, *model, &fabricItem)
+ if diags := utils.GetDiagsFromError(ctx, err, utils.OperationRead, nil); diags.HasError() {
+ return diags
+ }
+
+ model.set(fabricItem)
+
+ return r.PropertiesSetter(ctx, fabricItem.Properties, model)
+}
diff --git a/internal/pkg/fabricitem/resource_schema.go b/internal/pkg/fabricitem/resource_schema.go
index 8d867243..698c3473 100644
--- a/internal/pkg/fabricitem/resource_schema.go
+++ b/internal/pkg/fabricitem/resource_schema.go
@@ -10,11 +10,14 @@ import (
supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes"
"github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts"
+ "github.com/hashicorp/terraform-plugin-framework-validators/boolvalidator"
"github.com/hashicorp/terraform-plugin-framework-validators/mapvalidator"
+ "github.com/hashicorp/terraform-plugin-framework-validators/objectvalidator"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/booldefault"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/objectplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/stringdefault"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
@@ -25,7 +28,7 @@ import (
"github.com/microsoft/terraform-provider-fabric/internal/pkg/utils"
)
-func GetResourceFabricItemSchema(ctx context.Context, r ResourceFabricItem) schema.Schema {
+func getResourceFabricItemSchema(ctx context.Context, r ResourceFabricItem) schema.Schema {
attributes := getResourceFabricItemBaseAttributes(ctx, r.Name, r.DisplayNameMaxLength, r.DescriptionMaxLength, r.NameRenameAllowed)
return schema.Schema{
@@ -34,10 +37,10 @@ func GetResourceFabricItemSchema(ctx context.Context, r ResourceFabricItem) sche
}
}
-func GetResourceFabricItemDefinitionSchema(ctx context.Context, r ResourceFabricItemDefinition) schema.Schema {
+func getResourceFabricItemDefinitionSchema(ctx context.Context, r ResourceFabricItemDefinition) schema.Schema {
attributes := getResourceFabricItemBaseAttributes(ctx, r.Name, r.DisplayNameMaxLength, r.DescriptionMaxLength, r.NameRenameAllowed)
- for key, value := range getResourceFabricItemDefinitionAttributes(ctx, r.Name, r.FormatTypeDefault, r.FormatTypes, r.DefinitionPathDocsURL, r.DefinitionPathKeys, r.DefinitionPathKeysValidator, r.DefinitionRequired) {
+ for key, value := range getResourceFabricItemDefinitionAttributes(ctx, r.Name, r.FormatTypeDefault, r.FormatTypes, r.DefinitionPathDocsURL, r.DefinitionPathKeys, r.DefinitionPathKeysValidator, r.DefinitionRequired, false) {
attributes[key] = value
}
@@ -47,21 +50,21 @@ func GetResourceFabricItemDefinitionSchema(ctx context.Context, r ResourceFabric
}
}
-func GetResourceFabricItemPropertiesSchema(ctx context.Context, itemName, markdownDescription string, displayNameMaxLength, descriptionMaxLength int, nameRenameAllowed bool, properties schema.SingleNestedAttribute) schema.Schema {
- attributes := getResourceFabricItemBaseAttributes(ctx, itemName, displayNameMaxLength, descriptionMaxLength, nameRenameAllowed)
- attributes["properties"] = properties
+func getResourceFabricItemPropertiesSchema[Ttfprop, Titemprop any](ctx context.Context, r ResourceFabricItemProperties[Ttfprop, Titemprop]) schema.Schema {
+ attributes := getResourceFabricItemBaseAttributes(ctx, r.Name, r.DisplayNameMaxLength, r.DescriptionMaxLength, r.NameRenameAllowed)
+ attributes["properties"] = getResourceFabricItemPropertiesNestedAttr[Ttfprop](ctx, r.Name, r.PropertiesAttributes)
return schema.Schema{
- MarkdownDescription: markdownDescription,
+ MarkdownDescription: r.MarkdownDescription,
Attributes: attributes,
}
}
-func GetResourceFabricItemDefinitionPropertiesSchema(ctx context.Context, r ResourceFabricItemDefinition, properties schema.SingleNestedAttribute) schema.Schema {
+func getResourceFabricItemDefinitionPropertiesSchema[Ttfprop, Titemprop any](ctx context.Context, r ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) schema.Schema {
attributes := getResourceFabricItemBaseAttributes(ctx, r.Name, r.DisplayNameMaxLength, r.DescriptionMaxLength, r.NameRenameAllowed)
- attributes["properties"] = properties
+ attributes["properties"] = getResourceFabricItemPropertiesNestedAttr[Ttfprop](ctx, r.Name, r.PropertiesAttributes)
- for key, value := range getResourceFabricItemDefinitionAttributes(ctx, r.Name, r.FormatTypeDefault, r.FormatTypes, r.DefinitionPathDocsURL, r.DefinitionPathKeys, r.DefinitionPathKeysValidator, r.DefinitionRequired) {
+ for key, value := range getResourceFabricItemDefinitionAttributes(ctx, r.Name, r.FormatTypeDefault, r.FormatTypes, r.DefinitionPathDocsURL, r.DefinitionPathKeys, r.DefinitionPathKeysValidator, r.DefinitionRequired, false) {
attributes[key] = value
}
@@ -71,11 +74,31 @@ func GetResourceFabricItemDefinitionPropertiesSchema(ctx context.Context, r Reso
}
}
-func GetResourceFabricItemDefinitionPropertiesSchema1[Ttfprop, Titemprop any](ctx context.Context, r ResourceFabricItemDefinitionProperties[Ttfprop, Titemprop]) schema.Schema {
+func getResourceFabricItemConfigPropertiesSchema[Ttfprop, Titemprop, Ttfconfig, Titemconfig any](ctx context.Context, r ResourceFabricItemConfigProperties[Ttfprop, Titemprop, Ttfconfig, Titemconfig]) schema.Schema {
+ attributes := getResourceFabricItemBaseAttributes(ctx, r.Name, r.DisplayNameMaxLength, r.DescriptionMaxLength, r.NameRenameAllowed)
+ attributes["configuration"] = getResourceFabricItemConfigNestedAttr[Ttfconfig](ctx, r.Name, r.IsConfigRequired, r.ConfigAttributes)
+ attributes["properties"] = getResourceFabricItemPropertiesNestedAttr[Ttfprop](ctx, r.Name, r.PropertiesAttributes)
+
+ return schema.Schema{
+ MarkdownDescription: r.MarkdownDescription,
+ Attributes: attributes,
+ }
+}
+
+func getResourceFabricItemConfigDefinitionPropertiesSchema[Ttfprop, Titemprop, Ttfconfig, Titemconfig any](ctx context.Context, r ResourceFabricItemConfigDefinitionProperties[Ttfprop, Titemprop, Ttfconfig, Titemconfig]) schema.Schema {
attributes := getResourceFabricItemBaseAttributes(ctx, r.Name, r.DisplayNameMaxLength, r.DescriptionMaxLength, r.NameRenameAllowed)
- attributes["properties"] = r.PropertiesSchema
+ attrConfiguration := getResourceFabricItemConfigNestedAttr[Ttfconfig](ctx, r.Name, r.IsConfigRequired, r.ConfigAttributes)
+ attrConfiguration.Validators = []validator.Object{
+ objectvalidator.ConflictsWith(
+ path.MatchRoot("definition"),
+ path.MatchRoot("definition_update_enabled"),
+ path.MatchRoot("format"),
+ ),
+ }
+ attributes["configuration"] = attrConfiguration
+ attributes["properties"] = getResourceFabricItemPropertiesNestedAttr[Ttfprop](ctx, r.Name, r.PropertiesAttributes)
- for key, value := range getResourceFabricItemDefinitionAttributes(ctx, r.Name, r.FormatTypeDefault, r.FormatTypes, r.DefinitionPathDocsURL, r.DefinitionPathKeys, r.DefinitionPathKeysValidator, r.DefinitionRequired) {
+ for key, value := range getResourceFabricItemDefinitionAttributes(ctx, r.Name, r.FormatTypeDefault, r.FormatTypes, r.DefinitionPathDocsURL, r.DefinitionPathKeys, r.DefinitionPathKeysValidator, r.DefinitionRequired, true) {
attributes[key] = value
}
@@ -85,13 +108,31 @@ func GetResourceFabricItemDefinitionPropertiesSchema1[Ttfprop, Titemprop any](ct
}
}
-func GetResourceFabricItemPropertiesCreationSchema(ctx context.Context, itemName, markdownDescription string, displayNameMaxLength, descriptionMaxLength int, nameRenameAllowed bool, properties, configuration schema.SingleNestedAttribute) schema.Schema {
- attributes := getResourceFabricItemBaseAttributes(ctx, itemName, displayNameMaxLength, descriptionMaxLength, nameRenameAllowed)
- attributes["properties"] = properties
- attributes["configuration"] = configuration
+func getResourceFabricItemConfigNestedAttr[Ttfconfig any](ctx context.Context, name string, isRequired bool, attributes map[string]schema.Attribute) schema.SingleNestedAttribute { //revive:disable-line:flag-parameter
+ result := schema.SingleNestedAttribute{
+ MarkdownDescription: "The " + name + " creation configuration.\n\n" +
+ "Any changes to this configuration will result in recreation of the " + name + ".",
+ CustomType: supertypes.NewSingleNestedObjectTypeOf[Ttfconfig](ctx),
+ PlanModifiers: []planmodifier.Object{
+ objectplanmodifier.RequiresReplace(),
+ },
+ Attributes: attributes,
+ }
- return schema.Schema{
- MarkdownDescription: markdownDescription,
+ if isRequired {
+ result.Required = true
+ } else {
+ result.Optional = true
+ }
+
+ return result
+}
+
+func getResourceFabricItemPropertiesNestedAttr[Ttfprop any](ctx context.Context, name string, attributes map[string]schema.Attribute) schema.SingleNestedAttribute {
+ return schema.SingleNestedAttribute{
+ MarkdownDescription: "The " + name + " properties.",
+ Computed: true,
+ CustomType: supertypes.NewSingleNestedObjectTypeOf[Ttfprop](ctx),
Attributes: attributes,
}
}
@@ -147,48 +188,62 @@ func getResourceFabricItemBaseAttributes(ctx context.Context, name string, displ
}
// Helper function to get Fabric Item definition attributes.
-func getResourceFabricItemDefinitionAttributes(ctx context.Context, name, formatTypeDefault string, formatTypes []string, definitionPathDocsURL string, definitionPathKeys []string, definitionPathKeysValidator []validator.Map, definitionRequired bool) map[string]schema.Attribute { //revive:disable-line:flag-parameter
+func getResourceFabricItemDefinitionAttributes(ctx context.Context, name, formatTypeDefault string, formatTypes []string, definitionPathDocsURL string, definitionPathKeys []string, definitionPathKeysValidator []validator.Map, definitionRequired, alongConfiguration bool) map[string]schema.Attribute { //revive:disable-line:flag-parameter,argument-limit
attributes := make(map[string]schema.Attribute)
- attributes["definition_update_enabled"] = schema.BoolAttribute{
- MarkdownDescription: "Update definition on change of source content. Default: `true`.",
- Optional: true,
- Computed: true,
- Default: booldefault.StaticBool(true),
+ attrDefinitionUpdateEnabled := schema.BoolAttribute{}
+
+ attrDefinitionUpdateEnabled.MarkdownDescription = "Update definition on change of source content. Default: `true`."
+ attrDefinitionUpdateEnabled.Optional = true
+ attrDefinitionUpdateEnabled.Computed = true
+ attrDefinitionUpdateEnabled.Default = booldefault.StaticBool(true)
+
+ if alongConfiguration {
+ attrDefinitionUpdateEnabled.Validators = []validator.Bool{
+ boolvalidator.ConflictsWith(path.MatchRoot("configuration")),
+ }
}
+ attributes["definition_update_enabled"] = attrDefinitionUpdateEnabled
+
+ attrFormat := schema.StringAttribute{}
+ attrFormat.Computed = true
+
if len(formatTypes) > 0 {
- attributes["format"] = schema.StringAttribute{
- MarkdownDescription: fmt.Sprintf("The %s format. Possible values: %s.", name, utils.ConvertStringSlicesToString(formatTypes, true, false)),
- Computed: true,
- Default: stringdefault.StaticString(formatTypeDefault),
- }
+ attrFormat.MarkdownDescription = fmt.Sprintf("The %s format. Possible values: %s.", name, utils.ConvertStringSlicesToString(formatTypes, true, false))
+ attrFormat.Default = stringdefault.StaticString(formatTypeDefault)
} else {
- attributes["format"] = schema.StringAttribute{
- MarkdownDescription: fmt.Sprintf("The %s format. Possible values: `%s`", name, DefinitionFormatNotApplicable),
- Computed: true,
- Default: stringdefault.StaticString(DefinitionFormatNotApplicable),
+ attrFormat.MarkdownDescription = fmt.Sprintf("The %s format. Possible values: `%s`", name, DefinitionFormatNotApplicable)
+ attrFormat.Default = stringdefault.StaticString(DefinitionFormatNotApplicable)
+ }
+
+ if alongConfiguration {
+ attrFormat.Validators = []validator.String{
+ stringvalidator.ConflictsWith(path.MatchRoot("configuration")),
}
}
+ attributes["format"] = attrFormat
+
+ attrDefinition := schema.MapNestedAttribute{}
+ attrDefinition.MarkdownDescription = fmt.Sprintf("Definition parts. Accepted path keys: %s. Read more about [%s definition part paths](%s).", utils.ConvertStringSlicesToString(definitionPathKeys, true, false), name, definitionPathDocsURL)
+ attrDefinition.CustomType = supertypes.NewMapNestedObjectTypeOf[resourceFabricItemDefinitionPartModel](ctx)
+ attrDefinition.Validators = definitionPathKeysValidator
+ attrDefinition.NestedObject = getResourceFabricItemDefinitionPartSchema(ctx)
+
if definitionRequired {
- attributes["definition"] = schema.MapNestedAttribute{
- MarkdownDescription: fmt.Sprintf("Definition parts. Accepted path keys: %s. Read more about [%s definition part paths](%s).", utils.ConvertStringSlicesToString(definitionPathKeys, true, false), name, definitionPathDocsURL),
- Required: true,
- CustomType: supertypes.NewMapNestedObjectTypeOf[ResourceFabricItemDefinitionPartModel](ctx),
- Validators: definitionPathKeysValidator,
- NestedObject: getResourceFabricItemDefinitionPartSchema(ctx),
- }
+ attrDefinition.Required = true
} else {
- attributes["definition"] = schema.MapNestedAttribute{
- MarkdownDescription: fmt.Sprintf("Definition parts. Accepted path keys: %s. Read more about [%s definition part paths](%s).", utils.ConvertStringSlicesToString(definitionPathKeys, true, false), name, definitionPathDocsURL),
- Optional: true,
- CustomType: supertypes.NewMapNestedObjectTypeOf[ResourceFabricItemDefinitionPartModel](ctx),
- Validators: definitionPathKeysValidator,
- NestedObject: getResourceFabricItemDefinitionPartSchema(ctx),
- }
+ attrDefinition.Optional = true
}
+ if alongConfiguration {
+ definitionPathKeysValidator = append(definitionPathKeysValidator, mapvalidator.ConflictsWith(path.MatchRoot("configuration")))
+ attrDefinition.Validators = definitionPathKeysValidator
+ }
+
+ attributes["definition"] = attrDefinition
+
return attributes
}
diff --git a/internal/pkg/utils/errors.go b/internal/pkg/utils/errors.go
index 203cb8cb..11d94e58 100644
--- a/internal/pkg/utils/errors.go
+++ b/internal/pkg/utils/errors.go
@@ -161,9 +161,9 @@ func GetDiagsFromError(ctx context.Context, err error, operation Operation, errI
}
if diagErrDetail == "" {
- diagErrDetail = fmt.Sprintf("%s\n\nErrorCode: %s\nRequestID: %s", errMessage, errCode, errRequestID)
+ diagErrDetail = fmt.Sprintf("%s\n\nError Code: %s\nRequest ID: %s", errMessage, errCode, errRequestID)
} else {
- diagErrDetail = fmt.Sprintf("%s: %s\n\nErrorCode: %s\nRequestID: %s", diagErrDetail, errMessage, errCode, errRequestID)
+ diagErrDetail = fmt.Sprintf("%s: %s\n\nError Code: %s\nRequest ID: %s", diagErrDetail, errMessage, errCode, errRequestID)
}
case errors.As(err, &errAuthFailed):
var errAuthResp authErrorResponse
diff --git a/internal/pkg/utils/errors_test.go b/internal/pkg/utils/errors_test.go
index 7c39ddf1..dc711f16 100644
--- a/internal/pkg/utils/errors_test.go
+++ b/internal/pkg/utils/errors_test.go
@@ -138,7 +138,7 @@ func TestUnit_GetDiagsFromError(t *testing.T) {
assert.Len(t, diags, 1)
assert.Equal(t, common.ErrorReadHeader, diags[0].Summary())
- assert.Equal(t, fmt.Sprintf("%s: %s\n\nErrorCode: %s\nRequestID: %s", common.ErrorReadDetails, "Message", "ErrorCode", requestID), diags[0].Detail())
+ assert.Equal(t, fmt.Sprintf("%s: %s\n\nError Code: %s\nRequest ID: %s", common.ErrorReadDetails, "Message", "ErrorCode", requestID), diags[0].Detail())
})
t.Run("azcore.ResponseError", func(t *testing.T) {
@@ -185,7 +185,7 @@ func TestUnit_GetDiagsFromError(t *testing.T) {
assert.Len(t, diags, 1)
assert.Equal(t, common.ErrorReadHeader, diags[0].Summary())
- assert.Equal(t, fmt.Sprintf("%s: %s\n\nErrorCode: %s\nRequestID: %s", common.ErrorReadDetails, "Message / MessageMoreDetails", "ErrorCode / ErrorCodeMoreDetails", requestID), diags[0].Detail())
+ assert.Equal(t, fmt.Sprintf("%s: %s\n\nError Code: %s\nRequest ID: %s", common.ErrorReadDetails, "Message / MessageMoreDetails", "ErrorCode / ErrorCodeMoreDetails", requestID), diags[0].Detail())
})
t.Run("azidentity.AuthenticationFailedError", func(t *testing.T) {
diff --git a/internal/provider/provider.go b/internal/provider/provider.go
index e9c89e05..12b191b4 100644
--- a/internal/provider/provider.go
+++ b/internal/provider/provider.go
@@ -358,11 +358,11 @@ func (p *FabricProvider) Resources(ctx context.Context) []func() resource.Resour
domain.NewResourceDomain,
domain.NewResourceDomainRoleAssignments,
domain.NewResourceDomainWorkspaceAssignments,
- environment.NewResourceEnvironment,
- eventhouse.NewResourceEventhouse,
+ func() resource.Resource { return environment.NewResourceEnvironment(ctx) },
+ func() resource.Resource { return eventhouse.NewResourceEventhouse(ctx) },
eventstream.NewResourceEventstream,
kqldatabase.NewResourceKQLDatabase,
- lakehouse.NewResourceLakehouse,
+ func() resource.Resource { return lakehouse.NewResourceLakehouse(ctx) },
mlexperiment.NewResourceMLExperiment,
mlmodel.NewResourceMLModel,
notebook.NewResourceNotebook,
@@ -371,7 +371,7 @@ func (p *FabricProvider) Resources(ctx context.Context) []func() resource.Resour
spark.NewResourceSparkCustomPool,
spark.NewResourceSparkEnvironmentSettings,
spark.NewResourceSparkWorkspaceSettings,
- func() resource.Resource { return sparkjobdefinition.NewResourceSparkJobDefinition(ctx) },
+ sparkjobdefinition.NewResourceSparkJobDefinition,
warehouse.NewResourceWarehouse,
workspace.NewResourceWorkspace,
workspace.NewResourceWorkspaceRoleAssignment,
@@ -390,18 +390,18 @@ func (p *FabricProvider) DataSources(ctx context.Context) []func() datasource.Da
domain.NewDataSourceDomain,
domain.NewDataSourceDomains,
domain.NewDataSourceDomainWorkspaceAssignments,
- environment.NewDataSourceEnvironment,
- environment.NewDataSourceEnvironments,
- eventhouse.NewDataSourceEventhouse,
- eventhouse.NewDataSourceEventhouses,
+ func() datasource.DataSource { return environment.NewDataSourceEnvironment(ctx) },
+ func() datasource.DataSource { return environment.NewDataSourceEnvironments(ctx) },
+ func() datasource.DataSource { return eventhouse.NewDataSourceEventhouse(ctx) },
+ func() datasource.DataSource { return eventhouse.NewDataSourceEventhouses(ctx) },
eventstream.NewDataSourceEventstream,
eventstream.NewDataSourceEventstreams,
kqldatabase.NewDataSourceKQLDatabase,
kqldatabase.NewDataSourceKQLDatabases,
kqlqueryset.NewDataSourceKQLQueryset,
kqlqueryset.NewDataSourceKQLQuerysets,
- lakehouse.NewDataSourceLakehouse,
- lakehouse.NewDataSourceLakehouses,
+ func() datasource.DataSource { return lakehouse.NewDataSourceLakehouse(ctx) },
+ func() datasource.DataSource { return lakehouse.NewDataSourceLakehouses(ctx) },
lakehouse.NewDataSourceLakehouseTable,
lakehouse.NewDataSourceLakehouseTables,
mirroredwarehouse.NewDataSourceMirroredWarehouses,
@@ -419,8 +419,8 @@ func (p *FabricProvider) DataSources(ctx context.Context) []func() datasource.Da
spark.NewDataSourceSparkCustomPool,
spark.NewDataSourceSparkEnvironmentSettings,
spark.NewDataSourceSparkWorkspaceSettings,
- func() datasource.DataSource { return sparkjobdefinition.NewDataSourceSparkJobDefinition(ctx) },
- func() datasource.DataSource { return sparkjobdefinition.NewDataSourceSparkJobDefinitions(ctx) },
+ sparkjobdefinition.NewDataSourceSparkJobDefinition,
+ sparkjobdefinition.NewDataSourceSparkJobDefinitions,
sqlendpoint.NewDataSourceSQLEndpoints,
warehouse.NewDataSourceWarehouse,
warehouse.NewDataSourceWarehouses,
diff --git a/internal/services/dashboard/data_dashboards_test.go b/internal/services/dashboard/data_dashboards_test.go
index 9dfe3e42..7e81702a 100644
--- a/internal/services/dashboard/data_dashboards_test.go
+++ b/internal/services/dashboard/data_dashboards_test.go
@@ -79,7 +79,7 @@ func TestAcc_DashboardsDataSource(t *testing.T) {
t.Skip("No SPN support")
}
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceDS"].(map[string]any)
workspaceID := workspace["id"].(string)
resource.ParallelTest(t, testhelp.NewTestAccCase(t, nil, nil, []resource.TestStep{
diff --git a/internal/services/datamart/data_datamarts_test.go b/internal/services/datamart/data_datamarts_test.go
index c2bdc0ab..ef9aacf3 100644
--- a/internal/services/datamart/data_datamarts_test.go
+++ b/internal/services/datamart/data_datamarts_test.go
@@ -79,7 +79,7 @@ func TestAcc_DatamartsDataSource(t *testing.T) {
t.Skip("No SPN support")
}
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceDS"].(map[string]any)
workspaceID := workspace["id"].(string)
resource.ParallelTest(t, testhelp.NewTestAccCase(t, nil, nil, []resource.TestStep{
diff --git a/internal/services/datapipeline/data_data_pipeline_test.go b/internal/services/datapipeline/data_data_pipeline_test.go
index 5a431229..738b9309 100644
--- a/internal/services/datapipeline/data_data_pipeline_test.go
+++ b/internal/services/datapipeline/data_data_pipeline_test.go
@@ -154,7 +154,7 @@ func TestAcc_DataPipelineDataSource(t *testing.T) {
t.Skip("No SPN support")
}
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceDS"].(map[string]any)
workspaceID := workspace["id"].(string)
entity := testhelp.WellKnown()["DataPipeline"].(map[string]any)
diff --git a/internal/services/datapipeline/data_data_pipelines_test.go b/internal/services/datapipeline/data_data_pipelines_test.go
index 3a773019..0d7abfc9 100644
--- a/internal/services/datapipeline/data_data_pipelines_test.go
+++ b/internal/services/datapipeline/data_data_pipelines_test.go
@@ -75,7 +75,7 @@ func TestUnit_DataPipelinesDataSource(t *testing.T) {
}
func TestAcc_DataPipelinesDataSource(t *testing.T) {
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceDS"].(map[string]any)
workspaceID := workspace["id"].(string)
resource.ParallelTest(t, testhelp.NewTestAccCase(t, nil, nil, []resource.TestStep{
diff --git a/internal/services/datapipeline/resource_data_pipeline_test.go b/internal/services/datapipeline/resource_data_pipeline_test.go
index 0df01a15..4823b9f1 100644
--- a/internal/services/datapipeline/resource_data_pipeline_test.go
+++ b/internal/services/datapipeline/resource_data_pipeline_test.go
@@ -215,7 +215,7 @@ func TestAcc_DataPipelineResource_CRUD(t *testing.T) {
t.Skip("No SPN support")
}
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceRS"].(map[string]any)
workspaceID := workspace["id"].(string)
entityCreateDisplayName := testhelp.RandomName()
diff --git a/internal/services/domain/resource_domain_workspace_assignments_test.go b/internal/services/domain/resource_domain_workspace_assignments_test.go
index 20ae26a9..408cf338 100644
--- a/internal/services/domain/resource_domain_workspace_assignments_test.go
+++ b/internal/services/domain/resource_domain_workspace_assignments_test.go
@@ -88,7 +88,7 @@ func TestAcc_DomainWorkspaceAssignmentsResource_CRUD(t *testing.T) {
domainResourceFQN := testhelp.ResourceFQN("fabric", itemTFName, "test")
- entity := testhelp.WellKnown()["Workspace"].(map[string]any)
+ entity := testhelp.WellKnown()["WorkspaceRS"].(map[string]any)
entityID := entity["id"].(string)
resource.Test(t, testhelp.NewTestAccCase(t, nil, nil, []resource.TestStep{
diff --git a/internal/services/environment/data_environment.go b/internal/services/environment/data_environment.go
index 4f89f70c..b56d8243 100644
--- a/internal/services/environment/data_environment.go
+++ b/internal/services/environment/data_environment.go
@@ -5,238 +5,88 @@ package environment
import (
"context"
- "fmt"
supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes"
- "github.com/hashicorp/terraform-plugin-framework-timetypes/timetypes"
- "github.com/hashicorp/terraform-plugin-framework-validators/datasourcevalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
"github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/path"
- "github.com/hashicorp/terraform-plugin-log/tflog"
+ "github.com/microsoft/fabric-sdk-go/fabric"
+ fabcore "github.com/microsoft/fabric-sdk-go/fabric/core"
fabenvironment "github.com/microsoft/fabric-sdk-go/fabric/environment"
- "github.com/microsoft/terraform-provider-fabric/internal/common"
- "github.com/microsoft/terraform-provider-fabric/internal/framework/customtypes"
"github.com/microsoft/terraform-provider-fabric/internal/pkg/fabricitem"
- "github.com/microsoft/terraform-provider-fabric/internal/pkg/utils"
- pconfig "github.com/microsoft/terraform-provider-fabric/internal/provider/config"
)
-// Ensure the implementation satisfies the expected interfaces.
-var (
- _ datasource.DataSourceWithConfigValidators = (*dataSourceEnvironment)(nil)
- _ datasource.DataSourceWithConfigure = (*dataSourceEnvironment)(nil)
-)
-
-type dataSourceEnvironment struct {
- pConfigData *pconfig.ProviderData
- client *fabenvironment.ItemsClient
-}
-
-func NewDataSourceEnvironment() datasource.DataSource {
- return &dataSourceEnvironment{}
-}
-
-func (d *dataSourceEnvironment) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
- resp.TypeName = req.ProviderTypeName + "_" + ItemTFName
-}
-
-func (d *dataSourceEnvironment) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- markdownDescription := "Get a Fabric " + ItemName + ".\n\n" +
- "Use this data source to fetch an [" + ItemName + "](" + ItemDocsURL + ").\n\n" +
- ItemDocsSPNSupport
-
- publishStatePossibleValuesMarkdown := "Publish state. Possible values: " + utils.ConvertStringSlicesToString(fabenvironment.PossibleEnvironmentPublishStateValues(), true, true) + "."
-
- properties := schema.SingleNestedAttribute{
- MarkdownDescription: "The " + ItemName + " properties.",
- Computed: true,
- CustomType: supertypes.NewSingleNestedObjectTypeOf[environmentPropertiesModel](ctx),
- Attributes: map[string]schema.Attribute{
- "publish_details": schema.SingleNestedAttribute{
- MarkdownDescription: "Environment publish operation details.",
- Computed: true,
- CustomType: supertypes.NewSingleNestedObjectTypeOf[environmentPublishDetailsModel](ctx),
- Attributes: map[string]schema.Attribute{
- "state": schema.StringAttribute{
- MarkdownDescription: publishStatePossibleValuesMarkdown,
- Computed: true,
- },
- "target_version": schema.StringAttribute{
- MarkdownDescription: "Target version to be published.",
- Computed: true,
- CustomType: customtypes.UUIDType{},
- },
- "start_time": schema.StringAttribute{
- MarkdownDescription: "Start time of publish operation.",
- Computed: true,
- CustomType: timetypes.RFC3339Type{},
- },
- "end_time": schema.StringAttribute{
- MarkdownDescription: "End time of publish operation.",
- Computed: true,
- CustomType: timetypes.RFC3339Type{},
- },
- "component_publish_info": schema.SingleNestedAttribute{
- MarkdownDescription: "Environment component publish information.",
- Computed: true,
- CustomType: supertypes.NewSingleNestedObjectTypeOf[environmentComponentPublishInfoModel](ctx),
- Attributes: map[string]schema.Attribute{
- "spark_libraries": schema.SingleNestedAttribute{
- MarkdownDescription: "Spark libraries publish information.",
- Computed: true,
- CustomType: supertypes.NewSingleNestedObjectTypeOf[environmentSparkLibrariesModel](ctx),
- Attributes: map[string]schema.Attribute{
- "state": schema.StringAttribute{
- MarkdownDescription: publishStatePossibleValuesMarkdown,
- Computed: true,
- },
- },
- },
- "spark_settings": schema.SingleNestedAttribute{
- MarkdownDescription: "Spark settings publish information.",
- Computed: true,
- CustomType: supertypes.NewSingleNestedObjectTypeOf[environmentSparkSettingsModel](ctx),
- Attributes: map[string]schema.Attribute{
- "state": schema.StringAttribute{
- MarkdownDescription: publishStatePossibleValuesMarkdown,
- Computed: true,
- },
- },
- },
- },
- },
- },
- },
- },
- }
-
- itemConfig := fabricitem.DataSourceFabricItem{
- Type: ItemType,
- Name: ItemName,
- TFName: ItemTFName,
- MarkdownDescription: markdownDescription,
- IsDisplayNameUnique: true,
- }
+func NewDataSourceEnvironment(ctx context.Context) datasource.DataSource {
+ propertiesSetter := func(ctx context.Context, from *fabenvironment.PublishInfo, to *fabricitem.DataSourceFabricItemPropertiesModel[environmentPropertiesModel, fabenvironment.PublishInfo]) diag.Diagnostics {
+ properties := supertypes.NewSingleNestedObjectValueOfNull[environmentPropertiesModel](ctx)
- resp.Schema = fabricitem.GetDataSourceFabricItemPropertiesSchema(ctx, itemConfig, properties)
-}
-
-func (d *dataSourceEnvironment) ConfigValidators(_ context.Context) []datasource.ConfigValidator {
- return []datasource.ConfigValidator{
- datasourcevalidator.Conflicting(
- path.MatchRoot("id"),
- path.MatchRoot("display_name"),
- ),
- datasourcevalidator.ExactlyOneOf(
- path.MatchRoot("id"),
- path.MatchRoot("display_name"),
- ),
- }
-}
-
-func (d *dataSourceEnvironment) Configure(_ context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
- if req.ProviderData == nil {
- return
- }
-
- pConfigData, ok := req.ProviderData.(*pconfig.ProviderData)
- if !ok {
- resp.Diagnostics.AddError(
- common.ErrorDataSourceConfigType,
- fmt.Sprintf(common.ErrorFabricClientType, req.ProviderData),
- )
-
- return
- }
-
- d.pConfigData = pConfigData
- d.client = fabenvironment.NewClientFactoryWithClient(*pConfigData.FabricClient).NewItemsClient()
-}
+ if from != nil {
+ propertiesModel := &environmentPropertiesModel{}
-func (d *dataSourceEnvironment) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- tflog.Debug(ctx, "READ", map[string]any{
- "action": "start",
- })
- tflog.Trace(ctx, "READ", map[string]any{
- "config": req.Config,
- })
-
- var data dataSourceEnvironmentModel
-
- if resp.Diagnostics.Append(req.Config.Get(ctx, &data)...); resp.Diagnostics.HasError() {
- return
- }
-
- timeout, diags := data.Timeouts.Read(ctx, d.pConfigData.Timeout)
- if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
- return
- }
+ if diags := propertiesModel.set(ctx, from); diags.HasError() {
+ return diags
+ }
- ctx, cancel := context.WithTimeout(ctx, timeout)
- defer cancel()
+ if diags := properties.Set(ctx, propertiesModel); diags.HasError() {
+ return diags
+ }
+ }
- if data.ID.ValueString() != "" {
- diags = d.getByID(ctx, &data)
- } else {
- diags = d.getByDisplayName(ctx, &data)
- }
+ to.Properties = properties
- if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
- return
+ return nil
}
- resp.Diagnostics.Append(resp.State.Set(ctx, data)...)
+ itemGetter := func(ctx context.Context, fabricClient fabric.Client, model fabricitem.DataSourceFabricItemPropertiesModel[environmentPropertiesModel, fabenvironment.PublishInfo], fabricItem *fabricitem.FabricItemProperties[fabenvironment.PublishInfo]) error {
+ client := fabenvironment.NewClientFactoryWithClient(fabricClient).NewItemsClient()
- tflog.Debug(ctx, "READ", map[string]any{
- "action": "end",
- })
-
- if resp.Diagnostics.HasError() {
- return
- }
-}
+ respGet, err := client.GetEnvironment(ctx, model.WorkspaceID.ValueString(), model.ID.ValueString(), nil)
+ if err != nil {
+ return err
+ }
-func (d *dataSourceEnvironment) getByID(ctx context.Context, model *dataSourceEnvironmentModel) diag.Diagnostics {
- tflog.Trace(ctx, "getting Environment by 'id'")
+ fabricItem.Set(respGet.Environment)
- respGet, err := d.client.GetEnvironment(ctx, model.WorkspaceID.ValueString(), model.ID.ValueString(), nil)
- if diags := utils.GetDiagsFromError(ctx, err, utils.OperationRead, nil); diags.HasError() {
- return diags
+ return nil
}
- model.set(respGet.Environment)
-
- return model.setProperties(ctx, respGet.Environment)
-}
-
-func (d *dataSourceEnvironment) getByDisplayName(ctx context.Context, model *dataSourceEnvironmentModel) diag.Diagnostics {
- tflog.Trace(ctx, "getting Environment by 'display_name'")
-
- var diags diag.Diagnostics
+ itemListGetter := func(ctx context.Context, fabricClient fabric.Client, model fabricitem.DataSourceFabricItemPropertiesModel[environmentPropertiesModel, fabenvironment.PublishInfo], errNotFound fabcore.ResponseError, fabricItem *fabricitem.FabricItemProperties[fabenvironment.PublishInfo]) error {
+ client := fabenvironment.NewClientFactoryWithClient(fabricClient).NewItemsClient()
- pager := d.client.NewListEnvironmentsPager(model.WorkspaceID.ValueString(), nil)
- for pager.More() {
- page, err := pager.NextPage(ctx)
- if diags := utils.GetDiagsFromError(ctx, err, utils.OperationList, nil); diags.HasError() {
- return diags
- }
+ pager := client.NewListEnvironmentsPager(model.WorkspaceID.ValueString(), nil)
+ for pager.More() {
+ page, err := pager.NextPage(ctx)
+ if err != nil {
+ return err
+ }
- for _, entity := range page.Value {
- if *entity.DisplayName == model.DisplayName.ValueString() {
- model.set(entity)
+ for _, entity := range page.Value {
+ if *entity.DisplayName == model.DisplayName.ValueString() {
+ fabricItem.Set(entity)
- return model.setProperties(ctx, entity)
+ return nil
+ }
}
}
+
+ return &errNotFound
}
- diags.AddError(
- common.ErrorReadHeader,
- fmt.Sprintf("Unable to find Environment with 'display_name': %s in the Workspace ID: %s ", model.DisplayName.ValueString(), model.WorkspaceID.ValueString()),
- )
+ config := fabricitem.DataSourceFabricItemProperties[environmentPropertiesModel, fabenvironment.PublishInfo]{
+ DataSourceFabricItem: fabricitem.DataSourceFabricItem{
+ Type: ItemType,
+ Name: ItemName,
+ TFName: ItemTFName,
+ MarkdownDescription: "Get a Fabric " + ItemName + ".\n\n" +
+ "Use this data source to fetch an [" + ItemName + "](" + ItemDocsURL + ").\n\n" +
+ ItemDocsSPNSupport,
+ IsDisplayNameUnique: true,
+ },
+ PropertiesAttributes: getDataSourceEnvironmentPropertiesAttributes(ctx),
+ PropertiesSetter: propertiesSetter,
+ ItemGetter: itemGetter,
+ ItemListGetter: itemListGetter,
+ }
- return diags
+ return fabricitem.NewDataSourceFabricItemProperties(config)
}
diff --git a/internal/services/environment/data_environment_test.go b/internal/services/environment/data_environment_test.go
index 60a7c8d2..d2ac11d5 100644
--- a/internal/services/environment/data_environment_test.go
+++ b/internal/services/environment/data_environment_test.go
@@ -105,6 +105,9 @@ func TestUnit_EnvironmentDataSource(t *testing.T) {
resource.TestCheckResourceAttrPtr(testDataSourceItemFQN, "id", entity.ID),
resource.TestCheckResourceAttrPtr(testDataSourceItemFQN, "display_name", entity.DisplayName),
resource.TestCheckResourceAttrPtr(testDataSourceItemFQN, "description", entity.Description),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.publish_details.state"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.publish_details.component_publish_info.spark_libraries.state"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.publish_details.component_publish_info.spark_settings.state"),
),
},
// read by id - not found
@@ -133,6 +136,9 @@ func TestUnit_EnvironmentDataSource(t *testing.T) {
resource.TestCheckResourceAttrPtr(testDataSourceItemFQN, "id", entity.ID),
resource.TestCheckResourceAttrPtr(testDataSourceItemFQN, "display_name", entity.DisplayName),
resource.TestCheckResourceAttrPtr(testDataSourceItemFQN, "description", entity.Description),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.publish_details.state"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.publish_details.component_publish_info.spark_libraries.state"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.publish_details.component_publish_info.spark_settings.state"),
),
},
// read by name - not found
@@ -150,7 +156,7 @@ func TestUnit_EnvironmentDataSource(t *testing.T) {
}
func TestAcc_EnvironmentDataSource(t *testing.T) {
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceDS"].(map[string]any)
workspaceID := workspace["id"].(string)
entity := testhelp.WellKnown()["Environment"].(map[string]any)
@@ -173,6 +179,9 @@ func TestAcc_EnvironmentDataSource(t *testing.T) {
resource.TestCheckResourceAttr(testDataSourceItemFQN, "id", entityID),
resource.TestCheckResourceAttr(testDataSourceItemFQN, "display_name", entityDisplayName),
resource.TestCheckResourceAttr(testDataSourceItemFQN, "description", entityDescription),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.publish_details.state"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.publish_details.component_publish_info.spark_libraries.state"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.publish_details.component_publish_info.spark_settings.state"),
),
},
// read by id - not found
@@ -200,6 +209,9 @@ func TestAcc_EnvironmentDataSource(t *testing.T) {
resource.TestCheckResourceAttr(testDataSourceItemFQN, "id", entityID),
resource.TestCheckResourceAttr(testDataSourceItemFQN, "display_name", entityDisplayName),
resource.TestCheckResourceAttr(testDataSourceItemFQN, "description", entityDescription),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.publish_details.state"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.publish_details.component_publish_info.spark_libraries.state"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.publish_details.component_publish_info.spark_settings.state"),
),
},
// read by name - not found
diff --git a/internal/services/environment/data_environments.go b/internal/services/environment/data_environments.go
index e5b4c757..4fcf7ccc 100644
--- a/internal/services/environment/data_environments.go
+++ b/internal/services/environment/data_environments.go
@@ -4,21 +4,75 @@
package environment
import (
+ "context"
+
+ supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes"
"github.com/hashicorp/terraform-plugin-framework/datasource"
+ "github.com/hashicorp/terraform-plugin-framework/diag"
+ "github.com/microsoft/fabric-sdk-go/fabric"
+ fabenvironment "github.com/microsoft/fabric-sdk-go/fabric/environment"
"github.com/microsoft/terraform-provider-fabric/internal/pkg/fabricitem"
)
-func NewDataSourceEnvironments() datasource.DataSource {
- config := fabricitem.DataSourceFabricItems{
- Type: ItemType,
- Name: ItemName,
- Names: ItemsName,
- TFName: ItemsTFName,
- MarkdownDescription: "List a Fabric " + ItemsName + ".\n\n" +
- "Use this data source to list [" + ItemsName + "](" + ItemDocsURL + ").\n\n" +
- ItemDocsSPNSupport,
+func NewDataSourceEnvironments(ctx context.Context) datasource.DataSource {
+ propertiesSetter := func(ctx context.Context, from *fabenvironment.PublishInfo, to *fabricitem.FabricItemPropertiesModel[environmentPropertiesModel, fabenvironment.PublishInfo]) diag.Diagnostics {
+ properties := supertypes.NewSingleNestedObjectValueOfNull[environmentPropertiesModel](ctx)
+
+ if from != nil {
+ propertiesModel := &environmentPropertiesModel{}
+
+ if diags := propertiesModel.set(ctx, from); diags.HasError() {
+ return diags
+ }
+
+ if diags := properties.Set(ctx, propertiesModel); diags.HasError() {
+ return diags
+ }
+ }
+
+ to.Properties = properties
+
+ return nil
+ }
+
+ itemListGetter := func(ctx context.Context, fabricClient fabric.Client, model fabricitem.DataSourceFabricItemsPropertiesModel[environmentPropertiesModel, fabenvironment.PublishInfo], fabricItems *[]fabricitem.FabricItemProperties[fabenvironment.PublishInfo]) error {
+ client := fabenvironment.NewClientFactoryWithClient(fabricClient).NewItemsClient()
+
+ fabItems := make([]fabricitem.FabricItemProperties[fabenvironment.PublishInfo], 0)
+
+ respList, err := client.ListEnvironments(ctx, model.WorkspaceID.ValueString(), nil)
+ if err != nil {
+ return err
+ }
+
+ for _, entity := range respList {
+ var fabricItem fabricitem.FabricItemProperties[fabenvironment.PublishInfo]
+
+ fabricItem.Set(entity)
+
+ fabItems = append(fabItems, fabricItem)
+ }
+
+ *fabricItems = fabItems
+
+ return nil
+ }
+
+ config := fabricitem.DataSourceFabricItemsProperties[environmentPropertiesModel, fabenvironment.PublishInfo]{
+ DataSourceFabricItems: fabricitem.DataSourceFabricItems{
+ Type: ItemType,
+ Name: ItemName,
+ Names: ItemsName,
+ TFName: ItemsTFName,
+ MarkdownDescription: "List a Fabric " + ItemsName + ".\n\n" +
+ "Use this data source to list [" + ItemsName + "](" + ItemDocsURL + ").\n\n" +
+ ItemDocsSPNSupport,
+ },
+ PropertiesAttributes: getDataSourceEnvironmentPropertiesAttributes(ctx),
+ PropertiesSetter: propertiesSetter,
+ ItemListGetter: itemListGetter,
}
- return fabricitem.NewDataSourceFabricItems(config)
+ return fabricitem.NewDataSourceFabricItemsProperties(config)
}
diff --git a/internal/services/environment/data_environments_test.go b/internal/services/environment/data_environments_test.go
index 280399b8..eab63c1c 100644
--- a/internal/services/environment/data_environments_test.go
+++ b/internal/services/environment/data_environments_test.go
@@ -22,11 +22,11 @@ var (
func TestUnit_EnvironmentsDataSource(t *testing.T) {
workspaceID := testhelp.RandomUUID()
- entity := fakes.NewRandomItemWithWorkspace(itemType, workspaceID)
+ entity := fakes.NewRandomEnvironmentWithWorkspace(workspaceID)
- fakes.FakeServer.Upsert(fakes.NewRandomItemWithWorkspace(itemType, workspaceID))
+ fakes.FakeServer.Upsert(fakes.NewRandomEnvironmentWithWorkspace(workspaceID))
fakes.FakeServer.Upsert(entity)
- fakes.FakeServer.Upsert(fakes.NewRandomItemWithWorkspace(itemType, workspaceID))
+ fakes.FakeServer.Upsert(fakes.NewRandomEnvironmentWithWorkspace(workspaceID))
resource.ParallelTest(t, testhelp.NewTestUnitCase(t, nil, fakes.FakeServer.ServerFactory, nil, []resource.TestStep{
// error - no attributes
@@ -69,13 +69,16 @@ func TestUnit_EnvironmentsDataSource(t *testing.T) {
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttrPtr(testDataSourceItemsFQN, "workspace_id", entity.WorkspaceID),
resource.TestCheckResourceAttrPtr(testDataSourceItemsFQN, "values.1.id", entity.ID),
+ resource.TestCheckResourceAttrSet(testDataSourceItemsFQN, "values.1.properties.publish_details.state"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemsFQN, "values.1.properties.publish_details.component_publish_info.spark_libraries.state"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemsFQN, "values.1.properties.publish_details.component_publish_info.spark_settings.state"),
),
},
}))
}
func TestAcc_EnvironmentsDataSource(t *testing.T) {
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceDS"].(map[string]any)
workspaceID := workspace["id"].(string)
resource.ParallelTest(t, testhelp.NewTestAccCase(t, nil, nil, []resource.TestStep{
@@ -90,6 +93,9 @@ func TestAcc_EnvironmentsDataSource(t *testing.T) {
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr(testDataSourceItemsFQN, "workspace_id", workspaceID),
resource.TestCheckResourceAttrSet(testDataSourceItemsFQN, "values.0.id"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemsFQN, "values.0.properties.publish_details.state"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemsFQN, "values.0.properties.publish_details.component_publish_info.spark_libraries.state"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemsFQN, "values.0.properties.publish_details.component_publish_info.spark_settings.state"),
),
},
},
diff --git a/internal/services/environment/models.go b/internal/services/environment/models.go
index bd9c8705..77b85355 100644
--- a/internal/services/environment/models.go
+++ b/internal/services/environment/models.go
@@ -15,45 +15,6 @@ import (
"github.com/microsoft/terraform-provider-fabric/internal/framework/customtypes"
)
-type baseEnvironmentModel struct {
- WorkspaceID customtypes.UUID `tfsdk:"workspace_id"`
- ID customtypes.UUID `tfsdk:"id"`
- DisplayName types.String `tfsdk:"display_name"`
- Description types.String `tfsdk:"description"`
-}
-
-func (to *baseEnvironmentModel) set(from fabenvironment.Environment) {
- to.WorkspaceID = customtypes.NewUUIDPointerValue(from.WorkspaceID)
- to.ID = customtypes.NewUUIDPointerValue(from.ID)
- to.DisplayName = types.StringPointerValue(from.DisplayName)
- to.Description = types.StringPointerValue(from.Description)
-}
-
-type baseEnvironmentPropertiesModel struct {
- baseEnvironmentModel
- Properties supertypes.SingleNestedObjectValueOf[environmentPropertiesModel] `tfsdk:"properties"`
-}
-
-func (to *baseEnvironmentPropertiesModel) setProperties(ctx context.Context, from fabenvironment.Environment) diag.Diagnostics {
- properties := supertypes.NewSingleNestedObjectValueOfNull[environmentPropertiesModel](ctx)
-
- if from.Properties != nil {
- propertiesModel := &environmentPropertiesModel{}
-
- if diags := propertiesModel.set(ctx, from.Properties); diags.HasError() {
- return diags
- }
-
- if diags := properties.Set(ctx, propertiesModel); diags.HasError() {
- return diags
- }
- }
-
- to.Properties = properties
-
- return nil
-}
-
type environmentPropertiesModel struct {
PublishDetails supertypes.SingleNestedObjectValueOf[environmentPublishDetailsModel] `tfsdk:"publish_details"`
}
diff --git a/internal/services/environment/models_resource_environment.go b/internal/services/environment/models_resource_environment.go
deleted file mode 100644
index ffd12469..00000000
--- a/internal/services/environment/models_resource_environment.go
+++ /dev/null
@@ -1,32 +0,0 @@
-// Copyright (c) Microsoft Corporation
-// SPDX-License-Identifier: MPL-2.0
-
-package environment
-
-import (
- "github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts"
- fabenvironment "github.com/microsoft/fabric-sdk-go/fabric/environment"
-)
-
-type resourceEnvironmentModel struct {
- baseEnvironmentPropertiesModel
- Timeouts timeouts.Value `tfsdk:"timeouts"`
-}
-
-type requestCreateEnvironment struct {
- fabenvironment.CreateEnvironmentRequest
-}
-
-func (to *requestCreateEnvironment) set(from resourceEnvironmentModel) {
- to.DisplayName = from.DisplayName.ValueStringPointer()
- to.Description = from.Description.ValueStringPointer()
-}
-
-type requestUpdateEnvironment struct {
- fabenvironment.UpdateEnvironmentRequest
-}
-
-func (to *requestUpdateEnvironment) set(from resourceEnvironmentModel) {
- to.DisplayName = from.DisplayName.ValueStringPointer()
- to.Description = from.Description.ValueStringPointer()
-}
diff --git a/internal/services/environment/resource_environment.go b/internal/services/environment/resource_environment.go
index 2535fc09..1a06f5a9 100644
--- a/internal/services/environment/resource_environment.go
+++ b/internal/services/environment/resource_environment.go
@@ -5,384 +5,66 @@ package environment
import (
"context"
- "fmt"
- "strings"
supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes"
- "github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts"
- "github.com/hashicorp/terraform-plugin-framework-timetypes/timetypes"
"github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema"
- "github.com/hashicorp/terraform-plugin-log/tflog"
- fabcore "github.com/microsoft/fabric-sdk-go/fabric/core"
+ "github.com/microsoft/fabric-sdk-go/fabric"
fabenvironment "github.com/microsoft/fabric-sdk-go/fabric/environment"
- "github.com/microsoft/terraform-provider-fabric/internal/common"
- "github.com/microsoft/terraform-provider-fabric/internal/framework/customtypes"
"github.com/microsoft/terraform-provider-fabric/internal/pkg/fabricitem"
- "github.com/microsoft/terraform-provider-fabric/internal/pkg/utils"
- pconfig "github.com/microsoft/terraform-provider-fabric/internal/provider/config"
)
-// Ensure the implementation satisfies the expected interfaces.
-var (
- _ resource.ResourceWithConfigure = (*resourceEnvironment)(nil)
- _ resource.ResourceWithImportState = (*resourceEnvironment)(nil)
-)
-
-type resourceEnvironment struct {
- pConfigData *pconfig.ProviderData
- client *fabenvironment.ItemsClient
-}
-
-func NewResourceEnvironment() resource.Resource {
- return &resourceEnvironment{}
-}
-
-func (r *resourceEnvironment) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
- resp.TypeName = req.ProviderTypeName + "_" + ItemTFName
-}
-
-func (r *resourceEnvironment) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
- markdownDescription := "This resource manages a Fabric " + ItemName + ".\n\n" +
- "See [" + ItemName + "](" + ItemDocsURL + ") for more information.\n\n" +
- ItemDocsSPNSupport
-
- publishStatePossibleValuesMarkdown := "Publish state. Possible values: " + utils.ConvertStringSlicesToString(fabenvironment.PossibleEnvironmentPublishStateValues(), true, true) + "."
-
- properties := schema.SingleNestedAttribute{
- MarkdownDescription: "The " + ItemName + " properties.",
- Computed: true,
- CustomType: supertypes.NewSingleNestedObjectTypeOf[environmentPropertiesModel](ctx),
- Attributes: map[string]schema.Attribute{
- "publish_details": schema.SingleNestedAttribute{
- MarkdownDescription: "Environment publish operation details.",
- Computed: true,
- CustomType: supertypes.NewSingleNestedObjectTypeOf[environmentPublishDetailsModel](ctx),
- Attributes: map[string]schema.Attribute{
- "state": schema.StringAttribute{
- MarkdownDescription: publishStatePossibleValuesMarkdown,
- Computed: true,
- },
- "target_version": schema.StringAttribute{
- MarkdownDescription: "Target version to be published.",
- Computed: true,
- CustomType: customtypes.UUIDType{},
- },
- "start_time": schema.StringAttribute{
- MarkdownDescription: "Start time of publish operation.",
- Computed: true,
- CustomType: timetypes.RFC3339Type{},
- },
- "end_time": schema.StringAttribute{
- MarkdownDescription: "End time of publish operation.",
- Computed: true,
- CustomType: timetypes.RFC3339Type{},
- },
- "component_publish_info": schema.SingleNestedAttribute{
- MarkdownDescription: "Environment component publish information.",
- Computed: true,
- CustomType: supertypes.NewSingleNestedObjectTypeOf[environmentComponentPublishInfoModel](ctx),
- Attributes: map[string]schema.Attribute{
- "spark_libraries": schema.SingleNestedAttribute{
- MarkdownDescription: "Spark libraries publish information.",
- Computed: true,
- CustomType: supertypes.NewSingleNestedObjectTypeOf[environmentSparkLibrariesModel](ctx),
- Attributes: map[string]schema.Attribute{
- "state": schema.StringAttribute{
- MarkdownDescription: publishStatePossibleValuesMarkdown,
- Computed: true,
- },
- },
- },
- "spark_settings": schema.SingleNestedAttribute{
- MarkdownDescription: "Spark settings publish information.",
- Computed: true,
- CustomType: supertypes.NewSingleNestedObjectTypeOf[environmentSparkSettingsModel](ctx),
- Attributes: map[string]schema.Attribute{
- "state": schema.StringAttribute{
- MarkdownDescription: publishStatePossibleValuesMarkdown,
- Computed: true,
- },
- },
- },
- },
- },
- },
- },
- },
- }
-
- resp.Schema = fabricitem.GetResourceFabricItemPropertiesSchema(ctx, ItemName, markdownDescription, 123, 256, true, properties)
-}
-
-func (r *resourceEnvironment) Configure(_ context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) {
- if req.ProviderData == nil {
- return
- }
-
- pConfigData, ok := req.ProviderData.(*pconfig.ProviderData)
- if !ok {
- resp.Diagnostics.AddError(
- common.ErrorResourceConfigType,
- fmt.Sprintf(common.ErrorFabricClientType, req.ProviderData),
- )
-
- return
- }
-
- r.pConfigData = pConfigData
- r.client = fabenvironment.NewClientFactoryWithClient(*pConfigData.FabricClient).NewItemsClient()
-}
-
-func (r *resourceEnvironment) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
- tflog.Debug(ctx, "CREATE", map[string]any{
- "action": "start",
- })
- tflog.Trace(ctx, "CREATE", map[string]any{
- "config": req.Config,
- "plan": req.Plan,
- })
-
- var plan resourceEnvironmentModel
-
- if resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...); resp.Diagnostics.HasError() {
- return
- }
-
- timeout, diags := plan.Timeouts.Create(ctx, r.pConfigData.Timeout)
- if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
- return
- }
-
- ctx, cancel := context.WithTimeout(ctx, timeout)
- defer cancel()
-
- var reqCreate requestCreateEnvironment
-
- reqCreate.set(plan)
-
- respCreate, err := r.client.CreateEnvironment(ctx, plan.WorkspaceID.ValueString(), reqCreate.CreateEnvironmentRequest, nil)
- if resp.Diagnostics.Append(utils.GetDiagsFromError(ctx, err, utils.OperationCreate, nil)...); resp.Diagnostics.HasError() {
- return
- }
-
- plan.set(respCreate.Environment)
-
- resp.Diagnostics.Append(resp.State.Set(ctx, plan)...)
-
- if resp.Diagnostics.Append(r.get(ctx, &plan)...); resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(resp.State.Set(ctx, plan)...)
-
- tflog.Debug(ctx, "CREATE", map[string]any{
- "action": "end",
- })
-
- if resp.Diagnostics.HasError() {
- return
- }
-}
-
-func (r *resourceEnvironment) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
- tflog.Debug(ctx, "READ", map[string]any{
- "action": "start",
- })
- tflog.Trace(ctx, "READ", map[string]any{
- "state": req.State,
- })
-
- var state resourceEnvironmentModel
-
- if resp.Diagnostics.Append(req.State.Get(ctx, &state)...); resp.Diagnostics.HasError() {
- return
- }
-
- timeout, diags := state.Timeouts.Read(ctx, r.pConfigData.Timeout)
- if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
- return
- }
-
- ctx, cancel := context.WithTimeout(ctx, timeout)
- defer cancel()
-
- diags = r.get(ctx, &state)
- if utils.IsErrNotFound(state.ID.ValueString(), &diags, fabcore.ErrCommon.EntityNotFound) {
- resp.State.RemoveResource(ctx)
-
- resp.Diagnostics.Append(diags...)
+func NewResourceEnvironment(ctx context.Context) resource.Resource {
+ propertiesSetter := func(ctx context.Context, from *fabenvironment.PublishInfo, to *fabricitem.ResourceFabricItemPropertiesModel[environmentPropertiesModel, fabenvironment.PublishInfo]) diag.Diagnostics {
+ properties := supertypes.NewSingleNestedObjectValueOfNull[environmentPropertiesModel](ctx)
- return
- }
-
- if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(resp.State.Set(ctx, state)...)
-
- tflog.Debug(ctx, "READ", map[string]any{
- "action": "end",
- })
-
- if resp.Diagnostics.HasError() {
- return
- }
-}
-
-func (r *resourceEnvironment) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
- tflog.Debug(ctx, "UPDATE", map[string]any{
- "action": "start",
- })
- tflog.Trace(ctx, "UPDATE", map[string]any{
- "config": req.Config,
- "plan": req.Plan,
- "state": req.State,
- })
-
- var plan resourceEnvironmentModel
-
- if resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...); resp.Diagnostics.HasError() {
- return
- }
-
- timeout, diags := plan.Timeouts.Update(ctx, r.pConfigData.Timeout)
- if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
- return
- }
-
- ctx, cancel := context.WithTimeout(ctx, timeout)
- defer cancel()
-
- var reqUpdate requestUpdateEnvironment
-
- reqUpdate.set(plan)
-
- respUpdate, err := r.client.UpdateEnvironment(ctx, plan.WorkspaceID.ValueString(), plan.ID.ValueString(), reqUpdate.UpdateEnvironmentRequest, nil)
- if resp.Diagnostics.Append(utils.GetDiagsFromError(ctx, err, utils.OperationUpdate, nil)...); resp.Diagnostics.HasError() {
- return
- }
-
- plan.set(respUpdate.Environment)
-
- resp.Diagnostics.Append(resp.State.Set(ctx, plan)...)
-
- if resp.Diagnostics.Append(r.get(ctx, &plan)...); resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(resp.State.Set(ctx, plan)...)
-
- tflog.Debug(ctx, "UPDATE", map[string]any{
- "action": "end",
- })
+ if from != nil {
+ propertiesModel := &environmentPropertiesModel{}
- if resp.Diagnostics.HasError() {
- return
- }
-}
+ if diags := propertiesModel.set(ctx, from); diags.HasError() {
+ return diags
+ }
-func (r *resourceEnvironment) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
- tflog.Debug(ctx, "DELETE", map[string]any{
- "action": "start",
- })
- tflog.Trace(ctx, "DELETE", map[string]any{
- "state": req.State,
- })
+ if diags := properties.Set(ctx, propertiesModel); diags.HasError() {
+ return diags
+ }
+ }
- var state resourceEnvironmentModel
-
- if resp.Diagnostics.Append(req.State.Get(ctx, &state)...); resp.Diagnostics.HasError() {
- return
- }
+ to.Properties = properties
- timeout, diags := state.Timeouts.Delete(ctx, r.pConfigData.Timeout)
- if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
- return
+ return nil
}
- ctx, cancel := context.WithTimeout(ctx, timeout)
- defer cancel()
+ itemGetter := func(ctx context.Context, fabricClient fabric.Client, model fabricitem.ResourceFabricItemPropertiesModel[environmentPropertiesModel, fabenvironment.PublishInfo], fabricItem *fabricitem.FabricItemProperties[fabenvironment.PublishInfo]) error {
+ client := fabenvironment.NewClientFactoryWithClient(fabricClient).NewItemsClient()
- _, err := r.client.DeleteEnvironment(ctx, state.WorkspaceID.ValueString(), state.ID.ValueString(), nil)
- if resp.Diagnostics.Append(utils.GetDiagsFromError(ctx, err, utils.OperationDelete, nil)...); resp.Diagnostics.HasError() {
- return
- }
-
- tflog.Debug(ctx, "DELETE", map[string]any{
- "action": "end",
- })
-}
+ respGet, err := client.GetEnvironment(ctx, model.WorkspaceID.ValueString(), model.ID.ValueString(), nil)
+ if err != nil {
+ return err
+ }
-func (r *resourceEnvironment) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- tflog.Debug(ctx, "IMPORT", map[string]any{
- "action": "start",
- })
- tflog.Trace(ctx, "IMPORT", map[string]any{
- "id": req.ID,
- })
+ fabricItem.Set(respGet.Environment)
- workspaceID, environmentID, found := strings.Cut(req.ID, "/")
- if !found {
- resp.Diagnostics.AddError(
- common.ErrorImportIdentifierHeader,
- fmt.Sprintf(common.ErrorImportIdentifierDetails, "WorkspaceID/EnvironmentID"),
- )
-
- return
- }
-
- uuidWorkspaceID, diags := customtypes.NewUUIDValueMust(workspaceID)
- resp.Diagnostics.Append(diags...)
-
- uuidID, diags := customtypes.NewUUIDValueMust(environmentID)
- resp.Diagnostics.Append(diags...)
-
- if resp.Diagnostics.HasError() {
- return
+ return nil
}
- var timeout timeouts.Value
- if resp.Diagnostics.Append(resp.State.GetAttribute(ctx, path.Root("timeouts"), &timeout)...); resp.Diagnostics.HasError() {
- return
- }
-
- state := resourceEnvironmentModel{}
- state.ID = uuidID
- state.WorkspaceID = uuidWorkspaceID
- state.Timeouts = timeout
-
- if resp.Diagnostics.Append(r.get(ctx, &state)...); resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(resp.State.Set(ctx, state)...)
-
- tflog.Debug(ctx, "IMPORT", map[string]any{
- "action": "end",
- })
-
- if resp.Diagnostics.HasError() {
- return
- }
-}
-
-func (r *resourceEnvironment) get(ctx context.Context, model *resourceEnvironmentModel) diag.Diagnostics {
- tflog.Trace(ctx, "GET", map[string]any{
- "workspace_id": model.WorkspaceID.ValueString(),
- "id": model.ID.ValueString(),
- })
-
- respGet, err := r.client.GetEnvironment(ctx, model.WorkspaceID.ValueString(), model.ID.ValueString(), nil)
- if diags := utils.GetDiagsFromError(ctx, err, utils.OperationRead, fabcore.ErrCommon.EntityNotFound); diags.HasError() {
- return diags
+ config := fabricitem.ResourceFabricItemProperties[environmentPropertiesModel, fabenvironment.PublishInfo]{
+ ResourceFabricItem: fabricitem.ResourceFabricItem{
+ Type: ItemType,
+ Name: ItemName,
+ NameRenameAllowed: true,
+ TFName: ItemTFName,
+ MarkdownDescription: "Manage a Fabric " + ItemName + ".\n\n" +
+ "Use this resource to manage an [" + ItemName + "](" + ItemDocsURL + ").\n\n" +
+ ItemDocsSPNSupport,
+ DisplayNameMaxLength: 123,
+ DescriptionMaxLength: 256,
+ },
+ PropertiesAttributes: getResourceEnvironmentPropertiesAttributes(ctx),
+ PropertiesSetter: propertiesSetter,
+ ItemGetter: itemGetter,
}
- model.set(respGet.Environment)
-
- return model.setProperties(ctx, respGet.Environment)
+ return fabricitem.NewResourceFabricItemProperties(config)
}
diff --git a/internal/services/environment/resource_environment_test.go b/internal/services/environment/resource_environment_test.go
index 3d1bf87b..8259bfa4 100644
--- a/internal/services/environment/resource_environment_test.go
+++ b/internal/services/environment/resource_environment_test.go
@@ -188,6 +188,9 @@ func TestUnit_EnvironmentResource_CRUD(t *testing.T) {
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttrPtr(testResourceItemFQN, "display_name", entityBefore.DisplayName),
resource.TestCheckResourceAttr(testResourceItemFQN, "description", ""),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.publish_details.state"),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.publish_details.component_publish_info.spark_libraries.state"),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.publish_details.component_publish_info.spark_settings.state"),
),
},
// Update and Read
@@ -204,6 +207,9 @@ func TestUnit_EnvironmentResource_CRUD(t *testing.T) {
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttrPtr(testResourceItemFQN, "display_name", entityAfter.DisplayName),
resource.TestCheckResourceAttrPtr(testResourceItemFQN, "description", entityAfter.Description),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.publish_details.state"),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.publish_details.component_publish_info.spark_libraries.state"),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.publish_details.component_publish_info.spark_settings.state"),
),
},
// Delete testing automatically occurs in TestCase
@@ -211,7 +217,7 @@ func TestUnit_EnvironmentResource_CRUD(t *testing.T) {
}
func TestAcc_EnvironmentResource_CRUD(t *testing.T) {
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceRS"].(map[string]any)
workspaceID := workspace["id"].(string)
entityCreateDisplayName := testhelp.RandomName()
@@ -232,6 +238,9 @@ func TestAcc_EnvironmentResource_CRUD(t *testing.T) {
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr(testResourceItemFQN, "display_name", entityCreateDisplayName),
resource.TestCheckResourceAttr(testResourceItemFQN, "description", ""),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.publish_details.state"),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.publish_details.component_publish_info.spark_libraries.state"),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.publish_details.component_publish_info.spark_settings.state"),
),
},
// Update and Read
@@ -248,6 +257,9 @@ func TestAcc_EnvironmentResource_CRUD(t *testing.T) {
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr(testResourceItemFQN, "display_name", entityUpdateDisplayName),
resource.TestCheckResourceAttr(testResourceItemFQN, "description", entityUpdateDescription),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.publish_details.state"),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.publish_details.component_publish_info.spark_libraries.state"),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.publish_details.component_publish_info.spark_settings.state"),
),
},
},
diff --git a/internal/services/environment/schema_data_environment.go b/internal/services/environment/schema_data_environment.go
new file mode 100644
index 00000000..0fd4282c
--- /dev/null
+++ b/internal/services/environment/schema_data_environment.go
@@ -0,0 +1,80 @@
+// Copyright (c) Microsoft Corporation
+// SPDX-License-Identifier: MPL-2.0
+
+package environment
+
+import (
+ "context"
+
+ supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes"
+ "github.com/hashicorp/terraform-plugin-framework-timetypes/timetypes"
+ "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
+ fabenvironment "github.com/microsoft/fabric-sdk-go/fabric/environment"
+
+ "github.com/microsoft/terraform-provider-fabric/internal/framework/customtypes"
+ "github.com/microsoft/terraform-provider-fabric/internal/pkg/utils"
+)
+
+func getDataSourceEnvironmentPropertiesAttributes(ctx context.Context) map[string]schema.Attribute {
+ publishStatePossibleValuesMarkdown := "Publish state. Possible values: " + utils.ConvertStringSlicesToString(fabenvironment.PossibleEnvironmentPublishStateValues(), true, true) + "."
+
+ result := map[string]schema.Attribute{
+ "publish_details": schema.SingleNestedAttribute{
+ MarkdownDescription: "Environment publish operation details.",
+ Computed: true,
+ CustomType: supertypes.NewSingleNestedObjectTypeOf[environmentPublishDetailsModel](ctx),
+ Attributes: map[string]schema.Attribute{
+ "state": schema.StringAttribute{
+ MarkdownDescription: publishStatePossibleValuesMarkdown,
+ Computed: true,
+ },
+ "target_version": schema.StringAttribute{
+ MarkdownDescription: "Target version to be published.",
+ Computed: true,
+ CustomType: customtypes.UUIDType{},
+ },
+ "start_time": schema.StringAttribute{
+ MarkdownDescription: "Start time of publish operation.",
+ Computed: true,
+ CustomType: timetypes.RFC3339Type{},
+ },
+ "end_time": schema.StringAttribute{
+ MarkdownDescription: "End time of publish operation.",
+ Computed: true,
+ CustomType: timetypes.RFC3339Type{},
+ },
+ "component_publish_info": schema.SingleNestedAttribute{
+ MarkdownDescription: "Environment component publish information.",
+ Computed: true,
+ CustomType: supertypes.NewSingleNestedObjectTypeOf[environmentComponentPublishInfoModel](ctx),
+ Attributes: map[string]schema.Attribute{
+ "spark_libraries": schema.SingleNestedAttribute{
+ MarkdownDescription: "Spark libraries publish information.",
+ Computed: true,
+ CustomType: supertypes.NewSingleNestedObjectTypeOf[environmentSparkLibrariesModel](ctx),
+ Attributes: map[string]schema.Attribute{
+ "state": schema.StringAttribute{
+ MarkdownDescription: publishStatePossibleValuesMarkdown,
+ Computed: true,
+ },
+ },
+ },
+ "spark_settings": schema.SingleNestedAttribute{
+ MarkdownDescription: "Spark settings publish information.",
+ Computed: true,
+ CustomType: supertypes.NewSingleNestedObjectTypeOf[environmentSparkSettingsModel](ctx),
+ Attributes: map[string]schema.Attribute{
+ "state": schema.StringAttribute{
+ MarkdownDescription: publishStatePossibleValuesMarkdown,
+ Computed: true,
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ }
+
+ return result
+}
diff --git a/internal/services/environment/schema_resource_environment.go b/internal/services/environment/schema_resource_environment.go
new file mode 100644
index 00000000..c2bc516f
--- /dev/null
+++ b/internal/services/environment/schema_resource_environment.go
@@ -0,0 +1,80 @@
+// Copyright (c) Microsoft Corporation
+// SPDX-License-Identifier: MPL-2.0
+
+package environment
+
+import (
+ "context"
+
+ supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes"
+ "github.com/hashicorp/terraform-plugin-framework-timetypes/timetypes"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema"
+ fabenvironment "github.com/microsoft/fabric-sdk-go/fabric/environment"
+
+ "github.com/microsoft/terraform-provider-fabric/internal/framework/customtypes"
+ "github.com/microsoft/terraform-provider-fabric/internal/pkg/utils"
+)
+
+func getResourceEnvironmentPropertiesAttributes(ctx context.Context) map[string]schema.Attribute {
+ publishStatePossibleValuesMarkdown := "Publish state. Possible values: " + utils.ConvertStringSlicesToString(fabenvironment.PossibleEnvironmentPublishStateValues(), true, true) + "."
+
+ result := map[string]schema.Attribute{
+ "publish_details": schema.SingleNestedAttribute{
+ MarkdownDescription: "Environment publish operation details.",
+ Computed: true,
+ CustomType: supertypes.NewSingleNestedObjectTypeOf[environmentPublishDetailsModel](ctx),
+ Attributes: map[string]schema.Attribute{
+ "state": schema.StringAttribute{
+ MarkdownDescription: publishStatePossibleValuesMarkdown,
+ Computed: true,
+ },
+ "target_version": schema.StringAttribute{
+ MarkdownDescription: "Target version to be published.",
+ Computed: true,
+ CustomType: customtypes.UUIDType{},
+ },
+ "start_time": schema.StringAttribute{
+ MarkdownDescription: "Start time of publish operation.",
+ Computed: true,
+ CustomType: timetypes.RFC3339Type{},
+ },
+ "end_time": schema.StringAttribute{
+ MarkdownDescription: "End time of publish operation.",
+ Computed: true,
+ CustomType: timetypes.RFC3339Type{},
+ },
+ "component_publish_info": schema.SingleNestedAttribute{
+ MarkdownDescription: "Environment component publish information.",
+ Computed: true,
+ CustomType: supertypes.NewSingleNestedObjectTypeOf[environmentComponentPublishInfoModel](ctx),
+ Attributes: map[string]schema.Attribute{
+ "spark_libraries": schema.SingleNestedAttribute{
+ MarkdownDescription: "Spark libraries publish information.",
+ Computed: true,
+ CustomType: supertypes.NewSingleNestedObjectTypeOf[environmentSparkLibrariesModel](ctx),
+ Attributes: map[string]schema.Attribute{
+ "state": schema.StringAttribute{
+ MarkdownDescription: publishStatePossibleValuesMarkdown,
+ Computed: true,
+ },
+ },
+ },
+ "spark_settings": schema.SingleNestedAttribute{
+ MarkdownDescription: "Spark settings publish information.",
+ Computed: true,
+ CustomType: supertypes.NewSingleNestedObjectTypeOf[environmentSparkSettingsModel](ctx),
+ Attributes: map[string]schema.Attribute{
+ "state": schema.StringAttribute{
+ MarkdownDescription: publishStatePossibleValuesMarkdown,
+ Computed: true,
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ }
+
+ return result
+}
diff --git a/internal/services/eventhouse/base.go b/internal/services/eventhouse/base.go
index d6d6c545..8d30857e 100644
--- a/internal/services/eventhouse/base.go
+++ b/internal/services/eventhouse/base.go
@@ -10,11 +10,19 @@ import (
)
const (
- ItemName = "Eventhouse"
- ItemTFName = "eventhouse"
- ItemsName = "Eventhouses"
- ItemsTFName = "eventhouses"
- ItemType = fabcore.ItemTypeEventhouse
- ItemDocsSPNSupport = common.DocsSPNSupported
- ItemDocsURL = "https://learn.microsoft.com/fabric/real-time-intelligence/eventhouse"
+ ItemName = "Eventhouse"
+ ItemTFName = "eventhouse"
+ ItemsName = "Eventhouses"
+ ItemsTFName = "eventhouses"
+ ItemType = fabcore.ItemTypeEventhouse
+ ItemDocsSPNSupport = common.DocsSPNSupported
+ ItemDocsURL = "https://learn.microsoft.com/fabric/real-time-intelligence/eventhouse"
+ ItemFormatTypeDefault = ""
+ ItemDefinitionEmpty = `{}`
+ ItemDefinitionPathDocsURL = "https://learn.microsoft.com/rest/api/fabric/articles/item-management/definitions/eventhouse-definition"
+)
+
+var (
+ ItemFormatTypes = []string{} //nolint:gochecknoglobals
+ ItemDefinitionPaths = []string{"EventhouseProperties.json"} //nolint:gochecknoglobals
)
diff --git a/internal/services/eventhouse/data_eventhouse.go b/internal/services/eventhouse/data_eventhouse.go
index ea790602..45da53ed 100644
--- a/internal/services/eventhouse/data_eventhouse.go
+++ b/internal/services/eventhouse/data_eventhouse.go
@@ -5,198 +5,88 @@ package eventhouse
import (
"context"
- "fmt"
supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes"
- "github.com/hashicorp/terraform-plugin-framework-validators/datasourcevalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
"github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/path"
- "github.com/hashicorp/terraform-plugin-log/tflog"
+ "github.com/microsoft/fabric-sdk-go/fabric"
+ fabcore "github.com/microsoft/fabric-sdk-go/fabric/core"
fabeventhouse "github.com/microsoft/fabric-sdk-go/fabric/eventhouse"
- "github.com/microsoft/terraform-provider-fabric/internal/common"
"github.com/microsoft/terraform-provider-fabric/internal/pkg/fabricitem"
- "github.com/microsoft/terraform-provider-fabric/internal/pkg/utils"
- pconfig "github.com/microsoft/terraform-provider-fabric/internal/provider/config"
)
-// Ensure the implementation satisfies the expected interfaces.
-var (
- _ datasource.DataSourceWithConfigValidators = (*dataSourceEventhouse)(nil)
- _ datasource.DataSourceWithConfigure = (*dataSourceEventhouse)(nil)
-)
-
-type dataSourceEventhouse struct {
- pConfigData *pconfig.ProviderData
- client *fabeventhouse.ItemsClient
-}
-
-func NewDataSourceEventhouse() datasource.DataSource {
- return &dataSourceEventhouse{}
-}
-
-func (d *dataSourceEventhouse) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
- resp.TypeName = req.ProviderTypeName + "_" + ItemTFName
-}
-
-func (d *dataSourceEventhouse) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- markdownDescription := "Get a Fabric " + ItemName + ".\n\n" +
- "Use this data source to fetch a [" + ItemName + "](" + ItemDocsURL + ").\n\n" +
- ItemDocsSPNSupport
-
- properties := schema.SingleNestedAttribute{
- MarkdownDescription: "The " + ItemName + " properties.",
- Computed: true,
- CustomType: supertypes.NewSingleNestedObjectTypeOf[eventhousePropertiesModel](ctx),
- Attributes: map[string]schema.Attribute{
- "ingestion_service_uri": schema.StringAttribute{
- MarkdownDescription: "Ingestion service URI.",
- Computed: true,
- },
- "query_service_uri": schema.StringAttribute{
- MarkdownDescription: "Query service URI.",
- Computed: true,
- },
- "database_ids": schema.ListAttribute{
- MarkdownDescription: "The IDs list of KQL Databases.",
- Computed: true,
- CustomType: supertypes.NewListTypeOf[string](ctx),
- },
- },
- }
-
- itemConfig := fabricitem.DataSourceFabricItem{
- Type: ItemType,
- Name: ItemName,
- TFName: ItemTFName,
- MarkdownDescription: markdownDescription,
- IsDisplayNameUnique: true,
- }
+func NewDataSourceEventhouse(ctx context.Context) datasource.DataSource {
+ propertiesSetter := func(ctx context.Context, from *fabeventhouse.Properties, to *fabricitem.DataSourceFabricItemDefinitionPropertiesModel[eventhousePropertiesModel, fabeventhouse.Properties]) diag.Diagnostics {
+ properties := supertypes.NewSingleNestedObjectValueOfNull[eventhousePropertiesModel](ctx)
- resp.Schema = fabricitem.GetDataSourceFabricItemPropertiesSchema(ctx, itemConfig, properties)
-}
-
-func (d *dataSourceEventhouse) ConfigValidators(_ context.Context) []datasource.ConfigValidator {
- return []datasource.ConfigValidator{
- datasourcevalidator.Conflicting(
- path.MatchRoot("id"),
- path.MatchRoot("display_name"),
- ),
- datasourcevalidator.ExactlyOneOf(
- path.MatchRoot("id"),
- path.MatchRoot("display_name"),
- ),
- }
-}
-
-func (d *dataSourceEventhouse) Configure(_ context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
- if req.ProviderData == nil {
- return
- }
-
- pConfigData, ok := req.ProviderData.(*pconfig.ProviderData)
- if !ok {
- resp.Diagnostics.AddError(
- common.ErrorDataSourceConfigType,
- fmt.Sprintf(common.ErrorFabricClientType, req.ProviderData),
- )
-
- return
- }
-
- d.pConfigData = pConfigData
- d.client = fabeventhouse.NewClientFactoryWithClient(*pConfigData.FabricClient).NewItemsClient()
-}
+ if from != nil {
+ propertiesModel := &eventhousePropertiesModel{}
+ propertiesModel.set(ctx, from)
-func (d *dataSourceEventhouse) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- tflog.Debug(ctx, "READ", map[string]any{
- "action": "start",
- })
- tflog.Trace(ctx, "READ", map[string]any{
- "config": req.Config,
- })
-
- var data dataSourceEventhouseModel
-
- if resp.Diagnostics.Append(req.Config.Get(ctx, &data)...); resp.Diagnostics.HasError() {
- return
- }
-
- timeout, diags := data.Timeouts.Read(ctx, d.pConfigData.Timeout)
- if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
- return
- }
-
- ctx, cancel := context.WithTimeout(ctx, timeout)
- defer cancel()
+ if diags := properties.Set(ctx, propertiesModel); diags.HasError() {
+ return diags
+ }
+ }
- if data.ID.ValueString() != "" {
- diags = d.getByID(ctx, &data)
- } else {
- diags = d.getByDisplayName(ctx, &data)
- }
+ to.Properties = properties
- if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
- return
+ return nil
}
- resp.Diagnostics.Append(resp.State.Set(ctx, data)...)
+ itemGetter := func(ctx context.Context, fabricClient fabric.Client, model fabricitem.DataSourceFabricItemDefinitionPropertiesModel[eventhousePropertiesModel, fabeventhouse.Properties], fabricItem *fabricitem.FabricItemProperties[fabeventhouse.Properties]) error {
+ client := fabeventhouse.NewClientFactoryWithClient(fabricClient).NewItemsClient()
- tflog.Debug(ctx, "READ", map[string]any{
- "action": "end",
- })
-
- if resp.Diagnostics.HasError() {
- return
- }
-}
+ respGet, err := client.GetEventhouse(ctx, model.WorkspaceID.ValueString(), model.ID.ValueString(), nil)
+ if err != nil {
+ return err
+ }
-func (d *dataSourceEventhouse) getByID(ctx context.Context, model *dataSourceEventhouseModel) diag.Diagnostics {
- tflog.Trace(ctx, "GET BY ID", map[string]any{
- "workspace_id": model.WorkspaceID.ValueString(),
- "id": model.ID.ValueString(),
- })
+ fabricItem.Set(respGet.Eventhouse)
- respGet, err := d.client.GetEventhouse(ctx, model.WorkspaceID.ValueString(), model.ID.ValueString(), nil)
- if diags := utils.GetDiagsFromError(ctx, err, utils.OperationRead, nil); diags.HasError() {
- return diags
+ return nil
}
- model.set(respGet.Eventhouse)
-
- return model.setProperties(ctx, respGet.Eventhouse)
-}
-
-func (d *dataSourceEventhouse) getByDisplayName(ctx context.Context, model *dataSourceEventhouseModel) diag.Diagnostics {
- tflog.Trace(ctx, "GET BY DISPLAY NAME", map[string]any{
- "workspace_id": model.WorkspaceID.ValueString(),
- "display_name": model.DisplayName.ValueString(),
- })
-
- var diags diag.Diagnostics
+ itemListGetter := func(ctx context.Context, fabricClient fabric.Client, model fabricitem.DataSourceFabricItemDefinitionPropertiesModel[eventhousePropertiesModel, fabeventhouse.Properties], errNotFound fabcore.ResponseError, fabricItem *fabricitem.FabricItemProperties[fabeventhouse.Properties]) error {
+ client := fabeventhouse.NewClientFactoryWithClient(fabricClient).NewItemsClient()
- pager := d.client.NewListEventhousesPager(model.WorkspaceID.ValueString(), nil)
- for pager.More() {
- page, err := pager.NextPage(ctx)
- if diags := utils.GetDiagsFromError(ctx, err, utils.OperationList, nil); diags.HasError() {
- return diags
- }
+ pager := client.NewListEventhousesPager(model.WorkspaceID.ValueString(), nil)
+ for pager.More() {
+ page, err := pager.NextPage(ctx)
+ if err != nil {
+ return err
+ }
- for _, entity := range page.Value {
- if *entity.DisplayName == model.DisplayName.ValueString() {
- model.set(entity)
+ for _, entity := range page.Value {
+ if *entity.DisplayName == model.DisplayName.ValueString() {
+ fabricItem.Set(entity)
- return model.setProperties(ctx, entity)
+ return nil
+ }
}
}
+
+ return &errNotFound
}
- diags.AddError(
- common.ErrorReadHeader,
- fmt.Sprintf("Unable to find %s with display_name: '%s' in the Workspace ID: %s ", ItemName, model.DisplayName.ValueString(), model.WorkspaceID.ValueString()),
- )
+ config := fabricitem.DataSourceFabricItemDefinitionProperties[eventhousePropertiesModel, fabeventhouse.Properties]{
+ DataSourceFabricItemDefinition: fabricitem.DataSourceFabricItemDefinition{
+ Type: ItemType,
+ Name: ItemName,
+ TFName: ItemTFName,
+ MarkdownDescription: "Get a Fabric " + ItemName + ".\n\n" +
+ "Use this data source to fetch an [" + ItemName + "](" + ItemDocsURL + ").\n\n" +
+ ItemDocsSPNSupport,
+ IsDisplayNameUnique: true,
+ FormatTypeDefault: ItemFormatTypeDefault,
+ FormatTypes: ItemFormatTypes,
+ DefinitionPathKeys: ItemDefinitionPaths,
+ },
+ PropertiesAttributes: getDataSourceEventhousePropertiesAttributes(ctx),
+ PropertiesSetter: propertiesSetter,
+ ItemGetter: itemGetter,
+ ItemListGetter: itemListGetter,
+ }
- return diags
+ return fabricitem.NewDataSourceFabricItemDefinitionProperties(config)
}
diff --git a/internal/services/eventhouse/data_eventhouse_test.go b/internal/services/eventhouse/data_eventhouse_test.go
index b53bb443..36b46e05 100644
--- a/internal/services/eventhouse/data_eventhouse_test.go
+++ b/internal/services/eventhouse/data_eventhouse_test.go
@@ -89,7 +89,7 @@ func TestUnit_EventhouseDataSource(t *testing.T) {
"id": *entity.ID,
},
),
- ExpectError: regexp.MustCompile(`The argument "workspace_id" is required, but no definition was found.`),
+ ExpectError: regexp.MustCompile(`The argument "workspace_id" is required, but no definition was found`),
},
// read by id
{
@@ -105,6 +105,9 @@ func TestUnit_EventhouseDataSource(t *testing.T) {
resource.TestCheckResourceAttrPtr(testDataSourceItemFQN, "id", entity.ID),
resource.TestCheckResourceAttrPtr(testDataSourceItemFQN, "display_name", entity.DisplayName),
resource.TestCheckResourceAttrPtr(testDataSourceItemFQN, "description", entity.Description),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.query_service_uri"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.ingestion_service_uri"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.database_ids.0"),
),
},
// read by id - not found
@@ -133,6 +136,9 @@ func TestUnit_EventhouseDataSource(t *testing.T) {
resource.TestCheckResourceAttrPtr(testDataSourceItemFQN, "id", entity.ID),
resource.TestCheckResourceAttrPtr(testDataSourceItemFQN, "display_name", entity.DisplayName),
resource.TestCheckResourceAttrPtr(testDataSourceItemFQN, "description", entity.Description),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.query_service_uri"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.ingestion_service_uri"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.database_ids.0"),
),
},
// read by name - not found
@@ -150,7 +156,7 @@ func TestUnit_EventhouseDataSource(t *testing.T) {
}
func TestAcc_EventhouseDataSource(t *testing.T) {
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceDS"].(map[string]any)
workspaceID := workspace["id"].(string)
entity := testhelp.WellKnown()["Eventhouse"].(map[string]any)
@@ -174,6 +180,9 @@ func TestAcc_EventhouseDataSource(t *testing.T) {
resource.TestCheckResourceAttr(testDataSourceItemFQN, "id", entityID),
resource.TestCheckResourceAttr(testDataSourceItemFQN, "display_name", entityDisplayName),
resource.TestCheckResourceAttr(testDataSourceItemFQN, "description", entityDescription),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.query_service_uri"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.ingestion_service_uri"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.database_ids.0"),
),
},
// read by id - not found
@@ -203,6 +212,9 @@ func TestAcc_EventhouseDataSource(t *testing.T) {
resource.TestCheckResourceAttr(testDataSourceItemFQN, "id", entityID),
resource.TestCheckResourceAttr(testDataSourceItemFQN, "display_name", entityDisplayName),
resource.TestCheckResourceAttr(testDataSourceItemFQN, "description", entityDescription),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.query_service_uri"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.ingestion_service_uri"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.database_ids.0"),
),
},
// read by name - not found
@@ -217,5 +229,24 @@ func TestAcc_EventhouseDataSource(t *testing.T) {
),
ExpectError: regexp.MustCompile(common.ErrorReadHeader),
},
+ // read by id with definition
+ {
+ ResourceName: testDataSourceItemFQN,
+ Config: at.CompileConfig(
+ testDataSourceItemHeader,
+ map[string]any{
+ "workspace_id": workspaceID,
+ "id": entityID,
+ "output_definition": true,
+ },
+ ),
+ Check: resource.ComposeAggregateTestCheckFunc(
+ resource.TestCheckResourceAttr(testDataSourceItemFQN, "workspace_id", workspaceID),
+ resource.TestCheckResourceAttr(testDataSourceItemFQN, "id", entityID),
+ resource.TestCheckResourceAttr(testDataSourceItemFQN, "display_name", entityDisplayName),
+ resource.TestCheckResourceAttr(testDataSourceItemFQN, "description", entityDescription),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "definition.EventhouseProperties.json.content"),
+ ),
+ },
}))
}
diff --git a/internal/services/eventhouse/data_eventhouses.go b/internal/services/eventhouse/data_eventhouses.go
index dfb518e4..4c4cb473 100644
--- a/internal/services/eventhouse/data_eventhouses.go
+++ b/internal/services/eventhouse/data_eventhouses.go
@@ -4,21 +4,72 @@
package eventhouse
import (
+ "context"
+
+ supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes"
"github.com/hashicorp/terraform-plugin-framework/datasource"
+ "github.com/hashicorp/terraform-plugin-framework/diag"
+ "github.com/microsoft/fabric-sdk-go/fabric"
+ fabeventhouse "github.com/microsoft/fabric-sdk-go/fabric/eventhouse"
"github.com/microsoft/terraform-provider-fabric/internal/pkg/fabricitem"
)
-func NewDataSourceEventhouses() datasource.DataSource {
- config := fabricitem.DataSourceFabricItems{
- Type: ItemType,
- Name: ItemName,
- Names: ItemsName,
- TFName: ItemsTFName,
- MarkdownDescription: "List a Fabric " + ItemsName + ".\n\n" +
- "Use this data source to list [" + ItemsName + "](" + ItemDocsURL + ").\n\n" +
- ItemDocsSPNSupport,
+func NewDataSourceEventhouses(ctx context.Context) datasource.DataSource {
+ propertiesSetter := func(ctx context.Context, from *fabeventhouse.Properties, to *fabricitem.FabricItemPropertiesModel[eventhousePropertiesModel, fabeventhouse.Properties]) diag.Diagnostics {
+ properties := supertypes.NewSingleNestedObjectValueOfNull[eventhousePropertiesModel](ctx)
+
+ if from != nil {
+ propertiesModel := &eventhousePropertiesModel{}
+ propertiesModel.set(ctx, from)
+
+ if diags := properties.Set(ctx, propertiesModel); diags.HasError() {
+ return diags
+ }
+ }
+
+ to.Properties = properties
+
+ return nil
+ }
+
+ itemListGetter := func(ctx context.Context, fabricClient fabric.Client, model fabricitem.DataSourceFabricItemsPropertiesModel[eventhousePropertiesModel, fabeventhouse.Properties], fabricItems *[]fabricitem.FabricItemProperties[fabeventhouse.Properties]) error {
+ client := fabeventhouse.NewClientFactoryWithClient(fabricClient).NewItemsClient()
+
+ fabItems := make([]fabricitem.FabricItemProperties[fabeventhouse.Properties], 0)
+
+ respList, err := client.ListEventhouses(ctx, model.WorkspaceID.ValueString(), nil)
+ if err != nil {
+ return err
+ }
+
+ for _, entity := range respList {
+ var fabricItem fabricitem.FabricItemProperties[fabeventhouse.Properties]
+
+ fabricItem.Set(entity)
+
+ fabItems = append(fabItems, fabricItem)
+ }
+
+ *fabricItems = fabItems
+
+ return nil
+ }
+
+ config := fabricitem.DataSourceFabricItemsProperties[eventhousePropertiesModel, fabeventhouse.Properties]{
+ DataSourceFabricItems: fabricitem.DataSourceFabricItems{
+ Type: ItemType,
+ Name: ItemName,
+ Names: ItemsName,
+ TFName: ItemsTFName,
+ MarkdownDescription: "List a Fabric " + ItemsName + ".\n\n" +
+ "Use this data source to list [" + ItemsName + "](" + ItemDocsURL + ").\n\n" +
+ ItemDocsSPNSupport,
+ },
+ PropertiesAttributes: getDataSourceEventhousePropertiesAttributes(ctx),
+ PropertiesSetter: propertiesSetter,
+ ItemListGetter: itemListGetter,
}
- return fabricitem.NewDataSourceFabricItems(config)
+ return fabricitem.NewDataSourceFabricItemsProperties(config)
}
diff --git a/internal/services/eventhouse/data_eventhouses_test.go b/internal/services/eventhouse/data_eventhouses_test.go
index a2a31e1c..1126119f 100644
--- a/internal/services/eventhouse/data_eventhouses_test.go
+++ b/internal/services/eventhouse/data_eventhouses_test.go
@@ -22,11 +22,11 @@ var (
func TestUnit_EventhousesDataSource(t *testing.T) {
workspaceID := testhelp.RandomUUID()
- entity := fakes.NewRandomItemWithWorkspace(itemType, workspaceID)
+ entity := fakes.NewRandomEventhouseWithWorkspace(workspaceID)
- fakes.FakeServer.Upsert(fakes.NewRandomItemWithWorkspace(itemType, workspaceID))
+ fakes.FakeServer.Upsert(fakes.NewRandomEventhouseWithWorkspace(workspaceID))
fakes.FakeServer.Upsert(entity)
- fakes.FakeServer.Upsert(fakes.NewRandomItemWithWorkspace(itemType, workspaceID))
+ fakes.FakeServer.Upsert(fakes.NewRandomEventhouseWithWorkspace(workspaceID))
resource.ParallelTest(t, testhelp.NewTestUnitCase(t, nil, fakes.FakeServer.ServerFactory, nil, []resource.TestStep{
// error - no attributes
@@ -69,13 +69,16 @@ func TestUnit_EventhousesDataSource(t *testing.T) {
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttrPtr(testDataSourceItemsFQN, "workspace_id", entity.WorkspaceID),
resource.TestCheckResourceAttrPtr(testDataSourceItemsFQN, "values.1.id", entity.ID),
+ resource.TestCheckResourceAttrSet(testDataSourceItemsFQN, "values.1.properties.query_service_uri"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemsFQN, "values.1.properties.ingestion_service_uri"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemsFQN, "values.1.properties.database_ids.0"),
),
},
}))
}
func TestAcc_EventhousesDataSource(t *testing.T) {
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceDS"].(map[string]any)
workspaceID := workspace["id"].(string)
resource.ParallelTest(t, testhelp.NewTestAccCase(t, nil, nil, []resource.TestStep{
@@ -90,6 +93,9 @@ func TestAcc_EventhousesDataSource(t *testing.T) {
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr(testDataSourceItemsFQN, "workspace_id", workspaceID),
resource.TestCheckResourceAttrSet(testDataSourceItemsFQN, "values.0.id"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemsFQN, "values.0.properties.query_service_uri"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemsFQN, "values.0.properties.ingestion_service_uri"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemsFQN, "values.0.properties.database_ids.0"),
),
},
},
diff --git a/internal/services/eventhouse/models.go b/internal/services/eventhouse/models.go
index 076ad8e1..985cd017 100644
--- a/internal/services/eventhouse/models.go
+++ b/internal/services/eventhouse/models.go
@@ -7,61 +7,10 @@ import (
"context"
supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes"
- timeoutsd "github.com/hashicorp/terraform-plugin-framework-timeouts/datasource/timeouts"
- timeoutsr "github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts"
- "github.com/hashicorp/terraform-plugin-framework/diag"
"github.com/hashicorp/terraform-plugin-framework/types"
fabeventhouse "github.com/microsoft/fabric-sdk-go/fabric/eventhouse"
-
- "github.com/microsoft/terraform-provider-fabric/internal/framework/customtypes"
)
-type dataSourceEventhouseModel struct {
- baseEventhousePropertiesModel
- Timeouts timeoutsd.Value `tfsdk:"timeouts"`
-}
-
-type resourceEventhouseModel struct {
- baseEventhousePropertiesModel
- Timeouts timeoutsr.Value `tfsdk:"timeouts"`
-}
-
-type baseEventhouseModel struct {
- WorkspaceID customtypes.UUID `tfsdk:"workspace_id"`
- ID customtypes.UUID `tfsdk:"id"`
- DisplayName types.String `tfsdk:"display_name"`
- Description types.String `tfsdk:"description"`
-}
-
-func (to *baseEventhouseModel) set(from fabeventhouse.Eventhouse) {
- to.WorkspaceID = customtypes.NewUUIDPointerValue(from.WorkspaceID)
- to.ID = customtypes.NewUUIDPointerValue(from.ID)
- to.DisplayName = types.StringPointerValue(from.DisplayName)
- to.Description = types.StringPointerValue(from.Description)
-}
-
-type baseEventhousePropertiesModel struct {
- baseEventhouseModel
- Properties supertypes.SingleNestedObjectValueOf[eventhousePropertiesModel] `tfsdk:"properties"`
-}
-
-func (to *baseEventhousePropertiesModel) setProperties(ctx context.Context, from fabeventhouse.Eventhouse) diag.Diagnostics {
- properties := supertypes.NewSingleNestedObjectValueOfNull[eventhousePropertiesModel](ctx)
-
- if from.Properties != nil {
- propertiesModel := &eventhousePropertiesModel{}
- propertiesModel.set(ctx, from.Properties)
-
- if diags := properties.Set(ctx, propertiesModel); diags.HasError() {
- return diags
- }
- }
-
- to.Properties = properties
-
- return nil
-}
-
type eventhousePropertiesModel struct {
IngestionServiceURI types.String `tfsdk:"ingestion_service_uri"`
QueryServiceURI types.String `tfsdk:"query_service_uri"`
@@ -73,21 +22,3 @@ func (to *eventhousePropertiesModel) set(ctx context.Context, from *fabeventhous
to.QueryServiceURI = types.StringPointerValue(from.QueryServiceURI)
to.DatabaseIDs = supertypes.NewListValueOfSlice(ctx, from.DatabasesItemIDs)
}
-
-type requestUpdateEventhouse struct {
- fabeventhouse.UpdateEventhouseRequest
-}
-
-func (to *requestUpdateEventhouse) set(from resourceEventhouseModel) {
- to.DisplayName = from.DisplayName.ValueStringPointer()
- to.Description = from.Description.ValueStringPointer()
-}
-
-type requestCreateEventhouse struct {
- fabeventhouse.CreateEventhouseRequest
-}
-
-func (to *requestCreateEventhouse) set(from resourceEventhouseModel) {
- to.DisplayName = from.DisplayName.ValueStringPointer()
- to.Description = from.Description.ValueStringPointer()
-}
diff --git a/internal/services/eventhouse/resource_eventhouse.go b/internal/services/eventhouse/resource_eventhouse.go
index eadf62b7..932f6e59 100644
--- a/internal/services/eventhouse/resource_eventhouse.go
+++ b/internal/services/eventhouse/resource_eventhouse.go
@@ -5,337 +5,77 @@ package eventhouse
import (
"context"
- "fmt"
- "strings"
supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes"
- "github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts"
+ "github.com/hashicorp/terraform-plugin-framework-validators/mapvalidator"
+ "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema"
- "github.com/hashicorp/terraform-plugin-log/tflog"
- fabcore "github.com/microsoft/fabric-sdk-go/fabric/core"
+ "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/microsoft/fabric-sdk-go/fabric"
fabeventhouse "github.com/microsoft/fabric-sdk-go/fabric/eventhouse"
- "github.com/microsoft/terraform-provider-fabric/internal/common"
- "github.com/microsoft/terraform-provider-fabric/internal/framework/customtypes"
"github.com/microsoft/terraform-provider-fabric/internal/pkg/fabricitem"
- "github.com/microsoft/terraform-provider-fabric/internal/pkg/utils"
- pconfig "github.com/microsoft/terraform-provider-fabric/internal/provider/config"
)
-// Ensure the implementation satisfies the expected interfaces.
-var (
- _ resource.ResourceWithConfigure = (*resourceEventhouse)(nil)
- _ resource.ResourceWithImportState = (*resourceEventhouse)(nil)
-)
-
-type resourceEventhouse struct {
- pConfigData *pconfig.ProviderData
- client *fabeventhouse.ItemsClient
-}
-
-func NewResourceEventhouse() resource.Resource {
- return &resourceEventhouse{}
-}
-
-func (r *resourceEventhouse) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
- resp.TypeName = req.ProviderTypeName + "_" + ItemTFName
-}
-
-func (r *resourceEventhouse) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
- markdownDescription := "This resource manages a Fabric " + ItemName + ".\n\n" +
- "See [" + ItemName + "](" + ItemDocsURL + ") for more information.\n\n" +
- ItemDocsSPNSupport
-
- properties := schema.SingleNestedAttribute{
- MarkdownDescription: "The " + ItemName + " properties.",
- Computed: true,
- CustomType: supertypes.NewSingleNestedObjectTypeOf[eventhousePropertiesModel](ctx),
- Attributes: map[string]schema.Attribute{
- "ingestion_service_uri": schema.StringAttribute{
- MarkdownDescription: "Ingestion service URI.",
- Computed: true,
- },
- "query_service_uri": schema.StringAttribute{
- MarkdownDescription: "Query service URI.",
- Computed: true,
- },
- "database_ids": schema.ListAttribute{
- MarkdownDescription: "The IDs list of KQL Databases.",
- Computed: true,
- CustomType: supertypes.NewListTypeOf[string](ctx),
- },
- },
- }
-
- resp.Schema = fabricitem.GetResourceFabricItemPropertiesSchema(ctx, ItemName, markdownDescription, 123, 256, true, properties)
-}
-
-func (r *resourceEventhouse) Configure(_ context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) {
- if req.ProviderData == nil {
- return
- }
-
- pConfigData, ok := req.ProviderData.(*pconfig.ProviderData)
- if !ok {
- resp.Diagnostics.AddError(
- common.ErrorResourceConfigType,
- fmt.Sprintf(common.ErrorFabricClientType, req.ProviderData),
- )
-
- return
- }
-
- r.pConfigData = pConfigData
- r.client = fabeventhouse.NewClientFactoryWithClient(*pConfigData.FabricClient).NewItemsClient()
-}
-
-func (r *resourceEventhouse) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
- tflog.Debug(ctx, "CREATE", map[string]any{
- "action": "start",
- })
- tflog.Trace(ctx, "CREATE", map[string]any{
- "config": req.Config,
- "plan": req.Plan,
- })
-
- var plan resourceEventhouseModel
-
- if resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...); resp.Diagnostics.HasError() {
- return
- }
-
- timeout, diags := plan.Timeouts.Create(ctx, r.pConfigData.Timeout)
- if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
- return
- }
-
- ctx, cancel := context.WithTimeout(ctx, timeout)
- defer cancel()
-
- var reqCreate requestCreateEventhouse
-
- reqCreate.set(plan)
-
- respCreate, err := r.client.CreateEventhouse(ctx, plan.WorkspaceID.ValueString(), reqCreate.CreateEventhouseRequest, nil)
- if resp.Diagnostics.Append(utils.GetDiagsFromError(ctx, err, utils.OperationCreate, nil)...); resp.Diagnostics.HasError() {
- return
- }
-
- plan.set(respCreate.Eventhouse)
-
- if resp.Diagnostics.Append(r.get(ctx, &plan)...); resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(resp.State.Set(ctx, plan)...)
-
- tflog.Debug(ctx, "CREATE", map[string]any{
- "action": "end",
- })
+func NewResourceEventhouse(ctx context.Context) resource.Resource {
+ propertiesSetter := func(ctx context.Context, from *fabeventhouse.Properties, to *fabricitem.ResourceFabricItemDefinitionPropertiesModel[eventhousePropertiesModel, fabeventhouse.Properties]) diag.Diagnostics {
+ properties := supertypes.NewSingleNestedObjectValueOfNull[eventhousePropertiesModel](ctx)
- if resp.Diagnostics.HasError() {
- return
- }
-}
+ if from != nil {
+ propertiesModel := &eventhousePropertiesModel{}
+ propertiesModel.set(ctx, from)
-func (r *resourceEventhouse) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
- tflog.Debug(ctx, "READ", map[string]any{
- "action": "start",
- })
- tflog.Trace(ctx, "READ", map[string]any{
- "state": req.State,
- })
+ diags := properties.Set(ctx, propertiesModel)
+ if diags.HasError() {
+ return diags
+ }
+ }
- var state resourceEventhouseModel
-
- if resp.Diagnostics.Append(req.State.Get(ctx, &state)...); resp.Diagnostics.HasError() {
- return
- }
+ to.Properties = properties
- timeout, diags := state.Timeouts.Read(ctx, r.pConfigData.Timeout)
- if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
- return
+ return nil
}
- ctx, cancel := context.WithTimeout(ctx, timeout)
- defer cancel()
+ itemGetter := func(ctx context.Context, fabricClient fabric.Client, model fabricitem.ResourceFabricItemDefinitionPropertiesModel[eventhousePropertiesModel, fabeventhouse.Properties], fabricItem *fabricitem.FabricItemProperties[fabeventhouse.Properties]) error {
+ client := fabeventhouse.NewClientFactoryWithClient(fabricClient).NewItemsClient()
- diags = r.get(ctx, &state)
- if utils.IsErrNotFound(state.ID.ValueString(), &diags, fabcore.ErrCommon.EntityNotFound) {
- resp.State.RemoveResource(ctx)
+ respGet, err := client.GetEventhouse(ctx, model.WorkspaceID.ValueString(), model.ID.ValueString(), nil)
+ if err != nil {
+ return err
+ }
- resp.Diagnostics.Append(diags...)
+ fabricItem.Set(respGet.Eventhouse)
- return
+ return nil
}
- if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(resp.State.Set(ctx, state)...)
-
- tflog.Debug(ctx, "READ", map[string]any{
- "action": "end",
- })
-
- if resp.Diagnostics.HasError() {
- return
- }
-}
-
-func (r *resourceEventhouse) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
- tflog.Debug(ctx, "UPDATE", map[string]any{
- "action": "start",
- })
- tflog.Trace(ctx, "UPDATE", map[string]any{
- "config": req.Config,
- "plan": req.Plan,
- "state": req.State,
- })
-
- var plan resourceEventhouseModel
-
- if resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...); resp.Diagnostics.HasError() {
- return
- }
-
- timeout, diags := plan.Timeouts.Update(ctx, r.pConfigData.Timeout)
- if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
- return
- }
-
- ctx, cancel := context.WithTimeout(ctx, timeout)
- defer cancel()
-
- var reqUpdate requestUpdateEventhouse
-
- reqUpdate.set(plan)
-
- respUpdate, err := r.client.UpdateEventhouse(ctx, plan.WorkspaceID.ValueString(), plan.ID.ValueString(), reqUpdate.UpdateEventhouseRequest, nil)
- if resp.Diagnostics.Append(utils.GetDiagsFromError(ctx, err, utils.OperationUpdate, nil)...); resp.Diagnostics.HasError() {
- return
- }
-
- plan.set(respUpdate.Eventhouse)
-
- resp.Diagnostics.Append(resp.State.Set(ctx, plan)...)
-
- if resp.Diagnostics.Append(r.get(ctx, &plan)...); resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(resp.State.Set(ctx, plan)...)
-
- tflog.Debug(ctx, "UPDATE", map[string]any{
- "action": "end",
- })
-
- if resp.Diagnostics.HasError() {
- return
- }
-}
-
-func (r *resourceEventhouse) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
- tflog.Debug(ctx, "DELETE", map[string]any{
- "action": "start",
- })
- tflog.Trace(ctx, "DELETE", map[string]any{
- "state": req.State,
- })
-
- var state resourceEventhouseModel
-
- if resp.Diagnostics.Append(req.State.Get(ctx, &state)...); resp.Diagnostics.HasError() {
- return
- }
-
- timeout, diags := state.Timeouts.Delete(ctx, r.pConfigData.Timeout)
- if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
- return
- }
-
- ctx, cancel := context.WithTimeout(ctx, timeout)
- defer cancel()
-
- _, err := r.client.DeleteEventhouse(ctx, state.WorkspaceID.ValueString(), state.ID.ValueString(), nil)
- if resp.Diagnostics.Append(utils.GetDiagsFromError(ctx, err, utils.OperationDelete, nil)...); resp.Diagnostics.HasError() {
- return
- }
-
- tflog.Debug(ctx, "DELETE", map[string]any{
- "action": "end",
- })
-}
-
-func (r *resourceEventhouse) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- tflog.Debug(ctx, "IMPORT", map[string]any{
- "action": "start",
- })
- tflog.Trace(ctx, "IMPORT", map[string]any{
- "id": req.ID,
- })
-
- workspaceID, eventhouseID, found := strings.Cut(req.ID, "/")
- if !found {
- resp.Diagnostics.AddError(
- common.ErrorImportIdentifierHeader,
- fmt.Sprintf(common.ErrorImportIdentifierDetails, "WorkspaceID/EventhouseID"),
- )
-
- return
- }
-
- uuidWorkspaceID, diags := customtypes.NewUUIDValueMust(workspaceID)
- resp.Diagnostics.Append(diags...)
-
- uuidID, diags := customtypes.NewUUIDValueMust(eventhouseID)
- resp.Diagnostics.Append(diags...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- var timeout timeouts.Value
- if resp.Diagnostics.Append(resp.State.GetAttribute(ctx, path.Root("timeouts"), &timeout)...); resp.Diagnostics.HasError() {
- return
- }
-
- state := resourceEventhouseModel{}
- state.ID = uuidID
- state.WorkspaceID = uuidWorkspaceID
- state.Timeouts = timeout
-
- if resp.Diagnostics.Append(r.get(ctx, &state)...); resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(resp.State.Set(ctx, state)...)
-
- tflog.Debug(ctx, "IMPORT", map[string]any{
- "action": "end",
- })
-
- if resp.Diagnostics.HasError() {
- return
- }
-}
-
-func (r *resourceEventhouse) get(ctx context.Context, model *resourceEventhouseModel) diag.Diagnostics {
- tflog.Trace(ctx, "GET", map[string]any{
- "workspace_id": model.WorkspaceID.ValueString(),
- "id": model.ID.ValueString(),
- })
-
- respGet, err := r.client.GetEventhouse(ctx, model.WorkspaceID.ValueString(), model.ID.ValueString(), nil)
- if diags := utils.GetDiagsFromError(ctx, err, utils.OperationRead, fabcore.ErrCommon.EntityNotFound); diags.HasError() {
- return diags
+ config := fabricitem.ResourceFabricItemDefinitionProperties[eventhousePropertiesModel, fabeventhouse.Properties]{
+ ResourceFabricItemDefinition: fabricitem.ResourceFabricItemDefinition{
+ Type: ItemType,
+ Name: ItemName,
+ NameRenameAllowed: true,
+ TFName: ItemTFName,
+ MarkdownDescription: "Manage a Fabric " + ItemName + ".\n\n" +
+ "Use this resource to manage an [" + ItemName + "](" + ItemDocsURL + ").\n\n" +
+ ItemDocsSPNSupport,
+ DisplayNameMaxLength: 123,
+ DescriptionMaxLength: 256,
+ FormatTypeDefault: ItemFormatTypeDefault,
+ FormatTypes: ItemFormatTypes,
+ DefinitionPathDocsURL: ItemDefinitionPathDocsURL,
+ DefinitionPathKeys: ItemDefinitionPaths,
+ DefinitionPathKeysValidator: []validator.Map{
+ mapvalidator.SizeAtMost(1),
+ mapvalidator.KeysAre(stringvalidator.OneOf(ItemDefinitionPaths...)),
+ },
+ DefinitionRequired: false,
+ DefinitionEmpty: ItemDefinitionEmpty,
+ },
+ PropertiesAttributes: getResourceEventhousePropertiesAttributes(ctx),
+ PropertiesSetter: propertiesSetter,
+ ItemGetter: itemGetter,
}
- model.set(respGet.Eventhouse)
-
- return model.setProperties(ctx, respGet.Eventhouse)
+ return fabricitem.NewResourceFabricItemDefinitionProperties(config)
}
diff --git a/internal/services/eventhouse/resource_eventhouse_test.go b/internal/services/eventhouse/resource_eventhouse_test.go
index 98f6d5fb..1997e80d 100644
--- a/internal/services/eventhouse/resource_eventhouse_test.go
+++ b/internal/services/eventhouse/resource_eventhouse_test.go
@@ -24,61 +24,87 @@ var (
testResourceItemHeader = at.ResourceHeader(testhelp.TypeName("fabric", itemTFName), "test")
)
+var testHelperLocals = at.CompileLocalsConfig(map[string]any{
+ "path": testhelp.GetFixturesDirPath("eventhouse"),
+})
+
+var testHelperDefinition = map[string]any{
+ `"EventhouseProperties.json"`: map[string]any{
+ "source": "${local.path}/EventhouseProperties.json.tmpl",
+ },
+}
+
func TestUnit_EventhouseResource_Attributes(t *testing.T) {
resource.ParallelTest(t, testhelp.NewTestUnitCase(t, &testResourceItemFQN, fakes.FakeServer.ServerFactory, nil, []resource.TestStep{
// error - no attributes
{
ResourceName: testResourceItemFQN,
- Config: at.CompileConfig(
- testResourceItemHeader,
- map[string]any{},
+ Config: at.JoinConfigs(
+ testHelperLocals,
+ at.CompileConfig(
+ testResourceItemHeader,
+ map[string]any{},
+ ),
),
ExpectError: regexp.MustCompile(`Missing required argument`),
},
// error - workspace_id - invalid UUID
{
ResourceName: testResourceItemFQN,
- Config: at.CompileConfig(
- testResourceItemHeader,
- map[string]any{
- "workspace_id": "invalid uuid",
- "display_name": "test",
- },
- ),
+ Config: at.JoinConfigs(
+ testHelperLocals,
+ at.CompileConfig(
+ testResourceItemHeader,
+ map[string]any{
+ "workspace_id": "invalid uuid",
+ "display_name": "test",
+ "definition": testHelperDefinition,
+ },
+ )),
ExpectError: regexp.MustCompile(customtypes.UUIDTypeErrorInvalidStringHeader),
},
// error - unexpected attribute
{
ResourceName: testResourceItemFQN,
- Config: at.CompileConfig(
- testResourceItemHeader,
- map[string]any{
- "workspace_id": "00000000-0000-0000-0000-000000000000",
- "unexpected_attr": "test",
- },
- ),
+ Config: at.JoinConfigs(
+ testHelperLocals,
+ at.CompileConfig(
+ testResourceItemHeader,
+ map[string]any{
+ "workspace_id": "00000000-0000-0000-0000-000000000000",
+ "display_name": "test",
+ "unexpected_attr": "test",
+ "definition": testHelperDefinition,
+ },
+ )),
ExpectError: regexp.MustCompile(`An argument named "unexpected_attr" is not expected here`),
},
// error - no required attributes
{
ResourceName: testResourceItemFQN,
- Config: at.CompileConfig(
- testResourceItemHeader,
- map[string]any{
- "display_name": "test",
- },
- ),
+ Config: at.JoinConfigs(
+ testHelperLocals,
+ at.CompileConfig(
+ testResourceItemHeader,
+ map[string]any{
+ "display_name": "test",
+ "definition": testHelperDefinition,
+ },
+ )),
ExpectError: regexp.MustCompile(`The argument "workspace_id" is required, but no definition was found.`),
},
// error - no required attributes
{
ResourceName: testResourceItemFQN,
- Config: at.CompileConfig(
- testResourceItemHeader,
- map[string]any{
- "workspace_id": "00000000-0000-0000-0000-000000000000",
- },
- ),
+ Config: at.JoinConfigs(
+ testHelperLocals,
+ at.CompileConfig(
+ testResourceItemHeader,
+ map[string]any{
+ "workspace_id": "00000000-0000-0000-0000-000000000000",
+ "definition": testHelperDefinition,
+ },
+ )),
ExpectError: regexp.MustCompile(`The argument "display_name" is required, but no definition was found.`),
},
}))
@@ -92,13 +118,16 @@ func TestUnit_EventhouseResource_ImportState(t *testing.T) {
fakes.FakeServer.Upsert(entity)
fakes.FakeServer.Upsert(fakes.NewRandomEventhouseWithWorkspace(workspaceID))
- testCase := at.CompileConfig(
- testResourceItemHeader,
- map[string]any{
- "workspace_id": *entity.WorkspaceID,
- "display_name": *entity.DisplayName,
- },
- )
+ testCase := at.JoinConfigs(
+ testHelperLocals,
+ at.CompileConfig(
+ testResourceItemHeader,
+ map[string]any{
+ "workspace_id": *entity.WorkspaceID,
+ "display_name": *entity.DisplayName,
+ "definition": testHelperDefinition,
+ },
+ ))
resource.Test(t, testhelp.NewTestUnitCase(t, &testResourceItemFQN, fakes.FakeServer.ServerFactory, nil, []resource.TestStep{
{
@@ -106,7 +135,7 @@ func TestUnit_EventhouseResource_ImportState(t *testing.T) {
Config: testCase,
ImportStateId: "not-valid",
ImportState: true,
- ExpectError: regexp.MustCompile(fmt.Sprintf(common.ErrorImportIdentifierDetails, "WorkspaceID/EventhouseID")),
+ ExpectError: regexp.MustCompile(fmt.Sprintf(common.ErrorImportIdentifierDetails, fmt.Sprintf("WorkspaceID/%sID", string(itemType)))),
},
{
ResourceName: testResourceItemFQN,
@@ -166,44 +195,60 @@ func TestUnit_EventhouseResource_CRUD(t *testing.T) {
// error - create - existing entity
{
ResourceName: testResourceItemFQN,
- Config: at.CompileConfig(
- testResourceItemHeader,
- map[string]any{
- "workspace_id": *entityExist.WorkspaceID,
- "display_name": *entityExist.DisplayName,
- },
- ),
+ Config: at.JoinConfigs(
+ testHelperLocals,
+ at.CompileConfig(
+ testResourceItemHeader,
+ map[string]any{
+ "workspace_id": *entityExist.WorkspaceID,
+ "display_name": *entityExist.DisplayName,
+ "definition": testHelperDefinition,
+ },
+ )),
ExpectError: regexp.MustCompile(common.ErrorCreateHeader),
},
// Create and Read
{
ResourceName: testResourceItemFQN,
- Config: at.CompileConfig(
- testResourceItemHeader,
- map[string]any{
- "workspace_id": *entityBefore.WorkspaceID,
- "display_name": *entityBefore.DisplayName,
- },
- ),
+ Config: at.JoinConfigs(
+ testHelperLocals,
+ at.CompileConfig(
+ testResourceItemHeader,
+ map[string]any{
+ "workspace_id": *entityBefore.WorkspaceID,
+ "display_name": *entityBefore.DisplayName,
+ "definition": testHelperDefinition,
+ },
+ )),
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttrPtr(testResourceItemFQN, "display_name", entityBefore.DisplayName),
resource.TestCheckResourceAttr(testResourceItemFQN, "description", ""),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.query_service_uri"),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.ingestion_service_uri"),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.database_ids.0"),
),
},
// Update and Read
{
ResourceName: testResourceItemFQN,
- Config: at.CompileConfig(
- testResourceItemHeader,
- map[string]any{
- "workspace_id": *entityBefore.WorkspaceID,
- "display_name": *entityAfter.DisplayName,
- "description": *entityAfter.Description,
- },
- ),
+ Config: at.JoinConfigs(
+ testHelperLocals,
+ at.CompileConfig(
+ testResourceItemHeader,
+ map[string]any{
+ "workspace_id": *entityBefore.WorkspaceID,
+ "display_name": *entityAfter.DisplayName,
+ "description": *entityAfter.Description,
+ "definition": testHelperDefinition,
+ },
+ )),
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttrPtr(testResourceItemFQN, "display_name", entityAfter.DisplayName),
resource.TestCheckResourceAttrPtr(testResourceItemFQN, "description", entityAfter.Description),
+ resource.TestCheckResourceAttr(testResourceItemFQN, "definition_update_enabled", "true"),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.query_service_uri"),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.ingestion_service_uri"),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.database_ids.0"),
),
},
// Delete testing automatically occurs in TestCase
@@ -211,7 +256,7 @@ func TestUnit_EventhouseResource_CRUD(t *testing.T) {
}
func TestAcc_EventhouseResource_CRUD(t *testing.T) {
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceRS"].(map[string]any)
workspaceID := workspace["id"].(string)
entityCreateDisplayName := testhelp.RandomName()
@@ -232,6 +277,9 @@ func TestAcc_EventhouseResource_CRUD(t *testing.T) {
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr(testResourceItemFQN, "display_name", entityCreateDisplayName),
resource.TestCheckResourceAttr(testResourceItemFQN, "description", ""),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.query_service_uri"),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.ingestion_service_uri"),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.database_ids.0"),
),
},
// Update and Read
@@ -248,6 +296,67 @@ func TestAcc_EventhouseResource_CRUD(t *testing.T) {
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr(testResourceItemFQN, "display_name", entityUpdateDisplayName),
resource.TestCheckResourceAttr(testResourceItemFQN, "description", entityUpdateDescription),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.query_service_uri"),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.ingestion_service_uri"),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.database_ids.0"),
+ ),
+ },
+ },
+ ))
+}
+
+func TestAcc_EventhouseDefinitionResource_CRUD(t *testing.T) {
+ workspace := testhelp.WellKnown()["WorkspaceRS"].(map[string]any)
+ workspaceID := workspace["id"].(string)
+
+ entityCreateDisplayName := testhelp.RandomName()
+ entityUpdateDisplayName := testhelp.RandomName()
+ entityUpdateDescription := testhelp.RandomName()
+
+ resource.Test(t, testhelp.NewTestAccCase(t, &testResourceItemFQN, nil, []resource.TestStep{
+ // Create and Read
+ {
+ ResourceName: testResourceItemFQN,
+ Config: at.JoinConfigs(
+ testHelperLocals,
+ at.CompileConfig(
+ testResourceItemHeader,
+ map[string]any{
+ "workspace_id": workspaceID,
+ "display_name": entityCreateDisplayName,
+ "definition": testHelperDefinition,
+ },
+ )),
+ Check: resource.ComposeAggregateTestCheckFunc(
+ resource.TestCheckResourceAttr(testResourceItemFQN, "display_name", entityCreateDisplayName),
+ resource.TestCheckResourceAttr(testResourceItemFQN, "description", ""),
+ resource.TestCheckResourceAttr(testResourceItemFQN, "definition_update_enabled", "true"),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.query_service_uri"),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.ingestion_service_uri"),
+ resource.TestCheckNoResourceAttr(testResourceItemFQN, "properties.database_ids.0"),
+ ),
+ },
+ // Update and Read
+ {
+ ResourceName: testResourceItemFQN,
+ Config: at.JoinConfigs(
+ testHelperLocals,
+ at.CompileConfig(
+ testResourceItemHeader,
+ map[string]any{
+ "workspace_id": workspaceID,
+ "display_name": entityUpdateDisplayName,
+ "description": entityUpdateDescription,
+ "definition": testHelperDefinition,
+ },
+ )),
+ Check: resource.ComposeAggregateTestCheckFunc(
+ resource.TestCheckResourceAttr(testResourceItemFQN, "display_name", entityUpdateDisplayName),
+ resource.TestCheckResourceAttr(testResourceItemFQN, "description", entityUpdateDescription),
+ resource.TestCheckResourceAttr(testResourceItemFQN, "definition_update_enabled", "true"),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.query_service_uri"),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.ingestion_service_uri"),
+ resource.TestCheckNoResourceAttr(testResourceItemFQN, "properties.database_ids.0"),
),
},
},
diff --git a/internal/services/eventhouse/schema_data_eventhouse.go b/internal/services/eventhouse/schema_data_eventhouse.go
new file mode 100644
index 00000000..f50873b0
--- /dev/null
+++ b/internal/services/eventhouse/schema_data_eventhouse.go
@@ -0,0 +1,31 @@
+// Copyright (c) Microsoft Corporation
+// SPDX-License-Identifier: MPL-2.0
+
+package eventhouse
+
+import (
+ "context"
+
+ supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes"
+ "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
+)
+
+func getDataSourceEventhousePropertiesAttributes(ctx context.Context) map[string]schema.Attribute {
+ result := map[string]schema.Attribute{
+ "ingestion_service_uri": schema.StringAttribute{
+ MarkdownDescription: "Ingestion service URI.",
+ Computed: true,
+ },
+ "query_service_uri": schema.StringAttribute{
+ MarkdownDescription: "Query service URI.",
+ Computed: true,
+ },
+ "database_ids": schema.ListAttribute{
+ MarkdownDescription: "List of all KQL Database children IDs.",
+ Computed: true,
+ CustomType: supertypes.NewListTypeOf[string](ctx),
+ },
+ }
+
+ return result
+}
diff --git a/internal/services/eventhouse/schema_resource_eventhouse.go b/internal/services/eventhouse/schema_resource_eventhouse.go
new file mode 100644
index 00000000..786fe18a
--- /dev/null
+++ b/internal/services/eventhouse/schema_resource_eventhouse.go
@@ -0,0 +1,31 @@
+// Copyright (c) Microsoft Corporation
+// SPDX-License-Identifier: MPL-2.0
+
+package eventhouse
+
+import (
+ "context"
+
+ supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema"
+)
+
+func getResourceEventhousePropertiesAttributes(ctx context.Context) map[string]schema.Attribute {
+ result := map[string]schema.Attribute{
+ "ingestion_service_uri": schema.StringAttribute{
+ MarkdownDescription: "Ingestion service URI.",
+ Computed: true,
+ },
+ "query_service_uri": schema.StringAttribute{
+ MarkdownDescription: "Query service URI.",
+ Computed: true,
+ },
+ "database_ids": schema.ListAttribute{
+ MarkdownDescription: "List of all KQL Database children IDs.",
+ Computed: true,
+ CustomType: supertypes.NewListTypeOf[string](ctx),
+ },
+ }
+
+ return result
+}
diff --git a/internal/services/eventstream/data_eventstream_test.go b/internal/services/eventstream/data_eventstream_test.go
index e35971c9..fbeaaef3 100644
--- a/internal/services/eventstream/data_eventstream_test.go
+++ b/internal/services/eventstream/data_eventstream_test.go
@@ -150,7 +150,7 @@ func TestUnit_EventstreamDataSource(t *testing.T) {
}
func TestAcc_EventstreamDataSource(t *testing.T) {
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceDS"].(map[string]any)
workspaceID := workspace["id"].(string)
entity := testhelp.WellKnown()["Eventstream"].(map[string]any)
diff --git a/internal/services/eventstream/data_eventstreams_test.go b/internal/services/eventstream/data_eventstreams_test.go
index 8b7d7cd1..8d145cda 100644
--- a/internal/services/eventstream/data_eventstreams_test.go
+++ b/internal/services/eventstream/data_eventstreams_test.go
@@ -75,7 +75,7 @@ func TestUnit_EventstreamsDataSource(t *testing.T) {
}
func TestAcc_EventstreamsDataSource(t *testing.T) {
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceDS"].(map[string]any)
workspaceID := workspace["id"].(string)
resource.ParallelTest(t, testhelp.NewTestAccCase(t, nil, nil, []resource.TestStep{
diff --git a/internal/services/eventstream/resource_eventstream_test.go b/internal/services/eventstream/resource_eventstream_test.go
index f441fcc0..e0fad008 100644
--- a/internal/services/eventstream/resource_eventstream_test.go
+++ b/internal/services/eventstream/resource_eventstream_test.go
@@ -211,7 +211,7 @@ func TestUnit_EventstreamResource_CRUD(t *testing.T) {
}
func TestAcc_EventstreamResource_CRUD(t *testing.T) {
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceRS"].(map[string]any)
workspaceID := workspace["id"].(string)
entityCreateDisplayName := testhelp.RandomName()
diff --git a/internal/services/kqldatabase/base.go b/internal/services/kqldatabase/base.go
index f3429834..e54e5de9 100644
--- a/internal/services/kqldatabase/base.go
+++ b/internal/services/kqldatabase/base.go
@@ -10,11 +10,18 @@ import (
)
const (
- ItemName = "KQL Database"
- ItemTFName = "kql_database"
- ItemsName = "KQL Databases"
- ItemsTFName = "kql_databases"
- ItemType = fabcore.ItemTypeKQLDatabase
- ItemDocsSPNSupport = common.DocsSPNSupported
- ItemDocsURL = "https://learn.microsoft.com/fabric/real-time-intelligence/create-database"
+ ItemName = "KQL Database"
+ ItemTFName = "kql_database"
+ ItemsName = "KQL Databases"
+ ItemsTFName = "kql_databases"
+ ItemType = fabcore.ItemTypeKQLDatabase
+ ItemDocsSPNSupport = common.DocsSPNSupported
+ ItemDocsURL = "https://learn.microsoft.com/fabric/real-time-intelligence/create-database"
+ ItemFormatTypeDefault = ""
+ ItemDefinitionPathDocsURL = "https://learn.microsoft.com/rest/api/fabric/articles/item-management/definitions/kql-database-definition"
+)
+
+var (
+ ItemFormatTypes = []string{""} //nolint:gochecknoglobals
+ ItemDefinitionPaths = []string{"DatabaseProperties.json", "DatabaseSchema.kql"} //nolint:gochecknoglobals
)
diff --git a/internal/services/kqldatabase/data_kql_database.go b/internal/services/kqldatabase/data_kql_database.go
index f29e6ce3..ac6c96c2 100644
--- a/internal/services/kqldatabase/data_kql_database.go
+++ b/internal/services/kqldatabase/data_kql_database.go
@@ -5,203 +5,88 @@ package kqldatabase
import (
"context"
- "fmt"
supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes"
- "github.com/hashicorp/terraform-plugin-framework-validators/datasourcevalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
"github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/path"
- "github.com/hashicorp/terraform-plugin-log/tflog"
+ "github.com/microsoft/fabric-sdk-go/fabric"
+ fabcore "github.com/microsoft/fabric-sdk-go/fabric/core"
fabkqldatabase "github.com/microsoft/fabric-sdk-go/fabric/kqldatabase"
- "github.com/microsoft/terraform-provider-fabric/internal/common"
- "github.com/microsoft/terraform-provider-fabric/internal/framework/customtypes"
"github.com/microsoft/terraform-provider-fabric/internal/pkg/fabricitem"
- "github.com/microsoft/terraform-provider-fabric/internal/pkg/utils"
- pconfig "github.com/microsoft/terraform-provider-fabric/internal/provider/config"
)
-// Ensure the implementation satisfies the expected interfaces.
-var (
- _ datasource.DataSourceWithConfigValidators = (*dataSourceKQLDatabase)(nil)
- _ datasource.DataSourceWithConfigure = (*dataSourceKQLDatabase)(nil)
-)
-
-type dataSourceKQLDatabase struct {
- pConfigData *pconfig.ProviderData
- client *fabkqldatabase.ItemsClient
-}
-
func NewDataSourceKQLDatabase() datasource.DataSource {
- return &dataSourceKQLDatabase{}
-}
-
-func (d *dataSourceKQLDatabase) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
- resp.TypeName = req.ProviderTypeName + "_" + ItemTFName
-}
-
-func (d *dataSourceKQLDatabase) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- markdownDescription := "Get a Fabric " + ItemName + ".\n\n" +
- "Use this data source to fetch a [" + ItemName + "](" + ItemDocsURL + ").\n\n" +
- ItemDocsSPNSupport
-
- properties := schema.SingleNestedAttribute{
- MarkdownDescription: "The KQL Database properties.",
- Computed: true,
- CustomType: supertypes.NewSingleNestedObjectTypeOf[kqlDatabasePropertiesModel](ctx),
- Attributes: map[string]schema.Attribute{
- "database_type": schema.StringAttribute{
- MarkdownDescription: "The type of the database. Possible values:" + utils.ConvertStringSlicesToString(fabkqldatabase.PossibleKqlDatabaseTypeValues(), true, true) + ".",
- Computed: true,
- },
- "eventhouse_id": schema.StringAttribute{
- MarkdownDescription: "Parent Eventhouse ID.",
- Computed: true,
- CustomType: customtypes.UUIDType{},
- },
- "ingestion_service_uri": schema.StringAttribute{
- MarkdownDescription: "Ingestion service URI.",
- Computed: true,
- CustomType: customtypes.URLType{},
- },
- "query_service_uri": schema.StringAttribute{
- MarkdownDescription: "Query service URI.",
- Computed: true,
- CustomType: customtypes.URLType{},
- },
- },
- }
-
- itemConfig := fabricitem.DataSourceFabricItem{
- Type: ItemType,
- Name: ItemName,
- TFName: ItemTFName,
- MarkdownDescription: markdownDescription,
- IsDisplayNameUnique: true,
- }
+ propertiesSetter := func(ctx context.Context, from *fabkqldatabase.Properties, to *fabricitem.DataSourceFabricItemPropertiesModel[kqlDatabasePropertiesModel, fabkqldatabase.Properties]) diag.Diagnostics {
+ properties := supertypes.NewSingleNestedObjectValueOfNull[kqlDatabasePropertiesModel](ctx)
- resp.Schema = fabricitem.GetDataSourceFabricItemPropertiesSchema(ctx, itemConfig, properties)
-}
-
-func (d *dataSourceKQLDatabase) ConfigValidators(_ context.Context) []datasource.ConfigValidator {
- return []datasource.ConfigValidator{
- datasourcevalidator.Conflicting(
- path.MatchRoot("id"),
- path.MatchRoot("display_name"),
- ),
- datasourcevalidator.ExactlyOneOf(
- path.MatchRoot("id"),
- path.MatchRoot("display_name"),
- ),
- }
-}
-
-func (d *dataSourceKQLDatabase) Configure(_ context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
- if req.ProviderData == nil {
- return
- }
-
- pConfigData, ok := req.ProviderData.(*pconfig.ProviderData)
- if !ok {
- resp.Diagnostics.AddError(
- common.ErrorDataSourceConfigType,
- fmt.Sprintf(common.ErrorFabricClientType, req.ProviderData),
- )
-
- return
- }
-
- d.pConfigData = pConfigData
- d.client = fabkqldatabase.NewClientFactoryWithClient(*pConfigData.FabricClient).NewItemsClient()
-}
+ if from != nil {
+ propertiesModel := &kqlDatabasePropertiesModel{}
+ propertiesModel.set(from)
-func (d *dataSourceKQLDatabase) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- tflog.Debug(ctx, "READ", map[string]any{
- "action": "start",
- })
- tflog.Trace(ctx, "READ", map[string]any{
- "config": req.Config,
- })
-
- var data dataSourceKQLDatabaseModel
-
- if resp.Diagnostics.Append(req.Config.Get(ctx, &data)...); resp.Diagnostics.HasError() {
- return
- }
-
- timeout, diags := data.Timeouts.Read(ctx, d.pConfigData.Timeout)
- if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
- return
- }
-
- ctx, cancel := context.WithTimeout(ctx, timeout)
- defer cancel()
+ if diags := properties.Set(ctx, propertiesModel); diags.HasError() {
+ return diags
+ }
+ }
- if data.ID.ValueString() != "" {
- diags = d.getByID(ctx, &data)
- } else {
- diags = d.getByDisplayName(ctx, &data)
- }
+ to.Properties = properties
- if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
- return
+ return nil
}
- resp.Diagnostics.Append(resp.State.Set(ctx, data)...)
+ itemGetter := func(ctx context.Context, fabricClient fabric.Client, model fabricitem.DataSourceFabricItemPropertiesModel[kqlDatabasePropertiesModel, fabkqldatabase.Properties], fabricItem *fabricitem.FabricItemProperties[fabkqldatabase.Properties]) error {
+ client := fabkqldatabase.NewClientFactoryWithClient(fabricClient).NewItemsClient()
- tflog.Debug(ctx, "READ", map[string]any{
- "action": "end",
- })
-
- if resp.Diagnostics.HasError() {
- return
- }
-}
+ respGet, err := client.GetKQLDatabase(ctx, model.WorkspaceID.ValueString(), model.ID.ValueString(), nil)
+ if err != nil {
+ return err
+ }
-func (d *dataSourceKQLDatabase) getByID(ctx context.Context, model *dataSourceKQLDatabaseModel) diag.Diagnostics {
- tflog.Trace(ctx, "GET BY ID", map[string]any{
- "id": model.ID.ValueString(),
- })
+ fabricItem.Set(respGet.KQLDatabase)
- respGet, err := d.client.GetKQLDatabase(ctx, model.WorkspaceID.ValueString(), model.ID.ValueString(), nil)
- if diags := utils.GetDiagsFromError(ctx, err, utils.OperationRead, nil); diags.HasError() {
- return diags
+ return nil
}
- model.set(respGet.KQLDatabase)
-
- return model.setProperties(ctx, respGet.KQLDatabase)
-}
-
-func (d *dataSourceKQLDatabase) getByDisplayName(ctx context.Context, model *dataSourceKQLDatabaseModel) diag.Diagnostics {
- tflog.Trace(ctx, "GET BY DISPLAY NAME", map[string]any{
- "display_name": model.DisplayName.ValueString(),
- })
-
- var diags diag.Diagnostics
+ itemListGetter := func(ctx context.Context, fabricClient fabric.Client, model fabricitem.DataSourceFabricItemPropertiesModel[kqlDatabasePropertiesModel, fabkqldatabase.Properties], errNotFound fabcore.ResponseError, fabricItem *fabricitem.FabricItemProperties[fabkqldatabase.Properties]) error {
+ client := fabkqldatabase.NewClientFactoryWithClient(fabricClient).NewItemsClient()
- pager := d.client.NewListKQLDatabasesPager(model.WorkspaceID.ValueString(), nil)
- for pager.More() {
- page, err := pager.NextPage(ctx)
- if diags := utils.GetDiagsFromError(ctx, err, utils.OperationList, nil); diags.HasError() {
- return diags
- }
+ pager := client.NewListKQLDatabasesPager(model.WorkspaceID.ValueString(), nil)
+ for pager.More() {
+ page, err := pager.NextPage(ctx)
+ if err != nil {
+ return err
+ }
- for _, entity := range page.Value {
- if *entity.DisplayName == model.DisplayName.ValueString() {
- model.set(entity)
+ for _, entity := range page.Value {
+ if *entity.DisplayName == model.DisplayName.ValueString() {
+ fabricItem.Set(entity)
- return nil
+ return nil
+ }
}
}
+
+ return &errNotFound
}
- diags.AddError(
- common.ErrorReadHeader,
- fmt.Sprintf("Unable to find %s with display_name: '%s' in the Workspace ID: %s ", ItemName, model.DisplayName.ValueString(), model.WorkspaceID.ValueString()),
- )
+ config := fabricitem.DataSourceFabricItemProperties[kqlDatabasePropertiesModel, fabkqldatabase.Properties]{
+ DataSourceFabricItem: fabricitem.DataSourceFabricItem{
+ Type: ItemType,
+ Name: ItemName,
+ TFName: ItemTFName,
+ MarkdownDescription: "Get a Fabric " + ItemName + ".\n\n" +
+ "Use this data source to fetch a [" + ItemName + "](" + ItemDocsURL + ").\n\n" +
+ ItemDocsSPNSupport,
+ IsDisplayNameUnique: true,
+ // FormatTypeDefault: ItemFormatTypeDefault,
+ // FormatTypes: ItemFormatTypes,
+ // DefinitionPathKeys: ItemDefinitionPaths,
+ },
+ PropertiesAttributes: getDataSourceKQLDatabasePropertiesAttributes(),
+ PropertiesSetter: propertiesSetter,
+ ItemGetter: itemGetter,
+ ItemListGetter: itemListGetter,
+ }
- return diags
+ return fabricitem.NewDataSourceFabricItemProperties(config)
}
diff --git a/internal/services/kqldatabase/data_kql_database_test.go b/internal/services/kqldatabase/data_kql_database_test.go
index dfb0454e..7f1884cb 100644
--- a/internal/services/kqldatabase/data_kql_database_test.go
+++ b/internal/services/kqldatabase/data_kql_database_test.go
@@ -89,7 +89,7 @@ func TestUnit_KQLDatabaseDataSource(t *testing.T) {
"id": *entity.ID,
},
),
- ExpectError: regexp.MustCompile(`The argument "workspace_id" is required, but no definition was found.`),
+ ExpectError: regexp.MustCompile(`The argument "workspace_id" is required, but no definition was found`),
},
// read by id
{
@@ -105,6 +105,10 @@ func TestUnit_KQLDatabaseDataSource(t *testing.T) {
resource.TestCheckResourceAttrPtr(testDataSourceItemFQN, "id", entity.ID),
resource.TestCheckResourceAttrPtr(testDataSourceItemFQN, "display_name", entity.DisplayName),
resource.TestCheckResourceAttrPtr(testDataSourceItemFQN, "description", entity.Description),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.query_service_uri"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.ingestion_service_uri"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.eventhouse_id"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.database_type"),
),
},
// read by id - not found
@@ -133,6 +137,10 @@ func TestUnit_KQLDatabaseDataSource(t *testing.T) {
resource.TestCheckResourceAttrPtr(testDataSourceItemFQN, "id", entity.ID),
resource.TestCheckResourceAttrPtr(testDataSourceItemFQN, "display_name", entity.DisplayName),
resource.TestCheckResourceAttrPtr(testDataSourceItemFQN, "description", entity.Description),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.query_service_uri"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.ingestion_service_uri"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.eventhouse_id"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.database_type"),
),
},
// read by name - not found
@@ -150,7 +158,7 @@ func TestUnit_KQLDatabaseDataSource(t *testing.T) {
}
func TestAcc_KQLDatabaseDataSource(t *testing.T) {
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceDS"].(map[string]any)
workspaceID := workspace["id"].(string)
entity := testhelp.WellKnown()["KQLDatabase"].(map[string]any)
@@ -174,6 +182,10 @@ func TestAcc_KQLDatabaseDataSource(t *testing.T) {
resource.TestCheckResourceAttr(testDataSourceItemFQN, "id", entityID),
resource.TestCheckResourceAttr(testDataSourceItemFQN, "display_name", entityDisplayName),
resource.TestCheckResourceAttr(testDataSourceItemFQN, "description", entityDescription),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.query_service_uri"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.ingestion_service_uri"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.eventhouse_id"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.database_type"),
),
},
// read by id - not found
@@ -203,6 +215,10 @@ func TestAcc_KQLDatabaseDataSource(t *testing.T) {
resource.TestCheckResourceAttr(testDataSourceItemFQN, "id", entityID),
resource.TestCheckResourceAttr(testDataSourceItemFQN, "display_name", entityDisplayName),
resource.TestCheckResourceAttr(testDataSourceItemFQN, "description", entityDescription),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.query_service_uri"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.ingestion_service_uri"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.eventhouse_id"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.database_type"),
),
},
// read by name - not found
@@ -217,5 +233,28 @@ func TestAcc_KQLDatabaseDataSource(t *testing.T) {
),
ExpectError: regexp.MustCompile(common.ErrorReadHeader),
},
+ // read by id with definition
+ // {
+ // ResourceName: testDataSourceItemFQN,
+ // Config: at.CompileConfig(
+ // testDataSourceItemHeader,
+ // map[string]any{
+ // "workspace_id": workspaceID,
+ // "id": entityID,
+ // "output_definition": true,
+ // },
+ // ),
+ // Check: resource.ComposeAggregateTestCheckFunc(
+ // resource.TestCheckResourceAttr(testDataSourceItemFQN, "workspace_id", workspaceID),
+ // resource.TestCheckResourceAttr(testDataSourceItemFQN, "id", entityID),
+ // resource.TestCheckResourceAttr(testDataSourceItemFQN, "display_name", entityDisplayName),
+ // resource.TestCheckResourceAttr(testDataSourceItemFQN, "description", entityDescription),
+ // resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.query_service_uri"),
+ // resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.ingestion_service_uri"),
+ // resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.eventhouse_id"),
+ // resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.database_type"),
+ // resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "definition.DatabaseProperties.json.content"),
+ // ),
+ // },
}))
}
diff --git a/internal/services/kqldatabase/data_kql_databases.go b/internal/services/kqldatabase/data_kql_databases.go
index 68a8c7c4..26cf28c0 100644
--- a/internal/services/kqldatabase/data_kql_databases.go
+++ b/internal/services/kqldatabase/data_kql_databases.go
@@ -4,21 +4,72 @@
package kqldatabase
import (
+ "context"
+
+ supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes"
"github.com/hashicorp/terraform-plugin-framework/datasource"
+ "github.com/hashicorp/terraform-plugin-framework/diag"
+ "github.com/microsoft/fabric-sdk-go/fabric"
+ fabkqldatabase "github.com/microsoft/fabric-sdk-go/fabric/kqldatabase"
"github.com/microsoft/terraform-provider-fabric/internal/pkg/fabricitem"
)
func NewDataSourceKQLDatabases() datasource.DataSource {
- config := fabricitem.DataSourceFabricItems{
- Type: ItemType,
- Name: ItemName,
- Names: ItemsName,
- TFName: ItemsTFName,
- MarkdownDescription: "List a Fabric " + ItemsName + ".\n\n" +
- "Use this data source to list [" + ItemsName + "](" + ItemDocsURL + ").\n\n" +
- ItemDocsSPNSupport,
+ propertiesSetter := func(ctx context.Context, from *fabkqldatabase.Properties, to *fabricitem.FabricItemPropertiesModel[kqlDatabasePropertiesModel, fabkqldatabase.Properties]) diag.Diagnostics {
+ properties := supertypes.NewSingleNestedObjectValueOfNull[kqlDatabasePropertiesModel](ctx)
+
+ if from != nil {
+ propertiesModel := &kqlDatabasePropertiesModel{}
+ propertiesModel.set(from)
+
+ if diags := properties.Set(ctx, propertiesModel); diags.HasError() {
+ return diags
+ }
+ }
+
+ to.Properties = properties
+
+ return nil
+ }
+
+ itemListGetter := func(ctx context.Context, fabricClient fabric.Client, model fabricitem.DataSourceFabricItemsPropertiesModel[kqlDatabasePropertiesModel, fabkqldatabase.Properties], fabricItems *[]fabricitem.FabricItemProperties[fabkqldatabase.Properties]) error {
+ client := fabkqldatabase.NewClientFactoryWithClient(fabricClient).NewItemsClient()
+
+ fabItems := make([]fabricitem.FabricItemProperties[fabkqldatabase.Properties], 0)
+
+ respList, err := client.ListKQLDatabases(ctx, model.WorkspaceID.ValueString(), nil)
+ if err != nil {
+ return err
+ }
+
+ for _, entity := range respList {
+ var fabricItem fabricitem.FabricItemProperties[fabkqldatabase.Properties]
+
+ fabricItem.Set(entity)
+
+ fabItems = append(fabItems, fabricItem)
+ }
+
+ *fabricItems = fabItems
+
+ return nil
+ }
+
+ config := fabricitem.DataSourceFabricItemsProperties[kqlDatabasePropertiesModel, fabkqldatabase.Properties]{
+ DataSourceFabricItems: fabricitem.DataSourceFabricItems{
+ Type: ItemType,
+ Name: ItemName,
+ Names: ItemsName,
+ TFName: ItemsTFName,
+ MarkdownDescription: "List a Fabric " + ItemsName + ".\n\n" +
+ "Use this data source to list [" + ItemsName + "](" + ItemDocsURL + ").\n\n" +
+ ItemDocsSPNSupport,
+ },
+ PropertiesAttributes: getDataSourceKQLDatabasePropertiesAttributes(),
+ PropertiesSetter: propertiesSetter,
+ ItemListGetter: itemListGetter,
}
- return fabricitem.NewDataSourceFabricItems(config)
+ return fabricitem.NewDataSourceFabricItemsProperties(config)
}
diff --git a/internal/services/kqldatabase/data_kql_databases_test.go b/internal/services/kqldatabase/data_kql_databases_test.go
index eb707de2..140d25e0 100644
--- a/internal/services/kqldatabase/data_kql_databases_test.go
+++ b/internal/services/kqldatabase/data_kql_databases_test.go
@@ -75,7 +75,7 @@ func TestUnit_KQLDatabasesDataSource(t *testing.T) {
}
func TestAcc_KQLDatabasesDataSource(t *testing.T) {
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceDS"].(map[string]any)
workspaceID := workspace["id"].(string)
resource.ParallelTest(t, testhelp.NewTestAccCase(t, nil, nil, []resource.TestStep{
diff --git a/internal/services/kqldatabase/models.go b/internal/services/kqldatabase/models.go
index 0df422c0..25eec149 100644
--- a/internal/services/kqldatabase/models.go
+++ b/internal/services/kqldatabase/models.go
@@ -4,66 +4,12 @@
package kqldatabase
import (
- "context"
- "fmt"
-
- supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes"
- timeoutsd "github.com/hashicorp/terraform-plugin-framework-timeouts/datasource/timeouts"
- timeoutsr "github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts"
- "github.com/hashicorp/terraform-plugin-framework/diag"
"github.com/hashicorp/terraform-plugin-framework/types"
fabkqldatabase "github.com/microsoft/fabric-sdk-go/fabric/kqldatabase"
"github.com/microsoft/terraform-provider-fabric/internal/framework/customtypes"
)
-type dataSourceKQLDatabaseModel struct {
- baseKQLDatabasePropertiesModel
- Timeouts timeoutsd.Value `tfsdk:"timeouts"`
-}
-
-type resourceKQLDatabaseModel struct {
- baseKQLDatabasePropertiesModel
- Configuration supertypes.SingleNestedObjectValueOf[kqlDatabaseConfigurationModel] `tfsdk:"configuration"`
- Timeouts timeoutsr.Value `tfsdk:"timeouts"`
-}
-
-type baseKQLDatabaseModel struct {
- WorkspaceID customtypes.UUID `tfsdk:"workspace_id"`
- ID customtypes.UUID `tfsdk:"id"`
- DisplayName types.String `tfsdk:"display_name"`
- Description types.String `tfsdk:"description"`
-}
-
-func (to *baseKQLDatabaseModel) set(from fabkqldatabase.KQLDatabase) {
- to.WorkspaceID = customtypes.NewUUIDPointerValue(from.WorkspaceID)
- to.ID = customtypes.NewUUIDPointerValue(from.ID)
- to.DisplayName = types.StringPointerValue(from.DisplayName)
- to.Description = types.StringPointerValue(from.Description)
-}
-
-type baseKQLDatabasePropertiesModel struct {
- baseKQLDatabaseModel
- Properties supertypes.SingleNestedObjectValueOf[kqlDatabasePropertiesModel] `tfsdk:"properties"`
-}
-
-func (to *baseKQLDatabasePropertiesModel) setProperties(ctx context.Context, from fabkqldatabase.KQLDatabase) diag.Diagnostics {
- properties := supertypes.NewSingleNestedObjectValueOfNull[kqlDatabasePropertiesModel](ctx)
-
- if from.Properties != nil {
- propertiesModel := &kqlDatabasePropertiesModel{}
- propertiesModel.set(from.Properties)
-
- if diags := properties.Set(ctx, propertiesModel); diags.HasError() {
- return diags
- }
- }
-
- to.Properties = properties
-
- return nil
-}
-
type kqlDatabasePropertiesModel struct {
DatabaseType types.String `tfsdk:"database_type"`
EventhouseID customtypes.UUID `tfsdk:"eventhouse_id"`
@@ -81,75 +27,3 @@ func (to *kqlDatabasePropertiesModel) set(from *fabkqldatabase.Properties) {
// to.OneLakeStandardStoragePeriod = types.StringPointerValue(from.OneLakeStandardStoragePeriod)
// to.OneLakeCachingPeriod = types.StringPointerValue(from.OneLakeCachingPeriod)
}
-
-type kqlDatabaseConfigurationModel struct {
- DatabaseType types.String `tfsdk:"database_type"`
- EventhouseID customtypes.UUID `tfsdk:"eventhouse_id"`
- InvitationToken types.String `tfsdk:"invitation_token"`
- SourceClusterURI customtypes.URL `tfsdk:"source_cluster_uri"`
- SourceDatabaseName types.String `tfsdk:"source_database_name"`
-}
-
-type requestUpdateKQLDatabase struct {
- fabkqldatabase.UpdateKQLDatabaseRequest
-}
-
-func (to *requestUpdateKQLDatabase) set(from resourceKQLDatabaseModel) {
- to.DisplayName = from.DisplayName.ValueStringPointer()
- to.Description = from.Description.ValueStringPointer()
-}
-
-type requestCreateKQLDatabase struct {
- fabkqldatabase.CreateKQLDatabaseRequest
-}
-
-func (to *requestCreateKQLDatabase) set(ctx context.Context, from resourceKQLDatabaseModel) diag.Diagnostics {
- to.DisplayName = from.DisplayName.ValueStringPointer()
- to.Description = from.Description.ValueStringPointer()
-
- configuration, diags := from.Configuration.Get(ctx)
- if diags.HasError() {
- return diags
- }
-
- kqlDatabaseType := (fabkqldatabase.Type)(configuration.DatabaseType.ValueString())
-
- switch kqlDatabaseType {
- case fabkqldatabase.TypeReadWrite:
- to.CreationPayload = &fabkqldatabase.ReadWriteDatabaseCreationPayload{
- DatabaseType: &kqlDatabaseType,
- ParentEventhouseItemID: configuration.EventhouseID.ValueStringPointer(),
- }
- case fabkqldatabase.TypeShortcut:
- creationPayload := fabkqldatabase.ShortcutDatabaseCreationPayload{}
- creationPayload.DatabaseType = &kqlDatabaseType
- creationPayload.ParentEventhouseItemID = configuration.EventhouseID.ValueStringPointer()
-
- if !configuration.InvitationToken.IsNull() && !configuration.InvitationToken.IsUnknown() {
- creationPayload.InvitationToken = configuration.InvitationToken.ValueStringPointer()
-
- to.CreationPayload = &creationPayload
-
- return nil
- }
-
- if !configuration.SourceClusterURI.IsNull() && !configuration.SourceClusterURI.IsUnknown() {
- creationPayload.SourceClusterURI = configuration.SourceClusterURI.ValueStringPointer()
- }
-
- creationPayload.SourceDatabaseName = configuration.SourceDatabaseName.ValueStringPointer()
-
- to.CreationPayload = &creationPayload
-
- return nil
- default:
- diags.AddError(
- "Unsupported KQL database type",
- fmt.Sprintf("The KQL database type '%s' is not supported.", string(kqlDatabaseType)),
- )
-
- return diags
- }
-
- return nil
-}
diff --git a/internal/services/kqldatabase/models_resource_kql_database.go b/internal/services/kqldatabase/models_resource_kql_database.go
new file mode 100644
index 00000000..9bffef3a
--- /dev/null
+++ b/internal/services/kqldatabase/models_resource_kql_database.go
@@ -0,0 +1,18 @@
+// Copyright (c) Microsoft Corporation
+// SPDX-License-Identifier: MPL-2.0
+
+package kqldatabase
+
+import (
+ "github.com/hashicorp/terraform-plugin-framework/types"
+
+ "github.com/microsoft/terraform-provider-fabric/internal/framework/customtypes"
+)
+
+type kqlDatabaseConfigurationModel struct {
+ DatabaseType types.String `tfsdk:"database_type"`
+ EventhouseID customtypes.UUID `tfsdk:"eventhouse_id"`
+ InvitationToken types.String `tfsdk:"invitation_token"`
+ SourceClusterURI customtypes.URL `tfsdk:"source_cluster_uri"`
+ SourceDatabaseName types.String `tfsdk:"source_database_name"`
+}
diff --git a/internal/services/kqldatabase/resource_kql_database.go b/internal/services/kqldatabase/resource_kql_database.go
index d5ae6ded..64a20cfb 100644
--- a/internal/services/kqldatabase/resource_kql_database.go
+++ b/internal/services/kqldatabase/resource_kql_database.go
@@ -6,439 +6,120 @@ package kqldatabase
import (
"context"
"fmt"
- "strings"
supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes"
- superstringvalidator "github.com/FrangipaneTeam/terraform-plugin-framework-validators/stringvalidator"
- "github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
- "github.com/hashicorp/terraform-plugin-framework/attr"
"github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema/objectplanmodifier"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-log/tflog"
- fabcore "github.com/microsoft/fabric-sdk-go/fabric/core"
+ "github.com/microsoft/fabric-sdk-go/fabric"
fabkqldatabase "github.com/microsoft/fabric-sdk-go/fabric/kqldatabase"
- "github.com/microsoft/terraform-provider-fabric/internal/common"
- "github.com/microsoft/terraform-provider-fabric/internal/framework/customtypes"
"github.com/microsoft/terraform-provider-fabric/internal/pkg/fabricitem"
- "github.com/microsoft/terraform-provider-fabric/internal/pkg/utils"
- pconfig "github.com/microsoft/terraform-provider-fabric/internal/provider/config"
)
-// Ensure the implementation satisfies the expected interfaces.
-var (
- _ resource.ResourceWithConfigure = (*resourceKQLDatabase)(nil)
- _ resource.ResourceWithImportState = (*resourceKQLDatabase)(nil)
-)
-
-type resourceKQLDatabase struct {
- pConfigData *pconfig.ProviderData
- client *fabkqldatabase.ItemsClient
-}
-
func NewResourceKQLDatabase() resource.Resource {
- return &resourceKQLDatabase{}
-}
-
-func (r *resourceKQLDatabase) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
- resp.TypeName = req.ProviderTypeName + "_" + ItemTFName
-}
-
-func (r *resourceKQLDatabase) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
- markdownDescription := "This resource manages a Fabric " + ItemName + ".\n\n" +
- "See [" + ItemName + "](" + ItemDocsURL + ") for more information.\n\n" +
- ItemDocsSPNSupport
-
- properties := schema.SingleNestedAttribute{
- MarkdownDescription: "The " + ItemName + " properties.",
- Computed: true,
- CustomType: supertypes.NewSingleNestedObjectTypeOf[kqlDatabasePropertiesModel](ctx),
- Attributes: map[string]schema.Attribute{
- "database_type": schema.StringAttribute{
- MarkdownDescription: "The type of the database. Possible values:" + utils.ConvertStringSlicesToString(fabkqldatabase.PossibleKqlDatabaseTypeValues(), true, true) + ".",
- Computed: true,
- },
- "eventhouse_id": schema.StringAttribute{
- MarkdownDescription: "Parent Eventhouse ID.",
- Computed: true,
- CustomType: customtypes.UUIDType{},
- },
- "ingestion_service_uri": schema.StringAttribute{
- MarkdownDescription: "Ingestion service URI.",
- Computed: true,
- CustomType: customtypes.URLType{},
- },
- "query_service_uri": schema.StringAttribute{
- MarkdownDescription: "Query service URI.",
- Computed: true,
- CustomType: customtypes.URLType{},
- },
- },
- }
-
- configuration := schema.SingleNestedAttribute{
- MarkdownDescription: "The " + ItemName + " creation configuration.\n\n" +
- "Any changes to this configuration will result in recreation of the " + ItemName + ".",
- Required: true,
- CustomType: supertypes.NewSingleNestedObjectTypeOf[kqlDatabaseConfigurationModel](ctx),
- PlanModifiers: []planmodifier.Object{
- objectplanmodifier.RequiresReplace(),
- },
- Attributes: map[string]schema.Attribute{
- "database_type": schema.StringAttribute{
- MarkdownDescription: "The type of the KQL database. Accepted values: " + utils.ConvertStringSlicesToString(fabkqldatabase.PossibleKqlDatabaseTypeValues(), true, true) + ".\n\n" +
- "`" + string(fabkqldatabase.TypeReadWrite) + "` Allows read and write operations on the database.\n\n" +
- "`" + string(fabkqldatabase.TypeShortcut) + "` A shortcut is an embedded reference allowing read only operations on a source database. The source can be in the same or different tenants, either in an Azure Data Explorer cluster or a Fabric Eventhouse.",
- Required: true,
- PlanModifiers: []planmodifier.String{
- stringplanmodifier.RequiresReplace(),
- },
- Validators: []validator.String{
- stringvalidator.OneOf(utils.ConvertEnumsToStringSlices(fabkqldatabase.PossibleKqlDatabaseTypeValues(), false)...),
- },
- },
- "eventhouse_id": schema.StringAttribute{
- MarkdownDescription: "Parent Eventhouse ID.",
- Required: true,
- CustomType: customtypes.UUIDType{},
- PlanModifiers: []planmodifier.String{
- stringplanmodifier.RequiresReplace(),
- },
- },
- "invitation_token": schema.StringAttribute{
- MarkdownDescription: "Invitation token to follow the source database. Only allowed when `database_type` is `" + string(fabkqldatabase.TypeShortcut) + "`.",
- Optional: true,
- Sensitive: true,
- PlanModifiers: []planmodifier.String{
- stringplanmodifier.RequiresReplace(),
- },
- Validators: []validator.String{
- stringvalidator.ConflictsWith(
- path.MatchRelative().AtParent().AtName("source_cluster_uri"),
- path.MatchRelative().AtParent().AtName("source_database_name"),
- ),
- superstringvalidator.NullIfAttributeIsOneOf(
- path.MatchRelative().AtParent().AtName("database_type"),
- []attr.Value{types.StringValue(string(fabkqldatabase.TypeReadWrite))},
- ),
- },
- },
- "source_cluster_uri": schema.StringAttribute{
- MarkdownDescription: "The URI of the source Eventhouse or Azure Data Explorer cluster. Only allowed when `database_type` is `" + string(fabkqldatabase.TypeShortcut) + "`.",
- Optional: true,
- CustomType: customtypes.URLType{},
- PlanModifiers: []planmodifier.String{
- stringplanmodifier.RequiresReplace(),
- },
- Validators: []validator.String{
- stringvalidator.ConflictsWith(path.MatchRelative().AtParent().AtName("invitation_token")),
- stringvalidator.AlsoRequires(path.MatchRelative().AtParent().AtName("source_database_name")),
- superstringvalidator.NullIfAttributeIsOneOf(
- path.MatchRelative().AtParent().AtName("database_type"),
- []attr.Value{types.StringValue(string(fabkqldatabase.TypeReadWrite))},
- ),
- },
- },
- "source_database_name": schema.StringAttribute{
- MarkdownDescription: "The name of the database to follow in the source Eventhouse or Azure Data Explorer cluster. Only allowed when `database_type` is `" + string(fabkqldatabase.TypeShortcut) + "`.",
- Optional: true,
- PlanModifiers: []planmodifier.String{
- stringplanmodifier.RequiresReplace(),
- },
- Validators: []validator.String{
- stringvalidator.ConflictsWith(path.MatchRelative().AtParent().AtName("invitation_token")),
- superstringvalidator.NullIfAttributeIsOneOf(
- path.MatchRelative().AtParent().AtName("database_type"),
- []attr.Value{types.StringValue(string(fabkqldatabase.TypeReadWrite))},
- ),
- },
- },
- },
- }
-
- resp.Schema = fabricitem.GetResourceFabricItemPropertiesCreationSchema(ctx, ItemName, markdownDescription, 123, 256, true, properties, configuration)
-}
-
-func (r *resourceKQLDatabase) Configure(_ context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) {
- if req.ProviderData == nil {
- return
- }
-
- pConfigData, ok := req.ProviderData.(*pconfig.ProviderData)
- if !ok {
- resp.Diagnostics.AddError(
- common.ErrorResourceConfigType,
- fmt.Sprintf(common.ErrorFabricClientType, req.ProviderData),
- )
-
- return
- }
-
- r.pConfigData = pConfigData
- r.client = fabkqldatabase.NewClientFactoryWithClient(*pConfigData.FabricClient).NewItemsClient()
-}
+ creationPayloadSetter := func(_ context.Context, from kqlDatabaseConfigurationModel) (*fabkqldatabase.CreationPayloadClassification, diag.Diagnostics) {
+ kqlDatabaseType := (fabkqldatabase.Type)(from.DatabaseType.ValueString())
-func (r *resourceKQLDatabase) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
- tflog.Debug(ctx, "CREATE", map[string]any{
- "action": "start",
- })
- tflog.Trace(ctx, "CREATE", map[string]any{
- "config": req.Config,
- "plan": req.Plan,
- })
+ var cp fabkqldatabase.CreationPayloadClassification
- var plan resourceKQLDatabaseModel
+ switch kqlDatabaseType {
+ case fabkqldatabase.TypeReadWrite:
+ creationPayload := fabkqldatabase.ReadWriteDatabaseCreationPayload{
+ DatabaseType: &kqlDatabaseType,
+ ParentEventhouseItemID: from.EventhouseID.ValueStringPointer(),
+ }
- if resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...); resp.Diagnostics.HasError() {
- return
- }
-
- timeout, diags := plan.Timeouts.Create(ctx, r.pConfigData.Timeout)
- if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
- return
- }
-
- ctx, cancel := context.WithTimeout(ctx, timeout)
- defer cancel()
-
- var reqCreate requestCreateKQLDatabase
-
- if resp.Diagnostics.Append(reqCreate.set(ctx, plan)...); resp.Diagnostics.HasError() {
- return
- }
-
- respCreate, err := r.client.CreateKQLDatabase(ctx, plan.WorkspaceID.ValueString(), reqCreate.CreateKQLDatabaseRequest, nil)
- if resp.Diagnostics.Append(utils.GetDiagsFromError(ctx, err, utils.OperationCreate, nil)...); resp.Diagnostics.HasError() {
- return
- }
-
- plan.set(respCreate.KQLDatabase)
-
- if resp.Diagnostics.Append(r.get(ctx, &plan)...); resp.Diagnostics.HasError() {
- return
- }
+ cp = &creationPayload
+ case fabkqldatabase.TypeShortcut:
+ creationPayload := fabkqldatabase.ShortcutDatabaseCreationPayload{}
+ creationPayload.DatabaseType = &kqlDatabaseType
+ creationPayload.ParentEventhouseItemID = from.EventhouseID.ValueStringPointer()
- resp.Diagnostics.Append(resp.State.Set(ctx, plan)...)
+ if !from.InvitationToken.IsNull() && !from.InvitationToken.IsUnknown() {
+ creationPayload.InvitationToken = from.InvitationToken.ValueStringPointer()
+ }
- tflog.Debug(ctx, "CREATE", map[string]any{
- "action": "end",
- })
+ if !from.SourceClusterURI.IsNull() && !from.SourceClusterURI.IsUnknown() {
+ creationPayload.SourceClusterURI = from.SourceClusterURI.ValueStringPointer()
+ }
- if resp.Diagnostics.HasError() {
- return
- }
-}
+ creationPayload.SourceDatabaseName = from.SourceDatabaseName.ValueStringPointer()
-func (r *resourceKQLDatabase) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
- tflog.Debug(ctx, "READ", map[string]any{
- "action": "start",
- })
- tflog.Trace(ctx, "READ", map[string]any{
- "state": req.State,
- })
+ cp = &creationPayload
+ default:
+ var diags diag.Diagnostics
- var state resourceKQLDatabaseModel
+ diags.AddError(
+ "Unsupported KQL database type",
+ fmt.Sprintf("The KQL database type '%s' is not supported.", string(kqlDatabaseType)),
+ )
- if resp.Diagnostics.Append(req.State.Get(ctx, &state)...); resp.Diagnostics.HasError() {
- return
- }
+ return nil, diags
+ }
- timeout, diags := state.Timeouts.Read(ctx, r.pConfigData.Timeout)
- if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
- return
+ return &cp, nil
}
- ctx, cancel := context.WithTimeout(ctx, timeout)
- defer cancel()
-
- diags = r.get(ctx, &state)
- if utils.IsErrNotFound(state.ID.ValueString(), &diags, fabcore.ErrCommon.EntityNotFound) {
- resp.State.RemoveResource(ctx)
-
- resp.Diagnostics.Append(diags...)
-
- return
- }
+ propertiesSetter := func(ctx context.Context, from *fabkqldatabase.Properties, to *fabricitem.ResourceFabricItemConfigPropertiesModel[kqlDatabasePropertiesModel, fabkqldatabase.Properties, kqlDatabaseConfigurationModel, fabkqldatabase.CreationPayloadClassification]) diag.Diagnostics {
+ properties := supertypes.NewSingleNestedObjectValueOfNull[kqlDatabasePropertiesModel](ctx)
- if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
- return
- }
+ if from != nil {
+ propertiesModel := &kqlDatabasePropertiesModel{}
+ propertiesModel.set(from)
- resp.Diagnostics.Append(resp.State.Set(ctx, state)...)
+ if diags := properties.Set(ctx, propertiesModel); diags.HasError() {
+ return diags
+ }
+ }
- tflog.Debug(ctx, "READ", map[string]any{
- "action": "end",
- })
+ to.Properties = properties
- if resp.Diagnostics.HasError() {
- return
+ return nil
}
-}
-func (r *resourceKQLDatabase) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
- tflog.Debug(ctx, "UPDATE", map[string]any{
- "action": "start",
- })
- tflog.Trace(ctx, "UPDATE", map[string]any{
- "config": req.Config,
- "plan": req.Plan,
- "state": req.State,
- })
+ itemGetter := func(ctx context.Context, fabricClient fabric.Client, model fabricitem.ResourceFabricItemConfigPropertiesModel[kqlDatabasePropertiesModel, fabkqldatabase.Properties, kqlDatabaseConfigurationModel, fabkqldatabase.CreationPayloadClassification], fabricItem *fabricitem.FabricItemProperties[fabkqldatabase.Properties]) error {
+ client := fabkqldatabase.NewClientFactoryWithClient(fabricClient).NewItemsClient()
- var plan resourceKQLDatabaseModel
+ respGet, err := client.GetKQLDatabase(ctx, model.WorkspaceID.ValueString(), model.ID.ValueString(), nil)
+ if err != nil {
+ return err
+ }
- if resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...); resp.Diagnostics.HasError() {
- return
- }
+ fabricItem.Set(respGet.KQLDatabase)
- timeout, diags := plan.Timeouts.Update(ctx, r.pConfigData.Timeout)
- if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
- return
+ return nil
}
- ctx, cancel := context.WithTimeout(ctx, timeout)
- defer cancel()
-
- var reqUpdate requestUpdateKQLDatabase
-
- reqUpdate.set(plan)
-
- respUpdate, err := r.client.UpdateKQLDatabase(ctx, plan.WorkspaceID.ValueString(), plan.ID.ValueString(), reqUpdate.UpdateKQLDatabaseRequest, nil)
- if resp.Diagnostics.Append(utils.GetDiagsFromError(ctx, err, utils.OperationUpdate, nil)...); resp.Diagnostics.HasError() {
- return
- }
-
- plan.set(respUpdate.KQLDatabase)
-
- resp.Diagnostics.Append(resp.State.Set(ctx, plan)...)
-
- if resp.Diagnostics.Append(r.get(ctx, &plan)...); resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(resp.State.Set(ctx, plan)...)
-
- tflog.Debug(ctx, "UPDATE", map[string]any{
- "action": "end",
- })
-
- if resp.Diagnostics.HasError() {
- return
- }
-}
-
-func (r *resourceKQLDatabase) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
- tflog.Debug(ctx, "DELETE", map[string]any{
- "action": "start",
- })
- tflog.Trace(ctx, "DELETE", map[string]any{
- "state": req.State,
- })
-
- var state resourceKQLDatabaseModel
-
- if resp.Diagnostics.Append(req.State.Get(ctx, &state)...); resp.Diagnostics.HasError() {
- return
- }
-
- timeout, diags := state.Timeouts.Delete(ctx, r.pConfigData.Timeout)
- if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
- return
- }
-
- ctx, cancel := context.WithTimeout(ctx, timeout)
- defer cancel()
-
- _, err := r.client.DeleteKQLDatabase(ctx, state.WorkspaceID.ValueString(), state.ID.ValueString(), nil)
- if resp.Diagnostics.Append(utils.GetDiagsFromError(ctx, err, utils.OperationDelete, nil)...); resp.Diagnostics.HasError() {
- return
- }
-
- tflog.Debug(ctx, "DELETE", map[string]any{
- "action": "end",
- })
-}
-
-func (r *resourceKQLDatabase) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- tflog.Debug(ctx, "IMPORT", map[string]any{
- "action": "start",
- })
- tflog.Trace(ctx, "IMPORT", map[string]any{
- "id": req.ID,
- })
-
- workspaceID, kqlDatabaseID, found := strings.Cut(req.ID, "/")
-
- if !found {
- resp.Diagnostics.AddError(
- common.ErrorImportIdentifierHeader,
- fmt.Sprintf(common.ErrorImportIdentifierDetails, "WorkspaceID/KQLDatabaseID"),
- )
-
- return
- }
-
- uuidWorkspaceID, diags := customtypes.NewUUIDValueMust(workspaceID)
- resp.Diagnostics.Append(diags...)
-
- uuidID, diags := customtypes.NewUUIDValueMust(kqlDatabaseID)
- resp.Diagnostics.Append(diags...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- var configuration supertypes.SingleNestedObjectValueOf[kqlDatabaseConfigurationModel]
- if resp.Diagnostics.Append(resp.State.GetAttribute(ctx, path.Root("configuration"), &configuration)...); resp.Diagnostics.HasError() {
- return
- }
-
- var timeout timeouts.Value
- if resp.Diagnostics.Append(resp.State.GetAttribute(ctx, path.Root("timeouts"), &timeout)...); resp.Diagnostics.HasError() {
- return
- }
-
- state := resourceKQLDatabaseModel{}
- state.ID = uuidID
- state.WorkspaceID = uuidWorkspaceID
- state.Configuration = configuration
- state.Timeouts = timeout
-
- if resp.Diagnostics.Append(r.get(ctx, &state)...); resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(resp.State.Set(ctx, state)...)
-
- tflog.Debug(ctx, "IMPORT", map[string]any{
- "action": "end",
- })
-
- if resp.Diagnostics.HasError() {
- return
- }
-}
-
-func (r *resourceKQLDatabase) get(ctx context.Context, model *resourceKQLDatabaseModel) diag.Diagnostics {
- tflog.Trace(ctx, "GET", map[string]any{
- "workspace_id": model.WorkspaceID.ValueString(),
- "id": model.ID.ValueString(),
- })
-
- respGet, err := r.client.GetKQLDatabase(ctx, model.WorkspaceID.ValueString(), model.ID.ValueString(), nil)
- if diags := utils.GetDiagsFromError(ctx, err, utils.OperationRead, fabcore.ErrCommon.EntityNotFound); diags.HasError() {
- return diags
+ config := fabricitem.ResourceFabricItemConfigProperties[kqlDatabasePropertiesModel, fabkqldatabase.Properties, kqlDatabaseConfigurationModel, fabkqldatabase.CreationPayloadClassification]{
+ ResourceFabricItem: fabricitem.ResourceFabricItem{
+ Type: ItemType,
+ Name: ItemName,
+ NameRenameAllowed: true,
+ TFName: ItemTFName,
+ MarkdownDescription: "Manage a Fabric " + ItemName + ".\n\n" +
+ "Use this resource to manage a [" + ItemName + "](" + ItemDocsURL + ").\n\n" +
+ ItemDocsSPNSupport,
+ DisplayNameMaxLength: 123,
+ DescriptionMaxLength: 256,
+ // FormatTypeDefault: ItemFormatTypeDefault,
+ // FormatTypes: ItemFormatTypes,
+ // DefinitionPathDocsURL: ItemDefinitionPathDocsURL,
+ // DefinitionPathKeys: ItemDefinitionPaths,
+ // DefinitionPathKeysValidator: []validator.Map{
+ // mapvalidator.SizeAtLeast(2),
+ // mapvalidator.SizeAtMost(2),
+ // mapvalidator.KeysAre(stringvalidator.OneOf(ItemDefinitionPaths...)),
+ // },
+ // DefinitionRequired: false,
+ // DefinitionEmpty: "",
+ },
+ IsConfigRequired: false,
+ ConfigAttributes: getResourceKQLDatabaseConfigurationAttributes(),
+ CreationPayloadSetter: creationPayloadSetter,
+ PropertiesAttributes: getResourceKQLDatabasePropertiesAttributes(),
+ PropertiesSetter: propertiesSetter,
+ ItemGetter: itemGetter,
}
- model.set(respGet.KQLDatabase)
-
- return model.setProperties(ctx, respGet.KQLDatabase)
+ return fabricitem.NewResourceFabricItemConfigProperties(config)
}
diff --git a/internal/services/kqldatabase/resource_kql_database_test.go b/internal/services/kqldatabase/resource_kql_database_test.go
index c8cc6ca4..507ba603 100644
--- a/internal/services/kqldatabase/resource_kql_database_test.go
+++ b/internal/services/kqldatabase/resource_kql_database_test.go
@@ -24,6 +24,10 @@ var (
testResourceItemHeader = at.ResourceHeader(testhelp.TypeName("fabric", itemTFName), "test")
)
+// var testHelperLocals = at.CompileLocalsConfig(map[string]any{
+// "path": testhelp.GetFixturesDirPath("kql_database"),
+// })
+
func TestUnit_KQLDatabaseResource_Attributes(t *testing.T) {
resource.ParallelTest(t, testhelp.NewTestUnitCase(t, &testResourceItemFQN, fakes.FakeServer.ServerFactory, nil, []resource.TestStep{
// error - no attributes
@@ -219,6 +223,48 @@ func TestUnit_KQLDatabaseResource_Attributes(t *testing.T) {
),
ExpectError: regexp.MustCompile(customtypes.URLTypeErrorInvalidStringHeader),
},
+ // error - no required attributes (definition)
+ // {
+ // ResourceName: testResourceItemFQN,
+ // Config: at.CompileConfig(
+ // testResourceItemHeader,
+ // map[string]any{
+ // "workspace_id": "00000000-0000-0000-0000-000000000000",
+ // "display_name": "test",
+ // "definition": map[string]any{},
+ // },
+ // ),
+ // ExpectError: regexp.MustCompile("Invalid Attribute Value"),
+ // },
+ // error - conflicting attributes (configuration/definition)
+ // {
+ // ResourceName: testResourceItemFQN,
+ // Config: at.JoinConfigs(
+ // testHelperLocals,
+ // at.CompileConfig(
+ // testResourceItemHeader,
+ // map[string]any{
+ // "workspace_id": "00000000-0000-0000-0000-000000000000",
+ // "display_name": "test",
+ // "configuration": map[string]any{
+ // "database_type": "ReadWrite",
+ // "eventhouse_id": "00000000-0000-0000-0000-000000000000",
+ // },
+ // "definition": map[string]any{
+ // `"DatabasesProperties.json"`: map[string]any{
+ // "source": "${local.path}/DatabaseProperties.json.tmpl",
+ // "tokens": map[string]any{
+ // "EventhouseID": "00000000-0000-0000-0000-000000000000",
+ // },
+ // },
+ // `"DatabaseSchema.kql"`: map[string]any{
+ // "source": "${local.path}/DatabaseSchema.kql",
+ // },
+ // },
+ // },
+ // )),
+ // ExpectError: regexp.MustCompile("Invalid Attribute Combination"),
+ // },
}))
}
@@ -367,8 +413,8 @@ func TestUnit_KQLDatabaseResource_CRUD(t *testing.T) {
}))
}
-func TestAcc_KQLDatabaseResource_CRUD(t *testing.T) {
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+func TestAcc_KQLDatabaseConfigurationResource_CRUD(t *testing.T) {
+ workspace := testhelp.WellKnown()["WorkspaceRS"].(map[string]any)
workspaceID := workspace["id"].(string)
eventhouseResourceHCL, eventhouseResourceFQN := eventhouseResource(t, workspaceID)
@@ -378,7 +424,7 @@ func TestAcc_KQLDatabaseResource_CRUD(t *testing.T) {
entityUpdateDescription := testhelp.RandomName()
resource.Test(t, testhelp.NewTestAccCase(t, &testResourceItemFQN, nil, []resource.TestStep{
- // Create and Read (Configuration)
+ // Create and Read
{
ResourceName: testResourceItemFQN,
Config: at.JoinConfigs(
@@ -402,7 +448,7 @@ func TestAcc_KQLDatabaseResource_CRUD(t *testing.T) {
resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.query_service_uri"),
),
},
- // Update and Read (Configuration)
+ // Update and Read
{
ResourceName: testResourceItemFQN,
Config: at.JoinConfigs(
@@ -430,3 +476,73 @@ func TestAcc_KQLDatabaseResource_CRUD(t *testing.T) {
},
))
}
+
+// func TestAcc_KQLDatabaseDefinitionResource_CRUD(t *testing.T) {
+// workspace := testhelp.WellKnown()["WorkspaceRS"].(map[string]any)
+// workspaceID := workspace["id"].(string)
+
+// eventhouseResourceHCL, eventhouseResourceFQN := eventhouseResource(t, workspaceID)
+
+// testHelperDefinition := map[string]any{
+// `"DatabaseProperties.json"`: map[string]any{
+// "source": "${local.path}/DatabaseProperties.json.tmpl",
+// "tokens": map[string]any{
+// "EventhouseID": testhelp.RefByFQN(eventhouseResourceFQN, "id"),
+// },
+// },
+// `"DatabaseSchema.kql"`: map[string]any{
+// "source": "${local.path}/DatabaseSchema.kql",
+// },
+// }
+
+// entityCreateDisplayName := testhelp.RandomName()
+// entityUpdateDisplayName := testhelp.RandomName()
+// entityUpdateDescription := testhelp.RandomName()
+
+// resource.Test(t, testhelp.NewTestAccCase(t, &testResourceItemFQN, nil, []resource.TestStep{
+// // Create and Read
+// {
+// ResourceName: testResourceItemFQN,
+// Config: at.JoinConfigs(
+// eventhouseResourceHCL,
+// testHelperLocals,
+// at.CompileConfig(
+// testResourceItemHeader,
+// map[string]any{
+// "workspace_id": workspaceID,
+// "display_name": entityCreateDisplayName,
+// "definition": testHelperDefinition,
+// },
+// ),
+// ),
+// Check: resource.ComposeAggregateTestCheckFunc(
+// resource.TestCheckResourceAttr(testResourceItemFQN, "display_name", entityCreateDisplayName),
+// resource.TestCheckResourceAttr(testResourceItemFQN, "description", ""),
+// resource.TestCheckResourceAttr(testResourceItemFQN, "definition_update_enabled", "true"),
+// ),
+// },
+// // Update and Read
+// {
+// ResourceName: testResourceItemFQN,
+// Config: at.JoinConfigs(
+// eventhouseResourceHCL,
+// testHelperLocals,
+// at.CompileConfig(
+// testResourceItemHeader,
+// map[string]any{
+// "workspace_id": workspaceID,
+// "display_name": entityUpdateDisplayName,
+// "description": entityUpdateDescription,
+// "definition": testHelperDefinition,
+// },
+// ),
+// ),
+// Check: resource.ComposeAggregateTestCheckFunc(
+// resource.TestCheckResourceAttr(testResourceItemFQN, "display_name", entityUpdateDisplayName),
+// resource.TestCheckResourceAttr(testResourceItemFQN, "description", entityUpdateDescription),
+// resource.TestCheckResourceAttr(testResourceItemFQN, "definition_update_enabled", "true"),
+// ),
+// },
+// },
+// ))
+// }
diff --git a/internal/services/kqldatabase/schema_data_kql_database.go b/internal/services/kqldatabase/schema_data_kql_database.go
new file mode 100644
index 00000000..c8beaeac
--- /dev/null
+++ b/internal/services/kqldatabase/schema_data_kql_database.go
@@ -0,0 +1,38 @@
+// Copyright (c) Microsoft Corporation
+// SPDX-License-Identifier: MPL-2.0
+
+package kqldatabase
+
+import (
+ "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
+ fabkqldatabase "github.com/microsoft/fabric-sdk-go/fabric/kqldatabase"
+
+ "github.com/microsoft/terraform-provider-fabric/internal/framework/customtypes"
+ "github.com/microsoft/terraform-provider-fabric/internal/pkg/utils"
+)
+
+func getDataSourceKQLDatabasePropertiesAttributes() map[string]schema.Attribute {
+ result := map[string]schema.Attribute{
+ "database_type": schema.StringAttribute{
+ MarkdownDescription: "The type of the database. Possible values:" + utils.ConvertStringSlicesToString(fabkqldatabase.PossibleKqlDatabaseTypeValues(), true, true) + ".",
+ Computed: true,
+ },
+ "eventhouse_id": schema.StringAttribute{
+ MarkdownDescription: "Parent Eventhouse ID.",
+ Computed: true,
+ CustomType: customtypes.UUIDType{},
+ },
+ "ingestion_service_uri": schema.StringAttribute{
+ MarkdownDescription: "Ingestion service URI.",
+ Computed: true,
+ CustomType: customtypes.URLType{},
+ },
+ "query_service_uri": schema.StringAttribute{
+ MarkdownDescription: "Query service URI.",
+ Computed: true,
+ CustomType: customtypes.URLType{},
+ },
+ }
+
+ return result
+}
diff --git a/internal/services/kqldatabase/schema_resource_kql_database.go b/internal/services/kqldatabase/schema_resource_kql_database.go
new file mode 100644
index 00000000..142e744d
--- /dev/null
+++ b/internal/services/kqldatabase/schema_resource_kql_database.go
@@ -0,0 +1,117 @@
+// Copyright (c) Microsoft Corporation
+// SPDX-License-Identifier: MPL-2.0
+
+package kqldatabase
+
+import (
+ superstringvalidator "github.com/FrangipaneTeam/terraform-plugin-framework-validators/stringvalidator"
+ "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ "github.com/hashicorp/terraform-plugin-framework/attr"
+ "github.com/hashicorp/terraform-plugin-framework/path"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ fabkqldatabase "github.com/microsoft/fabric-sdk-go/fabric/kqldatabase"
+
+ "github.com/microsoft/terraform-provider-fabric/internal/framework/customtypes"
+ "github.com/microsoft/terraform-provider-fabric/internal/pkg/utils"
+)
+
+func getResourceKQLDatabasePropertiesAttributes() map[string]schema.Attribute {
+ return map[string]schema.Attribute{
+ "database_type": schema.StringAttribute{
+ MarkdownDescription: "The type of the database. Possible values:" + utils.ConvertStringSlicesToString(fabkqldatabase.PossibleKqlDatabaseTypeValues(), true, true) + ".",
+ Computed: true,
+ },
+ "eventhouse_id": schema.StringAttribute{
+ MarkdownDescription: "Parent Eventhouse ID.",
+ Computed: true,
+ CustomType: customtypes.UUIDType{},
+ },
+ "ingestion_service_uri": schema.StringAttribute{
+ MarkdownDescription: "Ingestion service URI.",
+ Computed: true,
+ CustomType: customtypes.URLType{},
+ },
+ "query_service_uri": schema.StringAttribute{
+ MarkdownDescription: "Query service URI.",
+ Computed: true,
+ CustomType: customtypes.URLType{},
+ },
+ }
+}
+
+func getResourceKQLDatabaseConfigurationAttributes() map[string]schema.Attribute {
+ return map[string]schema.Attribute{
+ "database_type": schema.StringAttribute{
+ MarkdownDescription: "The type of the KQL database. Accepted values: " + utils.ConvertStringSlicesToString(fabkqldatabase.PossibleKqlDatabaseTypeValues(), true, true) + ".\n\n" +
+ "`" + string(fabkqldatabase.TypeReadWrite) + "` Allows read and write operations on the database.\n\n" +
+ "`" + string(fabkqldatabase.TypeShortcut) + "` A shortcut is an embedded reference allowing read only operations on a source database. The source can be in the same or different tenants, either in an Azure Data Explorer cluster or a Fabric Eventhouse.",
+ Required: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Validators: []validator.String{
+ stringvalidator.OneOf(utils.ConvertEnumsToStringSlices(fabkqldatabase.PossibleKqlDatabaseTypeValues(), false)...),
+ },
+ },
+ "eventhouse_id": schema.StringAttribute{
+ MarkdownDescription: "Parent Eventhouse ID.",
+ Required: true,
+ CustomType: customtypes.UUIDType{},
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ },
+ "invitation_token": schema.StringAttribute{
+ MarkdownDescription: "Invitation token to follow the source database. Only allowed when `database_type` is `" + string(fabkqldatabase.TypeShortcut) + "`.",
+ Optional: true,
+ Sensitive: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Validators: []validator.String{
+ stringvalidator.ConflictsWith(
+ path.MatchRelative().AtParent().AtName("source_cluster_uri"),
+ path.MatchRelative().AtParent().AtName("source_database_name"),
+ ),
+ superstringvalidator.NullIfAttributeIsOneOf(
+ path.MatchRelative().AtParent().AtName("database_type"),
+ []attr.Value{types.StringValue(string(fabkqldatabase.TypeReadWrite))},
+ ),
+ },
+ },
+ "source_cluster_uri": schema.StringAttribute{
+ MarkdownDescription: "The URI of the source Eventhouse or Azure Data Explorer cluster. Only allowed when `database_type` is `" + string(fabkqldatabase.TypeShortcut) + "`.",
+ Optional: true,
+ CustomType: customtypes.URLType{},
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Validators: []validator.String{
+ stringvalidator.ConflictsWith(path.MatchRelative().AtParent().AtName("invitation_token")),
+ stringvalidator.AlsoRequires(path.MatchRelative().AtParent().AtName("source_database_name")),
+ superstringvalidator.NullIfAttributeIsOneOf(
+ path.MatchRelative().AtParent().AtName("database_type"),
+ []attr.Value{types.StringValue(string(fabkqldatabase.TypeReadWrite))},
+ ),
+ },
+ },
+ "source_database_name": schema.StringAttribute{
+ MarkdownDescription: "The name of the database to follow in the source Eventhouse or Azure Data Explorer cluster. Only allowed when `database_type` is `" + string(fabkqldatabase.TypeShortcut) + "`.",
+ Optional: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Validators: []validator.String{
+ stringvalidator.ConflictsWith(path.MatchRelative().AtParent().AtName("invitation_token")),
+ superstringvalidator.NullIfAttributeIsOneOf(
+ path.MatchRelative().AtParent().AtName("database_type"),
+ []attr.Value{types.StringValue(string(fabkqldatabase.TypeReadWrite))},
+ ),
+ },
+ },
+ }
+}
diff --git a/internal/services/kqlqueryset/data_kql_queryset_test.go b/internal/services/kqlqueryset/data_kql_queryset_test.go
index 0289d4f8..d5ce7b9b 100644
--- a/internal/services/kqlqueryset/data_kql_queryset_test.go
+++ b/internal/services/kqlqueryset/data_kql_queryset_test.go
@@ -150,7 +150,7 @@ func TestUnit_KQLQuerysetDataSource(t *testing.T) {
}
func TestAcc_KQLQuerysetDataSource(t *testing.T) {
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceDS"].(map[string]any)
workspaceID := workspace["id"].(string)
entity := testhelp.WellKnown()["KQLQueryset"].(map[string]any)
diff --git a/internal/services/kqlqueryset/data_kql_querysets_test.go b/internal/services/kqlqueryset/data_kql_querysets_test.go
index 9220c235..57efea19 100644
--- a/internal/services/kqlqueryset/data_kql_querysets_test.go
+++ b/internal/services/kqlqueryset/data_kql_querysets_test.go
@@ -75,7 +75,7 @@ func TestUnit_KQLQuerysetsDataSource(t *testing.T) {
}
func TestAcc_KQLQuerysetsDataSource(t *testing.T) {
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceDS"].(map[string]any)
workspaceID := workspace["id"].(string)
resource.ParallelTest(t, testhelp.NewTestAccCase(t, nil, nil, []resource.TestStep{
diff --git a/internal/services/lakehouse/data_lakehouse.go b/internal/services/lakehouse/data_lakehouse.go
index c4b88be1..4f26a090 100644
--- a/internal/services/lakehouse/data_lakehouse.go
+++ b/internal/services/lakehouse/data_lakehouse.go
@@ -5,208 +5,88 @@ package lakehouse
import (
"context"
- "fmt"
supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes"
- "github.com/hashicorp/terraform-plugin-framework-validators/datasourcevalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
"github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/path"
- "github.com/hashicorp/terraform-plugin-log/tflog"
+ "github.com/microsoft/fabric-sdk-go/fabric"
+ fabcore "github.com/microsoft/fabric-sdk-go/fabric/core"
fablakehouse "github.com/microsoft/fabric-sdk-go/fabric/lakehouse"
- "github.com/microsoft/terraform-provider-fabric/internal/common"
- "github.com/microsoft/terraform-provider-fabric/internal/framework/customtypes"
"github.com/microsoft/terraform-provider-fabric/internal/pkg/fabricitem"
- "github.com/microsoft/terraform-provider-fabric/internal/pkg/utils"
- pconfig "github.com/microsoft/terraform-provider-fabric/internal/provider/config"
)
-// Ensure the implementation satisfies the expected interfaces.
-var (
- _ datasource.DataSourceWithConfigValidators = (*dataSourceLakehouse)(nil)
- _ datasource.DataSourceWithConfigure = (*dataSourceLakehouse)(nil)
-)
-
-type dataSourceLakehouse struct {
- pConfigData *pconfig.ProviderData
- client *fablakehouse.ItemsClient
-}
-
-func NewDataSourceLakehouse() datasource.DataSource {
- return &dataSourceLakehouse{}
-}
-
-func (d *dataSourceLakehouse) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
- resp.TypeName = req.ProviderTypeName + "_" + ItemTFName
-}
-
-func (d *dataSourceLakehouse) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- markdownDescription := "Get a Fabric " + ItemName + ".\n\n" +
- "Use this data source to fetch a [" + ItemName + "](" + ItemDocsURL + ").\n\n" +
- ItemDocsSPNSupport
-
- properties := schema.SingleNestedAttribute{
- MarkdownDescription: "The " + ItemName + " properties.",
- Computed: true,
- CustomType: supertypes.NewSingleNestedObjectTypeOf[lakehousePropertiesModel](ctx),
- Attributes: map[string]schema.Attribute{
- "onelake_files_path": schema.StringAttribute{
- MarkdownDescription: "OneLake path to the Lakehouse files directory",
- Computed: true,
- },
- "onelake_tables_path": schema.StringAttribute{
- MarkdownDescription: "OneLake path to the Lakehouse tables directory.",
- Computed: true,
- },
- "sql_endpoint_properties": schema.SingleNestedAttribute{
- MarkdownDescription: "An object containing the properties of the SQL endpoint.",
- Computed: true,
- CustomType: supertypes.NewSingleNestedObjectTypeOf[lakehouseSQLEndpointPropertiesModel](ctx),
- Attributes: map[string]schema.Attribute{
- "provisioning_status": schema.StringAttribute{
- MarkdownDescription: "The SQL endpoint provisioning status.",
- Computed: true,
- },
- "connection_string": schema.StringAttribute{
- MarkdownDescription: "SQL endpoint connection string.",
- Computed: true,
- },
- "id": schema.StringAttribute{
- MarkdownDescription: "SQL endpoint ID.",
- Computed: true,
- CustomType: customtypes.UUIDType{},
- },
- },
- },
- "default_schema": schema.StringAttribute{
- MarkdownDescription: "Default schema of the Lakehouse. This property is returned only for schema enabled Lakehouse.",
- Computed: true,
- },
- },
- }
-
- itemConfig := fabricitem.DataSourceFabricItem{
- Type: ItemType,
- Name: ItemName,
- TFName: ItemTFName,
- MarkdownDescription: markdownDescription,
- IsDisplayNameUnique: true,
- }
-
- resp.Schema = fabricitem.GetDataSourceFabricItemPropertiesSchema(ctx, itemConfig, properties)
-}
+func NewDataSourceLakehouse(ctx context.Context) datasource.DataSource {
+ propertiesSetter := func(ctx context.Context, from *fablakehouse.Properties, to *fabricitem.DataSourceFabricItemPropertiesModel[lakehousePropertiesModel, fablakehouse.Properties]) diag.Diagnostics {
+ properties := supertypes.NewSingleNestedObjectValueOfNull[lakehousePropertiesModel](ctx)
-func (d *dataSourceLakehouse) ConfigValidators(_ context.Context) []datasource.ConfigValidator {
- return []datasource.ConfigValidator{
- datasourcevalidator.Conflicting(
- path.MatchRoot("id"),
- path.MatchRoot("display_name"),
- ),
- datasourcevalidator.ExactlyOneOf(
- path.MatchRoot("id"),
- path.MatchRoot("display_name"),
- ),
- }
-}
+ if from != nil {
+ propertiesModel := &lakehousePropertiesModel{}
-func (d *dataSourceLakehouse) Configure(_ context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
- if req.ProviderData == nil {
- return
- }
-
- pConfigData, ok := req.ProviderData.(*pconfig.ProviderData)
- if !ok {
- resp.Diagnostics.AddError(
- common.ErrorDataSourceConfigType,
- fmt.Sprintf(common.ErrorFabricClientType, req.ProviderData),
- )
-
- return
- }
-
- d.pConfigData = pConfigData
- d.client = fablakehouse.NewClientFactoryWithClient(*pConfigData.FabricClient).NewItemsClient()
-}
-
-func (d *dataSourceLakehouse) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- tflog.Debug(ctx, "READ", map[string]any{
- "action": "start",
- })
- tflog.Trace(ctx, "READ", map[string]any{
- "config": req.Config,
- })
-
- var data dataSourceLakehouseModel
-
- if resp.Diagnostics.Append(req.Config.Get(ctx, &data)...); resp.Diagnostics.HasError() {
- return
- }
-
- timeout, diags := data.Timeouts.Read(ctx, d.pConfigData.Timeout)
- if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
- return
- }
+ if diags := propertiesModel.set(ctx, from); diags.HasError() {
+ return diags
+ }
- ctx, cancel := context.WithTimeout(ctx, timeout)
- defer cancel()
+ if diags := properties.Set(ctx, propertiesModel); diags.HasError() {
+ return diags
+ }
+ }
- if data.ID.ValueString() != "" {
- diags = d.getByID(ctx, &data)
- } else {
- diags = d.getByDisplayName(ctx, &data)
- }
+ to.Properties = properties
- if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
- return
+ return nil
}
- resp.Diagnostics.Append(resp.State.Set(ctx, data)...)
-
- tflog.Debug(ctx, "READ", map[string]any{
- "action": "end",
- })
+ itemGetter := func(ctx context.Context, fabricClient fabric.Client, model fabricitem.DataSourceFabricItemPropertiesModel[lakehousePropertiesModel, fablakehouse.Properties], fabricItem *fabricitem.FabricItemProperties[fablakehouse.Properties]) error {
+ client := fablakehouse.NewClientFactoryWithClient(fabricClient).NewItemsClient()
- if resp.Diagnostics.HasError() {
- return
- }
-}
+ respGet, err := client.GetLakehouse(ctx, model.WorkspaceID.ValueString(), model.ID.ValueString(), nil)
+ if err != nil {
+ return err
+ }
-func (d *dataSourceLakehouse) getByID(ctx context.Context, model *dataSourceLakehouseModel) diag.Diagnostics {
- tflog.Trace(ctx, "getting Lakehouse by 'id'")
+ fabricItem.Set(respGet.Lakehouse)
- respGet, err := d.client.GetLakehouse(ctx, model.WorkspaceID.ValueString(), model.ID.ValueString(), nil)
- if diags := utils.GetDiagsFromError(ctx, err, utils.OperationRead, nil); diags.HasError() {
- return diags
+ return nil
}
- return model.set(ctx, respGet.Lakehouse)
-}
-
-func (d *dataSourceLakehouse) getByDisplayName(ctx context.Context, model *dataSourceLakehouseModel) diag.Diagnostics {
- tflog.Trace(ctx, "getting Lakehouse by 'display_name'")
+ itemListGetter := func(ctx context.Context, fabricClient fabric.Client, model fabricitem.DataSourceFabricItemPropertiesModel[lakehousePropertiesModel, fablakehouse.Properties], errNotFound fabcore.ResponseError, fabricItem *fabricitem.FabricItemProperties[fablakehouse.Properties]) error {
+ client := fablakehouse.NewClientFactoryWithClient(fabricClient).NewItemsClient()
- var diags diag.Diagnostics
+ pager := client.NewListLakehousesPager(model.WorkspaceID.ValueString(), nil)
+ for pager.More() {
+ page, err := pager.NextPage(ctx)
+ if err != nil {
+ return err
+ }
- pager := d.client.NewListLakehousesPager(model.WorkspaceID.ValueString(), nil)
- for pager.More() {
- page, err := pager.NextPage(ctx)
- if diags := utils.GetDiagsFromError(ctx, err, utils.OperationList, nil); diags.HasError() {
- return diags
- }
+ for _, entity := range page.Value {
+ if *entity.DisplayName == model.DisplayName.ValueString() {
+ fabricItem.Set(entity)
- for _, entity := range page.Value {
- if *entity.DisplayName == model.DisplayName.ValueString() {
- return model.set(ctx, entity)
+ return nil
+ }
}
}
+
+ return &errNotFound
}
- diags.AddError(
- common.ErrorReadHeader,
- fmt.Sprintf("Unable to find Lakehouse with 'display_name': %s in the Workspace ID: %s ", model.DisplayName.ValueString(), model.WorkspaceID.ValueString()),
- )
+ config := fabricitem.DataSourceFabricItemProperties[lakehousePropertiesModel, fablakehouse.Properties]{
+ DataSourceFabricItem: fabricitem.DataSourceFabricItem{
+ Type: ItemType,
+ Name: ItemName,
+ TFName: ItemTFName,
+ MarkdownDescription: "Get a Fabric " + ItemName + ".\n\n" +
+ "Use this data source to fetch a [" + ItemName + "](" + ItemDocsURL + ").\n\n" +
+ ItemDocsSPNSupport,
+ IsDisplayNameUnique: true,
+ },
+ PropertiesAttributes: getDataSourceLakehousePropertiesAttributes(ctx),
+ PropertiesSetter: propertiesSetter,
+ ItemGetter: itemGetter,
+ ItemListGetter: itemListGetter,
+ }
- return diags
+ return fabricitem.NewDataSourceFabricItemProperties(config)
}
diff --git a/internal/services/lakehouse/data_lakehouse_table_test.go b/internal/services/lakehouse/data_lakehouse_table_test.go
index 3a19ac3c..cb3e0478 100644
--- a/internal/services/lakehouse/data_lakehouse_table_test.go
+++ b/internal/services/lakehouse/data_lakehouse_table_test.go
@@ -98,7 +98,7 @@ func TestUnit_LakehouseTableDataSource(t *testing.T) {
}
func TestAcc_LakehouseTableDataSource(t *testing.T) {
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceDS"].(map[string]any)
workspaceID := workspace["id"].(string)
entity := testhelp.WellKnown()["Lakehouse"].(map[string]any)
diff --git a/internal/services/lakehouse/data_lakehouse_tables_test.go b/internal/services/lakehouse/data_lakehouse_tables_test.go
index 641aad93..bec38586 100644
--- a/internal/services/lakehouse/data_lakehouse_tables_test.go
+++ b/internal/services/lakehouse/data_lakehouse_tables_test.go
@@ -79,7 +79,7 @@ func TestUnit_LakehouseTablesDataSource(t *testing.T) {
}
func TestAcc_LakehouseTablesDataSource(t *testing.T) {
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceDS"].(map[string]any)
workspaceID := workspace["id"].(string)
entity := testhelp.WellKnown()["Lakehouse"].(map[string]any)
diff --git a/internal/services/lakehouse/data_lakehouse_test.go b/internal/services/lakehouse/data_lakehouse_test.go
index cd6d03dd..5735007c 100644
--- a/internal/services/lakehouse/data_lakehouse_test.go
+++ b/internal/services/lakehouse/data_lakehouse_test.go
@@ -150,7 +150,7 @@ func TestUnit_LakehouseDataSource(t *testing.T) {
}
func TestAcc_LakehouseDataSource(t *testing.T) {
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceDS"].(map[string]any)
workspaceID := workspace["id"].(string)
entity := testhelp.WellKnown()["Lakehouse"].(map[string]any)
diff --git a/internal/services/lakehouse/data_lakehouses.go b/internal/services/lakehouse/data_lakehouses.go
index 0e1f0109..a5b0c413 100644
--- a/internal/services/lakehouse/data_lakehouses.go
+++ b/internal/services/lakehouse/data_lakehouses.go
@@ -4,21 +4,75 @@
package lakehouse
import (
+ "context"
+
+ supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes"
"github.com/hashicorp/terraform-plugin-framework/datasource"
+ "github.com/hashicorp/terraform-plugin-framework/diag"
+ "github.com/microsoft/fabric-sdk-go/fabric"
+ fablakehouse "github.com/microsoft/fabric-sdk-go/fabric/lakehouse"
"github.com/microsoft/terraform-provider-fabric/internal/pkg/fabricitem"
)
-func NewDataSourceLakehouses() datasource.DataSource {
- config := fabricitem.DataSourceFabricItems{
- Type: ItemType,
- Name: ItemName,
- Names: ItemsName,
- TFName: ItemsTFName,
- MarkdownDescription: "List a Fabric " + ItemsName + ".\n\n" +
- "Use this data source to list [" + ItemsName + "](" + ItemDocsURL + ").\n\n" +
- ItemDocsSPNSupport,
+func NewDataSourceLakehouses(ctx context.Context) datasource.DataSource {
+ propertiesSetter := func(ctx context.Context, from *fablakehouse.Properties, to *fabricitem.FabricItemPropertiesModel[lakehousePropertiesModel, fablakehouse.Properties]) diag.Diagnostics {
+ properties := supertypes.NewSingleNestedObjectValueOfNull[lakehousePropertiesModel](ctx)
+
+ if from != nil {
+ propertiesModel := &lakehousePropertiesModel{}
+
+ if diags := propertiesModel.set(ctx, from); diags.HasError() {
+ return diags
+ }
+
+ if diags := properties.Set(ctx, propertiesModel); diags.HasError() {
+ return diags
+ }
+ }
+
+ to.Properties = properties
+
+ return nil
+ }
+
+ itemListGetter := func(ctx context.Context, fabricClient fabric.Client, model fabricitem.DataSourceFabricItemsPropertiesModel[lakehousePropertiesModel, fablakehouse.Properties], fabricItems *[]fabricitem.FabricItemProperties[fablakehouse.Properties]) error {
+ client := fablakehouse.NewClientFactoryWithClient(fabricClient).NewItemsClient()
+
+ fabItems := make([]fabricitem.FabricItemProperties[fablakehouse.Properties], 0)
+
+ respList, err := client.ListLakehouses(ctx, model.WorkspaceID.ValueString(), nil)
+ if err != nil {
+ return err
+ }
+
+ for _, entity := range respList {
+ var fabricItem fabricitem.FabricItemProperties[fablakehouse.Properties]
+
+ fabricItem.Set(entity)
+
+ fabItems = append(fabItems, fabricItem)
+ }
+
+ *fabricItems = fabItems
+
+ return nil
+ }
+
+ config := fabricitem.DataSourceFabricItemsProperties[lakehousePropertiesModel, fablakehouse.Properties]{
+ DataSourceFabricItems: fabricitem.DataSourceFabricItems{
+ Type: ItemType,
+ Name: ItemName,
+ Names: ItemsName,
+ TFName: ItemsTFName,
+ MarkdownDescription: "List a Fabric " + ItemsName + ".\n\n" +
+ "Use this data source to list [" + ItemsName + "](" + ItemDocsURL + ").\n\n" +
+ ItemDocsSPNSupport,
+ },
+ PropertiesAttributes: getDataSourceLakehousePropertiesAttributes(ctx),
+ PropertiesSetter: propertiesSetter,
+ ItemListGetter: itemListGetter,
}
- return fabricitem.NewDataSourceFabricItems(config)
+ return fabricitem.NewDataSourceFabricItemsProperties(config)
}
diff --git a/internal/services/lakehouse/data_lakehouses_test.go b/internal/services/lakehouse/data_lakehouses_test.go
index 2e65faa3..ded15f77 100644
--- a/internal/services/lakehouse/data_lakehouses_test.go
+++ b/internal/services/lakehouse/data_lakehouses_test.go
@@ -22,11 +22,11 @@ var (
func TestUnit_LakehousesDataSource(t *testing.T) {
workspaceID := testhelp.RandomUUID()
- entity := fakes.NewRandomItemWithWorkspace(itemType, workspaceID)
+ entity := fakes.NewRandomLakehouseWithWorkspace(workspaceID)
- fakes.FakeServer.Upsert(fakes.NewRandomItemWithWorkspace(itemType, workspaceID))
+ fakes.FakeServer.Upsert(fakes.NewRandomLakehouseWithWorkspace(workspaceID))
fakes.FakeServer.Upsert(entity)
- fakes.FakeServer.Upsert(fakes.NewRandomItemWithWorkspace(itemType, workspaceID))
+ fakes.FakeServer.Upsert(fakes.NewRandomLakehouseWithWorkspace(workspaceID))
resource.ParallelTest(t, testhelp.NewTestUnitCase(t, nil, fakes.FakeServer.ServerFactory, nil, []resource.TestStep{
// error - no attributes
@@ -75,7 +75,7 @@ func TestUnit_LakehousesDataSource(t *testing.T) {
}
func TestAcc_LakehousesDataSource(t *testing.T) {
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceDS"].(map[string]any)
workspaceID := workspace["id"].(string)
resource.ParallelTest(t, testhelp.NewTestAccCase(t, nil, nil, []resource.TestStep{
diff --git a/internal/services/lakehouse/models.go b/internal/services/lakehouse/models.go
index 0f6629e7..da3118d9 100644
--- a/internal/services/lakehouse/models.go
+++ b/internal/services/lakehouse/models.go
@@ -14,39 +14,6 @@ import (
"github.com/microsoft/terraform-provider-fabric/internal/framework/customtypes"
)
-type baseLakehouseModel struct {
- WorkspaceID customtypes.UUID `tfsdk:"workspace_id"`
- ID customtypes.UUID `tfsdk:"id"`
- DisplayName types.String `tfsdk:"display_name"`
- Description types.String `tfsdk:"description"`
- Properties supertypes.SingleNestedObjectValueOf[lakehousePropertiesModel] `tfsdk:"properties"`
-}
-
-func (to *baseLakehouseModel) set(ctx context.Context, from fablakehouse.Lakehouse) diag.Diagnostics {
- to.WorkspaceID = customtypes.NewUUIDPointerValue(from.WorkspaceID)
- to.ID = customtypes.NewUUIDPointerValue(from.ID)
- to.DisplayName = types.StringPointerValue(from.DisplayName)
- to.Description = types.StringPointerValue(from.Description)
-
- properties := supertypes.NewSingleNestedObjectValueOfNull[lakehousePropertiesModel](ctx)
-
- if from.Properties != nil {
- propertiesModel := &lakehousePropertiesModel{}
-
- if diags := propertiesModel.set(ctx, from.Properties); diags.HasError() {
- return diags
- }
-
- if diags := properties.Set(ctx, propertiesModel); diags.HasError() {
- return diags
- }
- }
-
- to.Properties = properties
-
- return nil
-}
-
type lakehousePropertiesModel struct {
OneLakeFilesPath types.String `tfsdk:"onelake_files_path"`
OneLakeTablesPath types.String `tfsdk:"onelake_tables_path"`
diff --git a/internal/services/lakehouse/models_data_lakehouse.go b/internal/services/lakehouse/models_data_lakehouse.go
deleted file mode 100644
index 5ee9e3bc..00000000
--- a/internal/services/lakehouse/models_data_lakehouse.go
+++ /dev/null
@@ -1,13 +0,0 @@
-// Copyright (c) Microsoft Corporation
-// SPDX-License-Identifier: MPL-2.0
-
-package lakehouse
-
-import (
- "github.com/hashicorp/terraform-plugin-framework-timeouts/datasource/timeouts"
-)
-
-type dataSourceLakehouseModel struct {
- baseLakehouseModel
- Timeouts timeouts.Value `tfsdk:"timeouts"`
-}
diff --git a/internal/services/lakehouse/models_resource_lakehouse.go b/internal/services/lakehouse/models_resource_lakehouse.go
index 4fd9ce61..cbbbd366 100644
--- a/internal/services/lakehouse/models_resource_lakehouse.go
+++ b/internal/services/lakehouse/models_resource_lakehouse.go
@@ -4,52 +4,9 @@
package lakehouse
import (
- "context"
-
- supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes"
- "github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts"
- "github.com/hashicorp/terraform-plugin-framework/diag"
"github.com/hashicorp/terraform-plugin-framework/types"
- fablakehouse "github.com/microsoft/fabric-sdk-go/fabric/lakehouse"
)
-type resourceLakehouseModel struct {
- baseLakehouseModel
- Configuration supertypes.SingleNestedObjectValueOf[lakehouseConfigurationModel] `tfsdk:"configuration"`
- Timeouts timeouts.Value `tfsdk:"timeouts"`
-}
type lakehouseConfigurationModel struct {
EnableSchemas types.Bool `tfsdk:"enable_schemas"`
}
-type requestCreateLakehouse struct {
- fablakehouse.CreateLakehouseRequest
-}
-
-func (to *requestCreateLakehouse) set(ctx context.Context, from resourceLakehouseModel) diag.Diagnostics {
- to.DisplayName = from.DisplayName.ValueStringPointer()
- to.Description = from.Description.ValueStringPointer()
-
- if !from.Configuration.IsNull() && !from.Configuration.IsUnknown() {
- configuration, diags := from.Configuration.Get(ctx)
- if diags.HasError() {
- return diags
- }
-
- if configuration.EnableSchemas.ValueBool() {
- to.CreationPayload = &fablakehouse.CreationPayload{
- EnableSchemas: configuration.EnableSchemas.ValueBoolPointer(),
- }
- }
- }
-
- return nil
-}
-
-type requestUpdateLakehouse struct {
- fablakehouse.UpdateLakehouseRequest
-}
-
-func (to *requestUpdateLakehouse) set(from resourceLakehouseModel) {
- to.DisplayName = from.DisplayName.ValueStringPointer()
- to.Description = from.Description.ValueStringPointer()
-}
diff --git a/internal/services/lakehouse/resource_lakehouse.go b/internal/services/lakehouse/resource_lakehouse.go
index 1411f6f6..cef09a26 100644
--- a/internal/services/lakehouse/resource_lakehouse.go
+++ b/internal/services/lakehouse/resource_lakehouse.go
@@ -5,416 +5,111 @@ package lakehouse
import (
"context"
- "fmt"
- "strings"
+ "net/http"
"time"
+ azto "github.com/Azure/azure-sdk-for-go/sdk/azcore/to"
supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes"
- "github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts"
"github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema/boolplanmodifier"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema/objectplanmodifier"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
"github.com/hashicorp/terraform-plugin-log/tflog"
+ "github.com/microsoft/fabric-sdk-go/fabric"
fabcore "github.com/microsoft/fabric-sdk-go/fabric/core"
fablakehouse "github.com/microsoft/fabric-sdk-go/fabric/lakehouse"
- "github.com/microsoft/terraform-provider-fabric/internal/common"
- "github.com/microsoft/terraform-provider-fabric/internal/framework/customtypes"
"github.com/microsoft/terraform-provider-fabric/internal/pkg/fabricitem"
- "github.com/microsoft/terraform-provider-fabric/internal/pkg/utils"
- pconfig "github.com/microsoft/terraform-provider-fabric/internal/provider/config"
)
-// Ensure the implementation satisfies the expected interfaces.
-var (
- _ resource.ResourceWithConfigure = (*resourceLakehouse)(nil)
- _ resource.ResourceWithImportState = (*resourceLakehouse)(nil)
-)
-
-type resourceLakehouse struct {
- pConfigData *pconfig.ProviderData
- client *fablakehouse.ItemsClient
-}
-
-func NewResourceLakehouse() resource.Resource {
- return &resourceLakehouse{}
-}
-
-func (r *resourceLakehouse) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
- resp.TypeName = req.ProviderTypeName + "_" + ItemTFName
-}
-
-func (r *resourceLakehouse) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
- markdownDescription := "This resource manages a Fabric " + ItemName + ".\n\n" +
- "See [" + ItemName + "](" + ItemDocsURL + ") for more information.\n\n" +
- ItemDocsSPNSupport
-
- properties := schema.SingleNestedAttribute{
- MarkdownDescription: "The " + ItemName + " properties.",
- Computed: true,
- CustomType: supertypes.NewSingleNestedObjectTypeOf[lakehousePropertiesModel](ctx),
- Attributes: map[string]schema.Attribute{
- "onelake_files_path": schema.StringAttribute{
- MarkdownDescription: "OneLake path to the Lakehouse files directory",
- Computed: true,
- },
- "onelake_tables_path": schema.StringAttribute{
- MarkdownDescription: "OneLake path to the Lakehouse tables directory.",
- Computed: true,
- },
- "sql_endpoint_properties": schema.SingleNestedAttribute{
- MarkdownDescription: "An object containing the properties of the SQL endpoint.",
- Computed: true,
- CustomType: supertypes.NewSingleNestedObjectTypeOf[lakehouseSQLEndpointPropertiesModel](ctx),
- Attributes: map[string]schema.Attribute{
- "provisioning_status": schema.StringAttribute{
- MarkdownDescription: "The SQL endpoint provisioning status.",
- Computed: true,
- },
- "connection_string": schema.StringAttribute{
- MarkdownDescription: "SQL endpoint connection string.",
- Computed: true,
- },
- "id": schema.StringAttribute{
- MarkdownDescription: "SQL endpoint ID.",
- Computed: true,
- CustomType: customtypes.UUIDType{},
- },
- },
- },
- "default_schema": schema.StringAttribute{
- MarkdownDescription: "Default schema of the Lakehouse. This property is returned only for schema enabled Lakehouse.",
- Computed: true,
- },
- },
- }
-
- configuration := schema.SingleNestedAttribute{
- MarkdownDescription: "The " + ItemName + " creation configuration.\n\n" +
- "Any changes to this configuration will result in recreation of the " + ItemName + ".",
- Optional: true,
- CustomType: supertypes.NewSingleNestedObjectTypeOf[lakehouseConfigurationModel](ctx),
- PlanModifiers: []planmodifier.Object{
- objectplanmodifier.RequiresReplace(),
- },
- Attributes: map[string]schema.Attribute{
- "enable_schemas": schema.BoolAttribute{
- MarkdownDescription: "Schema enabled Lakehouse.",
- Required: true,
- PlanModifiers: []planmodifier.Bool{
- boolplanmodifier.RequiresReplace(),
- },
- },
- },
- }
-
- resp.Schema = fabricitem.GetResourceFabricItemPropertiesCreationSchema(ctx, ItemName, markdownDescription, 123, 256, true, properties, configuration)
-}
-
-func (r *resourceLakehouse) Configure(_ context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) {
- if req.ProviderData == nil {
- return
- }
-
- pConfigData, ok := req.ProviderData.(*pconfig.ProviderData)
- if !ok {
- resp.Diagnostics.AddError(
- common.ErrorResourceConfigType,
- fmt.Sprintf(common.ErrorFabricClientType, req.ProviderData),
- )
-
- return
- }
-
- r.pConfigData = pConfigData
- r.client = fablakehouse.NewClientFactoryWithClient(*pConfigData.FabricClient).NewItemsClient()
-}
-
-func (r *resourceLakehouse) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
- tflog.Debug(ctx, "CREATE", map[string]any{
- "action": "start",
- })
- tflog.Trace(ctx, "CREATE", map[string]any{
- "config": req.Config,
- "plan": req.Plan,
- })
-
- var plan resourceLakehouseModel
-
- if resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...); resp.Diagnostics.HasError() {
- return
- }
-
- timeout, diags := plan.Timeouts.Create(ctx, r.pConfigData.Timeout)
- if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
- return
- }
-
- ctx, cancel := context.WithTimeout(ctx, timeout)
- defer cancel()
-
- var reqCreate requestCreateLakehouse
-
- if resp.Diagnostics.Append(reqCreate.set(ctx, plan)...); resp.Diagnostics.HasError() {
- return
- }
-
- respCreate, err := r.client.CreateLakehouse(ctx, plan.WorkspaceID.ValueString(), reqCreate.CreateLakehouseRequest, nil)
- if resp.Diagnostics.Append(utils.GetDiagsFromError(ctx, err, utils.OperationCreate, nil)...); resp.Diagnostics.HasError() {
- return
- }
-
- if resp.Diagnostics.Append(plan.set(ctx, respCreate.Lakehouse)...); resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(resp.State.Set(ctx, plan)...)
-
- if resp.Diagnostics.Append(r.get(ctx, &plan)...); resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(resp.State.Set(ctx, plan)...)
-
- tflog.Debug(ctx, "CREATE", map[string]any{
- "action": "end",
- })
-
- if resp.Diagnostics.HasError() {
- return
- }
-}
-
-func (r *resourceLakehouse) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
- tflog.Debug(ctx, "READ", map[string]any{
- "action": "start",
- })
- tflog.Trace(ctx, "READ", map[string]any{
- "state": req.State,
- })
-
- var state resourceLakehouseModel
-
- if resp.Diagnostics.Append(req.State.Get(ctx, &state)...); resp.Diagnostics.HasError() {
- return
- }
-
- timeout, diags := state.Timeouts.Read(ctx, r.pConfigData.Timeout)
- if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
- return
- }
-
- ctx, cancel := context.WithTimeout(ctx, timeout)
- defer cancel()
-
- diags = r.get(ctx, &state)
- if utils.IsErrNotFound(state.ID.ValueString(), &diags, fabcore.ErrCommon.EntityNotFound) {
- resp.State.RemoveResource(ctx)
-
- resp.Diagnostics.Append(diags...)
+func NewResourceLakehouse(ctx context.Context) resource.Resource {
+ creationPayloadSetter := func(_ context.Context, from lakehouseConfigurationModel) (*fablakehouse.CreationPayload, diag.Diagnostics) {
+ if from.EnableSchemas.ValueBool() {
+ cp := &fablakehouse.CreationPayload{
+ EnableSchemas: from.EnableSchemas.ValueBoolPointer(),
+ }
- return
- }
-
- if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(resp.State.Set(ctx, state)...)
-
- tflog.Debug(ctx, "READ", map[string]any{
- "action": "end",
- })
-
- if resp.Diagnostics.HasError() {
- return
- }
-}
-
-func (r *resourceLakehouse) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
- tflog.Debug(ctx, "UPDATE", map[string]any{
- "action": "start",
- })
- tflog.Trace(ctx, "UPDATE", map[string]any{
- "config": req.Config,
- "plan": req.Plan,
- "state": req.State,
- })
-
- var plan resourceLakehouseModel
-
- if resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...); resp.Diagnostics.HasError() {
- return
- }
-
- timeout, diags := plan.Timeouts.Update(ctx, r.pConfigData.Timeout)
- if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
- return
- }
-
- ctx, cancel := context.WithTimeout(ctx, timeout)
- defer cancel()
-
- var reqUpdate requestUpdateLakehouse
-
- reqUpdate.set(plan)
-
- respUpdate, err := r.client.UpdateLakehouse(ctx, plan.WorkspaceID.ValueString(), plan.ID.ValueString(), reqUpdate.UpdateLakehouseRequest, nil)
- if resp.Diagnostics.Append(utils.GetDiagsFromError(ctx, err, utils.OperationUpdate, nil)...); resp.Diagnostics.HasError() {
- return
- }
-
- if resp.Diagnostics.Append(plan.set(ctx, respUpdate.Lakehouse)...); resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(resp.State.Set(ctx, plan)...)
-
- if resp.Diagnostics.Append(r.get(ctx, &plan)...); resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(resp.State.Set(ctx, plan)...)
-
- tflog.Debug(ctx, "UPDATE", map[string]any{
- "action": "end",
- })
-
- if resp.Diagnostics.HasError() {
- return
- }
-}
-
-func (r *resourceLakehouse) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
- tflog.Debug(ctx, "DELETE", map[string]any{
- "action": "start",
- })
- tflog.Trace(ctx, "DELETE", map[string]any{
- "state": req.State,
- })
-
- var state resourceLakehouseModel
-
- if resp.Diagnostics.Append(req.State.Get(ctx, &state)...); resp.Diagnostics.HasError() {
- return
- }
+ return cp, nil
+ }
- timeout, diags := state.Timeouts.Delete(ctx, r.pConfigData.Timeout)
- if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
- return
+ return nil, nil
}
- ctx, cancel := context.WithTimeout(ctx, timeout)
- defer cancel()
-
- _, err := r.client.DeleteLakehouse(ctx, state.WorkspaceID.ValueString(), state.ID.ValueString(), nil)
- if resp.Diagnostics.Append(utils.GetDiagsFromError(ctx, err, utils.OperationDelete, nil)...); resp.Diagnostics.HasError() {
- return
- }
+ propertiesSetter := func(ctx context.Context, from *fablakehouse.Properties, to *fabricitem.ResourceFabricItemConfigPropertiesModel[lakehousePropertiesModel, fablakehouse.Properties, lakehouseConfigurationModel, fablakehouse.CreationPayload]) diag.Diagnostics {
+ properties := supertypes.NewSingleNestedObjectValueOfNull[lakehousePropertiesModel](ctx)
- tflog.Debug(ctx, "DELETE", map[string]any{
- "action": "end",
- })
-}
+ if from != nil {
+ propertiesModel := &lakehousePropertiesModel{}
-func (r *resourceLakehouse) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- tflog.Debug(ctx, "IMPORT", map[string]any{
- "action": "start",
- })
- tflog.Trace(ctx, "IMPORT", map[string]any{
- "id": req.ID,
- })
+ if diags := propertiesModel.set(ctx, from); diags.HasError() {
+ return diags
+ }
- workspaceID, lakehouseID, found := strings.Cut(req.ID, "/")
+ if diags := properties.Set(ctx, propertiesModel); diags.HasError() {
+ return diags
+ }
+ }
- if !found {
- resp.Diagnostics.AddError(
- common.ErrorImportIdentifierHeader,
- fmt.Sprintf(common.ErrorImportIdentifierDetails, "WorkspaceID/LakehouseID"),
- )
+ to.Properties = properties
- return
+ return nil
}
- uuidWorkspaceID, diags := customtypes.NewUUIDValueMust(workspaceID)
- resp.Diagnostics.Append(diags...)
+ itemGetter := func(ctx context.Context, fabricClient fabric.Client, model fabricitem.ResourceFabricItemConfigPropertiesModel[lakehousePropertiesModel, fablakehouse.Properties, lakehouseConfigurationModel, fablakehouse.CreationPayload], fabricItem *fabricitem.FabricItemProperties[fablakehouse.Properties]) error {
+ client := fablakehouse.NewClientFactoryWithClient(fabricClient).NewItemsClient()
- uuidID, diags := customtypes.NewUUIDValueMust(lakehouseID)
- resp.Diagnostics.Append(diags...)
+ for {
+ respGet, err := client.GetLakehouse(ctx, model.WorkspaceID.ValueString(), model.ID.ValueString(), nil)
+ if err != nil {
+ return err
+ }
- if resp.Diagnostics.HasError() {
- return
- }
+ if respGet.Properties == nil || respGet.Properties.SQLEndpointProperties == nil {
+ tflog.Info(ctx, "Lakehouse provisioning not done, waiting 30 seconds before retrying")
+ time.Sleep(30 * time.Second) // lintignore:R018
- var configuration supertypes.SingleNestedObjectValueOf[lakehouseConfigurationModel]
- if resp.Diagnostics.Append(resp.State.GetAttribute(ctx, path.Root("configuration"), &configuration)...); resp.Diagnostics.HasError() {
- return
- }
+ continue
+ }
- var timeout timeouts.Value
- if resp.Diagnostics.Append(resp.State.GetAttribute(ctx, path.Root("timeouts"), &timeout)...); resp.Diagnostics.HasError() {
- return
- }
+ switch *respGet.Properties.SQLEndpointProperties.ProvisioningStatus {
+ case fablakehouse.SQLEndpointProvisioningStatusFailed:
+ return &fabcore.ResponseError{
+ ErrorCode: (string)(fablakehouse.SQLEndpointProvisioningStatusFailed),
+ StatusCode: http.StatusBadRequest,
+ ErrorResponse: &fabcore.ErrorResponse{
+ ErrorCode: azto.Ptr((string)(fablakehouse.SQLEndpointProvisioningStatusFailed)),
+ Message: azto.Ptr("Lakehouse SQL endpoint provisioning failed"),
+ },
+ }
- state := resourceLakehouseModel{}
- state.ID = uuidID
- state.WorkspaceID = uuidWorkspaceID
- state.Configuration = configuration
- state.Timeouts = timeout
+ case fablakehouse.SQLEndpointProvisioningStatusSuccess:
+ fabricItem.Set(respGet.Lakehouse)
- if resp.Diagnostics.Append(r.get(ctx, &state)...); resp.Diagnostics.HasError() {
- return
+ return nil
+ default:
+ tflog.Info(ctx, "Lakehouse provisioning in progress, waiting 30 seconds before retrying")
+ time.Sleep(30 * time.Second) // lintignore:R018
+ }
+ }
}
- resp.Diagnostics.Append(resp.State.Set(ctx, state)...)
-
- tflog.Debug(ctx, "IMPORT", map[string]any{
- "action": "end",
- })
-
- if resp.Diagnostics.HasError() {
- return
+ config := fabricitem.ResourceFabricItemConfigProperties[lakehousePropertiesModel, fablakehouse.Properties, lakehouseConfigurationModel, fablakehouse.CreationPayload]{
+ ResourceFabricItem: fabricitem.ResourceFabricItem{
+ Type: ItemType,
+ Name: ItemName,
+ NameRenameAllowed: true,
+ TFName: ItemTFName,
+ MarkdownDescription: "Manage a Fabric " + ItemName + ".\n\n" +
+ "Use this resource to manage a [" + ItemName + "](" + ItemDocsURL + ").\n\n" +
+ ItemDocsSPNSupport,
+ DisplayNameMaxLength: 123,
+ DescriptionMaxLength: 256,
+ },
+ IsConfigRequired: false,
+ ConfigAttributes: getResourceLakehouseConfigurationAttributes(),
+ CreationPayloadSetter: creationPayloadSetter,
+ PropertiesAttributes: getResourceLakehousePropertiesAttributes(ctx),
+ PropertiesSetter: propertiesSetter,
+ ItemGetter: itemGetter,
}
-}
-
-func (r *resourceLakehouse) get(ctx context.Context, model *resourceLakehouseModel) diag.Diagnostics {
- tflog.Trace(ctx, "GET", map[string]any{
- "workspace_id": model.WorkspaceID.ValueString(),
- "id": model.ID.ValueString(),
- })
-
- for {
- respGet, err := r.client.GetLakehouse(ctx, model.WorkspaceID.ValueString(), model.ID.ValueString(), nil)
- if diags := utils.GetDiagsFromError(ctx, err, utils.OperationRead, fabcore.ErrCommon.EntityNotFound); diags.HasError() {
- return diags
- }
-
- if respGet.Properties == nil || respGet.Properties.SQLEndpointProperties == nil {
- tflog.Info(ctx, "Lakehouse provisioning not done, waiting 30 seconds before retrying")
- time.Sleep(30 * time.Second) // lintignore:R018
- continue
- }
-
- switch *respGet.Properties.SQLEndpointProperties.ProvisioningStatus {
- case fablakehouse.SQLEndpointProvisioningStatusFailed:
- var diags diag.Diagnostics
-
- diags.AddError(
- "provisioning failed",
- "Lakehouse SQL endpoint provisioning failed")
-
- return diags
-
- case fablakehouse.SQLEndpointProvisioningStatusSuccess:
- return model.set(ctx, respGet.Lakehouse)
- default:
- tflog.Info(ctx, "Lakehouse provisioning in progress, waiting 30 seconds before retrying")
- time.Sleep(30 * time.Second) // lintignore:R018
- }
- }
+ return fabricitem.NewResourceFabricItemConfigProperties(config)
}
diff --git a/internal/services/lakehouse/resource_lakehouse_test.go b/internal/services/lakehouse/resource_lakehouse_test.go
index db948280..1dc7a62b 100644
--- a/internal/services/lakehouse/resource_lakehouse_test.go
+++ b/internal/services/lakehouse/resource_lakehouse_test.go
@@ -224,7 +224,7 @@ func TestUnit_LakehouseResource_CRUD(t *testing.T) {
}
func TestAcc_LakehouseResource_CRUD(t *testing.T) {
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceRS"].(map[string]any)
workspaceID := workspace["id"].(string)
entityCreateDisplayName := testhelp.RandomName()
@@ -272,7 +272,7 @@ func TestAcc_LakehouseResource_CRUD(t *testing.T) {
}
func TestAcc_LakehouseConfigurationResource_CRUD(t *testing.T) {
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceRS"].(map[string]any)
workspaceID := workspace["id"].(string)
entityCreateDisplayName1 := testhelp.RandomName()
diff --git a/internal/services/lakehouse/schema_data_lakehouse.go b/internal/services/lakehouse/schema_data_lakehouse.go
new file mode 100644
index 00000000..c7d7a6ec
--- /dev/null
+++ b/internal/services/lakehouse/schema_data_lakehouse.go
@@ -0,0 +1,50 @@
+// Copyright (c) Microsoft Corporation
+// SPDX-License-Identifier: MPL-2.0
+
+package lakehouse
+
+import (
+ "context"
+
+ supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes"
+ "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
+
+ "github.com/microsoft/terraform-provider-fabric/internal/framework/customtypes"
+)
+
+func getDataSourceLakehousePropertiesAttributes(ctx context.Context) map[string]schema.Attribute {
+ return map[string]schema.Attribute{
+ "onelake_files_path": schema.StringAttribute{
+ MarkdownDescription: "OneLake path to the Lakehouse files directory",
+ Computed: true,
+ },
+ "onelake_tables_path": schema.StringAttribute{
+ MarkdownDescription: "OneLake path to the Lakehouse tables directory.",
+ Computed: true,
+ },
+ "sql_endpoint_properties": schema.SingleNestedAttribute{
+ MarkdownDescription: "An object containing the properties of the SQL endpoint.",
+ Computed: true,
+ CustomType: supertypes.NewSingleNestedObjectTypeOf[lakehouseSQLEndpointPropertiesModel](ctx),
+ Attributes: map[string]schema.Attribute{
+ "provisioning_status": schema.StringAttribute{
+ MarkdownDescription: "The SQL endpoint provisioning status.",
+ Computed: true,
+ },
+ "connection_string": schema.StringAttribute{
+ MarkdownDescription: "SQL endpoint connection string.",
+ Computed: true,
+ },
+ "id": schema.StringAttribute{
+ MarkdownDescription: "SQL endpoint ID.",
+ Computed: true,
+ CustomType: customtypes.UUIDType{},
+ },
+ },
+ },
+ "default_schema": schema.StringAttribute{
+ MarkdownDescription: "Default schema of the Lakehouse. This property is returned only for schema enabled Lakehouse.",
+ Computed: true,
+ },
+ }
+}
diff --git a/internal/services/lakehouse/schema_resource_lakehouse.go b/internal/services/lakehouse/schema_resource_lakehouse.go
new file mode 100644
index 00000000..a72f8a8d
--- /dev/null
+++ b/internal/services/lakehouse/schema_resource_lakehouse.go
@@ -0,0 +1,64 @@
+// Copyright (c) Microsoft Corporation
+// SPDX-License-Identifier: MPL-2.0
+
+package lakehouse
+
+import (
+ "context"
+
+ supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/boolplanmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+
+ "github.com/microsoft/terraform-provider-fabric/internal/framework/customtypes"
+)
+
+func getResourceLakehousePropertiesAttributes(ctx context.Context) map[string]schema.Attribute {
+ return map[string]schema.Attribute{
+ "onelake_files_path": schema.StringAttribute{
+ MarkdownDescription: "OneLake path to the Lakehouse files directory",
+ Computed: true,
+ },
+ "onelake_tables_path": schema.StringAttribute{
+ MarkdownDescription: "OneLake path to the Lakehouse tables directory.",
+ Computed: true,
+ },
+ "sql_endpoint_properties": schema.SingleNestedAttribute{
+ MarkdownDescription: "An object containing the properties of the SQL endpoint.",
+ Computed: true,
+ CustomType: supertypes.NewSingleNestedObjectTypeOf[lakehouseSQLEndpointPropertiesModel](ctx),
+ Attributes: map[string]schema.Attribute{
+ "provisioning_status": schema.StringAttribute{
+ MarkdownDescription: "The SQL endpoint provisioning status.",
+ Computed: true,
+ },
+ "connection_string": schema.StringAttribute{
+ MarkdownDescription: "SQL endpoint connection string.",
+ Computed: true,
+ },
+ "id": schema.StringAttribute{
+ MarkdownDescription: "SQL endpoint ID.",
+ Computed: true,
+ CustomType: customtypes.UUIDType{},
+ },
+ },
+ },
+ "default_schema": schema.StringAttribute{
+ MarkdownDescription: "Default schema of the Lakehouse. This property is returned only for schema enabled Lakehouse.",
+ Computed: true,
+ },
+ }
+}
+
+func getResourceLakehouseConfigurationAttributes() map[string]schema.Attribute {
+ return map[string]schema.Attribute{
+ "enable_schemas": schema.BoolAttribute{
+ MarkdownDescription: "Schema enabled Lakehouse.",
+ Required: true,
+ PlanModifiers: []planmodifier.Bool{
+ boolplanmodifier.RequiresReplace(),
+ },
+ },
+ }
+}
diff --git a/internal/services/mlexperiment/data_ml_experiment_test.go b/internal/services/mlexperiment/data_ml_experiment_test.go
index ccc536af..b123a904 100644
--- a/internal/services/mlexperiment/data_ml_experiment_test.go
+++ b/internal/services/mlexperiment/data_ml_experiment_test.go
@@ -153,7 +153,7 @@ func TestAcc_MLExperimentDataSource(t *testing.T) {
t.Skip("No SPN support")
}
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceDS"].(map[string]any)
workspaceID := workspace["id"].(string)
entity := testhelp.WellKnown()["MLExperiment"].(map[string]any)
diff --git a/internal/services/mlexperiment/data_ml_experiments_test.go b/internal/services/mlexperiment/data_ml_experiments_test.go
index ff5c3d58..2c6986b9 100644
--- a/internal/services/mlexperiment/data_ml_experiments_test.go
+++ b/internal/services/mlexperiment/data_ml_experiments_test.go
@@ -79,7 +79,7 @@ func TestAcc_MLExperimentsDataSource(t *testing.T) {
t.Skip("No SPN support")
}
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceDS"].(map[string]any)
workspaceID := workspace["id"].(string)
resource.ParallelTest(t, testhelp.NewTestAccCase(t, nil, nil, []resource.TestStep{
diff --git a/internal/services/mlexperiment/resource_ml_experiment_test.go b/internal/services/mlexperiment/resource_ml_experiment_test.go
index 54706fea..25b30157 100644
--- a/internal/services/mlexperiment/resource_ml_experiment_test.go
+++ b/internal/services/mlexperiment/resource_ml_experiment_test.go
@@ -215,7 +215,7 @@ func TestAcc_MLExperimentResource_CRUD(t *testing.T) {
t.Skip("No SPN support")
}
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceRS"].(map[string]any)
workspaceID := workspace["id"].(string)
entityCreateDisplayName := testhelp.RandomName()
diff --git a/internal/services/mlmodel/data_ml_model_test.go b/internal/services/mlmodel/data_ml_model_test.go
index ee8e3854..a4a2b053 100644
--- a/internal/services/mlmodel/data_ml_model_test.go
+++ b/internal/services/mlmodel/data_ml_model_test.go
@@ -154,7 +154,7 @@ func TestAcc_MLModelDataSource(t *testing.T) {
t.Skip("No SPN support")
}
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceDS"].(map[string]any)
workspaceID := workspace["id"].(string)
entity := testhelp.WellKnown()["MLModel"].(map[string]any)
diff --git a/internal/services/mlmodel/data_ml_models_test.go b/internal/services/mlmodel/data_ml_models_test.go
index 3a83e8d1..ba46e80d 100644
--- a/internal/services/mlmodel/data_ml_models_test.go
+++ b/internal/services/mlmodel/data_ml_models_test.go
@@ -79,7 +79,7 @@ func TestAcc_MLModelsDataSource(t *testing.T) {
t.Skip("No SPN support")
}
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceDS"].(map[string]any)
workspaceID := workspace["id"].(string)
resource.ParallelTest(t, testhelp.NewTestAccCase(t, nil, nil, []resource.TestStep{
diff --git a/internal/services/mlmodel/resource_ml_model_test.go b/internal/services/mlmodel/resource_ml_model_test.go
index 8fa6d037..208e670c 100644
--- a/internal/services/mlmodel/resource_ml_model_test.go
+++ b/internal/services/mlmodel/resource_ml_model_test.go
@@ -215,7 +215,7 @@ func TestAcc_MLModelResource_CRUD(t *testing.T) {
t.Skip("No SPN support")
}
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceRS"].(map[string]any)
workspaceID := workspace["id"].(string)
entityCreateDisplayName := testhelp.RandomName()
diff --git a/internal/services/notebook/data_notebook_test.go b/internal/services/notebook/data_notebook_test.go
index a8df755a..cd7a7816 100644
--- a/internal/services/notebook/data_notebook_test.go
+++ b/internal/services/notebook/data_notebook_test.go
@@ -150,7 +150,7 @@ func TestUnit_NotebookDataSource(t *testing.T) {
}
func TestAcc_NotebookDataSource(t *testing.T) {
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceDS"].(map[string]any)
workspaceID := workspace["id"].(string)
entity := testhelp.WellKnown()["Notebook"].(map[string]any)
diff --git a/internal/services/notebook/data_notebooks_test.go b/internal/services/notebook/data_notebooks_test.go
index 5d3ad52a..6bdde9c9 100644
--- a/internal/services/notebook/data_notebooks_test.go
+++ b/internal/services/notebook/data_notebooks_test.go
@@ -75,7 +75,7 @@ func TestUnit_NotebooksDataSource(t *testing.T) {
}
func TestAcc_NotebooksDataSource(t *testing.T) {
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceDS"].(map[string]any)
workspaceID := workspace["id"].(string)
resource.ParallelTest(t, testhelp.NewTestAccCase(t, nil, nil, []resource.TestStep{
diff --git a/internal/services/notebook/resource_notebook_test.go b/internal/services/notebook/resource_notebook_test.go
index 4964133a..0839a038 100644
--- a/internal/services/notebook/resource_notebook_test.go
+++ b/internal/services/notebook/resource_notebook_test.go
@@ -250,7 +250,7 @@ func TestUnit_NotebookResource_CRUD(t *testing.T) {
}
func TestAcc_NotebookResource_CRUD(t *testing.T) {
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceRS"].(map[string]any)
workspaceID := workspace["id"].(string)
entityCreateDisplayName := testhelp.RandomName()
diff --git a/internal/services/report/data_report_test.go b/internal/services/report/data_report_test.go
index ae21f46d..3a2b32c2 100644
--- a/internal/services/report/data_report_test.go
+++ b/internal/services/report/data_report_test.go
@@ -112,7 +112,7 @@ func TestUnit_ReportDataSource(t *testing.T) {
}
func TestAcc_ReportDataSource(t *testing.T) {
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceDS"].(map[string]any)
workspaceID := workspace["id"].(string)
entity := testhelp.WellKnown()["Report"].(map[string]any)
diff --git a/internal/services/report/data_reports_test.go b/internal/services/report/data_reports_test.go
index 0ccd3070..ee707f82 100644
--- a/internal/services/report/data_reports_test.go
+++ b/internal/services/report/data_reports_test.go
@@ -75,7 +75,7 @@ func TestUnit_ReportsDataSource(t *testing.T) {
}
func TestAcc_ReportsDataSource(t *testing.T) {
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceDS"].(map[string]any)
workspaceID := workspace["id"].(string)
resource.ParallelTest(t, testhelp.NewTestAccCase(t, nil, nil, []resource.TestStep{
diff --git a/internal/services/report/resource_report_test.go b/internal/services/report/resource_report_test.go
index 57964a52..ff56b02b 100644
--- a/internal/services/report/resource_report_test.go
+++ b/internal/services/report/resource_report_test.go
@@ -278,7 +278,7 @@ func TestUnit_ReportResource_CRUD(t *testing.T) {
}
func TestAcc_ReportResource_CRUD(t *testing.T) {
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceRS"].(map[string]any)
workspaceID := workspace["id"].(string)
semanticModel := testhelp.WellKnown()["SemanticModel"].(map[string]any)
diff --git a/internal/services/semanticmodel/data_semantic_model_test.go b/internal/services/semanticmodel/data_semantic_model_test.go
index 2a27b96a..302f8865 100644
--- a/internal/services/semanticmodel/data_semantic_model_test.go
+++ b/internal/services/semanticmodel/data_semantic_model_test.go
@@ -112,7 +112,7 @@ func TestUnit_SemanticModelDataSource(t *testing.T) {
}
func TestAcc_SemanticModelDataSource(t *testing.T) {
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceDS"].(map[string]any)
workspaceID := workspace["id"].(string)
entity := testhelp.WellKnown()["SemanticModel"].(map[string]any)
diff --git a/internal/services/semanticmodel/data_semantic_models_test.go b/internal/services/semanticmodel/data_semantic_models_test.go
index e4765bdc..506ca7f3 100644
--- a/internal/services/semanticmodel/data_semantic_models_test.go
+++ b/internal/services/semanticmodel/data_semantic_models_test.go
@@ -75,7 +75,7 @@ func TestUnit_SemanticModelsDataSource(t *testing.T) {
}
func TestAcc_SemanticModelsDataSource(t *testing.T) {
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceDS"].(map[string]any)
workspaceID := workspace["id"].(string)
resource.ParallelTest(t, testhelp.NewTestAccCase(t, nil, nil, []resource.TestStep{
diff --git a/internal/services/semanticmodel/resource_semantic_model_test.go b/internal/services/semanticmodel/resource_semantic_model_test.go
index b430f6e1..40af2ada 100644
--- a/internal/services/semanticmodel/resource_semantic_model_test.go
+++ b/internal/services/semanticmodel/resource_semantic_model_test.go
@@ -269,7 +269,7 @@ func TestUnit_SemanticModelResource_CRUD(t *testing.T) {
}
func TestAcc_SemanticModelResource_CRUD(t *testing.T) {
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceRS"].(map[string]any)
workspaceID := workspace["id"].(string)
entityCreateDisplayName := testhelp.RandomName()
diff --git a/internal/services/spark/data_spark_environment_settings_test.go b/internal/services/spark/data_spark_environment_settings_test.go
index 9117d4f1..323feb0c 100644
--- a/internal/services/spark/data_spark_environment_settings_test.go
+++ b/internal/services/spark/data_spark_environment_settings_test.go
@@ -18,7 +18,7 @@ var (
)
func TestAcc_SparkEnvironmentSettingsDataSource(t *testing.T) {
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceDS"].(map[string]any)
workspaceID := workspace["id"].(string)
environment := testhelp.WellKnown()["Environment"].(map[string]any)
diff --git a/internal/services/sparkjobdefinition/data_spark_job_definition.go b/internal/services/sparkjobdefinition/data_spark_job_definition.go
index 7fd54d14..407c9ff3 100644
--- a/internal/services/sparkjobdefinition/data_spark_job_definition.go
+++ b/internal/services/sparkjobdefinition/data_spark_job_definition.go
@@ -8,7 +8,6 @@ import (
supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes"
"github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
"github.com/hashicorp/terraform-plugin-framework/diag"
"github.com/microsoft/fabric-sdk-go/fabric"
fabcore "github.com/microsoft/fabric-sdk-go/fabric/core"
@@ -17,19 +16,7 @@ import (
"github.com/microsoft/terraform-provider-fabric/internal/pkg/fabricitem"
)
-func NewDataSourceSparkJobDefinition(ctx context.Context) datasource.DataSource {
- propertiesSchema := schema.SingleNestedAttribute{
- MarkdownDescription: "The " + ItemName + " properties.",
- Computed: true,
- CustomType: supertypes.NewSingleNestedObjectTypeOf[sparkJobDefinitionPropertiesModel](ctx),
- Attributes: map[string]schema.Attribute{
- "onelake_root_path": schema.StringAttribute{
- MarkdownDescription: "OneLake path to the Spark Job Definition root directory.",
- Computed: true,
- },
- },
- }
-
+func NewDataSourceSparkJobDefinition() datasource.DataSource {
propertiesSetter := func(ctx context.Context, from *fabsparkjobdefinition.Properties, to *fabricitem.DataSourceFabricItemDefinitionPropertiesModel[sparkJobDefinitionPropertiesModel, fabsparkjobdefinition.Properties]) diag.Diagnostics {
properties := supertypes.NewSingleNestedObjectValueOfNull[sparkJobDefinitionPropertiesModel](ctx)
@@ -37,8 +24,7 @@ func NewDataSourceSparkJobDefinition(ctx context.Context) datasource.DataSource
propertiesModel := &sparkJobDefinitionPropertiesModel{}
propertiesModel.set(from)
- diags := properties.Set(ctx, propertiesModel)
- if diags.HasError() {
+ if diags := properties.Set(ctx, propertiesModel); diags.HasError() {
return diags
}
}
@@ -96,10 +82,10 @@ func NewDataSourceSparkJobDefinition(ctx context.Context) datasource.DataSource
FormatTypes: ItemFormatTypes,
DefinitionPathKeys: ItemDefinitionPaths,
},
- PropertiesSchema: propertiesSchema,
- PropertiesSetter: propertiesSetter,
- ItemGetter: itemGetter,
- ItemListGetter: itemListGetter,
+ PropertiesAttributes: getDataSourceSparkJobDefinitionPropertiesAttributes(),
+ PropertiesSetter: propertiesSetter,
+ ItemGetter: itemGetter,
+ ItemListGetter: itemListGetter,
}
return fabricitem.NewDataSourceFabricItemDefinitionProperties(config)
diff --git a/internal/services/sparkjobdefinition/data_spark_job_definition_test.go b/internal/services/sparkjobdefinition/data_spark_job_definition_test.go
index b1b01f8b..cb6d1700 100644
--- a/internal/services/sparkjobdefinition/data_spark_job_definition_test.go
+++ b/internal/services/sparkjobdefinition/data_spark_job_definition_test.go
@@ -150,7 +150,7 @@ func TestUnit_SparkJobDefinitionDataSource(t *testing.T) {
}
func TestAcc_SparkJobDefinitionDataSource(t *testing.T) {
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceDS"].(map[string]any)
workspaceID := workspace["id"].(string)
entity := testhelp.WellKnown()["SparkJobDefinition"].(map[string]any)
@@ -158,9 +158,10 @@ func TestAcc_SparkJobDefinitionDataSource(t *testing.T) {
entityDisplayName := entity["displayName"].(string)
entityDescription := entity["description"].(string)
- resource.ParallelTest(t, testhelp.NewTestAccCase(t, nil, nil, []resource.TestStep{
+ resource.ParallelTest(t, testhelp.NewTestAccCase(t, &testDataSourceItemFQN, nil, []resource.TestStep{
// read by id
{
+ ResourceName: testDataSourceItemFQN,
Config: at.CompileConfig(
testDataSourceItemHeader,
map[string]any{
@@ -178,6 +179,7 @@ func TestAcc_SparkJobDefinitionDataSource(t *testing.T) {
},
// read by id - not found
{
+ ResourceName: testDataSourceItemFQN,
Config: at.CompileConfig(
testDataSourceItemHeader,
map[string]any{
@@ -189,6 +191,7 @@ func TestAcc_SparkJobDefinitionDataSource(t *testing.T) {
},
// read by name
{
+ ResourceName: testDataSourceItemFQN,
Config: at.CompileConfig(
testDataSourceItemHeader,
map[string]any{
@@ -206,6 +209,7 @@ func TestAcc_SparkJobDefinitionDataSource(t *testing.T) {
},
// read by name - not found
{
+ ResourceName: testDataSourceItemFQN,
Config: at.CompileConfig(
testDataSourceItemHeader,
map[string]any{
@@ -217,6 +221,7 @@ func TestAcc_SparkJobDefinitionDataSource(t *testing.T) {
},
// read by id with definition
{
+ ResourceName: testDataSourceItemFQN,
Config: at.CompileConfig(
testDataSourceItemHeader,
map[string]any{
diff --git a/internal/services/sparkjobdefinition/data_spark_job_definitions.go b/internal/services/sparkjobdefinition/data_spark_job_definitions.go
index fe77242a..b188246d 100644
--- a/internal/services/sparkjobdefinition/data_spark_job_definitions.go
+++ b/internal/services/sparkjobdefinition/data_spark_job_definitions.go
@@ -8,7 +8,6 @@ import (
supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes"
"github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
"github.com/hashicorp/terraform-plugin-framework/diag"
"github.com/microsoft/fabric-sdk-go/fabric"
fabsparkjobdefinition "github.com/microsoft/fabric-sdk-go/fabric/sparkjobdefinition"
@@ -16,19 +15,7 @@ import (
"github.com/microsoft/terraform-provider-fabric/internal/pkg/fabricitem"
)
-func NewDataSourceSparkJobDefinitions(ctx context.Context) datasource.DataSource {
- propertiesSchema := schema.SingleNestedAttribute{
- MarkdownDescription: "The " + ItemName + " properties.",
- Computed: true,
- CustomType: supertypes.NewSingleNestedObjectTypeOf[sparkJobDefinitionPropertiesModel](ctx),
- Attributes: map[string]schema.Attribute{
- "onelake_root_path": schema.StringAttribute{
- MarkdownDescription: "OneLake path to the Spark Job Definition root directory.",
- Computed: true,
- },
- },
- }
-
+func NewDataSourceSparkJobDefinitions() datasource.DataSource {
propertiesSetter := func(ctx context.Context, from *fabsparkjobdefinition.Properties, to *fabricitem.FabricItemPropertiesModel[sparkJobDefinitionPropertiesModel, fabsparkjobdefinition.Properties]) diag.Diagnostics {
properties := supertypes.NewSingleNestedObjectValueOfNull[sparkJobDefinitionPropertiesModel](ctx)
@@ -36,8 +23,7 @@ func NewDataSourceSparkJobDefinitions(ctx context.Context) datasource.DataSource
propertiesModel := &sparkJobDefinitionPropertiesModel{}
propertiesModel.set(from)
- diags := properties.Set(ctx, propertiesModel)
- if diags.HasError() {
+ if diags := properties.Set(ctx, propertiesModel); diags.HasError() {
return diags
}
}
@@ -80,9 +66,9 @@ func NewDataSourceSparkJobDefinitions(ctx context.Context) datasource.DataSource
"Use this data source to list [" + ItemsName + "](" + ItemDocsURL + ").\n\n" +
ItemDocsSPNSupport,
},
- PropertiesSchema: propertiesSchema,
- PropertiesSetter: propertiesSetter,
- ItemListGetter: itemListGetter,
+ PropertiesAttributes: getDataSourceSparkJobDefinitionPropertiesAttributes(),
+ PropertiesSetter: propertiesSetter,
+ ItemListGetter: itemListGetter,
}
return fabricitem.NewDataSourceFabricItemsProperties(config)
diff --git a/internal/services/sparkjobdefinition/data_spark_job_definitions_test.go b/internal/services/sparkjobdefinition/data_spark_job_definitions_test.go
index daefe6d1..388e8aa1 100644
--- a/internal/services/sparkjobdefinition/data_spark_job_definitions_test.go
+++ b/internal/services/sparkjobdefinition/data_spark_job_definitions_test.go
@@ -69,13 +69,14 @@ func TestUnit_SparkJobDefinitionsDataSource(t *testing.T) {
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttrPtr(testDataSourceItemsFQN, "workspace_id", entity.WorkspaceID),
resource.TestCheckResourceAttrPtr(testDataSourceItemsFQN, "values.1.id", entity.ID),
+ resource.TestCheckResourceAttrSet(testDataSourceItemsFQN, "values.1.properties.onelake_root_path"),
),
},
}))
}
func TestAcc_SparkJobDefinitionsDataSource(t *testing.T) {
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceDS"].(map[string]any)
workspaceID := workspace["id"].(string)
resource.ParallelTest(t, testhelp.NewTestAccCase(t, nil, nil, []resource.TestStep{
@@ -90,6 +91,7 @@ func TestAcc_SparkJobDefinitionsDataSource(t *testing.T) {
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr(testDataSourceItemsFQN, "workspace_id", workspaceID),
resource.TestCheckResourceAttrSet(testDataSourceItemsFQN, "values.0.id"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemsFQN, "values.0.properties.onelake_root_path"),
),
},
},
diff --git a/internal/services/sparkjobdefinition/resource_spark_job_definition.go b/internal/services/sparkjobdefinition/resource_spark_job_definition.go
index e09f01b4..d842990a 100644
--- a/internal/services/sparkjobdefinition/resource_spark_job_definition.go
+++ b/internal/services/sparkjobdefinition/resource_spark_job_definition.go
@@ -11,7 +11,6 @@ import (
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/diag"
"github.com/hashicorp/terraform-plugin-framework/resource"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/microsoft/fabric-sdk-go/fabric"
fabsparkjobdefinition "github.com/microsoft/fabric-sdk-go/fabric/sparkjobdefinition"
@@ -19,19 +18,7 @@ import (
"github.com/microsoft/terraform-provider-fabric/internal/pkg/fabricitem"
)
-func NewResourceSparkJobDefinition(ctx context.Context) resource.Resource {
- propertiesSchema := schema.SingleNestedAttribute{
- MarkdownDescription: "The " + ItemName + " properties.",
- Computed: true,
- CustomType: supertypes.NewSingleNestedObjectTypeOf[sparkJobDefinitionPropertiesModel](ctx),
- Attributes: map[string]schema.Attribute{
- "onelake_root_path": schema.StringAttribute{
- MarkdownDescription: "OneLake path to the Spark Job Definition root directory.",
- Computed: true,
- },
- },
- }
-
+func NewResourceSparkJobDefinition() resource.Resource {
propertiesSetter := func(ctx context.Context, from *fabsparkjobdefinition.Properties, to *fabricitem.ResourceFabricItemDefinitionPropertiesModel[sparkJobDefinitionPropertiesModel, fabsparkjobdefinition.Properties]) diag.Diagnostics {
properties := supertypes.NewSingleNestedObjectValueOfNull[sparkJobDefinitionPropertiesModel](ctx)
@@ -85,9 +72,9 @@ func NewResourceSparkJobDefinition(ctx context.Context) resource.Resource {
DefinitionRequired: false,
DefinitionEmpty: ItemDefinitionEmpty,
},
- PropertiesSchema: propertiesSchema,
- PropertiesSetter: propertiesSetter,
- ItemGetter: itemGetter,
+ PropertiesAttributes: getResourceSparkJobDefinitionPropertiesAttributes(),
+ PropertiesSetter: propertiesSetter,
+ ItemGetter: itemGetter,
}
return fabricitem.NewResourceFabricItemDefinitionProperties(config)
diff --git a/internal/services/sparkjobdefinition/resource_spark_job_definition_test.go b/internal/services/sparkjobdefinition/resource_spark_job_definition_test.go
index eba7fd27..057529af 100644
--- a/internal/services/sparkjobdefinition/resource_spark_job_definition_test.go
+++ b/internal/services/sparkjobdefinition/resource_spark_job_definition_test.go
@@ -242,6 +242,7 @@ func TestUnit_SparkJobDefinitionResource_CRUD(t *testing.T) {
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttrPtr(testResourceItemFQN, "display_name", entityAfter.DisplayName),
resource.TestCheckResourceAttr(testResourceItemFQN, "definition_update_enabled", "true"),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.onelake_root_path"),
),
},
// Delete testing automatically occurs in TestCase
@@ -249,7 +250,7 @@ func TestUnit_SparkJobDefinitionResource_CRUD(t *testing.T) {
}
func TestAcc_SparkJobDefinitionResource_CRUD(t *testing.T) {
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceRS"].(map[string]any)
workspaceID := workspace["id"].(string)
entityCreateDisplayName := testhelp.RandomName()
@@ -273,6 +274,7 @@ func TestAcc_SparkJobDefinitionResource_CRUD(t *testing.T) {
resource.TestCheckResourceAttr(testResourceItemFQN, "display_name", entityCreateDisplayName),
resource.TestCheckResourceAttr(testResourceItemFQN, "description", ""),
resource.TestCheckResourceAttr(testResourceItemFQN, "definition_update_enabled", "true"),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.onelake_root_path"),
),
},
// Update and Read
@@ -291,6 +293,7 @@ func TestAcc_SparkJobDefinitionResource_CRUD(t *testing.T) {
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr(testResourceItemFQN, "display_name", entityUpdateDisplayName),
resource.TestCheckResourceAttr(testResourceItemFQN, "definition_update_enabled", "true"),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.onelake_root_path"),
),
},
},
diff --git a/internal/services/sparkjobdefinition/schema_data_spark_job_definition.go b/internal/services/sparkjobdefinition/schema_data_spark_job_definition.go
new file mode 100644
index 00000000..550d9c04
--- /dev/null
+++ b/internal/services/sparkjobdefinition/schema_data_spark_job_definition.go
@@ -0,0 +1,19 @@
+// Copyright (c) Microsoft Corporation
+// SPDX-License-Identifier: MPL-2.0
+
+package sparkjobdefinition
+
+import (
+ "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
+)
+
+func getDataSourceSparkJobDefinitionPropertiesAttributes() map[string]schema.Attribute {
+ result := map[string]schema.Attribute{
+ "onelake_root_path": schema.StringAttribute{
+ MarkdownDescription: "OneLake path to the Spark Job Definition root directory.",
+ Computed: true,
+ },
+ }
+
+ return result
+}
diff --git a/internal/services/sparkjobdefinition/schema_resource_spark_job_definition.go b/internal/services/sparkjobdefinition/schema_resource_spark_job_definition.go
new file mode 100644
index 00000000..a6bbc4f8
--- /dev/null
+++ b/internal/services/sparkjobdefinition/schema_resource_spark_job_definition.go
@@ -0,0 +1,19 @@
+// Copyright (c) Microsoft Corporation
+// SPDX-License-Identifier: MPL-2.0
+
+package sparkjobdefinition
+
+import (
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema"
+)
+
+func getResourceSparkJobDefinitionPropertiesAttributes() map[string]schema.Attribute {
+ result := map[string]schema.Attribute{
+ "onelake_root_path": schema.StringAttribute{
+ MarkdownDescription: "OneLake path to the Spark Job Definition root directory.",
+ Computed: true,
+ },
+ }
+
+ return result
+}
diff --git a/internal/services/sqlendpoint/data_sql_endpoints_test.go b/internal/services/sqlendpoint/data_sql_endpoints_test.go
index 61139f2d..d3c53aca 100644
--- a/internal/services/sqlendpoint/data_sql_endpoints_test.go
+++ b/internal/services/sqlendpoint/data_sql_endpoints_test.go
@@ -79,7 +79,7 @@ func TestAcc_SQLEndpointsDataSource(t *testing.T) {
t.Skip("No SPN support")
}
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceDS"].(map[string]any)
workspaceID := workspace["id"].(string)
resource.ParallelTest(t, testhelp.NewTestAccCase(t, nil, nil, []resource.TestStep{
diff --git a/internal/services/warehouse/data_warehouse.go b/internal/services/warehouse/data_warehouse.go
index 3f106b3d..ab23f050 100644
--- a/internal/services/warehouse/data_warehouse.go
+++ b/internal/services/warehouse/data_warehouse.go
@@ -5,206 +5,85 @@ package warehouse
import (
"context"
- "fmt"
supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes"
- "github.com/hashicorp/terraform-plugin-framework-timeouts/datasource/timeouts"
- "github.com/hashicorp/terraform-plugin-framework-timetypes/timetypes"
- "github.com/hashicorp/terraform-plugin-framework-validators/datasourcevalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
"github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/path"
- "github.com/hashicorp/terraform-plugin-log/tflog"
+ "github.com/microsoft/fabric-sdk-go/fabric"
+ fabcore "github.com/microsoft/fabric-sdk-go/fabric/core"
fabwarehouse "github.com/microsoft/fabric-sdk-go/fabric/warehouse"
- "github.com/microsoft/terraform-provider-fabric/internal/common"
- "github.com/microsoft/terraform-provider-fabric/internal/framework/customtypes"
- "github.com/microsoft/terraform-provider-fabric/internal/pkg/utils"
- pconfig "github.com/microsoft/terraform-provider-fabric/internal/provider/config"
+ "github.com/microsoft/terraform-provider-fabric/internal/pkg/fabricitem"
)
-// Ensure the implementation satisfies the expected interfaces.
-var (
- _ datasource.DataSourceWithConfigValidators = (*dataSourceWarehouse)(nil)
- _ datasource.DataSourceWithConfigure = (*dataSourceWarehouse)(nil)
-)
-
-type dataSourceWarehouse struct {
- pConfigData *pconfig.ProviderData
- client *fabwarehouse.ItemsClient
-}
-
func NewDataSourceWarehouse() datasource.DataSource {
- return &dataSourceWarehouse{}
-}
-
-func (d *dataSourceWarehouse) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
- resp.TypeName = req.ProviderTypeName + "_" + ItemTFName
-}
-
-func (d *dataSourceWarehouse) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- resp.Schema = schema.Schema{
- MarkdownDescription: "Get a Fabric Warehouse.\n\n" +
- "Use this data source to fetch a [Warehouse](https://learn.microsoft.com/fabric/data-warehouse/data-warehousing).\n\n" +
- ItemDocsSPNSupport,
- Attributes: map[string]schema.Attribute{
- "workspace_id": schema.StringAttribute{
- MarkdownDescription: "The Workspace ID.",
- Required: true,
- CustomType: customtypes.UUIDType{},
- },
- "id": schema.StringAttribute{
- MarkdownDescription: "The Warehouse ID.",
- Optional: true,
- Computed: true,
- CustomType: customtypes.UUIDType{},
- },
- "display_name": schema.StringAttribute{
- MarkdownDescription: "The Warehouse display name.",
- Optional: true,
- Computed: true,
- },
- "description": schema.StringAttribute{
- MarkdownDescription: "The Warehouse description.",
- Computed: true,
- },
- "properties": schema.SingleNestedAttribute{
- Computed: true,
- MarkdownDescription: "The Warehouse properties.",
- CustomType: supertypes.NewSingleNestedObjectTypeOf[warehousePropertiesModel](ctx),
- Attributes: map[string]schema.Attribute{
- "connection_string": schema.StringAttribute{
- MarkdownDescription: "Connection String",
- Computed: true,
- },
- "created_date": schema.StringAttribute{
- MarkdownDescription: "Created Date",
- Computed: true,
- CustomType: timetypes.RFC3339Type{},
- },
- "last_updated_time": schema.StringAttribute{
- MarkdownDescription: "Last Updated Time",
- Computed: true,
- CustomType: timetypes.RFC3339Type{},
- },
- },
- },
- "timeouts": timeouts.Attributes(ctx),
- },
- }
-}
-
-func (d *dataSourceWarehouse) ConfigValidators(_ context.Context) []datasource.ConfigValidator {
- return []datasource.ConfigValidator{
- datasourcevalidator.Conflicting(
- path.MatchRoot("id"),
- path.MatchRoot("display_name"),
- ),
- datasourcevalidator.ExactlyOneOf(
- path.MatchRoot("id"),
- path.MatchRoot("display_name"),
- ),
- }
-}
-
-func (d *dataSourceWarehouse) Configure(_ context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
- if req.ProviderData == nil {
- return
- }
-
- pConfigData, ok := req.ProviderData.(*pconfig.ProviderData)
- if !ok {
- resp.Diagnostics.AddError(
- common.ErrorDataSourceConfigType,
- fmt.Sprintf(common.ErrorFabricClientType, req.ProviderData),
- )
-
- return
- }
-
- d.pConfigData = pConfigData
- d.client = fabwarehouse.NewClientFactoryWithClient(*pConfigData.FabricClient).NewItemsClient()
-}
-
-// Read refreshes the Terraform state with the latest data.
-func (d *dataSourceWarehouse) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- tflog.Debug(ctx, "READ", map[string]any{
- "action": "start",
- })
- tflog.Trace(ctx, "READ", map[string]any{
- "config": req.Config,
- })
-
- var data dataSourceWarehouseModel
-
- if resp.Diagnostics.Append(req.Config.Get(ctx, &data)...); resp.Diagnostics.HasError() {
- return
- }
+ propertiesSetter := func(ctx context.Context, from *fabwarehouse.Properties, to *fabricitem.DataSourceFabricItemPropertiesModel[warehousePropertiesModel, fabwarehouse.Properties]) diag.Diagnostics {
+ properties := supertypes.NewSingleNestedObjectValueOfNull[warehousePropertiesModel](ctx)
- timeout, diags := data.Timeouts.Read(ctx, d.pConfigData.Timeout)
- if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
- return
- }
+ if from != nil {
+ propertiesModel := &warehousePropertiesModel{}
+ propertiesModel.set(from)
- ctx, cancel := context.WithTimeout(ctx, timeout)
- defer cancel()
+ if diags := properties.Set(ctx, propertiesModel); diags.HasError() {
+ return diags
+ }
+ }
- if data.ID.ValueString() != "" {
- diags = d.getByID(ctx, &data)
- } else {
- diags = d.getByDisplayName(ctx, &data)
- }
+ to.Properties = properties
- if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
- return
+ return nil
}
- resp.Diagnostics.Append(resp.State.Set(ctx, data)...)
-
- tflog.Debug(ctx, "READ", map[string]any{
- "action": "end",
- })
+ itemGetter := func(ctx context.Context, fabricClient fabric.Client, model fabricitem.DataSourceFabricItemPropertiesModel[warehousePropertiesModel, fabwarehouse.Properties], fabricItem *fabricitem.FabricItemProperties[fabwarehouse.Properties]) error {
+ client := fabwarehouse.NewClientFactoryWithClient(fabricClient).NewItemsClient()
- if resp.Diagnostics.HasError() {
- return
- }
-}
+ respGet, err := client.GetWarehouse(ctx, model.WorkspaceID.ValueString(), model.ID.ValueString(), nil)
+ if err != nil {
+ return err
+ }
-func (d *dataSourceWarehouse) getByID(ctx context.Context, model *dataSourceWarehouseModel) diag.Diagnostics {
- tflog.Trace(ctx, "getting Warehouse by 'id'")
+ fabricItem.Set(respGet.Warehouse)
- respGet, err := d.client.GetWarehouse(ctx, model.WorkspaceID.ValueString(), model.ID.ValueString(), nil)
- if diags := utils.GetDiagsFromError(ctx, err, utils.OperationRead, nil); diags.HasError() {
- return diags
+ return nil
}
- return model.set(ctx, respGet.Warehouse)
-}
-
-func (d *dataSourceWarehouse) getByDisplayName(ctx context.Context, model *dataSourceWarehouseModel) diag.Diagnostics {
- tflog.Trace(ctx, "getting Warehouse by 'display_name'")
+ itemListGetter := func(ctx context.Context, fabricClient fabric.Client, model fabricitem.DataSourceFabricItemPropertiesModel[warehousePropertiesModel, fabwarehouse.Properties], errNotFound fabcore.ResponseError, fabricItem *fabricitem.FabricItemProperties[fabwarehouse.Properties]) error {
+ client := fabwarehouse.NewClientFactoryWithClient(fabricClient).NewItemsClient()
- var diags diag.Diagnostics
+ pager := client.NewListWarehousesPager(model.WorkspaceID.ValueString(), nil)
+ for pager.More() {
+ page, err := pager.NextPage(ctx)
+ if err != nil {
+ return err
+ }
- pager := d.client.NewListWarehousesPager(model.WorkspaceID.ValueString(), nil)
- for pager.More() {
- page, err := pager.NextPage(ctx)
- if diags := utils.GetDiagsFromError(ctx, err, utils.OperationList, nil); diags.HasError() {
- return diags
- }
+ for _, entity := range page.Value {
+ if *entity.DisplayName == model.DisplayName.ValueString() {
+ fabricItem.Set(entity)
- for _, entity := range page.Value {
- if *entity.DisplayName == model.DisplayName.ValueString() {
- return model.set(ctx, entity)
+ return nil
+ }
}
}
+
+ return &errNotFound
}
- diags.AddError(
- common.ErrorReadHeader,
- fmt.Sprintf("Unable to find Warehouse with 'display_name': %s in the Workspace ID: %s ", model.DisplayName.ValueString(), model.WorkspaceID.ValueString()),
- )
+ config := fabricitem.DataSourceFabricItemProperties[warehousePropertiesModel, fabwarehouse.Properties]{
+ DataSourceFabricItem: fabricitem.DataSourceFabricItem{
+ Type: ItemType,
+ Name: ItemName,
+ TFName: ItemTFName,
+ MarkdownDescription: "Get a Fabric " + ItemName + ".\n\n" +
+ "Use this data source to fetch a [" + ItemName + "](" + ItemDocsURL + ").\n\n" +
+ ItemDocsSPNSupport,
+ IsDisplayNameUnique: true,
+ },
+ PropertiesAttributes: getDataSourceWarehousePropertiesAttributes(),
+ PropertiesSetter: propertiesSetter,
+ ItemGetter: itemGetter,
+ ItemListGetter: itemListGetter,
+ }
- return diags
+ return fabricitem.NewDataSourceFabricItemProperties(config)
}
diff --git a/internal/services/warehouse/data_warehouse_test.go b/internal/services/warehouse/data_warehouse_test.go
index 12db7699..6c61077c 100644
--- a/internal/services/warehouse/data_warehouse_test.go
+++ b/internal/services/warehouse/data_warehouse_test.go
@@ -105,6 +105,9 @@ func TestUnit_WarehouseDataSource(t *testing.T) {
resource.TestCheckResourceAttrPtr(testDataSourceItemFQN, "id", entity.ID),
resource.TestCheckResourceAttrPtr(testDataSourceItemFQN, "display_name", entity.DisplayName),
resource.TestCheckResourceAttrPtr(testDataSourceItemFQN, "description", entity.Description),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.connection_string"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.created_date"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.last_updated_time"),
),
},
// read by id - not found
@@ -133,6 +136,9 @@ func TestUnit_WarehouseDataSource(t *testing.T) {
resource.TestCheckResourceAttrPtr(testDataSourceItemFQN, "id", entity.ID),
resource.TestCheckResourceAttrPtr(testDataSourceItemFQN, "display_name", entity.DisplayName),
resource.TestCheckResourceAttrPtr(testDataSourceItemFQN, "description", entity.Description),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.connection_string"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.created_date"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.last_updated_time"),
),
},
// read by name - not found
@@ -154,7 +160,7 @@ func TestAcc_WarehouseDataSource(t *testing.T) {
t.Skip("No SPN support")
}
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceDS"].(map[string]any)
workspaceID := workspace["id"].(string)
entity := testhelp.WellKnown()["Warehouse"].(map[string]any)
@@ -177,6 +183,9 @@ func TestAcc_WarehouseDataSource(t *testing.T) {
resource.TestCheckResourceAttr(testDataSourceItemFQN, "id", entityID),
resource.TestCheckResourceAttr(testDataSourceItemFQN, "display_name", entityDisplayName),
resource.TestCheckResourceAttr(testDataSourceItemFQN, "description", entityDescription),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.connection_string"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.created_date"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.last_updated_time"),
),
},
// read by id - not found
@@ -204,6 +213,9 @@ func TestAcc_WarehouseDataSource(t *testing.T) {
resource.TestCheckResourceAttr(testDataSourceItemFQN, "id", entityID),
resource.TestCheckResourceAttr(testDataSourceItemFQN, "display_name", entityDisplayName),
resource.TestCheckResourceAttr(testDataSourceItemFQN, "description", entityDescription),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.connection_string"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.created_date"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemFQN, "properties.last_updated_time"),
),
},
// read by name - not found
diff --git a/internal/services/warehouse/data_warehouses.go b/internal/services/warehouse/data_warehouses.go
index cc228a9f..87bb426c 100644
--- a/internal/services/warehouse/data_warehouses.go
+++ b/internal/services/warehouse/data_warehouses.go
@@ -4,21 +4,72 @@
package warehouse
import (
+ "context"
+
+ supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes"
"github.com/hashicorp/terraform-plugin-framework/datasource"
+ "github.com/hashicorp/terraform-plugin-framework/diag"
+ "github.com/microsoft/fabric-sdk-go/fabric"
+ fabwarehouse "github.com/microsoft/fabric-sdk-go/fabric/warehouse"
"github.com/microsoft/terraform-provider-fabric/internal/pkg/fabricitem"
)
func NewDataSourceWarehouses() datasource.DataSource {
- config := fabricitem.DataSourceFabricItems{
- Type: ItemType,
- Name: ItemName,
- Names: ItemsName,
- TFName: ItemsTFName,
- MarkdownDescription: "List a Fabric " + ItemsName + ".\n\n" +
- "Use this data source to list [" + ItemsName + "](" + ItemDocsURL + ").\n\n" +
- ItemDocsSPNSupport,
+ propertiesSetter := func(ctx context.Context, from *fabwarehouse.Properties, to *fabricitem.FabricItemPropertiesModel[warehousePropertiesModel, fabwarehouse.Properties]) diag.Diagnostics {
+ properties := supertypes.NewSingleNestedObjectValueOfNull[warehousePropertiesModel](ctx)
+
+ if from != nil {
+ propertiesModel := &warehousePropertiesModel{}
+ propertiesModel.set(from)
+
+ if diags := properties.Set(ctx, propertiesModel); diags.HasError() {
+ return diags
+ }
+ }
+
+ to.Properties = properties
+
+ return nil
+ }
+
+ itemListGetter := func(ctx context.Context, fabricClient fabric.Client, model fabricitem.DataSourceFabricItemsPropertiesModel[warehousePropertiesModel, fabwarehouse.Properties], fabricItems *[]fabricitem.FabricItemProperties[fabwarehouse.Properties]) error {
+ client := fabwarehouse.NewClientFactoryWithClient(fabricClient).NewItemsClient()
+
+ fabItems := make([]fabricitem.FabricItemProperties[fabwarehouse.Properties], 0)
+
+ respList, err := client.ListWarehouses(ctx, model.WorkspaceID.ValueString(), nil)
+ if err != nil {
+ return err
+ }
+
+ for _, entity := range respList {
+ var fabricItem fabricitem.FabricItemProperties[fabwarehouse.Properties]
+
+ fabricItem.Set(entity)
+
+ fabItems = append(fabItems, fabricItem)
+ }
+
+ *fabricItems = fabItems
+
+ return nil
+ }
+
+ config := fabricitem.DataSourceFabricItemsProperties[warehousePropertiesModel, fabwarehouse.Properties]{
+ DataSourceFabricItems: fabricitem.DataSourceFabricItems{
+ Type: ItemType,
+ Name: ItemName,
+ Names: ItemsName,
+ TFName: ItemsTFName,
+ MarkdownDescription: "List a Fabric " + ItemsName + ".\n\n" +
+ "Use this data source to list [" + ItemsName + "](" + ItemDocsURL + ").\n\n" +
+ ItemDocsSPNSupport,
+ },
+ PropertiesAttributes: getDataSourceWarehousePropertiesAttributes(),
+ PropertiesSetter: propertiesSetter,
+ ItemListGetter: itemListGetter,
}
- return fabricitem.NewDataSourceFabricItems(config)
+ return fabricitem.NewDataSourceFabricItemsProperties(config)
}
diff --git a/internal/services/warehouse/data_warehouses_test.go b/internal/services/warehouse/data_warehouses_test.go
index f8e22749..2f4beaf1 100644
--- a/internal/services/warehouse/data_warehouses_test.go
+++ b/internal/services/warehouse/data_warehouses_test.go
@@ -22,11 +22,11 @@ var (
func TestUnit_WarehousesDataSource(t *testing.T) {
workspaceID := testhelp.RandomUUID()
- entity := fakes.NewRandomItemWithWorkspace(itemType, workspaceID)
+ entity := fakes.NewRandomWarehouseWithWorkspace(workspaceID)
- fakes.FakeServer.Upsert(fakes.NewRandomItemWithWorkspace(itemType, workspaceID))
+ fakes.FakeServer.Upsert(fakes.NewRandomWarehouseWithWorkspace(workspaceID))
fakes.FakeServer.Upsert(entity)
- fakes.FakeServer.Upsert(fakes.NewRandomItemWithWorkspace(itemType, workspaceID))
+ fakes.FakeServer.Upsert(fakes.NewRandomWarehouseWithWorkspace(workspaceID))
resource.ParallelTest(t, testhelp.NewTestUnitCase(t, nil, fakes.FakeServer.ServerFactory, nil, []resource.TestStep{
// error - no attributes
@@ -69,6 +69,9 @@ func TestUnit_WarehousesDataSource(t *testing.T) {
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttrPtr(testDataSourceItemsFQN, "workspace_id", entity.WorkspaceID),
resource.TestCheckResourceAttrPtr(testDataSourceItemsFQN, "values.1.id", entity.ID),
+ resource.TestCheckResourceAttrSet(testDataSourceItemsFQN, "values.1.properties.connection_string"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemsFQN, "values.1.properties.created_date"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemsFQN, "values.1.properties.last_updated_time"),
),
},
}))
@@ -79,7 +82,7 @@ func TestAcc_WarehousesDataSource(t *testing.T) {
t.Skip("No SPN support")
}
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceDS"].(map[string]any)
workspaceID := workspace["id"].(string)
resource.ParallelTest(t, testhelp.NewTestAccCase(t, nil, nil, []resource.TestStep{
@@ -94,6 +97,9 @@ func TestAcc_WarehousesDataSource(t *testing.T) {
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr(testDataSourceItemsFQN, "workspace_id", workspaceID),
resource.TestCheckResourceAttrSet(testDataSourceItemsFQN, "values.0.id"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemsFQN, "values.0.properties.connection_string"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemsFQN, "values.0.properties.created_date"),
+ resource.TestCheckResourceAttrSet(testDataSourceItemsFQN, "values.0.properties.last_updated_time"),
),
},
},
diff --git a/internal/services/warehouse/models.go b/internal/services/warehouse/models.go
index ed3e7d23..7bbfe917 100644
--- a/internal/services/warehouse/models.go
+++ b/internal/services/warehouse/models.go
@@ -4,47 +4,11 @@
package warehouse
import (
- "context"
-
- supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes"
"github.com/hashicorp/terraform-plugin-framework-timetypes/timetypes"
- "github.com/hashicorp/terraform-plugin-framework/diag"
"github.com/hashicorp/terraform-plugin-framework/types"
fabwarehouse "github.com/microsoft/fabric-sdk-go/fabric/warehouse"
-
- "github.com/microsoft/terraform-provider-fabric/internal/framework/customtypes"
)
-type baseWarehouseModel struct {
- WorkspaceID customtypes.UUID `tfsdk:"workspace_id"`
- ID customtypes.UUID `tfsdk:"id"`
- DisplayName types.String `tfsdk:"display_name"`
- Description types.String `tfsdk:"description"`
- Properties supertypes.SingleNestedObjectValueOf[warehousePropertiesModel] `tfsdk:"properties"`
-}
-
-func (to *baseWarehouseModel) set(ctx context.Context, from fabwarehouse.Warehouse) diag.Diagnostics {
- to.WorkspaceID = customtypes.NewUUIDPointerValue(from.WorkspaceID)
- to.ID = customtypes.NewUUIDPointerValue(from.ID)
- to.DisplayName = types.StringPointerValue(from.DisplayName)
- to.Description = types.StringPointerValue(from.Description)
-
- properties := supertypes.NewSingleNestedObjectValueOfNull[warehousePropertiesModel](ctx)
-
- if from.Properties != nil {
- propertiesModel := &warehousePropertiesModel{}
- propertiesModel.set(from.Properties)
-
- if diags := properties.Set(ctx, propertiesModel); diags.HasError() {
- return diags
- }
- }
-
- to.Properties = properties
-
- return nil
-}
-
type warehousePropertiesModel struct {
ConnectionString types.String `tfsdk:"connection_string"`
CreatedDate timetypes.RFC3339 `tfsdk:"created_date"`
diff --git a/internal/services/warehouse/models_data_warehouse.go b/internal/services/warehouse/models_data_warehouse.go
deleted file mode 100644
index 346e6a32..00000000
--- a/internal/services/warehouse/models_data_warehouse.go
+++ /dev/null
@@ -1,13 +0,0 @@
-// Copyright (c) Microsoft Corporation
-// SPDX-License-Identifier: MPL-2.0
-
-package warehouse
-
-import (
- "github.com/hashicorp/terraform-plugin-framework-timeouts/datasource/timeouts"
-)
-
-type dataSourceWarehouseModel struct {
- baseWarehouseModel
- Timeouts timeouts.Value `tfsdk:"timeouts"`
-}
diff --git a/internal/services/warehouse/models_resource_warehouse.go b/internal/services/warehouse/models_resource_warehouse.go
deleted file mode 100644
index 081483d8..00000000
--- a/internal/services/warehouse/models_resource_warehouse.go
+++ /dev/null
@@ -1,32 +0,0 @@
-// Copyright (c) Microsoft Corporation
-// SPDX-License-Identifier: MPL-2.0
-
-package warehouse
-
-import (
- "github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts"
- fabwarehouse "github.com/microsoft/fabric-sdk-go/fabric/warehouse"
-)
-
-type resourceWarehouseModel struct {
- baseWarehouseModel
- Timeouts timeouts.Value `tfsdk:"timeouts"`
-}
-
-type requestCreateWarehouse struct {
- fabwarehouse.CreateWarehouseRequest
-}
-
-func (to *requestCreateWarehouse) set(from resourceWarehouseModel) {
- to.DisplayName = from.DisplayName.ValueStringPointer()
- to.Description = from.Description.ValueStringPointer()
-}
-
-type requestUpdateWarehouse struct {
- fabwarehouse.UpdateWarehouseRequest
-}
-
-func (to *requestUpdateWarehouse) set(from resourceWarehouseModel) {
- to.DisplayName = from.DisplayName.ValueStringPointer()
- to.Description = from.Description.ValueStringPointer()
-}
diff --git a/internal/services/warehouse/resource_warehouse.go b/internal/services/warehouse/resource_warehouse.go
index 81f5f55a..4657e66e 100644
--- a/internal/services/warehouse/resource_warehouse.go
+++ b/internal/services/warehouse/resource_warehouse.go
@@ -5,385 +5,63 @@ package warehouse
import (
"context"
- "fmt"
- "strings"
supertypes "github.com/FrangipaneTeam/terraform-plugin-framework-supertypes"
- "github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts"
- "github.com/hashicorp/terraform-plugin-framework-timetypes/timetypes"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringdefault"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
- "github.com/hashicorp/terraform-plugin-log/tflog"
- fabcore "github.com/microsoft/fabric-sdk-go/fabric/core"
+ "github.com/microsoft/fabric-sdk-go/fabric"
fabwarehouse "github.com/microsoft/fabric-sdk-go/fabric/warehouse"
- "github.com/microsoft/terraform-provider-fabric/internal/common"
- "github.com/microsoft/terraform-provider-fabric/internal/framework/customtypes"
- "github.com/microsoft/terraform-provider-fabric/internal/pkg/utils"
- pconfig "github.com/microsoft/terraform-provider-fabric/internal/provider/config"
+ "github.com/microsoft/terraform-provider-fabric/internal/pkg/fabricitem"
)
-// Ensure the implementation satisfies the expected interfaces.
-var (
- _ resource.ResourceWithConfigure = (*resourceWarehouse)(nil)
- _ resource.ResourceWithImportState = (*resourceWarehouse)(nil)
-)
-
-type resourceWarehouse struct {
- pConfigData *pconfig.ProviderData
- client *fabwarehouse.ItemsClient
-}
-
func NewResourceWarehouse() resource.Resource {
- return &resourceWarehouse{}
-}
-
-func (r *resourceWarehouse) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
- resp.TypeName = req.ProviderTypeName + "_" + ItemTFName
-}
-
-func (r *resourceWarehouse) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
- resp.Schema = schema.Schema{
- MarkdownDescription: "This resource manages a Fabric Warehouse.\n\n" +
- "See [Warehouse](https://learn.microsoft.com/fabric/data-warehouse/data-warehousing) for more information.\n\n" +
- ItemDocsSPNSupport,
- Attributes: map[string]schema.Attribute{
- "workspace_id": schema.StringAttribute{
- MarkdownDescription: "The Workspace ID.",
- Required: true,
- CustomType: customtypes.UUIDType{},
- PlanModifiers: []planmodifier.String{
- stringplanmodifier.RequiresReplace(),
- },
- },
- "id": schema.StringAttribute{
- MarkdownDescription: "The Warehouse ID.",
- Computed: true,
- CustomType: customtypes.UUIDType{},
- PlanModifiers: []planmodifier.String{
- stringplanmodifier.UseStateForUnknown(),
- },
- },
- "display_name": schema.StringAttribute{
- MarkdownDescription: "The Warehouse display name.",
- Required: true,
- Validators: []validator.String{
- stringvalidator.LengthAtMost(123),
- },
- },
- "description": schema.StringAttribute{
- MarkdownDescription: "The Warehouse description.",
- Optional: true,
- Computed: true,
- Default: stringdefault.StaticString(""),
- Validators: []validator.String{
- stringvalidator.LengthAtMost(256),
- },
- },
- "properties": schema.SingleNestedAttribute{
- Computed: true,
- MarkdownDescription: "The Warehouse properties.",
- CustomType: supertypes.NewSingleNestedObjectTypeOf[warehousePropertiesModel](ctx),
- Attributes: map[string]schema.Attribute{
- "connection_string": schema.StringAttribute{
- MarkdownDescription: "Connection String",
- Computed: true,
- },
- "created_date": schema.StringAttribute{
- MarkdownDescription: "Created Date",
- Computed: true,
- CustomType: timetypes.RFC3339Type{},
- },
- "last_updated_time": schema.StringAttribute{
- MarkdownDescription: "Last Updated Time",
- Computed: true,
- CustomType: timetypes.RFC3339Type{},
- },
- },
- },
- "timeouts": timeouts.AttributesAll(ctx),
- },
- }
-}
-
-func (r *resourceWarehouse) Configure(_ context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) {
- if req.ProviderData == nil {
- return
- }
-
- pConfigData, ok := req.ProviderData.(*pconfig.ProviderData)
- if !ok {
- resp.Diagnostics.AddError(
- common.ErrorResourceConfigType,
- fmt.Sprintf(common.ErrorFabricClientType, req.ProviderData),
- )
-
- return
- }
-
- r.pConfigData = pConfigData
- r.client = fabwarehouse.NewClientFactoryWithClient(*pConfigData.FabricClient).NewItemsClient()
-}
-
-func (r *resourceWarehouse) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
- tflog.Debug(ctx, "CREATE", map[string]any{
- "action": "start",
- })
- tflog.Trace(ctx, "CREATE", map[string]any{
- "config": req.Config,
- "plan": req.Plan,
- })
-
- var plan resourceWarehouseModel
-
- if resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...); resp.Diagnostics.HasError() {
- return
- }
-
- timeout, diags := plan.Timeouts.Create(ctx, r.pConfigData.Timeout)
- if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
- return
- }
-
- ctx, cancel := context.WithTimeout(ctx, timeout)
- defer cancel()
-
- var reqCreate requestCreateWarehouse
+ propertiesSetter := func(ctx context.Context, from *fabwarehouse.Properties, to *fabricitem.ResourceFabricItemPropertiesModel[warehousePropertiesModel, fabwarehouse.Properties]) diag.Diagnostics {
+ properties := supertypes.NewSingleNestedObjectValueOfNull[warehousePropertiesModel](ctx)
- reqCreate.set(plan)
+ if from != nil {
+ propertiesModel := &warehousePropertiesModel{}
+ propertiesModel.set(from)
- respCreate, err := r.client.CreateWarehouse(ctx, plan.WorkspaceID.ValueString(), reqCreate.CreateWarehouseRequest, nil)
- if resp.Diagnostics.Append(utils.GetDiagsFromError(ctx, err, utils.OperationCreate, nil)...); resp.Diagnostics.HasError() {
- return
- }
-
- if resp.Diagnostics.Append(plan.set(ctx, respCreate.Warehouse)...); resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(resp.State.Set(ctx, plan)...)
-
- if resp.Diagnostics.Append(r.get(ctx, &plan, utils.OperationCreate)...); resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(resp.State.Set(ctx, plan)...)
-
- tflog.Debug(ctx, "CREATE", map[string]any{
- "action": "end",
- })
-
- if resp.Diagnostics.HasError() {
- return
- }
-}
-
-func (r *resourceWarehouse) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
- tflog.Debug(ctx, "READ", map[string]any{
- "action": "start",
- })
- tflog.Trace(ctx, "READ", map[string]any{
- "state": req.State,
- })
-
- var state resourceWarehouseModel
-
- if resp.Diagnostics.Append(req.State.Get(ctx, &state)...); resp.Diagnostics.HasError() {
- return
- }
-
- timeout, diags := state.Timeouts.Read(ctx, r.pConfigData.Timeout)
- if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
- return
- }
-
- ctx, cancel := context.WithTimeout(ctx, timeout)
- defer cancel()
-
- if diags := r.get(ctx, &state, utils.OperationRead); diags.HasError() {
- if utils.IsErrNotFound(state.ID.ValueString(), &diags, fabcore.ErrCommon.EntityNotFound) {
- resp.State.RemoveResource(ctx)
+ if diags := properties.Set(ctx, propertiesModel); diags.HasError() {
+ return diags
+ }
}
- resp.Diagnostics.Append(diags...)
+ to.Properties = properties
- return
+ return nil
}
- resp.Diagnostics.Append(resp.State.Set(ctx, state)...)
-
- tflog.Debug(ctx, "READ", map[string]any{
- "action": "end",
- })
-
- if resp.Diagnostics.HasError() {
- return
- }
-}
-
-func (r *resourceWarehouse) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
- tflog.Debug(ctx, "UPDATE", map[string]any{
- "action": "start",
- })
- tflog.Trace(ctx, "UPDATE", map[string]any{
- "config": req.Config,
- "plan": req.Plan,
- "state": req.State,
- })
-
- var plan resourceWarehouseModel
+ itemGetter := func(ctx context.Context, fabricClient fabric.Client, model fabricitem.ResourceFabricItemPropertiesModel[warehousePropertiesModel, fabwarehouse.Properties], fabricItem *fabricitem.FabricItemProperties[fabwarehouse.Properties]) error {
+ client := fabwarehouse.NewClientFactoryWithClient(fabricClient).NewItemsClient()
- if resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...); resp.Diagnostics.HasError() {
- return
- }
-
- timeout, diags := plan.Timeouts.Update(ctx, r.pConfigData.Timeout)
- if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
- return
- }
-
- ctx, cancel := context.WithTimeout(ctx, timeout)
- defer cancel()
-
- var reqUpdate requestUpdateWarehouse
-
- reqUpdate.set(plan)
-
- respUpdate, err := r.client.UpdateWarehouse(ctx, plan.WorkspaceID.ValueString(), plan.ID.ValueString(), reqUpdate.UpdateWarehouseRequest, nil)
- if resp.Diagnostics.Append(utils.GetDiagsFromError(ctx, err, utils.OperationUpdate, nil)...); resp.Diagnostics.HasError() {
- return
- }
-
- if resp.Diagnostics.Append(plan.set(ctx, respUpdate.Warehouse)...); resp.Diagnostics.HasError() {
- return
- }
-
- if resp.Diagnostics.Append(resp.State.Set(ctx, plan)...); resp.Diagnostics.HasError() {
- return
- }
-
- if resp.Diagnostics.Append(r.get(ctx, &plan, utils.OperationUpdate)...); resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(resp.State.Set(ctx, plan)...)
-
- tflog.Debug(ctx, "UPDATE", map[string]any{
- "action": "end",
- })
-
- if resp.Diagnostics.HasError() {
- return
- }
-}
-
-func (r *resourceWarehouse) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
- tflog.Debug(ctx, "DELETE", map[string]any{
- "action": "start",
- })
- tflog.Trace(ctx, "DELETE", map[string]any{
- "state": req.State,
- })
-
- var state resourceWarehouseModel
-
- if resp.Diagnostics.Append(req.State.Get(ctx, &state)...); resp.Diagnostics.HasError() {
- return
- }
-
- timeout, diags := state.Timeouts.Delete(ctx, r.pConfigData.Timeout)
- if resp.Diagnostics.Append(diags...); resp.Diagnostics.HasError() {
- return
- }
-
- ctx, cancel := context.WithTimeout(ctx, timeout)
- defer cancel()
-
- _, err := r.client.DeleteWarehouse(ctx, state.WorkspaceID.ValueString(), state.ID.ValueString(), nil)
- if resp.Diagnostics.Append(utils.GetDiagsFromError(ctx, err, utils.OperationDelete, nil)...); resp.Diagnostics.HasError() {
- return
- }
-
- tflog.Debug(ctx, "DELETE", map[string]any{
- "action": "end",
- })
-}
-
-func (r *resourceWarehouse) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- tflog.Debug(ctx, "IMPORT", map[string]any{
- "action": "start",
- })
- tflog.Trace(ctx, "IMPORT", map[string]any{
- "id": req.ID,
- })
-
- workspaceID, warehouseID, found := strings.Cut(req.ID, "/")
- if !found {
- resp.Diagnostics.AddError(
- common.ErrorImportIdentifierHeader,
- fmt.Sprintf(common.ErrorImportIdentifierDetails, "WorkspaceID/WarehouseID"),
- )
-
- return
- }
-
- uuidWorkspaceID, diags := customtypes.NewUUIDValueMust(workspaceID)
- resp.Diagnostics.Append(diags...)
-
- uuidID, diags := customtypes.NewUUIDValueMust(warehouseID)
- resp.Diagnostics.Append(diags...)
+ respGet, err := client.GetWarehouse(ctx, model.WorkspaceID.ValueString(), model.ID.ValueString(), nil)
+ if err != nil {
+ return err
+ }
- if resp.Diagnostics.HasError() {
- return
- }
+ fabricItem.Set(respGet.Warehouse)
- var timeout timeouts.Value
- if resp.Diagnostics.Append(resp.State.GetAttribute(ctx, path.Root("timeouts"), &timeout)...); resp.Diagnostics.HasError() {
- return
+ return nil
}
- state := resourceWarehouseModel{
- baseWarehouseModel: baseWarehouseModel{
- ID: uuidID,
- WorkspaceID: uuidWorkspaceID,
+ config := fabricitem.ResourceFabricItemProperties[warehousePropertiesModel, fabwarehouse.Properties]{
+ ResourceFabricItem: fabricitem.ResourceFabricItem{
+ Type: ItemType,
+ Name: ItemName,
+ NameRenameAllowed: true,
+ TFName: ItemTFName,
+ MarkdownDescription: "Manage a Fabric " + ItemName + ".\n\n" +
+ "Use this resource to manage a [" + ItemName + "](" + ItemDocsURL + ").\n\n" +
+ ItemDocsSPNSupport,
+ DisplayNameMaxLength: 123,
+ DescriptionMaxLength: 256,
},
- Timeouts: timeout,
- }
-
- if resp.Diagnostics.Append(r.get(ctx, &state, utils.OperationImport)...); resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(resp.State.Set(ctx, state)...)
-
- tflog.Debug(ctx, "IMPORT", map[string]any{
- "action": "end",
- })
-
- if resp.Diagnostics.HasError() {
- return
- }
-}
-
-func (r *resourceWarehouse) get(ctx context.Context, model *resourceWarehouseModel, operation utils.Operation) diag.Diagnostics {
- tflog.Trace(ctx, "getting Warehouse")
-
- var errIs error
- if operation == utils.OperationRead {
- errIs = fabcore.ErrCommon.EntityNotFound
- }
-
- respGet, err := r.client.GetWarehouse(ctx, model.WorkspaceID.ValueString(), model.ID.ValueString(), nil)
- if diags := utils.GetDiagsFromError(ctx, err, operation, errIs); diags.HasError() {
- return diags
+ PropertiesAttributes: getResourceWarehousePropertiesAttributes(),
+ PropertiesSetter: propertiesSetter,
+ ItemGetter: itemGetter,
}
- return model.set(ctx, respGet.Warehouse)
+ return fabricitem.NewResourceFabricItemProperties(config)
}
diff --git a/internal/services/warehouse/resource_warehouse_test.go b/internal/services/warehouse/resource_warehouse_test.go
index 70de53c5..41f60d9a 100644
--- a/internal/services/warehouse/resource_warehouse_test.go
+++ b/internal/services/warehouse/resource_warehouse_test.go
@@ -188,6 +188,9 @@ func TestUnit_WarehouseResource_CRUD(t *testing.T) {
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttrPtr(testResourceItemFQN, "display_name", entityBefore.DisplayName),
resource.TestCheckResourceAttr(testResourceItemFQN, "description", ""),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.connection_string"),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.created_date"),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.last_updated_time"),
),
},
// Update and Read
@@ -204,6 +207,9 @@ func TestUnit_WarehouseResource_CRUD(t *testing.T) {
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttrPtr(testResourceItemFQN, "display_name", entityAfter.DisplayName),
resource.TestCheckResourceAttrPtr(testResourceItemFQN, "description", entityAfter.Description),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.connection_string"),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.created_date"),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.last_updated_time"),
),
},
// Delete testing automatically occurs in TestCase
@@ -215,7 +221,7 @@ func TestAcc_WarehouseResource_CRUD(t *testing.T) {
t.Skip("No SPN support")
}
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceRS"].(map[string]any)
workspaceID := workspace["id"].(string)
entityCreateDisplayName := testhelp.RandomName()
@@ -236,6 +242,9 @@ func TestAcc_WarehouseResource_CRUD(t *testing.T) {
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr(testResourceItemFQN, "display_name", entityCreateDisplayName),
resource.TestCheckResourceAttr(testResourceItemFQN, "description", ""),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.connection_string"),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.created_date"),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.last_updated_time"),
),
},
// Update and Read
@@ -252,6 +261,9 @@ func TestAcc_WarehouseResource_CRUD(t *testing.T) {
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr(testResourceItemFQN, "display_name", entityUpdateDisplayName),
resource.TestCheckResourceAttr(testResourceItemFQN, "description", entityUpdateDescription),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.connection_string"),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.created_date"),
+ resource.TestCheckResourceAttrSet(testResourceItemFQN, "properties.last_updated_time"),
),
},
},
diff --git a/internal/services/warehouse/schema_data_warehouse.go b/internal/services/warehouse/schema_data_warehouse.go
new file mode 100644
index 00000000..26c09cf5
--- /dev/null
+++ b/internal/services/warehouse/schema_data_warehouse.go
@@ -0,0 +1,30 @@
+// Copyright (c) Microsoft Corporation
+// SPDX-License-Identifier: MPL-2.0
+
+package warehouse
+
+import (
+ "github.com/hashicorp/terraform-plugin-framework-timetypes/timetypes"
+ "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
+)
+
+func getDataSourceWarehousePropertiesAttributes() map[string]schema.Attribute {
+ result := map[string]schema.Attribute{
+ "connection_string": schema.StringAttribute{
+ MarkdownDescription: "The SQL connection string connected to the workspace containing this warehouse.",
+ Computed: true,
+ },
+ "created_date": schema.StringAttribute{
+ MarkdownDescription: "The date and time the warehouse was created.",
+ Computed: true,
+ CustomType: timetypes.RFC3339Type{},
+ },
+ "last_updated_time": schema.StringAttribute{
+ MarkdownDescription: "The date and time the warehouse was last updated.",
+ Computed: true,
+ CustomType: timetypes.RFC3339Type{},
+ },
+ }
+
+ return result
+}
diff --git a/internal/services/warehouse/schema_resource_warehouse.go b/internal/services/warehouse/schema_resource_warehouse.go
new file mode 100644
index 00000000..93f08ee3
--- /dev/null
+++ b/internal/services/warehouse/schema_resource_warehouse.go
@@ -0,0 +1,30 @@
+// Copyright (c) Microsoft Corporation
+// SPDX-License-Identifier: MPL-2.0
+
+package warehouse
+
+import (
+ "github.com/hashicorp/terraform-plugin-framework-timetypes/timetypes"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema"
+)
+
+func getResourceWarehousePropertiesAttributes() map[string]schema.Attribute {
+ result := map[string]schema.Attribute{
+ "connection_string": schema.StringAttribute{
+ MarkdownDescription: "The SQL connection string connected to the workspace containing this warehouse.",
+ Computed: true,
+ },
+ "created_date": schema.StringAttribute{
+ MarkdownDescription: "The date and time the warehouse was created.",
+ Computed: true,
+ CustomType: timetypes.RFC3339Type{},
+ },
+ "last_updated_time": schema.StringAttribute{
+ MarkdownDescription: "The date and time the warehouse was last updated.",
+ Computed: true,
+ CustomType: timetypes.RFC3339Type{},
+ },
+ }
+
+ return result
+}
diff --git a/internal/services/workspace/data_workspace_role_assignments_test.go b/internal/services/workspace/data_workspace_role_assignments_test.go
index 8d88b2a1..e30108d2 100644
--- a/internal/services/workspace/data_workspace_role_assignments_test.go
+++ b/internal/services/workspace/data_workspace_role_assignments_test.go
@@ -58,7 +58,7 @@ func TestUnit_WorkspaceRoleAssignmentsDataSource(t *testing.T) {
}
func TestAcc_WorkspaceRoleAssignmentsDataSource(t *testing.T) {
- workspace := testhelp.WellKnown()["Workspace"].(map[string]any)
+ workspace := testhelp.WellKnown()["WorkspaceDS"].(map[string]any)
workspaceID := workspace["id"].(string)
resource.ParallelTest(t, testhelp.NewTestAccCase(t, nil, nil, []resource.TestStep{
diff --git a/internal/services/workspace/data_workspace_test.go b/internal/services/workspace/data_workspace_test.go
index 06592b73..1ddcd116 100644
--- a/internal/services/workspace/data_workspace_test.go
+++ b/internal/services/workspace/data_workspace_test.go
@@ -147,7 +147,7 @@ func TestUnit_WorkspaceDataSource(t *testing.T) {
}
func TestAcc_WorkspaceDataSource(t *testing.T) {
- entity := testhelp.WellKnown()["Workspace"].(map[string]any)
+ entity := testhelp.WellKnown()["WorkspaceDS"].(map[string]any)
entityID := entity["id"].(string)
entityDisplayName := entity["displayName"].(string)
entityDescription := entity["description"].(string)
diff --git a/internal/testhelp/fakes/fabric_environment.go b/internal/testhelp/fakes/fabric_environment.go
index 9e6dabf7..048a4c60 100644
--- a/internal/testhelp/fakes/fabric_environment.go
+++ b/internal/testhelp/fakes/fabric_environment.go
@@ -17,6 +17,18 @@ import (
type operationsEnvironment struct{}
+// ConvertItemToEntity implements itemConverter.
+func (o *operationsEnvironment) ConvertItemToEntity(item fabcore.Item) fabenvironment.Environment {
+ return fabenvironment.Environment{
+ ID: item.ID,
+ DisplayName: item.DisplayName,
+ Description: item.Description,
+ WorkspaceID: item.WorkspaceID,
+ Type: to.Ptr(fabenvironment.ItemTypeEnvironment),
+ Properties: NewRandomEnvironment().Properties,
+ }
+}
+
// CreateWithParentID implements concreteOperations.
func (o *operationsEnvironment) CreateWithParentID(parentID string, data fabenvironment.CreateEnvironmentRequest) fabenvironment.Environment {
entity := NewRandomEnvironmentWithWorkspace(parentID)
@@ -106,8 +118,8 @@ func configureEnvironment(server *fakeServer) fabenvironment.Environment {
}
var entityOperations concreteEntityOperations = &operationsEnvironment{}
-
- handler := newTypedHandler(server, entityOperations)
+ var converter itemConverter[fabenvironment.Environment] = &operationsEnvironment{}
+ handler := newTypedHandlerWithConverter(server, entityOperations, converter)
configureEntityWithParentID(
handler,
diff --git a/internal/testhelp/fakes/fabric_eventhouse.go b/internal/testhelp/fakes/fabric_eventhouse.go
index 7b8cd955..2bafd04b 100644
--- a/internal/testhelp/fakes/fabric_eventhouse.go
+++ b/internal/testhelp/fakes/fabric_eventhouse.go
@@ -16,6 +16,37 @@ import (
type operationsEventhouse struct{}
+// ConvertItemToEntity implements itemConverter.
+func (o *operationsEventhouse) ConvertItemToEntity(item fabcore.Item) fabeventhouse.Eventhouse {
+ return fabeventhouse.Eventhouse{
+ ID: item.ID,
+ DisplayName: item.DisplayName,
+ Description: item.Description,
+ WorkspaceID: item.WorkspaceID,
+ Type: to.Ptr(fabeventhouse.ItemTypeEventhouse),
+ Properties: NewRandomEventhouse().Properties,
+ }
+}
+
+// CreateDefinition implements concreteDefinitionOperations.
+func (o *operationsEventhouse) CreateDefinition(data fabeventhouse.CreateEventhouseRequest) *fabeventhouse.Definition {
+ return data.Definition
+}
+
+// TransformDefinition implements concreteDefinitionOperations.
+func (o *operationsEventhouse) TransformDefinition(entity *fabeventhouse.Definition) fabeventhouse.ItemsClientGetEventhouseDefinitionResponse {
+ return fabeventhouse.ItemsClientGetEventhouseDefinitionResponse{
+ DefinitionResponse: fabeventhouse.DefinitionResponse{
+ Definition: entity,
+ },
+ }
+}
+
+// UpdateDefinition implements concreteDefinitionOperations.
+func (o *operationsEventhouse) UpdateDefinition(_ *fabeventhouse.Definition, data fabeventhouse.UpdateEventhouseDefinitionRequest) *fabeventhouse.Definition {
+ return data.Definition
+}
+
// CreateWithParentID implements concreteOperations.
func (o *operationsEventhouse) CreateWithParentID(parentID string, data fabeventhouse.CreateEventhouseRequest) fabeventhouse.Eventhouse {
entity := NewRandomEventhouseWithWorkspace(parentID)
@@ -75,8 +106,8 @@ func (o *operationsEventhouse) TransformUpdate(entity fabeventhouse.Eventhouse)
// Update implements concreteOperations.
func (o *operationsEventhouse) Update(base fabeventhouse.Eventhouse, data fabeventhouse.UpdateEventhouseRequest) fabeventhouse.Eventhouse {
- base.Description = data.Description
base.DisplayName = data.DisplayName
+ base.Description = data.Description
return base
}
@@ -104,9 +135,22 @@ func configureEventhouse(server *fakeServer) fabeventhouse.Eventhouse {
fabeventhouse.UpdateEventhouseRequest]
}
+ type concreteDefinitionOperations interface {
+ definitionOperations[
+ fabeventhouse.Definition,
+ fabeventhouse.CreateEventhouseRequest,
+ fabeventhouse.UpdateEventhouseDefinitionRequest,
+ fabeventhouse.ItemsClientGetEventhouseDefinitionResponse,
+ fabeventhouse.ItemsClientUpdateEventhouseDefinitionResponse]
+ }
+
var entityOperations concreteEntityOperations = &operationsEventhouse{}
- handler := newTypedHandler(server, entityOperations)
+ var definitionOperations concreteDefinitionOperations = &operationsEventhouse{}
+
+ var converter itemConverter[fabeventhouse.Eventhouse] = &operationsEventhouse{}
+
+ handler := newTypedHandlerWithConverter(server, entityOperations, converter)
configureEntityWithParentID(
handler,
@@ -117,6 +161,14 @@ func configureEventhouse(server *fakeServer) fabeventhouse.Eventhouse {
&server.ServerFactory.Eventhouse.ItemsServer.NewListEventhousesPager,
&server.ServerFactory.Eventhouse.ItemsServer.DeleteEventhouse)
+ configureDefinitions(
+ handler,
+ entityOperations,
+ definitionOperations,
+ &server.ServerFactory.Eventhouse.ItemsServer.BeginCreateEventhouse,
+ &server.ServerFactory.Eventhouse.ItemsServer.BeginGetEventhouseDefinition,
+ &server.ServerFactory.Eventhouse.ItemsServer.BeginUpdateEventhouseDefinition)
+
return fabeventhouse.Eventhouse{}
}
@@ -141,3 +193,19 @@ func NewRandomEventhouseWithWorkspace(workspaceID string) fabeventhouse.Eventhou
return result
}
+
+func NewRandomEventhouseDefinition() fabeventhouse.Definition {
+ defPart := fabeventhouse.DefinitionPart{
+ PayloadType: to.Ptr(fabeventhouse.PayloadTypeInlineBase64),
+ Path: to.Ptr("EventhouseProperties.json"),
+ Payload: to.Ptr("e30="),
+ }
+
+ var defParts []fabeventhouse.DefinitionPart
+
+ defParts = append(defParts, defPart)
+
+ return fabeventhouse.Definition{
+ Parts: defParts,
+ }
+}
diff --git a/internal/testhelp/fakes/fabric_kqldatabase.go b/internal/testhelp/fakes/fabric_kqldatabase.go
index 6d87d4ec..49b6eabb 100644
--- a/internal/testhelp/fakes/fabric_kqldatabase.go
+++ b/internal/testhelp/fakes/fabric_kqldatabase.go
@@ -16,6 +16,37 @@ import (
type operationsKQLDatabase struct{}
+// ConvertItemToEntity implements itemConverter.
+func (o *operationsKQLDatabase) ConvertItemToEntity(item fabcore.Item) fabkqldatabase.KQLDatabase {
+ return fabkqldatabase.KQLDatabase{
+ ID: item.ID,
+ DisplayName: item.DisplayName,
+ Description: item.Description,
+ WorkspaceID: item.WorkspaceID,
+ Type: to.Ptr(fabkqldatabase.ItemTypeKQLDatabase),
+ Properties: NewRandomKQLDatabase().Properties,
+ }
+}
+
+// CreateDefinition implements concreteDefinitionOperations.
+func (o *operationsKQLDatabase) CreateDefinition(data fabkqldatabase.CreateKQLDatabaseRequest) *fabkqldatabase.Definition {
+ return data.Definition
+}
+
+// TransformDefinition implements concreteDefinitionOperations.
+func (o *operationsKQLDatabase) TransformDefinition(entity *fabkqldatabase.Definition) fabkqldatabase.ItemsClientGetKQLDatabaseDefinitionResponse {
+ return fabkqldatabase.ItemsClientGetKQLDatabaseDefinitionResponse{
+ DefinitionResponse: fabkqldatabase.DefinitionResponse{
+ Definition: entity,
+ },
+ }
+}
+
+// UpdateDefinition implements concreteDefinitionOperations.
+func (o *operationsKQLDatabase) UpdateDefinition(_ *fabkqldatabase.Definition, data fabkqldatabase.UpdateKQLDatabaseDefinitionRequest) *fabkqldatabase.Definition {
+ return data.Definition
+}
+
// CreateWithParentID implements concreteOperations.
func (o *operationsKQLDatabase) CreateWithParentID(parentID string, data fabkqldatabase.CreateKQLDatabaseRequest) fabkqldatabase.KQLDatabase {
entity := NewRandomKQLDatabaseWithWorkspace(parentID)
@@ -104,9 +135,22 @@ func configureKQLDatabase(server *fakeServer) fabkqldatabase.KQLDatabase {
fabkqldatabase.UpdateKQLDatabaseRequest]
}
+ type concreteDefinitionOperations interface {
+ definitionOperations[
+ fabkqldatabase.Definition,
+ fabkqldatabase.CreateKQLDatabaseRequest,
+ fabkqldatabase.UpdateKQLDatabaseDefinitionRequest,
+ fabkqldatabase.ItemsClientGetKQLDatabaseDefinitionResponse,
+ fabkqldatabase.ItemsClientUpdateKQLDatabaseDefinitionResponse]
+ }
+
var entityOperations concreteEntityOperations = &operationsKQLDatabase{}
- handler := newTypedHandler(server, entityOperations)
+ var definitionOperations concreteDefinitionOperations = &operationsKQLDatabase{}
+
+ var converter itemConverter[fabkqldatabase.KQLDatabase] = &operationsKQLDatabase{}
+
+ handler := newTypedHandlerWithConverter(server, entityOperations, converter)
configureEntityWithParentID(
handler,
@@ -117,6 +161,14 @@ func configureKQLDatabase(server *fakeServer) fabkqldatabase.KQLDatabase {
&server.ServerFactory.KQLDatabase.ItemsServer.NewListKQLDatabasesPager,
&server.ServerFactory.KQLDatabase.ItemsServer.DeleteKQLDatabase)
+ configureDefinitions(
+ handler,
+ entityOperations,
+ definitionOperations,
+ &server.ServerFactory.KQLDatabase.ItemsServer.BeginCreateKQLDatabase,
+ &server.ServerFactory.KQLDatabase.ItemsServer.BeginGetKQLDatabaseDefinition,
+ &server.ServerFactory.KQLDatabase.ItemsServer.BeginUpdateKQLDatabaseDefinition)
+
return fabkqldatabase.KQLDatabase{}
}
@@ -142,3 +194,26 @@ func NewRandomKQLDatabaseWithWorkspace(workspaceID string) fabkqldatabase.KQLDat
return result
}
+
+func NewRandomKQLDatabaseDefinition() fabkqldatabase.Definition {
+ defPart1 := fabkqldatabase.DefinitionPart{
+ PayloadType: to.Ptr(fabkqldatabase.PayloadTypeInlineBase64),
+ Path: to.Ptr("DatabaseProperties.json"),
+ Payload: to.Ptr("ew0KICAiZGF0YWJhc2VUeXBlIjogIlJlYWRXcml0ZSIsDQogICJwYXJlbnRFdmVudGhvdXNlSXRlbUlkIjogIjAwMDAwMDAwLTAwMDAtMDAwMC0wMDAwLTAwMDAwMDAwMDAwMCIsIA0KICAib25lTGFrZUNhY2hpbmdQZXJpb2QiOiAiUDM2NTAwRCIsIA0KICAib25lTGFrZVN0YW5kYXJkU3RvcmFnZVBlcmlvZCI6ICJQMzY1MDAwRCIgDQp9"),
+ }
+
+ defPart2 := fabkqldatabase.DefinitionPart{
+ PayloadType: to.Ptr(fabkqldatabase.PayloadTypeInlineBase64),
+ Path: to.Ptr("DatabaseSchema.kql"),
+ Payload: to.Ptr("LmNyZWF0ZS1tZXJnZSB0YWJsZSBNeUxvZ3MyIChMZXZlbDpzdHJpbmcsIFRpbWVzdGFtcDpkYXRldGltZSwgVXNlcklkOnN0cmluZywgVHJhY2VJZDpzdHJpbmcsIE1lc3NhZ2U6c3RyaW5nLCBQcm9jZXNzSWQ6aW50KSANCi5jcmVhdGUtbWVyZ2UgdGFibGUgTXlMb2dzMyAoTGV2ZWw6c3RyaW5nLCBUaW1lc3RhbXA6ZGF0ZXRpbWUsIFVzZXJJZDpzdHJpbmcsIFRyYWNlSWQ6c3RyaW5nLCBNZXNzYWdlOnN0cmluZywgUHJvY2Vzc0lkOmludCkgDQouY3JlYXRlLW1lcmdlIHRhYmxlIE15TG9nczcgKExldmVsOnN0cmluZywgVGltZXN0YW1wOmRhdGV0aW1lLCBVc2VySWQ6c3RyaW5nLCBUcmFjZUlkOnN0cmluZywgTWVzc2FnZTpzdHJpbmcsIFByb2Nlc3NJZDppbnQp"),
+ }
+
+ var defParts []fabkqldatabase.DefinitionPart
+
+ defParts = append(defParts, defPart1)
+ defParts = append(defParts, defPart2)
+
+ return fabkqldatabase.Definition{
+ Parts: defParts,
+ }
+}
diff --git a/internal/testhelp/fakes/fabric_lakehouse.go b/internal/testhelp/fakes/fabric_lakehouse.go
index 62af0f0d..e5860593 100644
--- a/internal/testhelp/fakes/fabric_lakehouse.go
+++ b/internal/testhelp/fakes/fabric_lakehouse.go
@@ -16,6 +16,18 @@ import (
type operationsLakehouse struct{}
+// ConvertItemToEntity implements itemConverter.
+func (o *operationsLakehouse) ConvertItemToEntity(item fabcore.Item) fablakehouse.Lakehouse {
+ return fablakehouse.Lakehouse{
+ ID: item.ID,
+ DisplayName: item.DisplayName,
+ Description: item.Description,
+ WorkspaceID: item.WorkspaceID,
+ Type: to.Ptr(fablakehouse.ItemTypeLakehouse),
+ Properties: NewRandomLakehouse().Properties,
+ }
+}
+
// CreateWithParentID implements concreteOperations.
func (o *operationsLakehouse) CreateWithParentID(parentID string, data fablakehouse.CreateLakehouseRequest) fablakehouse.Lakehouse {
entity := NewRandomLakehouseWithWorkspace(parentID)
@@ -106,7 +118,9 @@ func configureLakehouse(server *fakeServer) fablakehouse.Lakehouse {
var entityOperations concreteEntityOperations = &operationsLakehouse{}
- handler := newTypedHandler(server, entityOperations)
+ var converter itemConverter[fablakehouse.Lakehouse] = &operationsLakehouse{}
+
+ handler := newTypedHandlerWithConverter(server, entityOperations, converter)
configureEntityWithParentID(
handler,
@@ -130,6 +144,7 @@ func NewRandomLakehouse() fablakehouse.Lakehouse {
Properties: &fablakehouse.Properties{
OneLakeFilesPath: to.Ptr(testhelp.RandomName()),
OneLakeTablesPath: to.Ptr(testhelp.RandomName()),
+ DefaultSchema: to.Ptr("dbo"),
SQLEndpointProperties: &fablakehouse.SQLEndpointProperties{
ID: to.Ptr(testhelp.RandomUUID()),
ProvisioningStatus: to.Ptr(fablakehouse.SQLEndpointProvisioningStatusSuccess),
diff --git a/internal/testhelp/fakes/fabric_sparkjobdefinition.go b/internal/testhelp/fakes/fabric_sparkjobdefinition.go
index 325eed76..39f354a7 100644
--- a/internal/testhelp/fakes/fabric_sparkjobdefinition.go
+++ b/internal/testhelp/fakes/fabric_sparkjobdefinition.go
@@ -16,6 +16,18 @@ import (
type operationsSparkJobDefinition struct{}
+// ConvertItemToEntity implements itemConverter.
+func (o *operationsSparkJobDefinition) ConvertItemToEntity(item fabcore.Item) fabsparkjobdefinition.SparkJobDefinition {
+ return fabsparkjobdefinition.SparkJobDefinition{
+ ID: item.ID,
+ DisplayName: item.DisplayName,
+ Description: item.Description,
+ WorkspaceID: item.WorkspaceID,
+ Type: to.Ptr(fabsparkjobdefinition.ItemTypeSparkJobDefinition),
+ Properties: NewRandomSparkJobDefinition().Properties,
+ }
+}
+
// CreateDefinition implements concreteDefinitionOperations.
func (o *operationsSparkJobDefinition) CreateDefinition(data fabsparkjobdefinition.CreateSparkJobDefinitionRequest) *fabsparkjobdefinition.PublicDefinition {
return data.Definition
@@ -94,8 +106,8 @@ func (o *operationsSparkJobDefinition) TransformUpdate(entity fabsparkjobdefinit
// Update implements concreteOperations.
func (o *operationsSparkJobDefinition) Update(base fabsparkjobdefinition.SparkJobDefinition, data fabsparkjobdefinition.UpdateSparkJobDefinitionRequest) fabsparkjobdefinition.SparkJobDefinition {
- base.Description = data.Description
base.DisplayName = data.DisplayName
+ base.Description = data.Description
return base
}
@@ -133,10 +145,10 @@ func configureSparkJobDefinition(server *fakeServer) fabsparkjobdefinition.Spark
}
var entityOperations concreteEntityOperations = &operationsSparkJobDefinition{}
-
+ var converter itemConverter[fabsparkjobdefinition.SparkJobDefinition] = &operationsSparkJobDefinition{}
var definitionOperations concreteDefinitionOperations = &operationsSparkJobDefinition{}
- handler := newTypedHandler(server, entityOperations)
+ handler := newTypedHandlerWithConverter(server, entityOperations, converter)
configureEntityWithParentID(
handler,
diff --git a/internal/testhelp/fakes/fabric_warehouse.go b/internal/testhelp/fakes/fabric_warehouse.go
index 0bf72118..c61bbdb8 100644
--- a/internal/testhelp/fakes/fabric_warehouse.go
+++ b/internal/testhelp/fakes/fabric_warehouse.go
@@ -93,6 +93,18 @@ func (o *operationsWarehouse) Validate(newEntity fabwarehouse.Warehouse, existin
return http.StatusCreated, nil
}
+// ConvertItemToEntity implements itemConverter.
+func (o *operationsWarehouse) ConvertItemToEntity(entity fabcore.Item) fabwarehouse.Warehouse {
+ return fabwarehouse.Warehouse{
+ ID: entity.ID,
+ DisplayName: entity.DisplayName,
+ Description: entity.Description,
+ WorkspaceID: entity.WorkspaceID,
+ Type: to.Ptr(fabwarehouse.ItemTypeWarehouse),
+ Properties: NewRandomWarehouse().Properties,
+ }
+}
+
func configureWarehouse(server *fakeServer) fabwarehouse.Warehouse {
type concreteEntityOperations interface {
parentIDOperations[
@@ -106,8 +118,9 @@ func configureWarehouse(server *fakeServer) fabwarehouse.Warehouse {
}
var entityOperations concreteEntityOperations = &operationsWarehouse{}
+ var converter itemConverter[fabwarehouse.Warehouse] = &operationsWarehouse{}
- handler := newTypedHandler(server, entityOperations)
+ handler := newTypedHandlerWithConverter(server, entityOperations, converter)
configureEntityWithParentID(
handler,
diff --git a/internal/testhelp/fakes/fake_interfaces.go b/internal/testhelp/fakes/fake_interfaces.go
index 959b55fc..bd63e515 100644
--- a/internal/testhelp/fakes/fake_interfaces.go
+++ b/internal/testhelp/fakes/fake_interfaces.go
@@ -3,11 +3,18 @@
package fakes
+import fabcore "github.com/microsoft/fabric-sdk-go/fabric/core"
+
type identifier[TEntity any] interface {
// GetID returns the ID of the entity.
GetID(entity TEntity) string
}
+type itemConverter[TEntity any] interface {
+ // ConvertItemToEntity converts an fabcore.Item into an specific entity.
+ ConvertItemToEntity(item fabcore.Item) TEntity
+}
+
type getTransformer[TEntity, TOutput any] interface {
// TransformGet transforms an entity into a response.
TransformGet(entity TEntity) TOutput
diff --git a/internal/testhelp/fakes/fake_typedhandler.go b/internal/testhelp/fakes/fake_typedhandler.go
index 7913ce07..02edc146 100644
--- a/internal/testhelp/fakes/fake_typedhandler.go
+++ b/internal/testhelp/fakes/fake_typedhandler.go
@@ -14,17 +14,37 @@ import (
var errItemNotFound = fabcore.ErrItem.ItemNotFound.Error()
+type defaultConverter[TEntity any] struct{}
+
+func (c *defaultConverter[TEntity]) ConvertItemToEntity(item fabcore.Item) TEntity {
+ var entity TEntity
+
+ setReflectedStringPropertyValue(&entity, "ID", *item.ID)
+ setReflectedStringPropertyValue(&entity, "WorkspaceID", *item.WorkspaceID)
+ setReflectedStringPropertyValue(&entity, "DisplayName", *item.DisplayName)
+ setReflectedStringPropertyValue(&entity, "Description", *item.Description)
+ setReflectedStringPropertyValue(&entity, "Type", string(*item.Type))
+
+ return entity
+}
+
// typedHandler is a handler for a specific entity type.
type typedHandler[TEntity any] struct {
*fakeServer
identifier identifier[TEntity]
+ converter itemConverter[TEntity]
}
// newTypedHandler creates a new typedHandler.
func newTypedHandler[TEntity any](server *fakeServer, identifier identifier[TEntity]) *typedHandler[TEntity] {
+ return newTypedHandlerWithConverter(server, identifier, &defaultConverter[TEntity]{})
+}
+
+func newTypedHandlerWithConverter[TEntity any](server *fakeServer, identifier identifier[TEntity], converter itemConverter[TEntity]) *typedHandler[TEntity] {
typedHandler := &typedHandler[TEntity]{
fakeServer: server,
identifier: identifier,
+ converter: converter,
}
return typedHandler
@@ -103,17 +123,14 @@ func generateID(parentID, childID string) string {
func (h *typedHandler[TEntity]) Elements() []TEntity {
ret := make([]TEntity, 0)
- // if it is a FabricItem, return all the elements as fabric items
- if h.entityTypeIsFabricItem() {
- for _, element := range h.elements {
+ for _, element := range h.elements {
+ // if it already is the right type, add it.
+ if castedElement, ok := element.(TEntity); ok {
+ ret = append(ret, castedElement)
+ } else if h.entityTypeCanBeConvertedToFabricItem() {
+ // if it is not the right type, but it's a fabric item, convert it to the right type
item := asFabricItem(element)
- ret = append(ret, h.getFabricItemAsTEntity(item))
- }
- } else {
- for _, element := range h.elements {
- if element, ok := element.(TEntity); ok {
- ret = append(ret, element)
- }
+ ret = append(ret, h.converter.ConvertItemToEntity(item))
}
}
@@ -180,7 +197,7 @@ func (h *typedHandler[TEntity]) Get(id string) TEntity {
for _, element := range h.elements {
item := asFabricItem(element)
if strings.HasSuffix(id, *item.ID) {
- return h.getFabricItemAsTEntity(item)
+ return h.converter.ConvertItemToEntity(item)
}
}
}
@@ -223,18 +240,6 @@ func (h *typedHandler[TEntity]) getPointer(id string) *TEntity {
return nil
}
-func (h *typedHandler[TEntity]) getFabricItemAsTEntity(item fabcore.Item) TEntity {
- var entity TEntity
-
- h.setReflectedStringPropertyValue(&entity, "ID", *item.ID)
- h.setReflectedStringPropertyValue(&entity, "WorkspaceID", *item.WorkspaceID)
- h.setReflectedStringPropertyValue(&entity, "DisplayName", *item.DisplayName)
- h.setReflectedStringPropertyValue(&entity, "Description", *item.Description)
- h.setReflectedStringPropertyValue(&entity, "Type", string(*item.Type))
-
- return entity
-}
-
// asFabricItem converts an element to a fabric item.
func asFabricItem(element any) fabcore.Item {
itemType := fabcore.ItemType(*getReflectedStringPropertyValue(element, "Type"))
@@ -261,7 +266,7 @@ func getReflectedStringPropertyValue(element any, propertyName string) *string {
}
// setReflectedStringPropertyValue sets a string property value on a reflected object.
-func (h *typedHandler[TEntity]) setReflectedStringPropertyValue(entity *TEntity, propertyName, value string) {
+func setReflectedStringPropertyValue[TEntity any](entity *TEntity, propertyName, value string) {
reflectedValue := reflect.ValueOf(entity).Elem()
propertyValue := reflectedValue.FieldByName(propertyName)
diff --git a/internal/testhelp/fixtures/eventhouse/EventhouseProperties.json.tmpl b/internal/testhelp/fixtures/eventhouse/EventhouseProperties.json.tmpl
new file mode 100644
index 00000000..0967ef42
--- /dev/null
+++ b/internal/testhelp/fixtures/eventhouse/EventhouseProperties.json.tmpl
@@ -0,0 +1 @@
+{}
diff --git a/internal/testhelp/fixtures/kql_database/DatabaseProperties.json.tmpl b/internal/testhelp/fixtures/kql_database/DatabaseProperties.json.tmpl
new file mode 100644
index 00000000..9c163788
--- /dev/null
+++ b/internal/testhelp/fixtures/kql_database/DatabaseProperties.json.tmpl
@@ -0,0 +1,6 @@
+{
+ "databaseType": "ReadWrite",
+ "parentEventhouseItemId": "{{ .EventhouseID }}",
+ "oneLakeCachingPeriod": "P36500D",
+ "oneLakeStandardStoragePeriod": "P365000D"
+}
diff --git a/internal/testhelp/fixtures/kql_database/DatabaseSchema.kql b/internal/testhelp/fixtures/kql_database/DatabaseSchema.kql
new file mode 100644
index 00000000..fa546dc6
--- /dev/null
+++ b/internal/testhelp/fixtures/kql_database/DatabaseSchema.kql
@@ -0,0 +1,7 @@
+// KQL script
+// Use management commands in this script to configure your database items, such as tables, functions, materialized views, and more.
+
+
+.create-merge table MyLogs2 (Level:string, Timestamp:datetime, UserId:string, TraceId:string, Message:string, ProcessId:int)
+.create-merge table MyLogs3 (Level:string, Timestamp:datetime, UserId:string, TraceId:string, Message:string, ProcessId:int)
+.create-merge table MyLogs7 (Level:string, Timestamp:datetime, UserId:string, TraceId:string, Message:string, ProcessId:int)
diff --git a/tools/scripts/Set-WellKnown.ps1 b/tools/scripts/Set-WellKnown.ps1
index 2c1fa660..c6324ba4 100644
--- a/tools/scripts/Set-WellKnown.ps1
+++ b/tools/scripts/Set-WellKnown.ps1
@@ -404,6 +404,76 @@ function Get-DisplayName {
return $result
}
+function Set-FabricWorkspace {
+ param (
+ [Parameter(Mandatory = $true)]
+ [string]$DisplayName,
+
+ [Parameter(Mandatory = $true)]
+ [string]$CapacityId
+ )
+
+ $workspaces = Invoke-FabricRest -Method 'GET' -Endpoint 'workspaces'
+ $workspace = $workspaces.Response.value | Where-Object { $_.displayName -eq $DisplayName }
+ if (!$workspace) {
+ Write-Log -Message "Creating Workspace: $DisplayName" -Level 'WARN'
+ $payload = @{
+ displayName = $DisplayName
+ description = $DisplayName
+ capacityId = $CapacityId
+ }
+ $workspace = (Invoke-FabricRest -Method 'POST' -Endpoint 'workspaces' -Payload $payload).Response
+ }
+
+ return $workspace
+}
+
+function Set-FabricWorkspaceCapacity {
+ param (
+ [Parameter(Mandatory = $true)]
+ [string]$WorkspaceId,
+
+ [Parameter(Mandatory = $true)]
+ [string]$CapacityId
+ )
+
+ $workspace = Invoke-FabricRest -Method 'GET' -Endpoint "workspaces/$WorkspaceId"
+ if ($workspace.Response.capacityId -ne $CapacityId) {
+ Write-Log -Message "Assigning Workspace to Capacity ID: $CapacityId" -Level 'WARN'
+ $payload = @{
+ capacityId = $CapacityId
+ }
+ $result = (Invoke-FabricRest -Method 'POST' -Endpoint "workspaces/$WorkspaceId/assignToCapacity" -Payload $payload).Response
+ $workspace.Response.capacityId = $CapacityId
+ }
+
+ return $workspace.Response
+}
+
+function Set-FabricWorkspaceRoleAssignment {
+ param (
+ [Parameter(Mandatory = $true)]
+ [string]$WorkspaceId,
+
+ [Parameter(Mandatory = $true)]
+ [object]$SPN
+ )
+
+ $results = Invoke-FabricRest -Method 'GET' -Endpoint "workspaces/$WorkspaceId/roleAssignments"
+ $result = $results.Response.value | Where-Object { $_.id -eq $SPN.Id }
+ if (!$result) {
+ Write-Log -Message "Assigning SPN to Workspace: $($SPN.DisplayName)" -Level 'WARN'
+ $payload = @{
+ principal = @{
+ id = $SPN.Id
+ type = 'ServicePrincipal'
+ }
+ role = 'Admin'
+ }
+ $result = (Invoke-FabricRest -Method 'POST' -Endpoint "workspaces/$WorkspaceId/roleAssignments" -Payload $payload).Response
+ }
+}
+
# Define an array of modules to install
$modules = @('Az.Accounts', 'Az.Resources', 'Az.Fabric', 'pwsh-dotenv', 'ADOPS')
@@ -482,7 +552,8 @@ $itemNaming = @{
'SQLDatabase' = 'sqldb'
'SQLEndpoint' = 'sqle'
'Warehouse' = 'wh'
- 'Workspace' = 'ws'
+ 'WorkspaceDS' = 'wsds'
+ 'WorkspaceRS' = 'wsrs'
'DomainParent' = 'parent'
'DomainChild' = 'child'
'EntraServicePrincipal' = 'sp'
@@ -517,54 +588,44 @@ $envVars -join "`n" | Set-Content -Path './wellknown.env' -Force -NoNewline -Enc
$displayName = Get-DisplayName -Base $baseName
-# Create Workspace if not exists
-$displayNameTemp = "${displayName}_$($itemNaming['Workspace'])"
-$workspaces = Invoke-FabricRest -Method 'GET' -Endpoint 'workspaces'
-$workspace = $workspaces.Response.value | Where-Object { $_.displayName -eq $displayNameTemp }
-if (!$workspace) {
- Write-Log -Message "Creating Workspace: $displayNameTemp" -Level 'WARN'
- $payload = @{
- displayName = $displayNameTemp
- description = $displayNameTemp
- capacityId = $capacity.id
- }
- $workspace = (Invoke-FabricRest -Method 'POST' -Endpoint 'workspaces' -Payload $payload).Response
-}
+# Create WorkspaceRS if not exists
+$displayNameTemp = "${displayName}_$($itemNaming['WorkspaceRS'])"
+$workspace = Set-FabricWorkspace -DisplayName $displayNameTemp -CapacityId $capacity.id
-# Assign Workspace to Capacity if not already assigned or assigned to a different capacity
-if ($workspace.capacityId -ne $capacity.id) {
- Write-Log -Message "Assigning Workspace to Capacity ID: $($capacity.id)" -Level 'WARN'
- $payload = @{
- capacityId = $capacity.id
- }
- $result = (Invoke-FabricRest -Method 'POST' -Endpoint "workspaces/$($workspace.id)/assignToCapacity" -Payload $payload).Response
- $workspace.capacityId = $capacity.id
+# Assign WorkspaceDS to Capacity if not already assigned or assigned to a different capacity
+$workspace = Set-FabricWorkspaceCapacity -WorkspaceId $workspace.id -CapacityId $capacity.id
+
+Write-Log -Message "WorkspaceRS - Name: $($workspace.displayName) / ID: $($workspace.id)"
+$wellKnown['WorkspaceRS'] = @{
+ id = $workspace.id
+ displayName = $workspace.displayName
+ description = $workspace.description
}
-# Assign SPN to Workspace if not already assigned
+# Assign SPN to WorkspaceRS if not already assigned
if ($SPN) {
- $results = Invoke-FabricRest -Method 'GET' -Endpoint "workspaces/$($workspace.id)/roleAssignments"
- $result = $results.Response.value | Where-Object { $_.id -eq $SPN.Id }
- if (!$result) {
- Write-Log -Message "Assigning SPN to Workspace: $($SPN.DisplayName)" -Level 'WARN'
- $payload = @{
- principal = @{
- id = $SPN.Id
- type = 'ServicePrincipal'
- }
- role = 'Admin'
- }
- $result = (Invoke-FabricRest -Method 'POST' -Endpoint "workspaces/$($workspace.id)/roleAssignments" -Payload $payload).Response
- }
+ Set-FabricWorkspaceRoleAssignment -WorkspaceId $workspace.id -SPN $SPN
}
-Write-Log -Message "Workspace - Name: $($workspace.displayName) / ID: $($workspace.id)"
-$wellKnown['Workspace'] = @{
+# Create WorkspaceDS if not exists
+$displayNameTemp = "${displayName}_$($itemNaming['WorkspaceDS'])"
+$workspace = Set-FabricWorkspace -DisplayName $displayNameTemp -CapacityId $capacity.id
+
+# Assign WorkspaceDS to Capacity if not already assigned or assigned to a different capacity
+$workspace = Set-FabricWorkspaceCapacity -WorkspaceId $workspace.id -CapacityId $capacity.id
+
+Write-Log -Message "WorkspaceDS - Name: $($workspace.displayName) / ID: $($workspace.id)"
+$wellKnown['WorkspaceDS'] = @{
id = $workspace.id
displayName = $workspace.displayName
description = $workspace.description
}
+# Assign SPN to WorkspaceRS if not already assigned
+if ($SPN) {
+ Set-FabricWorkspaceRoleAssignment -WorkspaceId $workspace.id -SPN $SPN
+}
+
# Define an array of item types to create
$itemTypes = @('DataPipeline', 'Environment', 'Eventhouse', 'Eventstream', 'KQLDashboard', 'KQLQueryset', 'Lakehouse', 'MLExperiment', 'MLModel', 'Notebook', 'Reflex', 'SparkJobDefinition', 'Warehouse')
@@ -783,5 +844,3 @@ if ($SPN) {
$wellKnownJson = $wellKnown | ConvertTo-Json
$wellKnownJson
$wellKnownJson | Set-Content -Path './internal/testhelp/fixtures/.wellknown.json' -Force -NoNewline -Encoding utf8
-
-
From 9c108c19c5ef74c5967079db6bcedc415b742b2e Mon Sep 17 00:00:00 2001
From: Dariusz Porowski <3431813+DariuszPorowski@users.noreply.github.com>
Date: Mon, 30 Dec 2024 12:16:40 -0800
Subject: [PATCH 2/2] fix(kql-database): config is required
---
.changes/unreleased/added-20241230-120810.yaml | 5 +++++
.changes/unreleased/added-20241230-120926.yaml | 5 +++++
docs/resources/kql_database.md | 8 ++++----
internal/services/kqldatabase/resource_kql_database.go | 2 +-
4 files changed, 15 insertions(+), 5 deletions(-)
create mode 100644 .changes/unreleased/added-20241230-120810.yaml
create mode 100644 .changes/unreleased/added-20241230-120926.yaml
diff --git a/.changes/unreleased/added-20241230-120810.yaml b/.changes/unreleased/added-20241230-120810.yaml
new file mode 100644
index 00000000..c2498554
--- /dev/null
+++ b/.changes/unreleased/added-20241230-120810.yaml
@@ -0,0 +1,5 @@
+kind: added
+body: Definition support in the `fabric_eventhouse` Resource
+time: 2024-12-30T12:08:10.4218393-08:00
+custom:
+ Issue: "135"
diff --git a/.changes/unreleased/added-20241230-120926.yaml b/.changes/unreleased/added-20241230-120926.yaml
new file mode 100644
index 00000000..7acc277f
--- /dev/null
+++ b/.changes/unreleased/added-20241230-120926.yaml
@@ -0,0 +1,5 @@
+kind: added
+body: Definition support in the `fabric_eventhouse` Data-Sources
+time: 2024-12-30T12:09:26.6679881-08:00
+custom:
+ Issue: "156"
diff --git a/docs/resources/kql_database.md b/docs/resources/kql_database.md
index a30e6f58..112e0b1d 100644
--- a/docs/resources/kql_database.md
+++ b/docs/resources/kql_database.md
@@ -73,15 +73,15 @@ resource "fabric_kql_database" "example4" {
### Required
+- `configuration` (Attributes) The KQL Database creation configuration.
+
+Any changes to this configuration will result in recreation of the KQL Database. (see [below for nested schema](#nestedatt--configuration))
+
- `display_name` (String) The KQL Database display name.
- `workspace_id` (String) The Workspace ID.
### Optional
-- `configuration` (Attributes) The KQL Database creation configuration.
-
-Any changes to this configuration will result in recreation of the KQL Database. (see [below for nested schema](#nestedatt--configuration))
-
- `description` (String) The KQL Database description.
- `timeouts` (Attributes) (see [below for nested schema](#nestedatt--timeouts))
diff --git a/internal/services/kqldatabase/resource_kql_database.go b/internal/services/kqldatabase/resource_kql_database.go
index 64a20cfb..7e6b5ee2 100644
--- a/internal/services/kqldatabase/resource_kql_database.go
+++ b/internal/services/kqldatabase/resource_kql_database.go
@@ -113,7 +113,7 @@ func NewResourceKQLDatabase() resource.Resource {
// DefinitionRequired: false,
// DefinitionEmpty: "",
},
- IsConfigRequired: false,
+ IsConfigRequired: true,
ConfigAttributes: getResourceKQLDatabaseConfigurationAttributes(),
CreationPayloadSetter: creationPayloadSetter,
PropertiesAttributes: getResourceKQLDatabasePropertiesAttributes(),