Skip to content

Commit

Permalink
add support for type in adf pipeline parameters and variables
Browse files Browse the repository at this point in the history
  • Loading branch information
hqhqhqhqhqhqhqhqhqhqhq committed Aug 19, 2024
1 parent f988645 commit 11c8714
Show file tree
Hide file tree
Showing 3 changed files with 339 additions and 11 deletions.
58 changes: 58 additions & 0 deletions internal/services/datafactory/data_factory.go
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,31 @@ func flattenDataFactoryParameters(input map[string]*datafactory.ParameterSpecifi
return output
}

func expandDataFactoryParametersFourPointOh(input []interface{}) map[string]*datafactory.ParameterSpecification {
parameters := make(map[string]*datafactory.ParameterSpecification)
for _, v := range input {
val := v.(map[string]interface{})
parameters[val["name"].(string)] = &datafactory.ParameterSpecification{
Type: datafactory.ParameterType(val["type"].(string)),
DefaultValue: val["default_value"],
}
}
return parameters
}

func flattenDataFactoryParametersFourPointOh(input map[string]*datafactory.ParameterSpecification) []interface{} {
parameters := make([]interface{}, 0, len(input))
for k, v := range input {
param := map[string]interface{}{
"name": k,
"type": string(v.Type),
"default_value": v.DefaultValue,
}
parameters = append(parameters, param)
}
return parameters
}

func flattenDataFactoryAnnotations(input *[]interface{}) []string {
annotations := make([]string, 0)
if input == nil {
Expand Down Expand Up @@ -144,6 +169,39 @@ func flattenDataFactoryVariables(input map[string]*datafactory.VariableSpecifica
return output
}

func expandDataFactoryVariablesFourPointOh(input []interface{}) map[string]*datafactory.VariableSpecification {
variables := make(map[string]*datafactory.VariableSpecification)
for _, v := range input {
val := v.(map[string]interface{})

variables[val["name"].(string)] = &datafactory.VariableSpecification{
Type: datafactory.VariableType(val["type"].(string)),
DefaultValue: val["default_value"],
}
}
return variables
}

func flattenDataFactoryVariablesFourPointOh(input map[string]*datafactory.VariableSpecification) []interface{} {
variables := make([]interface{}, 0, len(input))
for k, v := range input {

// convert value to string if it is bool
// this is needed because the API returns the default value as a bool
if _, ok := v.DefaultValue.(bool); ok {
v.DefaultValue = fmt.Sprintf("%v", v.DefaultValue)
}

variable := map[string]interface{}{
"name": k,
"type": string(v.Type),
"default_value": v.DefaultValue,
}
variables = append(variables, variable)
}
return variables
}

// DatasetColumn describes the attributes needed to specify a structure column for a dataset
type DatasetColumn struct {
Name string `json:"name,omitempty" tfschema:"name"`
Expand Down
103 changes: 92 additions & 11 deletions internal/services/datafactory/data_factory_pipeline_resource.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import (
"github.com/hashicorp/go-azure-sdk/resource-manager/datafactory/2018-06-01/factories"
"github.com/hashicorp/terraform-provider-azurerm/helpers/tf"
"github.com/hashicorp/terraform-provider-azurerm/internal/clients"
"github.com/hashicorp/terraform-provider-azurerm/internal/features"
"github.com/hashicorp/terraform-provider-azurerm/internal/services/datafactory/azuresdkhacks"
"github.com/hashicorp/terraform-provider-azurerm/internal/services/datafactory/parse"
"github.com/hashicorp/terraform-provider-azurerm/internal/services/datafactory/validate"
Expand All @@ -22,7 +23,7 @@ import (
)

func resourceDataFactoryPipeline() *pluginsdk.Resource {
return &pluginsdk.Resource{
resource := &pluginsdk.Resource{
Create: resourceDataFactoryPipelineCreateUpdate,
Read: resourceDataFactoryPipelineRead,
Update: resourceDataFactoryPipelineCreateUpdate,
Expand Down Expand Up @@ -109,6 +110,67 @@ func resourceDataFactoryPipeline() *pluginsdk.Resource {
},
},
}

if features.FourPointOhBeta() {
resource.Schema["parameters"] = &pluginsdk.Schema{
Type: pluginsdk.TypeList,
Optional: true,
Elem: &pluginsdk.Resource{
Schema: map[string]*pluginsdk.Schema{
"name": {
Type: pluginsdk.TypeString,
Required: true,
},
"default_value": {
Type: pluginsdk.TypeString,
Required: true,
},
"type": {
Type: pluginsdk.TypeString,
Optional: true,
Default: string(datafactory.ParameterTypeString),
ValidateFunc: validation.StringInSlice([]string{
string(datafactory.ParameterTypeString),
string(datafactory.ParameterTypeInt),
string(datafactory.ParameterTypeFloat),
string(datafactory.ParameterTypeBool),
string(datafactory.ParameterTypeArray),
string(datafactory.ParameterTypeObject),
string(datafactory.ParameterTypeSecureString),
}, false),
},
},
},
}

resource.Schema["variables"] = &pluginsdk.Schema{
Type: pluginsdk.TypeList,
Optional: true,
Elem: &pluginsdk.Resource{
Schema: map[string]*pluginsdk.Schema{
"name": {
Type: pluginsdk.TypeString,
Required: true,
},
"default_value": {
Type: pluginsdk.TypeString,
Required: true,
},
"type": {
Type: pluginsdk.TypeString,
Optional: true,
Default: string(datafactory.VariableTypeString),
ValidateFunc: validation.StringInSlice([]string{
string(datafactory.VariableTypeString),
string(datafactory.VariableTypeArray),
}, false),
},
},
},
}
}

return resource
}

func resourceDataFactoryPipelineCreateUpdate(d *pluginsdk.ResourceData, meta interface{}) error {
Expand Down Expand Up @@ -141,11 +203,17 @@ func resourceDataFactoryPipelineCreateUpdate(d *pluginsdk.ResourceData, meta int
}

pipeline := &azuresdkhacks.Pipeline{
Parameters: expandDataFactoryParameters(d.Get("parameters").(map[string]interface{})),
Variables: expandDataFactoryVariables(d.Get("variables").(map[string]interface{})),
Description: utils.String(d.Get("description").(string)),
}

if features.FourPointOhBeta() {
pipeline.Parameters = expandDataFactoryParametersFourPointOh(d.Get("parameters").([]interface{}))
pipeline.Variables = expandDataFactoryVariablesFourPointOh(d.Get("variables").([]interface{}))
} else {
pipeline.Parameters = expandDataFactoryParameters(d.Get("parameters").(map[string]interface{}))
pipeline.Variables = expandDataFactoryVariables(d.Get("variables").(map[string]interface{}))
}

if v, ok := d.GetOk("activities_json"); ok {
activities, err := deserializeDataFactoryPipelineActivities(v.(string))
if err != nil {
Expand Down Expand Up @@ -225,9 +293,27 @@ func resourceDataFactoryPipelineRead(d *pluginsdk.ResourceData, meta interface{}
if props := resp.Pipeline; props != nil {
d.Set("description", props.Description)

parameters := flattenDataFactoryParameters(props.Parameters)
if err := d.Set("parameters", parameters); err != nil {
return fmt.Errorf("setting `parameters`: %+v", err)
if features.FourPointOhBeta() {
parameters := flattenDataFactoryParametersFourPointOh(props.Parameters)
if err := d.Set("parameters", parameters); err != nil {
return fmt.Errorf("setting `parameters`: %+v", err)
}

variables := flattenDataFactoryVariablesFourPointOh(props.Variables)
if err := d.Set("variables", variables); err != nil {
return fmt.Errorf("setting `variables`: %+v", err)
}
} else {
parameters := flattenDataFactoryParameters(props.Parameters)
if err := d.Set("parameters", parameters); err != nil {
return fmt.Errorf("setting `parameters`: %+v", err)
}

variables := flattenDataFactoryVariables(props.Variables)
if err := d.Set("variables", variables); err != nil {
return fmt.Errorf("setting `variables`: %+v", err)
}

}

annotations := flattenDataFactoryAnnotations(props.Annotations)
Expand Down Expand Up @@ -255,11 +341,6 @@ func resourceDataFactoryPipelineRead(d *pluginsdk.ResourceData, meta interface{}
}
}

variables := flattenDataFactoryVariables(props.Variables)
if err := d.Set("variables", variables); err != nil {
return fmt.Errorf("setting `variables`: %+v", err)
}

if activities := props.Activities; activities != nil {
activitiesJson, err := serializeDataFactoryPipelineActivities(activities)
if err != nil {
Expand Down
Loading

0 comments on commit 11c8714

Please sign in to comment.