Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

New Resource: azurerm_data_factory_dataset_mysql #3267

Merged
merged 3 commits into from
Apr 16, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
54 changes: 54 additions & 0 deletions azurerm/data_factory.go
Original file line number Diff line number Diff line change
Expand Up @@ -136,3 +136,57 @@ func flattenDataFactoryVariables(input map[string]*datafactory.VariableSpecifica

return output
}

// DatasetColumn describes the attributes needed to specify a structure column for a dataset
type DatasetColumn struct {
Name string `json:"name,omitempty"`
Description string `json:"description,omitempty"`
Type string `json:"type,omitempty"`
}

func expandDataFactoryDatasetStructure(input []interface{}) interface{} {
columns := make([]DatasetColumn, 0)
for _, column := range input {
attrs := column.(map[string]interface{})

datasetColumn := DatasetColumn{
Name: attrs["name"].(string),
}
if attrs["description"] != nil {
datasetColumn.Description = attrs["description"].(string)
}
if attrs["type"] != nil {
datasetColumn.Type = attrs["type"].(string)
}
columns = append(columns, datasetColumn)
}
return columns
}

func flattenDataFactoryStructureColumns(input interface{}) []interface{} {
output := make([]interface{}, 0)

columns, ok := input.([]interface{})
if !ok {
return columns
}

for _, v := range columns {
column, ok := v.(map[string]interface{})
if !ok {
continue
}
result := make(map[string]interface{})
if column["name"] != nil {
result["name"] = column["name"]
}
if column["type"] != nil {
result["type"] = column["type"]
}
if column["description"] != nil {
result["description"] = column["description"]
}
output = append(output, result)
}
return output
}
1 change: 1 addition & 0 deletions azurerm/provider.go
Original file line number Diff line number Diff line change
Expand Up @@ -222,6 +222,7 @@ func Provider() terraform.ResourceProvider {
"azurerm_container_service": resourceArmContainerService(),
"azurerm_cosmosdb_account": resourceArmCosmosDBAccount(),
"azurerm_data_factory": resourceArmDataFactory(),
"azurerm_data_factory_dataset_mysql": resourceArmDataFactoryDatasetMySQL(),
"azurerm_data_factory_dataset_sql_server_table": resourceArmDataFactoryDatasetSQLServerTable(),
"azurerm_data_factory_linked_service_mysql": resourceArmDataFactoryLinkedServiceMySQL(),
"azurerm_data_factory_linked_service_postgresql": resourceArmDataFactoryLinkedServicePostgreSQL(),
Expand Down
315 changes: 315 additions & 0 deletions azurerm/resource_arm_data_factory_dataset_mysql.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,315 @@
package azurerm

import (
"fmt"
"log"
"regexp"

"github.com/Azure/azure-sdk-for-go/services/datafactory/mgmt/2018-06-01/datafactory"
"github.com/hashicorp/terraform/helper/schema"
"github.com/hashicorp/terraform/helper/validation"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils"
)

func resourceArmDataFactoryDatasetMySQL() *schema.Resource {
return &schema.Resource{
Create: resourceArmDataFactoryDatasetMySQLCreateOrUpdate,
Read: resourceArmDataFactoryDatasetMySQLRead,
Update: resourceArmDataFactoryDatasetMySQLCreateOrUpdate,
Delete: resourceArmDataFactoryDatasetMySQLDelete,

Importer: &schema.ResourceImporter{
State: schema.ImportStatePassthrough,
},

Schema: map[string]*schema.Schema{
"name": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
ValidateFunc: validateAzureRMDataFactoryLinkedServiceDatasetName,
},

"data_factory_name": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
ValidateFunc: validation.StringMatch(
regexp.MustCompile(`^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$`),
`Invalid name for Data Factory, see https://docs.microsoft.com/en-us/azure/data-factory/naming-rules`,
),
},

"resource_group_name": resourceGroupNameSchema(),

"linked_service_name": {
Type: schema.TypeString,
Required: true,
ValidateFunc: validate.NoEmptyStrings,
},

"table_name": {
Type: schema.TypeString,
Optional: true,
ValidateFunc: validate.NoEmptyStrings,
},

"parameters": {
Type: schema.TypeMap,
Optional: true,
},

"description": {
Type: schema.TypeString,
Optional: true,
ValidateFunc: validate.NoEmptyStrings,
},

"annotations": {
Type: schema.TypeList,
Optional: true,
Elem: &schema.Schema{
Type: schema.TypeString,
},
},

"folder": {
Type: schema.TypeString,
Optional: true,
ValidateFunc: validate.NoEmptyStrings,
},

"additional_properties": {
Type: schema.TypeMap,
Optional: true,
},

"schema_column": {
Type: schema.TypeList,
Optional: true,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"name": {
Type: schema.TypeString,
Required: true,
ValidateFunc: validate.NoEmptyStrings,
},
"type": {
Type: schema.TypeString,
Optional: true,
ValidateFunc: validation.StringInSlice([]string{
"Byte",
"Byte[]",
"Boolean",
"Date",
"DateTime",
"DateTimeOffset",
"Decimal",
"Double",
"Guid",
"Int16",
"Int32",
"Int64",
"Single",
"String",
"TimeSpan",
}, false),
},
"description": {
Type: schema.TypeString,
Optional: true,
ValidateFunc: validate.NoEmptyStrings,
},
},
},
},
},
}
}

func resourceArmDataFactoryDatasetMySQLCreateOrUpdate(d *schema.ResourceData, meta interface{}) error {
client := meta.(*ArmClient).dataFactoryDatasetClient
ctx := meta.(*ArmClient).StopContext

name := d.Get("name").(string)
dataFactoryName := d.Get("data_factory_name").(string)
resourceGroup := d.Get("resource_group_name").(string)

if requireResourcesToBeImported && d.IsNewResource() {
existing, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "")
if err != nil {
if !utils.ResponseWasNotFound(existing.Response) {
return fmt.Errorf("Error checking for presence of existing Data Factory Dataset MySQL %q (Data Factory %q / Resource Group %q): %s", name, dataFactoryName, resourceGroup, err)
}
}

if existing.ID != nil && *existing.ID != "" {
return tf.ImportAsExistsError("azurerm_data_factory_dataset_mysql", *existing.ID)
}
}

mysqlDatasetProperties := datafactory.RelationalTableDatasetTypeProperties{
TableName: d.Get("table_name").(string),
}

linkedServiceName := d.Get("linked_service_name").(string)
linkedServiceType := "LinkedServiceReference"
linkedService := &datafactory.LinkedServiceReference{
ReferenceName: &linkedServiceName,
Type: &linkedServiceType,
}

description := d.Get("description").(string)
mysqlTableset := datafactory.RelationalTableDataset{
RelationalTableDatasetTypeProperties: &mysqlDatasetProperties,
LinkedServiceName: linkedService,
Description: &description,
}

if v, ok := d.GetOk("folder"); ok {
name := v.(string)
mysqlTableset.Folder = &datafactory.DatasetFolder{
Name: &name,
}
}

if v, ok := d.GetOk("parameters"); ok {
mysqlTableset.Parameters = expandDataFactoryParameters(v.(map[string]interface{}))
}

if v, ok := d.GetOk("annotations"); ok {
annotations := v.([]interface{})
mysqlTableset.Annotations = &annotations
}

if v, ok := d.GetOk("additional_properties"); ok {
mysqlTableset.AdditionalProperties = v.(map[string]interface{})
}

if v, ok := d.GetOk("schema_column"); ok {
mysqlTableset.Structure = expandDataFactoryDatasetStructure(v.([]interface{}))
}

datasetType := string(datafactory.TypeRelationalTable)
dataset := datafactory.DatasetResource{
Properties: &mysqlTableset,
Type: &datasetType,
}

if _, err := client.CreateOrUpdate(ctx, resourceGroup, dataFactoryName, name, dataset, ""); err != nil {
return fmt.Errorf("Error creating/updating Data Factory Dataset MySQL %q (Data Factory %q / Resource Group %q): %s", name, dataFactoryName, resourceGroup, err)
}

resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "")
if err != nil {
return fmt.Errorf("Error retrieving Data Factory Dataset MySQL %q (Data Factory %q / Resource Group %q): %s", name, dataFactoryName, resourceGroup, err)
}

if resp.ID == nil {
return fmt.Errorf("Cannot read Data Factory Dataset MySQL %q (Data Factory %q / Resource Group %q): %s", name, dataFactoryName, resourceGroup, err)
}

d.SetId(*resp.ID)

return resourceArmDataFactoryDatasetMySQLRead(d, meta)
}

func resourceArmDataFactoryDatasetMySQLRead(d *schema.ResourceData, meta interface{}) error {
client := meta.(*ArmClient).dataFactoryDatasetClient
ctx := meta.(*ArmClient).StopContext

id, err := parseAzureResourceID(d.Id())
if err != nil {
return err
}
resourceGroup := id.ResourceGroup
dataFactoryName := id.Path["factories"]
name := id.Path["datasets"]

resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "")
if err != nil {
if utils.ResponseWasNotFound(resp.Response) {
d.SetId("")
return nil
}

return fmt.Errorf("Error retrieving Data Factory Dataset MySQL %q (Data Factory %q / Resource Group %q): %s", name, dataFactoryName, resourceGroup, err)
}

d.Set("name", resp.Name)
d.Set("resource_group_name", resourceGroup)
d.Set("data_factory_name", dataFactoryName)

mysqlTable, ok := resp.Properties.AsRelationalTableDataset()
if !ok {
return fmt.Errorf("Error classifiying Data Factory Dataset MySQL %q (Data Factory %q / Resource Group %q): Expected: %q Received: %q", name, dataFactoryName, resourceGroup, datafactory.TypeRelationalTable, *resp.Type)
}

d.Set("additional_properties", mysqlTable.AdditionalProperties)

if mysqlTable.Description != nil {
d.Set("description", mysqlTable.Description)
}

parameters := flattenDataFactoryParameters(mysqlTable.Parameters)
if err := d.Set("parameters", parameters); err != nil {
return fmt.Errorf("Error setting `parameters`: %+v", err)
}

annotations := flattenDataFactoryAnnotations(mysqlTable.Annotations)
if err := d.Set("annotations", annotations); err != nil {
return fmt.Errorf("Error setting `annotations`: %+v", err)
}

if linkedService := mysqlTable.LinkedServiceName; linkedService != nil {
if linkedService.ReferenceName != nil {
d.Set("linked_service_name", linkedService.ReferenceName)
}
}

if properties := mysqlTable.RelationalTableDatasetTypeProperties; properties != nil {
val, ok := properties.TableName.(string)
if !ok {
log.Printf("[DEBUG] Skipping `table_name` since it's not a string")
} else {
d.Set("table_name", val)
}
}

if folder := mysqlTable.Folder; folder != nil {
if folder.Name != nil {
d.Set("folder", folder.Name)
}
}

structureColumns := flattenDataFactoryStructureColumns(mysqlTable.Structure)
if err := d.Set("schema_column", structureColumns); err != nil {
return fmt.Errorf("Error setting `schema_column`: %+v", err)
}

return nil
}

func resourceArmDataFactoryDatasetMySQLDelete(d *schema.ResourceData, meta interface{}) error {
client := meta.(*ArmClient).dataFactoryDatasetClient
ctx := meta.(*ArmClient).StopContext

id, err := parseAzureResourceID(d.Id())
if err != nil {
return err
}
resourceGroup := id.ResourceGroup
dataFactoryName := id.Path["factories"]
name := id.Path["datasets"]

response, err := client.Delete(ctx, resourceGroup, dataFactoryName, name)
if err != nil {
if !utils.ResponseWasNotFound(response) {
return fmt.Errorf("Error deleting Data Factory Dataset MySQL %q (Data Factory %q / Resource Group %q): %s", name, dataFactoryName, resourceGroup, err)
}
}

return nil
}
Loading