diff --git a/.changelog/36072.txt b/.changelog/36072.txt new file mode 100644 index 000000000000..514301656df4 --- /dev/null +++ b/.changelog/36072.txt @@ -0,0 +1,27 @@ +```release-note:bug +resource/aws_datasync_location_azure_blob: Fix missing `container_url` attribute value and bad `subdirectory` attribute value from state read/refresh +``` + +```release-note:bug +resource/aws_datasync_location_efs: Fix missing `efs_file_system_arn` attribute value from state read/refresh +``` + +```release-note:bug +resource/aws_datasync_location_nfs: Fix missing `server_hostname` attribute value from state read/refresh +``` + +```release-note:bug +resource/aws_datasync_location_s3: Fix missing `s3_bucket_arn` attribute value from state read/refresh +``` + +```release-note:bug +resource/aws_datasync_location_smb: Fix missing `server_hostname` attribute value from state read/refresh +``` + +```release-note:enhancement +resource/aws_datasync_location_hdfs: Add `kerberos_keytab_base64` and `kerberos_krb5_conf_base64` arguments +``` + +```release-note:bug +resource/aws_datasync_location_hdfs: Mark `qop_configuration` as Computed +``` \ No newline at end of file diff --git a/internal/flex/flex.go b/internal/flex/flex.go index f1c6c69b7bf1..fe211e3a3538 100644 --- a/internal/flex/flex.go +++ b/internal/flex/flex.go @@ -13,6 +13,7 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" tfmaps "github.com/hashicorp/terraform-provider-aws/internal/maps" tfslices "github.com/hashicorp/terraform-provider-aws/internal/slices" + itypes "github.com/hashicorp/terraform-provider-aws/internal/types" ) const ( @@ -350,6 +351,11 @@ func StringToIntValue(v *string) int { return i } +// StringValueToBase64String converts a string to a Go base64 string pointer. +func StringValueToBase64String(v string) *string { + return aws.String(itypes.Base64EncodeOnce([]byte(v))) +} + // StringValueToInt64 converts a string to a Go int64 pointer. // Invalid integer strings are converted to 0. func StringValueToInt64(v string) *int64 { diff --git a/internal/service/autoscaling/launch_configuration.go b/internal/service/autoscaling/launch_configuration.go index fcb120248006..0b34279510fc 100644 --- a/internal/service/autoscaling/launch_configuration.go +++ b/internal/service/autoscaling/launch_configuration.go @@ -6,7 +6,6 @@ package autoscaling import ( // nosemgrep:ci.semgrep.aws.multiple-service-imports "context" "crypto/sha1" - "encoding/base64" "encoding/hex" "fmt" "log" @@ -26,10 +25,11 @@ import ( // nosemgrep:ci.semgrep.aws.multiple-service-imports "github.com/hashicorp/terraform-provider-aws/internal/flex" tfec2 "github.com/hashicorp/terraform-provider-aws/internal/service/ec2" "github.com/hashicorp/terraform-provider-aws/internal/tfresource" + itypes "github.com/hashicorp/terraform-provider-aws/internal/types" "github.com/hashicorp/terraform-provider-aws/internal/verify" ) -// @SDKResource("aws_launch_configuration") +// @SDKResource("aws_launch_configuration", name="Launch Configuration") func ResourceLaunchConfiguration() *schema.Resource { return &schema.Resource{ CreateWithoutTimeout: resourceLaunchConfigurationCreate, @@ -302,15 +302,7 @@ func ResourceLaunchConfiguration() *schema.Resource { Optional: true, ForceNew: true, ConflictsWith: []string{"user_data"}, - ValidateFunc: func(v interface{}, name string) (warns []string, errs []error) { - s := v.(string) - if !verify.IsBase64Encoded([]byte(s)) { - errs = append(errs, fmt.Errorf( - "%s: must be base64-encoded", name, - )) - } - return - }, + ValidateFunc: verify.ValidBase64String, }, }, } @@ -363,7 +355,7 @@ func resourceLaunchConfigurationCreate(ctx context.Context, d *schema.ResourceDa } if v, ok := d.GetOk("user_data"); ok { - input.UserData = aws.String(verify.Base64Encode([]byte(v.(string)))) + input.UserData = flex.StringValueToBase64String(v.(string)) } else if v, ok := d.GetOk("user_data_base64"); ok { input.UserData = aws.String(v.(string)) } @@ -805,8 +797,7 @@ func userDataHashSum(userData string) string { // Check whether the user_data is not Base64 encoded. // Always calculate hash of base64 decoded value since we // check against double-encoding when setting it. - v, err := base64.StdEncoding.DecodeString(userData) - + v, err := itypes.Base64Decode(userData) if err != nil { v = []byte(userData) } diff --git a/internal/service/autoscaling/service_package_gen.go b/internal/service/autoscaling/service_package_gen.go index 8abf78465873..75dcf6be0c31 100644 --- a/internal/service/autoscaling/service_package_gen.go +++ b/internal/service/autoscaling/service_package_gen.go @@ -77,6 +77,7 @@ func (p *servicePackage) SDKResources(ctx context.Context) []*types.ServicePacka { Factory: ResourceLaunchConfiguration, TypeName: "aws_launch_configuration", + Name: "Launch Configuration", }, } } diff --git a/internal/service/cognitoidp/user_pool_ui_customization.go b/internal/service/cognitoidp/user_pool_ui_customization.go index dd4009f5a7d1..16f2b8f6ce41 100644 --- a/internal/service/cognitoidp/user_pool_ui_customization.go +++ b/internal/service/cognitoidp/user_pool_ui_customization.go @@ -5,7 +5,6 @@ package cognitoidp import ( "context" - "encoding/base64" "log" "time" @@ -95,7 +94,7 @@ func resourceUserPoolUICustomizationPut(ctx context.Context, d *schema.ResourceD } if v, ok := d.GetOk("image_file"); ok { - v, err := base64.StdEncoding.DecodeString(v.(string)) + v, err := itypes.Base64Decode(v.(string)) if err != nil { return sdkdiag.AppendFromErr(diags, err) } diff --git a/internal/service/datasync/exports_test.go b/internal/service/datasync/exports_test.go new file mode 100644 index 000000000000..5b172eef7ef2 --- /dev/null +++ b/internal/service/datasync/exports_test.go @@ -0,0 +1,33 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package datasync + +// Exports for use in tests only. +var ( + ResourceLocationAzureBlob = resourceLocationAzureBlob + ResourceLocationEFS = resourceLocationEFS + ResourceLocationFSxLustreFileSystem = resourceLocationFSxLustreFileSystem + ResourceLocationFSxONTAPFileSystem = resourceLocationFSxONTAPFileSystem + ResourceLocationFSxOpenZFSFileSystem = resourceLocationFSxOpenZFSFileSystem + ResourceLocationFSxWindowsFileSystem = resourceLocationFSxWindowsFileSystem + ResourceLocationHDFS = resourceLocationHDFS + ResourceLocationNFS = resourceLocationNFS + ResourceLocationObjectStorage = resourceLocationObjectStorage + ResourceLocationS3 = resourceLocationS3 + ResourceLocationSMB = resourceLocationSMB + ResourceTask = resourceTask + + FindLocationAzureBlobByARN = findLocationAzureBlobByARN + FindLocationEFSByARN = findLocationEFSByARN + FindLocationFSxLustreByARN = findLocationFSxLustreByARN + FindLocationFSxONTAPByARN = findLocationFSxONTAPByARN + FindLocationFSxOpenZFSByARN = findLocationFSxOpenZFSByARN + FindLocationFSxWindowsByARN = findLocationFSxWindowsByARN + FindLocationHDFSByARN = findLocationHDFSByARN + FindLocationNFSByARN = findLocationNFSByARN + FindLocationObjectStorageByARN = findLocationObjectStorageByARN + FindLocationS3ByARN = findLocationS3ByARN + FindLocationSMBByARN = findLocationSMBByARN + FindTaskByARN = findTaskByARN +) diff --git a/internal/service/datasync/location_azure_blob.go b/internal/service/datasync/location_azure_blob.go index 605f01d07422..385ba0998fca 100644 --- a/internal/service/datasync/location_azure_blob.go +++ b/internal/service/datasync/location_azure_blob.go @@ -5,6 +5,7 @@ package datasync import ( "context" + "fmt" "log" "strings" @@ -26,7 +27,7 @@ import ( // @SDKResource("aws_datasync_location_azure_blob", name="Location Microsoft Azure Blob Storage") // @Tags(identifierAttribute="id") -func ResourceLocationAzureBlob() *schema.Resource { +func resourceLocationAzureBlob() *schema.Resource { return &schema.Resource{ CreateWithoutTimeout: resourceLocationAzureBlobCreate, ReadWithoutTimeout: resourceLocationAzureBlobRead, @@ -154,7 +155,7 @@ func resourceLocationAzureBlobRead(ctx context.Context, d *schema.ResourceData, var diags diag.Diagnostics conn := meta.(*conns.AWSClient).DataSyncConn(ctx) - output, err := FindLocationAzureBlobByARN(ctx, conn, d.Id()) + output, err := findLocationAzureBlobByARN(ctx, conn, d.Id()) if !d.IsNewResource() && tfresource.NotFound(err) { log.Printf("[WARN] DataSync Location Microsoft Azure Blob Storage (%s) not found, removing from state", d.Id()) @@ -167,19 +168,25 @@ func resourceLocationAzureBlobRead(ctx context.Context, d *schema.ResourceData, } uri := aws.StringValue(output.LocationUri) + accountHostName, err := globalIDFromLocationURI(aws.StringValue(output.LocationUri)) + if err != nil { + return sdkdiag.AppendFromErr(diags, err) + } subdirectory, err := subdirectoryFromLocationURI(uri) if err != nil { return sdkdiag.AppendFromErr(diags, err) } + containerName := subdirectory[:strings.IndexAny(subdirectory[1:], "/")+1] + containerURL := fmt.Sprintf("https://%s%s", accountHostName, containerName) d.Set("access_tier", output.AccessTier) d.Set("agent_arns", aws.StringValueSlice(output.AgentArns)) d.Set("arn", output.LocationArn) d.Set("authentication_type", output.AuthenticationType) d.Set("blob_type", output.BlobType) - d.Set("container_url", d.Get("container_url")) + d.Set("container_url", containerURL) d.Set("sas_configuration", d.Get("sas_configuration")) - d.Set("subdirectory", subdirectory) + d.Set("subdirectory", subdirectory[strings.IndexAny(subdirectory[1:], "/")+1:]) d.Set("uri", uri) return diags @@ -248,7 +255,7 @@ func resourceLocationAzureBlobDelete(ctx context.Context, d *schema.ResourceData return diags } -func FindLocationAzureBlobByARN(ctx context.Context, conn *datasync.DataSync, arn string) (*datasync.DescribeLocationAzureBlobOutput, error) { +func findLocationAzureBlobByARN(ctx context.Context, conn *datasync.DataSync, arn string) (*datasync.DescribeLocationAzureBlobOutput, error) { input := &datasync.DescribeLocationAzureBlobInput{ LocationArn: aws.String(arn), } diff --git a/internal/service/datasync/location_azure_blob_test.go b/internal/service/datasync/location_azure_blob_test.go index 30b7465aeb65..80032142df5d 100644 --- a/internal/service/datasync/location_azure_blob_test.go +++ b/internal/service/datasync/location_azure_blob_test.go @@ -41,10 +41,10 @@ func TestAccDataSyncLocationAzureBlob_basic(t *testing.T) { acctest.MatchResourceAttrRegionalARN(resourceName, "arn", "datasync", regexache.MustCompile(`location/loc-.+`)), resource.TestCheckResourceAttr(resourceName, "authentication_type", "SAS"), resource.TestCheckResourceAttr(resourceName, "blob_type", "BLOCK"), - resource.TestCheckResourceAttr(resourceName, "container_url", "https://example.com/path"), + resource.TestCheckResourceAttr(resourceName, "container_url", "https://myaccount.blob.core.windows.net/mycontainer"), resource.TestCheckResourceAttr(resourceName, "sas_configuration.#", "1"), resource.TestCheckResourceAttrSet(resourceName, "sas_configuration.0.token"), - resource.TestCheckResourceAttr(resourceName, "subdirectory", "/path/"), + resource.TestCheckResourceAttr(resourceName, "subdirectory", "/myvdir1/myvdir2/"), resource.TestCheckResourceAttr(resourceName, "tags.%", "0"), resource.TestMatchResourceAttr(resourceName, "uri", regexache.MustCompile(`^azure-blob://.+/`)), ), @@ -53,7 +53,7 @@ func TestAccDataSyncLocationAzureBlob_basic(t *testing.T) { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"container_url", "sas_configuration"}, + ImportStateVerifyIgnore: []string{"sas_configuration"}, }, }, }) @@ -107,7 +107,7 @@ func TestAccDataSyncLocationAzureBlob_tags(t *testing.T) { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"container_url", "sas_configuration"}, + ImportStateVerifyIgnore: []string{"sas_configuration"}, }, { Config: testAccLocationAzureBlobConfig_tags2(rName, "key1", "value1updated", "key2", "value2"), @@ -151,10 +151,10 @@ func TestAccDataSyncLocationAzureBlob_update(t *testing.T) { acctest.MatchResourceAttrRegionalARN(resourceName, "arn", "datasync", regexache.MustCompile(`location/loc-.+`)), resource.TestCheckResourceAttr(resourceName, "authentication_type", "SAS"), resource.TestCheckResourceAttr(resourceName, "blob_type", "BLOCK"), - resource.TestCheckResourceAttr(resourceName, "container_url", "https://example.com/path"), + resource.TestCheckResourceAttr(resourceName, "container_url", "https://myaccount.blob.core.windows.net/mycontainer"), resource.TestCheckResourceAttr(resourceName, "sas_configuration.#", "1"), resource.TestCheckResourceAttrSet(resourceName, "sas_configuration.0.token"), - resource.TestCheckResourceAttr(resourceName, "subdirectory", "/path/"), + resource.TestCheckResourceAttr(resourceName, "subdirectory", "/myvdir1/myvdir2/"), resource.TestCheckResourceAttr(resourceName, "tags.%", "0"), resource.TestMatchResourceAttr(resourceName, "uri", regexache.MustCompile(`^azure-blob://.+/`)), ), @@ -168,10 +168,10 @@ func TestAccDataSyncLocationAzureBlob_update(t *testing.T) { acctest.MatchResourceAttrRegionalARN(resourceName, "arn", "datasync", regexache.MustCompile(`location/loc-.+`)), resource.TestCheckResourceAttr(resourceName, "authentication_type", "SAS"), resource.TestCheckResourceAttr(resourceName, "blob_type", "BLOCK"), - resource.TestCheckResourceAttr(resourceName, "container_url", "https://example.com/path"), + resource.TestCheckResourceAttr(resourceName, "container_url", "https://myaccount.blob.core.windows.net/mycontainer"), resource.TestCheckResourceAttr(resourceName, "sas_configuration.#", "1"), resource.TestCheckResourceAttrSet(resourceName, "sas_configuration.0.token"), - resource.TestCheckResourceAttr(resourceName, "subdirectory", "/path/"), + resource.TestCheckResourceAttr(resourceName, "subdirectory", "/"), resource.TestCheckResourceAttr(resourceName, "tags.%", "0"), resource.TestMatchResourceAttr(resourceName, "uri", regexache.MustCompile(`^azure-blob://.+/`)), ), @@ -241,7 +241,8 @@ func testAccLocationAzureBlobConfig_basic(rName string) string { resource "aws_datasync_location_azure_blob" "test" { agent_arns = [aws_datasync_agent.test.arn] authentication_type = "SAS" - container_url = "https://example.com/path" + container_url = "https://myaccount.blob.core.windows.net/mycontainer" + subdirectory = "/myvdir1/myvdir2" sas_configuration { token = "sp=r&st=2023-12-20T14:54:52Z&se=2023-12-20T22:54:52Z&spr=https&sv=2021-06-08&sr=c&sig=aBBKDWQvyuVcTPH9EBp%%2FXTI9E%%2F%%2Fmq171%%2BZU178wcwqU%%3D" @@ -255,7 +256,7 @@ func testAccLocationAzureBlobConfig_tags1(rName, key1, value1 string) string { resource "aws_datasync_location_azure_blob" "test" { agent_arns = [aws_datasync_agent.test.arn] authentication_type = "SAS" - container_url = "https://example.com/path" + container_url = "https://myaccount.blob.core.windows.net/mycontainer" sas_configuration { token = "sp=r&st=2023-12-20T14:54:52Z&se=2023-12-20T22:54:52Z&spr=https&sv=2021-06-08&sr=c&sig=aBBKDWQvyuVcTPH9EBp%%2FXTI9E%%2F%%2Fmq171%%2BZU178wcwqU%%3D" @@ -273,7 +274,7 @@ func testAccLocationAzureBlobConfig_tags2(rName, key1, value1, key2, value2 stri resource "aws_datasync_location_azure_blob" "test" { agent_arns = [aws_datasync_agent.test.arn] authentication_type = "SAS" - container_url = "https://example.com/path" + container_url = "https://myaccount.blob.core.windows.net/mycontainer" sas_configuration { token = "sp=r&st=2023-12-20T14:54:52Z&se=2023-12-20T22:54:52Z&spr=https&sv=2021-06-08&sr=c&sig=aBBKDWQvyuVcTPH9EBp%%2FXTI9E%%2F%%2Fmq171%%2BZU178wcwqU%%3D" @@ -293,7 +294,8 @@ resource "aws_datasync_location_azure_blob" "test" { access_tier = "COOL" agent_arns = [aws_datasync_agent.test.arn] authentication_type = "SAS" - container_url = "https://example.com/path" + container_url = "https://myaccount.blob.core.windows.net/mycontainer" + subdirectory = "/" sas_configuration { token = "sp=r&st=2023-12-20T14:54:52Z&se=2023-12-20T22:54:52Z&spr=https&sv=2021-06-08&sr=c&sig=aBBKDWQvyuVcTPH9EBp%%2FXTI9E%%2F%%2Fmq171%%2BZU178wcwqU%%3D" diff --git a/internal/service/datasync/location_efs.go b/internal/service/datasync/location_efs.go index 9f85450dcb35..c2810e7bb748 100644 --- a/internal/service/datasync/location_efs.go +++ b/internal/service/datasync/location_efs.go @@ -5,10 +5,12 @@ package datasync import ( "context" + "fmt" "log" "strings" "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/aws/arn" "github.com/aws/aws-sdk-go/service/datasync" "github.com/hashicorp/aws-sdk-go-base/v2/awsv1shim/v2/tfawserr" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" @@ -26,7 +28,7 @@ import ( // @SDKResource("aws_datasync_location_efs", name="Location EFS") // @Tags(identifierAttribute="id") -func ResourceLocationEFS() *schema.Resource { +func resourceLocationEFS() *schema.Resource { return &schema.Resource{ CreateWithoutTimeout: resourceLocationEFSCreate, ReadWithoutTimeout: resourceLocationEFSRead, @@ -157,7 +159,7 @@ func resourceLocationEFSRead(ctx context.Context, d *schema.ResourceData, meta i var diags diag.Diagnostics conn := meta.(*conns.AWSClient).DataSyncConn(ctx) - output, err := FindLocationEFSByARN(ctx, conn, d.Id()) + output, err := findLocationEFSByARN(ctx, conn, d.Id()) if !d.IsNewResource() && tfresource.NotFound(err) { log.Printf("[WARN] DataSync Location EFS (%s) not found, removing from state", d.Id()) @@ -170,16 +172,27 @@ func resourceLocationEFSRead(ctx context.Context, d *schema.ResourceData, meta i } uri := aws.StringValue(output.LocationUri) + globalID, err := globalIDFromLocationURI(uri) + if err != nil { + return sdkdiag.AppendFromErr(diags, err) + } subdirectory, err := subdirectoryFromLocationURI(uri) if err != nil { return sdkdiag.AppendFromErr(diags, err) } + locationARN, err := arn.Parse(d.Id()) + if err != nil { + return sdkdiag.AppendFromErr(diags, err) + } + globalIDParts := strings.Split(globalID, ".") // Global ID format for EFS location is . d.Set("access_point_arn", output.AccessPointArn) d.Set("arn", output.LocationArn) if err := d.Set("ec2_config", flattenEC2Config(output.Ec2Config)); err != nil { return sdkdiag.AppendErrorf(diags, "setting ec2_config: %s", err) } + efsFileSystemARN := fmt.Sprintf("arn:%s:elasticfilesystem:%s:%s:file-system/%s", locationARN.Partition, globalIDParts[0], locationARN.AccountID, globalIDParts[1]) + d.Set("efs_file_system_arn", efsFileSystemARN) d.Set("file_system_access_role_arn", output.FileSystemAccessRoleArn) d.Set("in_transit_encryption", output.InTransitEncryption) d.Set("subdirectory", subdirectory) @@ -216,7 +229,7 @@ func resourceLocationEFSDelete(ctx context.Context, d *schema.ResourceData, meta return diags } -func FindLocationEFSByARN(ctx context.Context, conn *datasync.DataSync, arn string) (*datasync.DescribeLocationEfsOutput, error) { +func findLocationEFSByARN(ctx context.Context, conn *datasync.DataSync, arn string) (*datasync.DescribeLocationEfsOutput, error) { input := &datasync.DescribeLocationEfsInput{ LocationArn: aws.String(arn), } diff --git a/internal/service/datasync/location_efs_test.go b/internal/service/datasync/location_efs_test.go index f53b84b62f27..1933e0e5a176 100644 --- a/internal/service/datasync/location_efs_test.go +++ b/internal/service/datasync/location_efs_test.go @@ -49,10 +49,9 @@ func TestAccDataSyncLocationEFS_basic(t *testing.T) { ), }, { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"efs_file_system_arn"}, + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, }, }, }) @@ -79,10 +78,9 @@ func TestAccDataSyncLocationEFS_accessPointARN(t *testing.T) { ), }, { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"efs_file_system_arn"}, + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, }, }, }) @@ -133,10 +131,9 @@ func TestAccDataSyncLocationEFS_subdirectory(t *testing.T) { ), }, { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"efs_file_system_arn"}, + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, }, }, }) @@ -163,10 +160,9 @@ func TestAccDataSyncLocationEFS_tags(t *testing.T) { ), }, { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"efs_file_system_arn"}, + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, }, { Config: testAccLocationEFSConfig_tags2(rName, "key1", "value1updated", "key2", "value2"), diff --git a/internal/service/datasync/location_fsx_lustre_file_system.go b/internal/service/datasync/location_fsx_lustre_file_system.go index cac05875e6b8..f2e999413698 100644 --- a/internal/service/datasync/location_fsx_lustre_file_system.go +++ b/internal/service/datasync/location_fsx_lustre_file_system.go @@ -28,7 +28,7 @@ import ( // @SDKResource("aws_datasync_location_fsx_lustre_file_system", name="Location FSx for Lustre File System") // @Tags(identifierAttribute="id") -func ResourceLocationFSxLustreFileSystem() *schema.Resource { +func resourceLocationFSxLustreFileSystem() *schema.Resource { return &schema.Resource{ CreateWithoutTimeout: resourceLocationFSxLustreFileSystemCreate, ReadWithoutTimeout: resourceLocationFSxLustreFileSystemRead, @@ -127,7 +127,7 @@ func resourceLocationFSxLustreFileSystemRead(ctx context.Context, d *schema.Reso var diags diag.Diagnostics conn := meta.(*conns.AWSClient).DataSyncConn(ctx) - output, err := FindLocationFSxLustreByARN(ctx, conn, d.Id()) + output, err := findLocationFSxLustreByARN(ctx, conn, d.Id()) if !d.IsNewResource() && tfresource.NotFound(err) { log.Printf("[WARN] DataSync Location FSx for Lustre File System (%s) not found, removing from state", d.Id()) @@ -183,7 +183,7 @@ func resourceLocationFSxLustreFileSystemDelete(ctx context.Context, d *schema.Re return diags } -func FindLocationFSxLustreByARN(ctx context.Context, conn *datasync.DataSync, arn string) (*datasync.DescribeLocationFsxLustreOutput, error) { +func findLocationFSxLustreByARN(ctx context.Context, conn *datasync.DataSync, arn string) (*datasync.DescribeLocationFsxLustreOutput, error) { input := &datasync.DescribeLocationFsxLustreInput{ LocationArn: aws.String(arn), } diff --git a/internal/service/datasync/location_fsx_ontap_file_system.go b/internal/service/datasync/location_fsx_ontap_file_system.go index 20263884cf2f..a07f9f01f465 100644 --- a/internal/service/datasync/location_fsx_ontap_file_system.go +++ b/internal/service/datasync/location_fsx_ontap_file_system.go @@ -28,7 +28,7 @@ import ( // @SDKResource("aws_datasync_location_fsx_ontap_file_system", name="Location FSx for NetApp ONTAP File System") // @Tags(identifierAttribute="id") -func ResourceLocationFSxONTAPFileSystem() *schema.Resource { +func resourceLocationFSxONTAPFileSystem() *schema.Resource { return &schema.Resource{ CreateWithoutTimeout: resourceLocationFSxONTAPFileSystemCreate, ReadWithoutTimeout: resourceLocationFSxONTAPFileSystemRead, @@ -223,7 +223,7 @@ func resourceLocationFSxONTAPFileSystemRead(ctx context.Context, d *schema.Resou var diags diag.Diagnostics conn := meta.(*conns.AWSClient).DataSyncConn(ctx) - output, err := FindLocationFSxONTAPByARN(ctx, conn, d.Id()) + output, err := findLocationFSxONTAPByARN(ctx, conn, d.Id()) if !d.IsNewResource() && tfresource.NotFound(err) { log.Printf("[WARN] DataSync Location FSx for NetApp ONTAP File System (%s) not found, removing from state", d.Id()) @@ -291,7 +291,7 @@ func resourceLocationFSxONTAPFileSystemDelete(ctx context.Context, d *schema.Res return diags } -func FindLocationFSxONTAPByARN(ctx context.Context, conn *datasync.DataSync, arn string) (*datasync.DescribeLocationFsxOntapOutput, error) { +func findLocationFSxONTAPByARN(ctx context.Context, conn *datasync.DataSync, arn string) (*datasync.DescribeLocationFsxOntapOutput, error) { input := &datasync.DescribeLocationFsxOntapInput{ LocationArn: aws.String(arn), } diff --git a/internal/service/datasync/location_fsx_openzfs_file_system.go b/internal/service/datasync/location_fsx_openzfs_file_system.go index cb7bf5c2f723..dc0bdc21d83f 100644 --- a/internal/service/datasync/location_fsx_openzfs_file_system.go +++ b/internal/service/datasync/location_fsx_openzfs_file_system.go @@ -28,7 +28,7 @@ import ( // @SDKResource("aws_datasync_location_fsx_openzfs_file_system", name="Location FSx for OpenZFS File System") // @Tags(identifierAttribute="id") -func ResourceLocationFSxOpenZFSFileSystem() *schema.Resource { +func resourceLocationFSxOpenZFSFileSystem() *schema.Resource { return &schema.Resource{ CreateWithoutTimeout: resourceLocationFSxOpenZFSFileSystemCreate, ReadWithoutTimeout: resourceLocationFSxOpenZFSFileSystemRead, @@ -164,7 +164,7 @@ func resourceLocationFSxOpenZFSFileSystemRead(ctx context.Context, d *schema.Res var diags diag.Diagnostics conn := meta.(*conns.AWSClient).DataSyncConn(ctx) - output, err := FindLocationFSxOpenZFSByARN(ctx, conn, d.Id()) + output, err := findLocationFSxOpenZFSByARN(ctx, conn, d.Id()) if !d.IsNewResource() && tfresource.NotFound(err) { log.Printf("[WARN] DataSync Location FSx for OpenZFS File System (%s) not found, removing from state", d.Id()) @@ -223,7 +223,7 @@ func resourceLocationFSxOpenZFSFileSystemDelete(ctx context.Context, d *schema.R return diags } -func FindLocationFSxOpenZFSByARN(ctx context.Context, conn *datasync.DataSync, arn string) (*datasync.DescribeLocationFsxOpenZfsOutput, error) { +func findLocationFSxOpenZFSByARN(ctx context.Context, conn *datasync.DataSync, arn string) (*datasync.DescribeLocationFsxOpenZfsOutput, error) { input := &datasync.DescribeLocationFsxOpenZfsInput{ LocationArn: aws.String(arn), } diff --git a/internal/service/datasync/location_fsx_windows_file_system.go b/internal/service/datasync/location_fsx_windows_file_system.go index a59e8033ba38..bb1599b6e000 100644 --- a/internal/service/datasync/location_fsx_windows_file_system.go +++ b/internal/service/datasync/location_fsx_windows_file_system.go @@ -28,7 +28,7 @@ import ( // @SDKResource("aws_datasync_location_fsx_windows_file_system", name="Location FSx for Windows File Server File System") // @Tags(identifierAttribute="id") -func ResourceLocationFSxWindowsFileSystem() *schema.Resource { +func resourceLocationFSxWindowsFileSystem() *schema.Resource { return &schema.Resource{ CreateWithoutTimeout: resourceLocationFSxWindowsFileSystemCreate, ReadWithoutTimeout: resourceLocationFSxWindowsFileSystemRead, @@ -151,7 +151,7 @@ func resourceLocationFSxWindowsFileSystemRead(ctx context.Context, d *schema.Res var diags diag.Diagnostics conn := meta.(*conns.AWSClient).DataSyncConn(ctx) - output, err := FindLocationFSxWindowsByARN(ctx, conn, d.Id()) + output, err := findLocationFSxWindowsByARN(ctx, conn, d.Id()) if !d.IsNewResource() && tfresource.NotFound(err) { log.Printf("[WARN] DataSync Location FSx for Windows File Server File System (%s) not found, removing from state", d.Id()) @@ -209,7 +209,7 @@ func resourceLocationFSxWindowsFileSystemDelete(ctx context.Context, d *schema.R return diags } -func FindLocationFSxWindowsByARN(ctx context.Context, conn *datasync.DataSync, arn string) (*datasync.DescribeLocationFsxWindowsOutput, error) { +func findLocationFSxWindowsByARN(ctx context.Context, conn *datasync.DataSync, arn string) (*datasync.DescribeLocationFsxWindowsOutput, error) { input := &datasync.DescribeLocationFsxWindowsInput{ LocationArn: aws.String(arn), } diff --git a/internal/service/datasync/location_hdfs.go b/internal/service/datasync/location_hdfs.go index 9cd22982f18e..8fcd72d8b7e0 100644 --- a/internal/service/datasync/location_hdfs.go +++ b/internal/service/datasync/location_hdfs.go @@ -20,13 +20,14 @@ import ( "github.com/hashicorp/terraform-provider-aws/internal/flex" tftags "github.com/hashicorp/terraform-provider-aws/internal/tags" "github.com/hashicorp/terraform-provider-aws/internal/tfresource" + itypes "github.com/hashicorp/terraform-provider-aws/internal/types" "github.com/hashicorp/terraform-provider-aws/internal/verify" "github.com/hashicorp/terraform-provider-aws/names" ) // @SDKResource("aws_datasync_location_hdfs", name="Location HDFS") // @Tags(identifierAttribute="id") -func ResourceLocationHDFS() *schema.Resource { +func resourceLocationHDFS() *schema.Resource { return &schema.Resource{ CreateWithoutTimeout: resourceLocationHDFSCreate, ReadWithoutTimeout: resourceLocationHDFSRead, @@ -65,12 +66,26 @@ func ResourceLocationHDFS() *schema.Resource { ), }, "kerberos_keytab": { - Type: schema.TypeString, - Optional: true, + Type: schema.TypeString, + Optional: true, + ConflictsWith: []string{"kerberos_keytab_base64"}, + }, + "kerberos_keytab_base64": { + Type: schema.TypeString, + Optional: true, + ConflictsWith: []string{"kerberos_keytab"}, + ValidateFunc: verify.ValidBase64String, }, "kerberos_krb5_conf": { - Type: schema.TypeString, - Optional: true, + Type: schema.TypeString, + Optional: true, + ConflictsWith: []string{"kerberos_krb5_conf_base64"}, + }, + "kerberos_krb5_conf_base64": { + Type: schema.TypeString, + Optional: true, + ConflictsWith: []string{"kerberos_krb5_conf"}, + ValidateFunc: verify.ValidBase64String, }, "kerberos_principal": { Type: schema.TypeString, @@ -104,17 +119,20 @@ func ResourceLocationHDFS() *schema.Resource { "qop_configuration": { Type: schema.TypeList, Optional: true, + Computed: true, MaxItems: 1, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "data_transfer_protection": { Type: schema.TypeString, Optional: true, + Computed: true, ValidateFunc: validation.StringInSlice(datasync.HdfsDataTransferProtection_Values(), false), }, "rpc_protection": { Type: schema.TypeString, Optional: true, + Computed: true, ValidateFunc: validation.StringInSlice(datasync.HdfsRpcProtection_Values(), false), }, }, @@ -176,10 +194,24 @@ func resourceLocationHDFSCreate(ctx context.Context, d *schema.ResourceData, met if v, ok := d.GetOk("kerberos_keytab"); ok { input.KerberosKeytab = []byte(v.(string)) + } else if v, ok := d.GetOk("kerberos_keytab_base64"); ok { + v := v.(string) + b, err := itypes.Base64Decode(v) + if err != nil { + b = []byte(v) + } + input.KerberosKeytab = b } if v, ok := d.GetOk("kerberos_krb5_conf"); ok { input.KerberosKrb5Conf = []byte(v.(string)) + } else if v, ok := d.GetOk("kerberos_krb5_conf_base64"); ok { + v := v.(string) + b, err := itypes.Base64Decode(v) + if err != nil { + b = []byte(v) + } + input.KerberosKrb5Conf = b } if v, ok := d.GetOk("kerberos_principal"); ok { @@ -217,7 +249,7 @@ func resourceLocationHDFSRead(ctx context.Context, d *schema.ResourceData, meta var diags diag.Diagnostics conn := meta.(*conns.AWSClient).DataSyncConn(ctx) - output, err := FindLocationHDFSByARN(ctx, conn, d.Id()) + output, err := findLocationHDFSByARN(ctx, conn, d.Id()) if !d.IsNewResource() && tfresource.NotFound(err) { log.Printf("[WARN] DataSync Location HDFS (%s) not found, removing from state", d.Id()) @@ -276,12 +308,30 @@ func resourceLocationHDFSUpdate(ctx context.Context, d *schema.ResourceData, met input.BlockSize = aws.Int64(int64(d.Get("block_size").(int))) } - if d.HasChange("kerberos_keytab") { - input.KerberosKeytab = []byte(d.Get("kerberos_keytab").(string)) + if d.HasChanges("kerberos_keytab", "kerberos_keytab_base64") { + if v, ok := d.GetOk("kerberos_keytab"); ok { + input.KerberosKeytab = []byte(v.(string)) + } else if v, ok := d.GetOk("kerberos_keytab_base64"); ok { + v := v.(string) + b, err := itypes.Base64Decode(v) + if err != nil { + b = []byte(v) + } + input.KerberosKeytab = b + } } - if d.HasChange("kerberos_krb5_conf") { - input.KerberosKrb5Conf = []byte(d.Get("kerberos_krb5_conf").(string)) + if d.HasChanges("kerberos_krb5_conf", "kerberos_krb5_conf_base64") { + if v, ok := d.GetOk("kerberos_krb5_conf"); ok { + input.KerberosKrb5Conf = []byte(v.(string)) + } else if v, ok := d.GetOk("kerberos_krb5_conf_base64"); ok { + v := v.(string) + b, err := itypes.Base64Decode(v) + if err != nil { + b = []byte(v) + } + input.KerberosKrb5Conf = b + } } if d.HasChange("kerberos_principal") { @@ -342,7 +392,7 @@ func resourceLocationHDFSDelete(ctx context.Context, d *schema.ResourceData, met return diags } -func FindLocationHDFSByARN(ctx context.Context, conn *datasync.DataSync, arn string) (*datasync.DescribeLocationHdfsOutput, error) { +func findLocationHDFSByARN(ctx context.Context, conn *datasync.DataSync, arn string) (*datasync.DescribeLocationHdfsOutput, error) { input := &datasync.DescribeLocationHdfsInput{ LocationArn: aws.String(arn), } diff --git a/internal/service/datasync/location_hdfs_test.go b/internal/service/datasync/location_hdfs_test.go index 4ea64896cfc9..aa78232ce6e0 100644 --- a/internal/service/datasync/location_hdfs_test.go +++ b/internal/service/datasync/location_hdfs_test.go @@ -34,18 +34,26 @@ func TestAccDataSyncLocationHDFS_basic(t *testing.T) { Steps: []resource.TestStep{ { Config: testAccLocationHDFSConfig_basic(rName), - Check: resource.ComposeTestCheckFunc( + Check: resource.ComposeAggregateTestCheckFunc( testAccCheckLocationHDFSExists(ctx, resourceName, &v), - acctest.MatchResourceAttrRegionalARN(resourceName, "arn", "datasync", regexache.MustCompile(`location/loc-.+`)), resource.TestCheckResourceAttr(resourceName, "agent_arns.#", "1"), + acctest.MatchResourceAttrRegionalARN(resourceName, "arn", "datasync", regexache.MustCompile(`location/loc-.+`)), + resource.TestCheckResourceAttr(resourceName, "authentication_type", "SIMPLE"), + resource.TestCheckResourceAttr(resourceName, "block_size", "134217728"), + resource.TestCheckNoResourceAttr(resourceName, "kerberos_keytab"), + resource.TestCheckNoResourceAttr(resourceName, "kerberos_keytab_base64"), + resource.TestCheckNoResourceAttr(resourceName, "kerberos_krb5_conf"), + resource.TestCheckNoResourceAttr(resourceName, "kerberos_krb5_conf_base64"), + resource.TestCheckResourceAttr(resourceName, "kerberos_principal", ""), + resource.TestCheckResourceAttr(resourceName, "kms_key_provider_uri", ""), resource.TestCheckResourceAttr(resourceName, "name_node.#", "1"), resource.TestCheckTypeSetElemNestedAttrs(resourceName, "name_node.*", map[string]string{ "port": "80", }), - resource.TestCheckResourceAttr(resourceName, "authentication_type", "SIMPLE"), - resource.TestCheckResourceAttr(resourceName, "simple_user", rName), - resource.TestCheckResourceAttr(resourceName, "block_size", "134217728"), + resource.TestCheckResourceAttr(resourceName, "qop_configuration.#", "0"), resource.TestCheckResourceAttr(resourceName, "replication_factor", "3"), + resource.TestCheckResourceAttr(resourceName, "simple_user", rName), + resource.TestCheckResourceAttr(resourceName, "subdirectory", "/"), resource.TestCheckResourceAttr(resourceName, "tags.%", "0"), resource.TestMatchResourceAttr(resourceName, "uri", regexache.MustCompile(`^hdfs://.+/`)), ), @@ -76,7 +84,6 @@ func TestAccDataSyncLocationHDFS_disappears(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckLocationHDFSExists(ctx, resourceName, &v), acctest.CheckResourceDisappears(ctx, acctest.Provider, tfdatasync.ResourceLocationHDFS(), resourceName), - acctest.CheckResourceDisappears(ctx, acctest.Provider, tfdatasync.ResourceLocationHDFS(), resourceName), ), ExpectNonEmptyPlan: true, }, @@ -130,6 +137,57 @@ func TestAccDataSyncLocationHDFS_tags(t *testing.T) { }) } +func TestAccDataSyncLocationHDFS_kerberos(t *testing.T) { + ctx := acctest.Context(t) + var v datasync.DescribeLocationHdfsOutput + resourceName := "aws_datasync_location_hdfs.test" + rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) + principal := acctest.RandomEmailAddress(acctest.RandomDomainName()) + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + ErrorCheck: acctest.ErrorCheck(t, names.DataSyncServiceID), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, + CheckDestroy: testAccCheckLocationHDFSDestroy(ctx), + Steps: []resource.TestStep{ + { + Config: testAccLocationHDFSConfig_kerberos(rName, principal), + Check: resource.ComposeAggregateTestCheckFunc( + testAccCheckLocationHDFSExists(ctx, resourceName, &v), + resource.TestCheckResourceAttr(resourceName, "agent_arns.#", "1"), + acctest.MatchResourceAttrRegionalARN(resourceName, "arn", "datasync", regexache.MustCompile(`location/loc-.+`)), + resource.TestCheckResourceAttr(resourceName, "authentication_type", "KERBEROS"), + resource.TestCheckResourceAttr(resourceName, "block_size", "134217728"), + resource.TestCheckNoResourceAttr(resourceName, "kerberos_keytab"), + resource.TestCheckResourceAttrSet(resourceName, "kerberos_keytab_base64"), + resource.TestCheckResourceAttrSet(resourceName, "kerberos_krb5_conf"), + resource.TestCheckNoResourceAttr(resourceName, "kerberos_krb5_conf_base64"), + resource.TestCheckResourceAttr(resourceName, "kerberos_principal", principal), + resource.TestCheckResourceAttr(resourceName, "kms_key_provider_uri", ""), + resource.TestCheckResourceAttr(resourceName, "name_node.#", "1"), + resource.TestCheckTypeSetElemNestedAttrs(resourceName, "name_node.*", map[string]string{ + "port": "80", + }), + resource.TestCheckResourceAttr(resourceName, "qop_configuration.#", "1"), + resource.TestCheckResourceAttr(resourceName, "replication_factor", "3"), + resource.TestCheckResourceAttr(resourceName, "subdirectory", "/"), + resource.TestCheckResourceAttr(resourceName, "tags.%", "0"), + resource.TestMatchResourceAttr(resourceName, "uri", regexache.MustCompile(`^hdfs://.+/`)), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{ + "kerberos_keytab_base64", + "kerberos_krb5_conf", + }, + }, + }, + }) +} + func testAccCheckLocationHDFSDestroy(ctx context.Context) resource.TestCheckFunc { return func(s *terraform.State) error { conn := acctest.Provider.Meta().(*conns.AWSClient).DataSyncConn(ctx) @@ -239,3 +297,21 @@ resource "aws_datasync_location_hdfs" "test" { } `, rName, key1, value1, key2, value2)) } + +func testAccLocationHDFSConfig_kerberos(rName, principal string) string { + return acctest.ConfigCompose(testAccLocationHDFSConfig_base(rName), fmt.Sprintf(` +resource "aws_datasync_location_hdfs" "test" { + agent_arns = [aws_datasync_agent.test.arn] + authentication_type = "KERBEROS" + + name_node { + hostname = aws_instance.test.private_dns + port = 80 + } + + kerberos_principal = %[1]q + kerberos_keytab_base64 = filebase64("test-fixtures/keytab.krb") + kerberos_krb5_conf = file("test-fixtures/krb5.conf") +} +`, principal)) +} diff --git a/internal/service/datasync/location_nfs.go b/internal/service/datasync/location_nfs.go index 904bd8cb1be6..4f55987eb62f 100644 --- a/internal/service/datasync/location_nfs.go +++ b/internal/service/datasync/location_nfs.go @@ -26,7 +26,7 @@ import ( // @SDKResource("aws_datasync_location_nfs", name="Location NFS") // @Tags(identifierAttribute="id") -func ResourceLocationNFS() *schema.Resource { +func resourceLocationNFS() *schema.Resource { return &schema.Resource{ CreateWithoutTimeout: resourceLocationNFSCreate, ReadWithoutTimeout: resourceLocationNFSRead, @@ -140,7 +140,7 @@ func resourceLocationNFSRead(ctx context.Context, d *schema.ResourceData, meta i var diags diag.Diagnostics conn := meta.(*conns.AWSClient).DataSyncConn(ctx) - output, err := FindLocationNFSByARN(ctx, conn, d.Id()) + output, err := findLocationNFSByARN(ctx, conn, d.Id()) if !d.IsNewResource() && tfresource.NotFound(err) { log.Printf("[WARN] DataSync Location NFS (%s) not found, removing from state", d.Id()) @@ -153,6 +153,10 @@ func resourceLocationNFSRead(ctx context.Context, d *schema.ResourceData, meta i } uri := aws.StringValue(output.LocationUri) + serverHostName, err := globalIDFromLocationURI(uri) + if err != nil { + return sdkdiag.AppendFromErr(diags, err) + } subdirectory, err := subdirectoryFromLocationURI(uri) if err != nil { return sdkdiag.AppendFromErr(diags, err) @@ -165,6 +169,7 @@ func resourceLocationNFSRead(ctx context.Context, d *schema.ResourceData, meta i if err := d.Set("on_prem_config", flattenOnPremConfig(output.OnPremConfig)); err != nil { return sdkdiag.AppendErrorf(diags, "setting on_prem_config: %s", err) } + d.Set("server_hostname", serverHostName) d.Set("subdirectory", subdirectory) d.Set("uri", uri) @@ -216,7 +221,7 @@ func resourceLocationNFSDelete(ctx context.Context, d *schema.ResourceData, meta return diags } -func FindLocationNFSByARN(ctx context.Context, conn *datasync.DataSync, arn string) (*datasync.DescribeLocationNfsOutput, error) { +func findLocationNFSByARN(ctx context.Context, conn *datasync.DataSync, arn string) (*datasync.DescribeLocationNfsOutput, error) { input := &datasync.DescribeLocationNfsInput{ LocationArn: aws.String(arn), } diff --git a/internal/service/datasync/location_nfs_test.go b/internal/service/datasync/location_nfs_test.go index 64470dcc57f2..15eb279b4b20 100644 --- a/internal/service/datasync/location_nfs_test.go +++ b/internal/service/datasync/location_nfs_test.go @@ -48,10 +48,9 @@ func TestAccDataSyncLocationNFS_basic(t *testing.T) { ), }, { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"server_hostname"}, + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, }, }, }) @@ -77,10 +76,9 @@ func TestAccDataSyncLocationNFS_mountOptions(t *testing.T) { ), }, { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"server_hostname"}, + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, }, { Config: testAccLocationNFSConfig_mountOptions(rName, "NFS4_1"), @@ -138,10 +136,9 @@ func TestAccDataSyncLocationNFS_AgentARNs_multiple(t *testing.T) { ), }, { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"server_hostname"}, + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, }, }, }) @@ -167,10 +164,9 @@ func TestAccDataSyncLocationNFS_subdirectory(t *testing.T) { ), }, { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"server_hostname"}, + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, }, { Config: testAccLocationNFSConfig_subdirectory(rName, "/subdirectory2/"), @@ -204,10 +200,9 @@ func TestAccDataSyncLocationNFS_tags(t *testing.T) { ), }, { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"server_hostname"}, + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, }, { Config: testAccLocationNFSConfig_tags2(rName, "key1", "value1updated", "key2", "value2"), diff --git a/internal/service/datasync/location_object_storage.go b/internal/service/datasync/location_object_storage.go index 18cc783cd5b0..83dc71d2b4bc 100644 --- a/internal/service/datasync/location_object_storage.go +++ b/internal/service/datasync/location_object_storage.go @@ -27,7 +27,7 @@ import ( // @SDKResource("aws_datasync_location_object_storage", name="Location Object Storage") // @Tags(identifierAttribute="id") -func ResourceLocationObjectStorage() *schema.Resource { +func resourceLocationObjectStorage() *schema.Resource { return &schema.Resource{ CreateWithoutTimeout: resourceLocationObjectStorageCreate, ReadWithoutTimeout: resourceLocationObjectStorageRead, @@ -155,7 +155,7 @@ func resourceLocationObjectStorageRead(ctx context.Context, d *schema.ResourceDa var diags diag.Diagnostics conn := meta.(*conns.AWSClient).DataSyncConn(ctx) - output, err := FindLocationObjectStorageByARN(ctx, conn, d.Id()) + output, err := findLocationObjectStorageByARN(ctx, conn, d.Id()) if !d.IsNewResource() && tfresource.NotFound(err) { log.Printf("[WARN] DataSync Location Object Storage (%s) not found, removing from state", d.Id()) @@ -250,7 +250,7 @@ func resourceLocationObjectStorageDelete(ctx context.Context, d *schema.Resource return diags } -func FindLocationObjectStorageByARN(ctx context.Context, conn *datasync.DataSync, arn string) (*datasync.DescribeLocationObjectStorageOutput, error) { +func findLocationObjectStorageByARN(ctx context.Context, conn *datasync.DataSync, arn string) (*datasync.DescribeLocationObjectStorageOutput, error) { input := &datasync.DescribeLocationObjectStorageInput{ LocationArn: aws.String(arn), } diff --git a/internal/service/datasync/location_s3.go b/internal/service/datasync/location_s3.go index e4211f2271e4..936f7d52b61d 100644 --- a/internal/service/datasync/location_s3.go +++ b/internal/service/datasync/location_s3.go @@ -5,10 +5,12 @@ package datasync import ( "context" + "fmt" "log" "strings" "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/aws/arn" "github.com/aws/aws-sdk-go/service/datasync" "github.com/hashicorp/aws-sdk-go-base/v2/awsv1shim/v2/tfawserr" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" @@ -26,7 +28,7 @@ import ( // @SDKResource("aws_datasync_location_s3", name="Location S3") // @Tags(identifierAttribute="id") -func ResourceLocationS3() *schema.Resource { +func resourceLocationS3() *schema.Resource { return &schema.Resource{ CreateWithoutTimeout: resourceLocationS3Create, ReadWithoutTimeout: resourceLocationS3Read, @@ -159,7 +161,7 @@ func resourceLocationS3Read(ctx context.Context, d *schema.ResourceData, meta in var diags diag.Diagnostics conn := meta.(*conns.AWSClient).DataSyncConn(ctx) - output, err := FindLocationS3ByARN(ctx, conn, d.Id()) + output, err := findLocationS3ByARN(ctx, conn, d.Id()) if !d.IsNewResource() && tfresource.NotFound(err) { log.Printf("[WARN] DataSync Location S3 (%s) not found, removing from state", d.Id()) @@ -172,13 +174,23 @@ func resourceLocationS3Read(ctx context.Context, d *schema.ResourceData, meta in } uri := aws.StringValue(output.LocationUri) + s3BucketName, err := globalIDFromLocationURI(aws.StringValue(output.LocationUri)) + if err != nil { + return sdkdiag.AppendFromErr(diags, err) + } subdirectory, err := subdirectoryFromLocationURI(aws.StringValue(output.LocationUri)) if err != nil { return sdkdiag.AppendFromErr(diags, err) } + locationARN, err := arn.Parse(d.Id()) + if err != nil { + return sdkdiag.AppendFromErr(diags, err) + } d.Set("agent_arns", aws.StringValueSlice(output.AgentArns)) d.Set("arn", output.LocationArn) + s3BucketArn := fmt.Sprintf("arn:%s:s3:::%s", locationARN.Partition, s3BucketName) + d.Set("s3_bucket_arn", s3BucketArn) if err := d.Set("s3_config", flattenS3Config(output.S3Config)); err != nil { return sdkdiag.AppendErrorf(diags, "setting s3_config: %s", err) } @@ -217,7 +229,7 @@ func resourceLocationS3Delete(ctx context.Context, d *schema.ResourceData, meta return diags } -func FindLocationS3ByARN(ctx context.Context, conn *datasync.DataSync, arn string) (*datasync.DescribeLocationS3Output, error) { +func findLocationS3ByARN(ctx context.Context, conn *datasync.DataSync, arn string) (*datasync.DescribeLocationS3Output, error) { input := &datasync.DescribeLocationS3Input{ LocationArn: aws.String(arn), } diff --git a/internal/service/datasync/location_s3_test.go b/internal/service/datasync/location_s3_test.go index 3ae18effd005..9fc082383158 100644 --- a/internal/service/datasync/location_s3_test.go +++ b/internal/service/datasync/location_s3_test.go @@ -50,10 +50,9 @@ func TestAccDataSyncLocationS3_basic(t *testing.T) { ), }, { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"s3_bucket_arn"}, + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, }, }, }) @@ -87,10 +86,9 @@ func TestAccDataSyncLocationS3_storageClass(t *testing.T) { ), }, { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"s3_bucket_arn"}, + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, }, }, }) @@ -141,10 +139,9 @@ func TestAccDataSyncLocationS3_tags(t *testing.T) { ), }, { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"s3_bucket_arn"}, + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, }, { Config: testAccLocationS3Config_tags2(rName, "key1", "value1updated", "key2", "value2"), diff --git a/internal/service/datasync/location_smb.go b/internal/service/datasync/location_smb.go index ba7dcd526a23..c68d10f80941 100644 --- a/internal/service/datasync/location_smb.go +++ b/internal/service/datasync/location_smb.go @@ -25,7 +25,7 @@ import ( // @SDKResource("aws_datasync_location_smb", name="Location SMB") // @Tags(identifierAttribute="id") -func ResourceLocationSMB() *schema.Resource { +func resourceLocationSMB() *schema.Resource { return &schema.Resource{ CreateWithoutTimeout: resourceLocationSMBCreate, ReadWithoutTimeout: resourceLocationSMBRead, @@ -149,7 +149,7 @@ func resourceLocationSMBRead(ctx context.Context, d *schema.ResourceData, meta i var diags diag.Diagnostics conn := meta.(*conns.AWSClient).DataSyncConn(ctx) - output, err := FindLocationSMBByARN(ctx, conn, d.Id()) + output, err := findLocationSMBByARN(ctx, conn, d.Id()) if !d.IsNewResource() && tfresource.NotFound(err) { log.Printf("[WARN] DataSync Location SMB (%s) not found, removing from state", d.Id()) @@ -162,6 +162,10 @@ func resourceLocationSMBRead(ctx context.Context, d *schema.ResourceData, meta i } uri := aws.StringValue(output.LocationUri) + serverHostName, err := globalIDFromLocationURI(uri) + if err != nil { + return sdkdiag.AppendFromErr(diags, err) + } subdirectory, err := subdirectoryFromLocationURI(aws.StringValue(output.LocationUri)) if err != nil { return sdkdiag.AppendFromErr(diags, err) @@ -173,6 +177,7 @@ func resourceLocationSMBRead(ctx context.Context, d *schema.ResourceData, meta i if err := d.Set("mount_options", flattenSMBMountOptions(output.MountOptions)); err != nil { return sdkdiag.AppendErrorf(diags, "setting mount_options: %s", err) } + d.Set("server_hostname", serverHostName) d.Set("subdirectory", subdirectory) d.Set("uri", uri) d.Set("user", output.User) @@ -228,7 +233,7 @@ func resourceLocationSMBDelete(ctx context.Context, d *schema.ResourceData, meta return diags } -func FindLocationSMBByARN(ctx context.Context, conn *datasync.DataSync, arn string) (*datasync.DescribeLocationSmbOutput, error) { +func findLocationSMBByARN(ctx context.Context, conn *datasync.DataSync, arn string) (*datasync.DescribeLocationSmbOutput, error) { input := &datasync.DescribeLocationSmbInput{ LocationArn: aws.String(arn), } diff --git a/internal/service/datasync/location_smb_test.go b/internal/service/datasync/location_smb_test.go index 504667d014a1..df4f78345616 100644 --- a/internal/service/datasync/location_smb_test.go +++ b/internal/service/datasync/location_smb_test.go @@ -49,7 +49,7 @@ func TestAccDataSyncLocationSMB_basic(t *testing.T) { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"password", "server_hostname"}, + ImportStateVerifyIgnore: []string{"password"}, }, { Config: testAccLocationSMBConfig_basic(rName, "/test2/"), @@ -116,7 +116,7 @@ func TestAccDataSyncLocationSMB_tags(t *testing.T) { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"password", "server_hostname"}, + ImportStateVerifyIgnore: []string{"password"}, }, { Config: testAccLocationSMBConfig_tags2(rName, "key1", "value1updated", "key2", "value2"), diff --git a/internal/service/datasync/service_package_gen.go b/internal/service/datasync/service_package_gen.go index b0e13b9ed2e6..e83a4b11901f 100644 --- a/internal/service/datasync/service_package_gen.go +++ b/internal/service/datasync/service_package_gen.go @@ -38,7 +38,7 @@ func (p *servicePackage) SDKResources(ctx context.Context) []*types.ServicePacka }, }, { - Factory: ResourceLocationAzureBlob, + Factory: resourceLocationAzureBlob, TypeName: "aws_datasync_location_azure_blob", Name: "Location Microsoft Azure Blob Storage", Tags: &types.ServicePackageResourceTags{ @@ -46,7 +46,7 @@ func (p *servicePackage) SDKResources(ctx context.Context) []*types.ServicePacka }, }, { - Factory: ResourceLocationEFS, + Factory: resourceLocationEFS, TypeName: "aws_datasync_location_efs", Name: "Location EFS", Tags: &types.ServicePackageResourceTags{ @@ -54,7 +54,7 @@ func (p *servicePackage) SDKResources(ctx context.Context) []*types.ServicePacka }, }, { - Factory: ResourceLocationFSxLustreFileSystem, + Factory: resourceLocationFSxLustreFileSystem, TypeName: "aws_datasync_location_fsx_lustre_file_system", Name: "Location FSx for Lustre File System", Tags: &types.ServicePackageResourceTags{ @@ -62,7 +62,7 @@ func (p *servicePackage) SDKResources(ctx context.Context) []*types.ServicePacka }, }, { - Factory: ResourceLocationFSxONTAPFileSystem, + Factory: resourceLocationFSxONTAPFileSystem, TypeName: "aws_datasync_location_fsx_ontap_file_system", Name: "Location FSx for NetApp ONTAP File System", Tags: &types.ServicePackageResourceTags{ @@ -70,7 +70,7 @@ func (p *servicePackage) SDKResources(ctx context.Context) []*types.ServicePacka }, }, { - Factory: ResourceLocationFSxOpenZFSFileSystem, + Factory: resourceLocationFSxOpenZFSFileSystem, TypeName: "aws_datasync_location_fsx_openzfs_file_system", Name: "Location FSx for OpenZFS File System", Tags: &types.ServicePackageResourceTags{ @@ -78,7 +78,7 @@ func (p *servicePackage) SDKResources(ctx context.Context) []*types.ServicePacka }, }, { - Factory: ResourceLocationFSxWindowsFileSystem, + Factory: resourceLocationFSxWindowsFileSystem, TypeName: "aws_datasync_location_fsx_windows_file_system", Name: "Location FSx for Windows File Server File System", Tags: &types.ServicePackageResourceTags{ @@ -86,7 +86,7 @@ func (p *servicePackage) SDKResources(ctx context.Context) []*types.ServicePacka }, }, { - Factory: ResourceLocationHDFS, + Factory: resourceLocationHDFS, TypeName: "aws_datasync_location_hdfs", Name: "Location HDFS", Tags: &types.ServicePackageResourceTags{ @@ -94,7 +94,7 @@ func (p *servicePackage) SDKResources(ctx context.Context) []*types.ServicePacka }, }, { - Factory: ResourceLocationNFS, + Factory: resourceLocationNFS, TypeName: "aws_datasync_location_nfs", Name: "Location NFS", Tags: &types.ServicePackageResourceTags{ @@ -102,7 +102,7 @@ func (p *servicePackage) SDKResources(ctx context.Context) []*types.ServicePacka }, }, { - Factory: ResourceLocationObjectStorage, + Factory: resourceLocationObjectStorage, TypeName: "aws_datasync_location_object_storage", Name: "Location Object Storage", Tags: &types.ServicePackageResourceTags{ @@ -110,7 +110,7 @@ func (p *servicePackage) SDKResources(ctx context.Context) []*types.ServicePacka }, }, { - Factory: ResourceLocationS3, + Factory: resourceLocationS3, TypeName: "aws_datasync_location_s3", Name: "Location S3", Tags: &types.ServicePackageResourceTags{ @@ -118,7 +118,7 @@ func (p *servicePackage) SDKResources(ctx context.Context) []*types.ServicePacka }, }, { - Factory: ResourceLocationSMB, + Factory: resourceLocationSMB, TypeName: "aws_datasync_location_smb", Name: "Location SMB", Tags: &types.ServicePackageResourceTags{ @@ -126,7 +126,7 @@ func (p *servicePackage) SDKResources(ctx context.Context) []*types.ServicePacka }, }, { - Factory: ResourceTask, + Factory: resourceTask, TypeName: "aws_datasync_task", Name: "Task", Tags: &types.ServicePackageResourceTags{ diff --git a/internal/service/datasync/sweep.go b/internal/service/datasync/sweep.go index 39aeba02857d..38698a52362d 100644 --- a/internal/service/datasync/sweep.go +++ b/internal/service/datasync/sweep.go @@ -169,7 +169,7 @@ func sweepTasks(region string) error { } for _, v := range page.Tasks { - r := ResourceTask() + r := resourceTask() d := r.Data(nil) d.SetId(aws.StringValue(v.TaskArn)) diff --git a/internal/service/datasync/task.go b/internal/service/datasync/task.go index 767e40a0af6f..9b21a6f515c2 100644 --- a/internal/service/datasync/task.go +++ b/internal/service/datasync/task.go @@ -27,7 +27,7 @@ import ( // @SDKResource("aws_datasync_task", name="Task") // @Tags(identifierAttribute="id") -func ResourceTask() *schema.Resource { +func resourceTask() *schema.Resource { return &schema.Resource{ CreateWithoutTimeout: resourceTaskCreate, ReadWithoutTimeout: resourceTaskRead, @@ -358,7 +358,7 @@ func resourceTaskRead(ctx context.Context, d *schema.ResourceData, meta interfac var diags diag.Diagnostics conn := meta.(*conns.AWSClient).DataSyncConn(ctx) - output, err := FindTaskByARN(ctx, conn, d.Id()) + output, err := findTaskByARN(ctx, conn, d.Id()) if !d.IsNewResource() && tfresource.NotFound(err) { log.Printf("[WARN] DataSync Task (%s) not found, removing from state", d.Id()) @@ -459,7 +459,7 @@ func resourceTaskDelete(ctx context.Context, d *schema.ResourceData, meta interf return diags } -func FindTaskByARN(ctx context.Context, conn *datasync.DataSync, arn string) (*datasync.DescribeTaskOutput, error) { +func findTaskByARN(ctx context.Context, conn *datasync.DataSync, arn string) (*datasync.DescribeTaskOutput, error) { input := &datasync.DescribeTaskInput{ TaskArn: aws.String(arn), } @@ -486,7 +486,7 @@ func FindTaskByARN(ctx context.Context, conn *datasync.DataSync, arn string) (*d func statusTask(ctx context.Context, conn *datasync.DataSync, arn string) retry.StateRefreshFunc { return func() (interface{}, string, error) { - output, err := FindTaskByARN(ctx, conn, arn) + output, err := findTaskByARN(ctx, conn, arn) if tfresource.NotFound(err) { return nil, "", nil diff --git a/internal/service/datasync/test-fixtures/keytab.krb b/internal/service/datasync/test-fixtures/keytab.krb new file mode 100644 index 000000000000..6c315ab75348 Binary files /dev/null and b/internal/service/datasync/test-fixtures/keytab.krb differ diff --git a/internal/service/datasync/test-fixtures/krb5.conf b/internal/service/datasync/test-fixtures/krb5.conf new file mode 100644 index 000000000000..9c68cac8007c --- /dev/null +++ b/internal/service/datasync/test-fixtures/krb5.conf @@ -0,0 +1,37 @@ +[libdefaults] + default_realm = ATHENA.MIT.EDU + default_tkt_enctypes = des3-hmac-sha1 des-cbc-crc + default_tgs_enctypes = des3-hmac-sha1 des-cbc-crc + dns_lookup_kdc = true + dns_lookup_realm = false + +[realms] + ATHENA.MIT.EDU = { + kdc = kerberos.mit.edu + kdc = kerberos-1.mit.edu + kdc = kerberos-2.mit.edu:750 + admin_server = kerberos.mit.edu + master_kdc = kerberos.mit.edu + default_domain = mit.edu + } + EXAMPLE.COM = { + kdc = kerberos.example.com + kdc = kerberos-1.example.com + admin_server = kerberos.example.com + } + +[domain_realm] + .mit.edu = ATHENA.MIT.EDU + mit.edu = ATHENA.MIT.EDU + +[capaths] + ATHENA.MIT.EDU = { + EXAMPLE.COM = . + } + EXAMPLE.COM = { + ATHENA.MIT.EDU = . + } + +[logging] + kdc = SYSLOG:INFO + admin_server = FILE=/var/kadm5.log diff --git a/internal/service/datasync/uri.go b/internal/service/datasync/uri.go index f99c702f0769..1947bcd471d0 100644 --- a/internal/service/datasync/uri.go +++ b/internal/service/datasync/uri.go @@ -16,6 +16,26 @@ var ( s3OutpostsAccessPointARNResourcePattern = regexache.MustCompile(`^outpost/.*/accesspoint/.*?(/.*)$`) ) +// globalIDFromLocationURI extracts the global ID from a location URI. +// https://docs.aws.amazon.com/datasync/latest/userguide/API_LocationListEntry.html#DataSync-Type-LocationListEntry-LocationUri +func globalIDFromLocationURI(uri string) (string, error) { + submatches := locationURIPattern.FindStringSubmatch(uri) + + if len(submatches) != 3 { + return "", fmt.Errorf("location URI (%s) does not match pattern %q", uri, locationURIPattern) + } + + globalIDAndSubdir := submatches[2] + + submatches = locationURIGlobalIDAndSubdirPattern.FindStringSubmatch(globalIDAndSubdir) + + if len(submatches) != 3 { + return "", fmt.Errorf("location URI global ID and subdirectory (%s) does not match pattern %q", globalIDAndSubdir, locationURIGlobalIDAndSubdirPattern) + } + + return submatches[1], nil +} + // subdirectoryFromLocationURI extracts the subdirectory from a location URI. // https://docs.aws.amazon.com/datasync/latest/userguide/API_LocationListEntry.html#DataSync-Type-LocationListEntry-LocationUri func subdirectoryFromLocationURI(uri string) (string, error) { diff --git a/internal/service/dms/certificate.go b/internal/service/dms/certificate.go index 436bc00510c0..f7a071a249ce 100644 --- a/internal/service/dms/certificate.go +++ b/internal/service/dms/certificate.go @@ -5,7 +5,6 @@ package dms import ( "context" - "encoding/base64" "log" "github.com/YakDriver/regexache" @@ -20,6 +19,7 @@ import ( "github.com/hashicorp/terraform-provider-aws/internal/errs/sdkdiag" tftags "github.com/hashicorp/terraform-provider-aws/internal/tags" "github.com/hashicorp/terraform-provider-aws/internal/tfresource" + itypes "github.com/hashicorp/terraform-provider-aws/internal/types" "github.com/hashicorp/terraform-provider-aws/internal/verify" "github.com/hashicorp/terraform-provider-aws/names" ) @@ -90,11 +90,11 @@ func resourceCertificateCreate(ctx context.Context, d *schema.ResourceData, meta } if v, ok := d.GetOk("certificate_wallet"); ok { - certWallet, err := base64.StdEncoding.DecodeString(v.(string)) + v, err := itypes.Base64Decode(v.(string)) if err != nil { return sdkdiag.AppendFromErr(diags, err) } - input.CertificateWallet = certWallet + input.CertificateWallet = v } _, err := conn.ImportCertificateWithContext(ctx, input) @@ -167,7 +167,7 @@ func resourceCertificateSetState(d *schema.ResourceData, cert *dms.Certificate) d.Set("certificate_pem", cert.CertificatePem) } if cert.CertificateWallet != nil && len(cert.CertificateWallet) != 0 { - d.Set("certificate_wallet", verify.Base64Encode(cert.CertificateWallet)) + d.Set("certificate_wallet", itypes.Base64EncodeOnce(cert.CertificateWallet)) } } diff --git a/internal/service/dms/certificate_data_source.go b/internal/service/dms/certificate_data_source.go index 0c60e7e2df45..59a332ea6b9d 100644 --- a/internal/service/dms/certificate_data_source.go +++ b/internal/service/dms/certificate_data_source.go @@ -14,7 +14,7 @@ import ( "github.com/hashicorp/terraform-provider-aws/internal/conns" "github.com/hashicorp/terraform-provider-aws/internal/errs/sdkdiag" tftags "github.com/hashicorp/terraform-provider-aws/internal/tags" - "github.com/hashicorp/terraform-provider-aws/internal/verify" + itypes "github.com/hashicorp/terraform-provider-aws/internal/types" ) // @SDKDataSource("aws_dms_certificate") @@ -95,7 +95,7 @@ func dataSourceCertificateRead(ctx context.Context, d *schema.ResourceData, meta d.Set("certificate_id", out.CertificateIdentifier) d.Set("certificate_pem", out.CertificatePem) if len(out.CertificateWallet) != 0 { - d.Set("certificate_wallet", verify.Base64Encode(out.CertificateWallet)) + d.Set("certificate_wallet", itypes.Base64EncodeOnce(out.CertificateWallet)) } d.Set("key_length", out.KeyLength) d.Set("signing_algorithm", out.SigningAlgorithm) diff --git a/internal/service/dynamodb/table_item.go b/internal/service/dynamodb/table_item.go index 344807ef2d9b..be663ee9ffa1 100644 --- a/internal/service/dynamodb/table_item.go +++ b/internal/service/dynamodb/table_item.go @@ -19,6 +19,7 @@ import ( "github.com/hashicorp/terraform-provider-aws/internal/conns" "github.com/hashicorp/terraform-provider-aws/internal/errs/sdkdiag" "github.com/hashicorp/terraform-provider-aws/internal/tfresource" + itypes "github.com/hashicorp/terraform-provider-aws/internal/types" "github.com/hashicorp/terraform-provider-aws/internal/verify" ) @@ -273,12 +274,12 @@ func buildTableItemID(tableName string, hashKey string, rangeKey string, attrs m id := []string{tableName, hashKey} if hashVal, ok := attrs[hashKey]; ok { - id = append(id, verify.Base64Encode(hashVal.B)) + id = append(id, itypes.Base64EncodeOnce(hashVal.B)) id = append(id, aws.StringValue(hashVal.S)) id = append(id, aws.StringValue(hashVal.N)) } if rangeVal, ok := attrs[rangeKey]; ok && rangeKey != "" { - id = append(id, rangeKey, verify.Base64Encode(rangeVal.B)) + id = append(id, rangeKey, itypes.Base64EncodeOnce(rangeVal.B)) id = append(id, aws.StringValue(rangeVal.S)) id = append(id, aws.StringValue(rangeVal.N)) } diff --git a/internal/service/dynamodb/table_item_data_source.go b/internal/service/dynamodb/table_item_data_source.go index 2863fe6738be..63fd248ddf4f 100644 --- a/internal/service/dynamodb/table_item_data_source.go +++ b/internal/service/dynamodb/table_item_data_source.go @@ -15,7 +15,7 @@ import ( "github.com/hashicorp/terraform-provider-aws/internal/create" "github.com/hashicorp/terraform-provider-aws/internal/errs/sdkdiag" "github.com/hashicorp/terraform-provider-aws/internal/flex" - "github.com/hashicorp/terraform-provider-aws/internal/verify" + itypes "github.com/hashicorp/terraform-provider-aws/internal/types" "github.com/hashicorp/terraform-provider-aws/names" ) @@ -113,7 +113,7 @@ func buildTableItemDataSourceID(tableName string, attrs map[string]*dynamodb.Att id := []string{tableName} for key, element := range attrs { - id = append(id, key, verify.Base64Encode(element.B)) + id = append(id, key, itypes.Base64EncodeOnce(element.B)) id = append(id, aws.StringValue(element.S)) id = append(id, aws.StringValue(element.N)) } diff --git a/internal/service/ec2/ec2_instance.go b/internal/service/ec2/ec2_instance.go index 2d07e7c93b53..3a16e3c85ba8 100644 --- a/internal/service/ec2/ec2_instance.go +++ b/internal/service/ec2/ec2_instance.go @@ -7,7 +7,6 @@ import ( "bytes" "context" "crypto/sha1" - "encoding/base64" "encoding/hex" "errors" "fmt" @@ -35,6 +34,7 @@ import ( "github.com/hashicorp/terraform-provider-aws/internal/flex" tftags "github.com/hashicorp/terraform-provider-aws/internal/tags" "github.com/hashicorp/terraform-provider-aws/internal/tfresource" + itypes "github.com/hashicorp/terraform-provider-aws/internal/types" "github.com/hashicorp/terraform-provider-aws/internal/verify" "github.com/hashicorp/terraform-provider-aws/names" ) @@ -821,15 +821,7 @@ func ResourceInstance() *schema.Resource { Optional: true, Computed: true, ConflictsWith: []string{"user_data"}, - ValidateFunc: func(v interface{}, name string) (warns []string, errs []error) { - s := v.(string) - if !verify.IsBase64Encoded([]byte(s)) { - errs = append(errs, fmt.Errorf( - "%s: must be base64-encoded", name, - )) - } - return - }, + ValidateFunc: verify.ValidBase64String, }, "user_data_replace_on_change": { Type: schema.TypeBool, @@ -1775,19 +1767,16 @@ func resourceInstanceUpdate(ctx context.Context, d *schema.ResourceData, meta in // Otherwise, you must provide base64-encoded text". if d.HasChange("user_data") { - log.Printf("[INFO] Modifying user data %s", d.Id()) - - // Decode so the AWS SDK doesn't double encode - userData, err := base64.StdEncoding.DecodeString(d.Get("user_data").(string)) + // Decode so the AWS SDK doesn't double encode. + v, err := itypes.Base64Decode(d.Get("user_data").(string)) if err != nil { - log.Printf("[DEBUG] Instance (%s) user_data not base64 decoded", d.Id()) - userData = []byte(d.Get("user_data").(string)) + v = []byte(d.Get("user_data").(string)) } input := &ec2.ModifyInstanceAttributeInput{ InstanceId: aws.String(d.Id()), UserData: &ec2.BlobAttributeValue{ - Value: userData, + Value: v, }, } @@ -1797,20 +1786,17 @@ func resourceInstanceUpdate(ctx context.Context, d *schema.ResourceData, meta in } if d.HasChange("user_data_base64") { - log.Printf("[INFO] Modifying user data base64 %s", d.Id()) - // Schema validation technically ensures the data is Base64 encoded. - // Decode so the AWS SDK doesn't double encode - userData, err := base64.StdEncoding.DecodeString(d.Get("user_data_base64").(string)) + // Decode so the AWS SDK doesn't double encode. + v, err := itypes.Base64Decode(d.Get("user_data_base64").(string)) if err != nil { - log.Printf("[DEBUG] Instance (%s) user_data_base64 not base64 decoded", d.Id()) - userData = []byte(d.Get("user_data_base64").(string)) + v = []byte(d.Get("user_data_base64").(string)) } input := &ec2.ModifyInstanceAttributeInput{ InstanceId: aws.String(d.Id()), UserData: &ec2.BlobAttributeValue{ - Value: userData, + Value: v, }, } @@ -2943,7 +2929,7 @@ func buildInstanceOpts(ctx context.Context, d *schema.ResourceData, meta interfa userDataBase64 := d.Get("user_data_base64").(string) if userData != "" { - opts.UserData64 = aws.String(verify.Base64Encode([]byte(userData))) + opts.UserData64 = flex.StringValueToBase64String(userData) } else if userDataBase64 != "" { opts.UserData64 = aws.String(userDataBase64) } @@ -3299,13 +3285,13 @@ func waitInstanceStopped(ctx context.Context, conn *ec2.EC2, id string, timeout return nil, err } -func userDataHashSum(user_data string) string { +func userDataHashSum(userData string) string { // Check whether the user_data is not Base64 encoded. // Always calculate hash of base64 decoded value since we - // check against double-encoding when setting it - v, base64DecodeError := base64.StdEncoding.DecodeString(user_data) - if base64DecodeError != nil { - v = []byte(user_data) + // check against double-encoding when setting it. + v, err := itypes.Base64Decode(userData) + if err != nil { + v = []byte(userData) } hash := sha1.Sum(v) diff --git a/internal/service/ec2/ec2_spot_fleet_request.go b/internal/service/ec2/ec2_spot_fleet_request.go index 67f6254ccc39..2aa806f1550b 100644 --- a/internal/service/ec2/ec2_spot_fleet_request.go +++ b/internal/service/ec2/ec2_spot_fleet_request.go @@ -1253,7 +1253,7 @@ func buildSpotFleetLaunchSpecification(ctx context.Context, d map[string]interfa } if v, ok := d["user_data"]; ok { - opts.UserData = aws.String(verify.Base64Encode([]byte(v.(string)))) + opts.UserData = flex.StringValueToBase64String(v.(string)) } if v, ok := d["key_name"]; ok && v != "" { diff --git a/internal/service/ecr/authorization_token_data_source.go b/internal/service/ecr/authorization_token_data_source.go index 77c5c94f0518..9b51cc323198 100644 --- a/internal/service/ecr/authorization_token_data_source.go +++ b/internal/service/ecr/authorization_token_data_source.go @@ -5,7 +5,6 @@ package ecr import ( "context" - "encoding/base64" "log" "strings" "time" @@ -16,6 +15,7 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-provider-aws/internal/conns" "github.com/hashicorp/terraform-provider-aws/internal/errs/sdkdiag" + itypes "github.com/hashicorp/terraform-provider-aws/internal/types" ) // @SDKDataSource("aws_ecr_authorization_token") @@ -71,7 +71,7 @@ func dataSourceAuthorizationTokenRead(ctx context.Context, d *schema.ResourceDat authorizationToken := aws.StringValue(authorizationData.AuthorizationToken) expiresAt := aws.TimeValue(authorizationData.ExpiresAt).Format(time.RFC3339) proxyEndpoint := aws.StringValue(authorizationData.ProxyEndpoint) - authBytes, err := base64.URLEncoding.DecodeString(authorizationToken) + authBytes, err := itypes.Base64Decode(authorizationToken) if err != nil { d.SetId("") return sdkdiag.AppendErrorf(diags, "decoding ECR authorization token: %s", err) diff --git a/internal/service/ecrpublic/authorization_token_data_source.go b/internal/service/ecrpublic/authorization_token_data_source.go index 720cc3a23119..bea36a2acb1c 100644 --- a/internal/service/ecrpublic/authorization_token_data_source.go +++ b/internal/service/ecrpublic/authorization_token_data_source.go @@ -5,7 +5,6 @@ package ecrpublic import ( "context" - "encoding/base64" "strings" "time" @@ -15,6 +14,7 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-provider-aws/internal/conns" "github.com/hashicorp/terraform-provider-aws/internal/errs/sdkdiag" + itypes "github.com/hashicorp/terraform-provider-aws/internal/types" ) // @SDKDataSource("aws_ecrpublic_authorization_token") @@ -59,7 +59,7 @@ func dataSourceAuthorizationTokenRead(ctx context.Context, d *schema.ResourceDat authorizationData := out.AuthorizationData authorizationToken := aws.StringValue(authorizationData.AuthorizationToken) expiresAt := aws.TimeValue(authorizationData.ExpiresAt).Format(time.RFC3339) - authBytes, err := base64.URLEncoding.DecodeString(authorizationToken) + authBytes, err := itypes.Base64Decode(authorizationToken) if err != nil { return sdkdiag.AppendErrorf(diags, "decoding ECR Public authorization token: %s", err) } diff --git a/internal/service/ecrpublic/repository.go b/internal/service/ecrpublic/repository.go index 9696fe6f1f08..3808b0d71a65 100644 --- a/internal/service/ecrpublic/repository.go +++ b/internal/service/ecrpublic/repository.go @@ -5,7 +5,6 @@ package ecrpublic import ( "context" - "encoding/base64" "fmt" "log" "time" @@ -23,6 +22,7 @@ import ( "github.com/hashicorp/terraform-provider-aws/internal/flex" tftags "github.com/hashicorp/terraform-provider-aws/internal/tags" "github.com/hashicorp/terraform-provider-aws/internal/tfresource" + itypes "github.com/hashicorp/terraform-provider-aws/internal/types" "github.com/hashicorp/terraform-provider-aws/internal/verify" "github.com/hashicorp/terraform-provider-aws/names" ) @@ -356,8 +356,7 @@ func expandRepositoryCatalogData(tfMap map[string]interface{}) *ecrpublic.Reposi } if v, ok := tfMap["logo_image_blob"].(string); ok && len(v) > 0 { - data, _ := base64.StdEncoding.DecodeString(v) - repositoryCatalogDataInput.LogoImageBlob = data + repositoryCatalogDataInput.LogoImageBlob = itypes.MustBase64Decode(v) } if v, ok := tfMap["operating_systems"].(*schema.Set); ok { diff --git a/internal/service/iam/access_key.go b/internal/service/iam/access_key.go index 9238c4cb8456..13d331820896 100644 --- a/internal/service/iam/access_key.go +++ b/internal/service/iam/access_key.go @@ -7,7 +7,6 @@ import ( "context" "crypto/hmac" "crypto/sha256" - "encoding/base64" "fmt" "log" "time" @@ -23,6 +22,7 @@ import ( "github.com/hashicorp/terraform-provider-aws/internal/errs/sdkdiag" tfslices "github.com/hashicorp/terraform-provider-aws/internal/slices" "github.com/hashicorp/terraform-provider-aws/internal/tfresource" + itypes "github.com/hashicorp/terraform-provider-aws/internal/types" ) // @SDKResource("aws_iam_access_key", name="Access Key") @@ -128,7 +128,7 @@ func resourceAccessKeyCreate(ctx context.Context, d *schema.ResourceData, meta i return sdkdiag.AppendErrorf(diags, "CreateAccessKey response did not contain a Secret Access Key as expected") } - sesSMTPPasswordV4, err := SessmTPPasswordFromSecretKeySigV4(createResp.AccessKey.SecretAccessKey, meta.(*conns.AWSClient).Region) + sesSMTPPasswordV4, err := sesSMTPPasswordFromSecretKeySigV4(createResp.AccessKey.SecretAccessKey, meta.(*conns.AWSClient).Region) if err != nil { return sdkdiag.AppendErrorf(diags, "getting SES SigV4 SMTP Password from Secret Access Key: %s", err) } @@ -336,7 +336,7 @@ func hmacSignature(key []byte, value []byte) ([]byte, error) { return h.Sum(nil), nil } -func SessmTPPasswordFromSecretKeySigV4(key *string, region string) (string, error) { +func sesSMTPPasswordFromSecretKeySigV4(key *string, region string) (string, error) { if key == nil { return "", nil } @@ -346,7 +346,7 @@ func SessmTPPasswordFromSecretKeySigV4(key *string, region string) (string, erro terminal := []byte("aws4_request") message := []byte("SendRawEmail") - rawSig, err := hmacSignature([]byte("AWS4"+*key), date) + rawSig, err := hmacSignature([]byte("AWS4"+aws.StringValue(key)), date) if err != nil { return "", err } @@ -367,5 +367,5 @@ func SessmTPPasswordFromSecretKeySigV4(key *string, region string) (string, erro versionedSig := make([]byte, 0, len(rawSig)+1) versionedSig = append(versionedSig, version) versionedSig = append(versionedSig, rawSig...) - return base64.StdEncoding.EncodeToString(versionedSig), nil + return itypes.Base64Encode(versionedSig), nil } diff --git a/internal/service/iam/access_key_test.go b/internal/service/iam/access_key_test.go index b27be71e75b0..0bc4beb935e0 100644 --- a/internal/service/iam/access_key_test.go +++ b/internal/service/iam/access_key_test.go @@ -309,7 +309,7 @@ func TestSESSMTPPasswordFromSecretKeySigV4(t *testing.T) { } for _, tc := range cases { - actual, err := tfiam.SessmTPPasswordFromSecretKeySigV4(&tc.Input, tc.Region) + actual, err := tfiam.SESSMTPPasswordFromSecretKeySigV4(&tc.Input, tc.Region) if err != nil { t.Fatalf("unexpected error: %s", err) } diff --git a/internal/service/iam/encryption.go b/internal/service/iam/encryption.go index eb3cca3b7254..39f5ccd5396f 100644 --- a/internal/service/iam/encryption.go +++ b/internal/service/iam/encryption.go @@ -4,10 +4,10 @@ package iam import ( - "encoding/base64" "fmt" "strings" + itypes "github.com/hashicorp/terraform-provider-aws/internal/types" "github.com/hashicorp/terraform-provider-aws/internal/vault/helper/pgpkeys" ) @@ -38,5 +38,5 @@ func encryptValue(encryptionKey, value, description string) (string, string, err return "", "", fmt.Errorf("encrypting %s: %w", description, err) } - return fingerprints[0], base64.StdEncoding.EncodeToString(encryptedValue[0]), nil + return fingerprints[0], itypes.Base64Encode(encryptedValue[0]), nil } diff --git a/internal/service/iam/exports_test.go b/internal/service/iam/exports_test.go index 47be0c257fa9..85e0bd74d9e4 100644 --- a/internal/service/iam/exports_test.go +++ b/internal/service/iam/exports_test.go @@ -49,4 +49,5 @@ var ( FindSSHPublicKeyByThreePartKey = findSSHPublicKeyByThreePartKey FindUserByName = findUserByName FindVirtualMFADeviceBySerialNumber = findVirtualMFADeviceBySerialNumber + SESSMTPPasswordFromSecretKeySigV4 = sesSMTPPasswordFromSecretKeySigV4 ) diff --git a/internal/service/imagebuilder/image_recipe.go b/internal/service/imagebuilder/image_recipe.go index fce38e8a381d..fbccc745c8ae 100644 --- a/internal/service/imagebuilder/image_recipe.go +++ b/internal/service/imagebuilder/image_recipe.go @@ -5,7 +5,6 @@ package imagebuilder import ( "context" - "fmt" "log" "strconv" @@ -220,15 +219,7 @@ func ResourceImageRecipe() *schema.Resource { Computed: true, ValidateFunc: validation.All( validation.StringLenBetween(1, 21847), - func(v interface{}, name string) (warns []string, errs []error) { - s := v.(string) - if !verify.IsBase64Encoded([]byte(s)) { - errs = append(errs, fmt.Errorf( - "%s: must be base64-encoded", name, - )) - } - return - }, + verify.ValidBase64String, ), }, "version": { diff --git a/internal/service/imagebuilder/image_recipe_test.go b/internal/service/imagebuilder/image_recipe_test.go index bf3975e55cba..999aae2bdb99 100644 --- a/internal/service/imagebuilder/image_recipe_test.go +++ b/internal/service/imagebuilder/image_recipe_test.go @@ -18,7 +18,7 @@ import ( "github.com/hashicorp/terraform-provider-aws/internal/acctest" "github.com/hashicorp/terraform-provider-aws/internal/conns" tfimagebuilder "github.com/hashicorp/terraform-provider-aws/internal/service/imagebuilder" - "github.com/hashicorp/terraform-provider-aws/internal/verify" + itypes "github.com/hashicorp/terraform-provider-aws/internal/types" "github.com/hashicorp/terraform-provider-aws/names" ) @@ -702,7 +702,7 @@ func TestAccImageBuilderImageRecipe_userDataBase64(t *testing.T) { Config: testAccImageRecipeConfig_userDataBase64(rName), Check: resource.ComposeTestCheckFunc( testAccCheckImageRecipeExists(ctx, resourceName), - resource.TestCheckResourceAttr(resourceName, "user_data_base64", verify.Base64Encode([]byte("hello world"))), + resource.TestCheckResourceAttr(resourceName, "user_data_base64", itypes.Base64EncodeOnce([]byte("hello world"))), ), }, { diff --git a/internal/service/kafkaconnect/worker_configuration.go b/internal/service/kafkaconnect/worker_configuration.go index e39cf1252993..03a8d522d092 100644 --- a/internal/service/kafkaconnect/worker_configuration.go +++ b/internal/service/kafkaconnect/worker_configuration.go @@ -5,7 +5,6 @@ package kafkaconnect import ( "context" - "encoding/base64" "log" "github.com/aws/aws-sdk-go/aws" @@ -14,8 +13,9 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-provider-aws/internal/conns" "github.com/hashicorp/terraform-provider-aws/internal/errs/sdkdiag" + "github.com/hashicorp/terraform-provider-aws/internal/flex" "github.com/hashicorp/terraform-provider-aws/internal/tfresource" - "github.com/hashicorp/terraform-provider-aws/internal/verify" + itypes "github.com/hashicorp/terraform-provider-aws/internal/types" ) // @SDKResource("aws_mskconnect_worker_configuration") @@ -73,7 +73,7 @@ func resourceWorkerConfigurationCreate(ctx context.Context, d *schema.ResourceDa name := d.Get("name").(string) input := &kafkaconnect.CreateWorkerConfigurationInput{ Name: aws.String(name), - PropertiesFileContent: aws.String(verify.Base64Encode([]byte(d.Get("properties_file_content").(string)))), + PropertiesFileContent: flex.StringValueToBase64String(d.Get("properties_file_content").(string)), } if v, ok := d.GetOk("description"); ok { @@ -125,11 +125,10 @@ func resourceWorkerConfigurationRead(ctx context.Context, d *schema.ResourceData } func decodePropertiesFileContent(content string) string { - result, err := base64.StdEncoding.DecodeString(content) - + v, err := itypes.Base64Decode(content) if err != nil { return content } - return string(result) + return string(v) } diff --git a/internal/service/kms/ciphertext.go b/internal/service/kms/ciphertext.go index 1936e75c306d..1a15eb68d81d 100644 --- a/internal/service/kms/ciphertext.go +++ b/internal/service/kms/ciphertext.go @@ -5,8 +5,6 @@ package kms import ( "context" - "encoding/base64" - "log" "time" "github.com/aws/aws-sdk-go/aws" @@ -16,6 +14,7 @@ import ( "github.com/hashicorp/terraform-provider-aws/internal/conns" "github.com/hashicorp/terraform-provider-aws/internal/errs/sdkdiag" "github.com/hashicorp/terraform-provider-aws/internal/flex" + itypes "github.com/hashicorp/terraform-provider-aws/internal/types" ) // @SDKResource("aws_kms_ciphertext") @@ -26,29 +25,26 @@ func ResourceCiphertext() *schema.Resource { DeleteWithoutTimeout: schema.NoopContext, Schema: map[string]*schema.Schema{ - "plaintext": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - Sensitive: true, - }, - - "key_id": { + "ciphertext_blob": { Type: schema.TypeString, - Required: true, - ForceNew: true, + Computed: true, }, - "context": { Type: schema.TypeMap, Optional: true, Elem: &schema.Schema{Type: schema.TypeString}, ForceNew: true, }, - - "ciphertext_blob": { + "key_id": { Type: schema.TypeString, - Computed: true, + Required: true, + ForceNew: true, + }, + "plaintext": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Sensitive: true, }, }, } @@ -58,26 +54,25 @@ func resourceCiphertextCreate(ctx context.Context, d *schema.ResourceData, meta var diags diag.Diagnostics conn := meta.(*conns.AWSClient).KMSConn(ctx) - //lintignore:R017 // Allow legacy unstable ID usage in managed resource - d.SetId(time.Now().UTC().String()) - keyID := d.Get("key_id").(string) - req := &kms.EncryptInput{ + input := &kms.EncryptInput{ KeyId: aws.String(d.Get("key_id").(string)), Plaintext: []byte(d.Get("plaintext").(string)), } - if ec := d.Get("context"); ec != nil { - req.EncryptionContext = flex.ExpandStringMap(ec.(map[string]interface{})) + if v, ok := d.GetOk("context"); ok && len(v.(map[string]interface{})) > 0 { + input.EncryptionContext = flex.ExpandStringMap(v.(map[string]interface{})) } - log.Printf("[DEBUG] KMS encrypting with KMS Key: %s", keyID) - resp, err := conn.EncryptWithContext(ctx, req) + output, err := conn.EncryptWithContext(ctx, input) + if err != nil { return sdkdiag.AppendErrorf(diags, "encrypting with KMS Key (%s): %s", keyID, err) } - d.Set("ciphertext_blob", base64.StdEncoding.EncodeToString(resp.CiphertextBlob)) + //lintignore:R017 // Allow legacy unstable ID usage in managed resource + d.SetId(time.Now().UTC().String()) + d.Set("ciphertext_blob", itypes.Base64Encode(output.CiphertextBlob)) return diags } diff --git a/internal/service/kms/ciphertext_data_source.go b/internal/service/kms/ciphertext_data_source.go index facd994d0de0..b611b717ac04 100644 --- a/internal/service/kms/ciphertext_data_source.go +++ b/internal/service/kms/ciphertext_data_source.go @@ -5,8 +5,6 @@ package kms import ( "context" - "encoding/base64" - "log" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/service/kms" @@ -15,6 +13,7 @@ import ( "github.com/hashicorp/terraform-provider-aws/internal/conns" "github.com/hashicorp/terraform-provider-aws/internal/errs/sdkdiag" "github.com/hashicorp/terraform-provider-aws/internal/flex" + itypes "github.com/hashicorp/terraform-provider-aws/internal/types" ) // @SDKDataSource("aws_kms_ciphertext") @@ -23,26 +22,23 @@ func DataSourceCiphertext() *schema.Resource { ReadWithoutTimeout: dataSourceCiphertextRead, Schema: map[string]*schema.Schema{ - "plaintext": { - Type: schema.TypeString, - Required: true, - Sensitive: true, - }, - - "key_id": { + "ciphertext_blob": { Type: schema.TypeString, - Required: true, + Computed: true, }, - "context": { Type: schema.TypeMap, Optional: true, Elem: &schema.Schema{Type: schema.TypeString}, }, - - "ciphertext_blob": { + "key_id": { Type: schema.TypeString, - Computed: true, + Required: true, + }, + "plaintext": { + Type: schema.TypeString, + Required: true, + Sensitive: true, }, }, } @@ -53,24 +49,23 @@ func dataSourceCiphertextRead(ctx context.Context, d *schema.ResourceData, meta conn := meta.(*conns.AWSClient).KMSConn(ctx) keyID := d.Get("key_id").(string) - req := &kms.EncryptInput{ + input := &kms.EncryptInput{ KeyId: aws.String(keyID), Plaintext: []byte(d.Get("plaintext").(string)), } - if ec := d.Get("context"); ec != nil { - req.EncryptionContext = flex.ExpandStringMap(ec.(map[string]interface{})) + if v, ok := d.GetOk("context"); ok && len(v.(map[string]interface{})) > 0 { + input.EncryptionContext = flex.ExpandStringMap(v.(map[string]interface{})) } - log.Printf("[DEBUG] KMS encrypting with KMS Key: %s", keyID) - resp, err := conn.EncryptWithContext(ctx, req) + output, err := conn.EncryptWithContext(ctx, input) + if err != nil { return sdkdiag.AppendErrorf(diags, "encrypting with KMS Key (%s): %s", keyID, err) } - d.SetId(aws.StringValue(resp.KeyId)) - - d.Set("ciphertext_blob", base64.StdEncoding.EncodeToString(resp.CiphertextBlob)) + d.SetId(aws.StringValue(output.KeyId)) + d.Set("ciphertext_blob", itypes.Base64Encode(output.CiphertextBlob)) return diags } diff --git a/internal/service/kms/external_key.go b/internal/service/kms/external_key.go index f1b0d8ede029..eaa3e1c54c85 100644 --- a/internal/service/kms/external_key.go +++ b/internal/service/kms/external_key.go @@ -9,7 +9,6 @@ import ( "crypto/rsa" "crypto/sha256" "crypto/x509" - "encoding/base64" "fmt" "log" "time" @@ -27,6 +26,7 @@ import ( "github.com/hashicorp/terraform-provider-aws/internal/logging" tftags "github.com/hashicorp/terraform-provider-aws/internal/tags" "github.com/hashicorp/terraform-provider-aws/internal/tfresource" + itypes "github.com/hashicorp/terraform-provider-aws/internal/types" "github.com/hashicorp/terraform-provider-aws/internal/verify" "github.com/hashicorp/terraform-provider-aws/names" ) @@ -365,24 +365,21 @@ func importExternalKeyMaterial(ctx context.Context, conn *kms.KMS, keyID, keyMat return fmt.Errorf("getting parameters for import: %w", err) } - output := outputRaw.(*kms.GetParametersForImportOutput) - - keyMaterial, err := base64.StdEncoding.DecodeString(keyMaterialBase64) - + keyMaterial, err := itypes.Base64Decode(keyMaterialBase64) if err != nil { - return fmt.Errorf("Base64 decoding key material: %w", err) + return err } - publicKey, err := x509.ParsePKIXPublicKey(output.PublicKey) + output := outputRaw.(*kms.GetParametersForImportOutput) + publicKey, err := x509.ParsePKIXPublicKey(output.PublicKey) if err != nil { - return fmt.Errorf("parsing public key: %w", err) + return fmt.Errorf("parsing public key (PKIX): %w", err) } encryptedKeyMaterial, err := rsa.EncryptOAEP(sha256.New(), rand.Reader, publicKey.(*rsa.PublicKey), keyMaterial, []byte{}) - if err != nil { - return fmt.Errorf("encrypting key material: %w", err) + return fmt.Errorf("encrypting key material (RSA-OAEP): %w", err) } input := &kms.ImportKeyMaterialInput{ @@ -394,9 +391,8 @@ func importExternalKeyMaterial(ctx context.Context, conn *kms.KMS, keyID, keyMat if validTo != "" { t, err := time.Parse(time.RFC3339, validTo) - if err != nil { - return fmt.Errorf("parsing valid_to timestamp: %w", err) + return err } input.ExpirationModel = aws.String(kms.ExpirationModelTypeKeyMaterialExpires) diff --git a/internal/service/kms/external_key_test.go b/internal/service/kms/external_key_test.go index 3f7cba659027..3d938d8fdfaa 100644 --- a/internal/service/kms/external_key_test.go +++ b/internal/service/kms/external_key_test.go @@ -226,6 +226,7 @@ func TestAccKMSExternalKey_enabled(t *testing.T) { ImportStateVerifyIgnore: []string{ "bypass_policy_lockout_safety_check", "deletion_window_in_days", + "key_material_base64", }, }, { @@ -275,6 +276,7 @@ func TestAccKMSExternalKey_keyMaterialBase64(t *testing.T) { ImportStateVerifyIgnore: []string{ "bypass_policy_lockout_safety_check", "deletion_window_in_days", + "key_material_base64", }, }, { @@ -464,6 +466,7 @@ func TestAccKMSExternalKey_validTo(t *testing.T) { ImportStateVerifyIgnore: []string{ "bypass_policy_lockout_safety_check", "deletion_window_in_days", + "key_material_base64", }, }, { diff --git a/internal/service/kms/public_key_data_source.go b/internal/service/kms/public_key_data_source.go index 045fd5dd6ac8..17cd5c8feb9f 100644 --- a/internal/service/kms/public_key_data_source.go +++ b/internal/service/kms/public_key_data_source.go @@ -5,7 +5,6 @@ package kms import ( "context" - "encoding/base64" "encoding/pem" "github.com/aws/aws-sdk-go/aws" @@ -15,6 +14,7 @@ import ( "github.com/hashicorp/terraform-provider-aws/internal/conns" "github.com/hashicorp/terraform-provider-aws/internal/errs/sdkdiag" "github.com/hashicorp/terraform-provider-aws/internal/flex" + itypes "github.com/hashicorp/terraform-provider-aws/internal/types" ) // @SDKDataSource("aws_kms_public_key") @@ -90,7 +90,7 @@ func dataSourcePublicKeyRead(ctx context.Context, d *schema.ResourceData, meta i d.Set("arn", output.KeyId) d.Set("customer_master_key_spec", output.CustomerMasterKeySpec) d.Set("key_usage", output.KeyUsage) - d.Set("public_key", base64.StdEncoding.EncodeToString(output.PublicKey)) + d.Set("public_key", itypes.Base64Encode(output.PublicKey)) d.Set("public_key_pem", string(pem.EncodeToMemory(&pem.Block{ Type: "PUBLIC KEY", Bytes: output.PublicKey, diff --git a/internal/service/kms/secrets_data_source.go b/internal/service/kms/secrets_data_source.go index 1fec9a54ef12..50e2052bdc11 100644 --- a/internal/service/kms/secrets_data_source.go +++ b/internal/service/kms/secrets_data_source.go @@ -5,7 +5,6 @@ package kms import ( "context" - "encoding/base64" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/service/kms" @@ -15,6 +14,7 @@ import ( "github.com/hashicorp/terraform-provider-aws/internal/conns" "github.com/hashicorp/terraform-provider-aws/internal/errs/sdkdiag" "github.com/hashicorp/terraform-provider-aws/internal/flex" + itypes "github.com/hashicorp/terraform-provider-aws/internal/types" ) // @SDKDataSource("aws_kms_secrets") @@ -81,8 +81,7 @@ func dataSourceSecretsRead(ctx context.Context, d *schema.ResourceData, meta int name := secret["name"].(string) // base64 decode the payload - payload, err := base64.StdEncoding.DecodeString(secret["payload"].(string)) - + payload, err := itypes.Base64Decode(secret["payload"].(string)) if err != nil { return sdkdiag.AppendErrorf(diags, "invalid base64 value for secret (%s): %s", name, err) } diff --git a/internal/service/lightsail/key_pair.go b/internal/service/lightsail/key_pair.go index 166355d51549..b0777cca1a36 100644 --- a/internal/service/lightsail/key_pair.go +++ b/internal/service/lightsail/key_pair.go @@ -5,7 +5,6 @@ package lightsail import ( "context" - "encoding/base64" "fmt" "log" "strings" @@ -19,6 +18,7 @@ import ( "github.com/hashicorp/terraform-provider-aws/internal/create" "github.com/hashicorp/terraform-provider-aws/internal/errs/sdkdiag" tftags "github.com/hashicorp/terraform-provider-aws/internal/tags" + itypes "github.com/hashicorp/terraform-provider-aws/internal/types" "github.com/hashicorp/terraform-provider-aws/internal/vault/helper/pgpkeys" "github.com/hashicorp/terraform-provider-aws/internal/verify" "github.com/hashicorp/terraform-provider-aws/names" @@ -249,5 +249,5 @@ func encryptValue(encryptionKey, value, description string) (string, string, err return "", "", fmt.Errorf("encrypting %s: %w", description, err) } - return fingerprints[0], base64.StdEncoding.EncodeToString(encryptedValue[0]), nil + return fingerprints[0], itypes.Base64Encode(encryptedValue[0]), nil } diff --git a/internal/service/mq/configuration.go b/internal/service/mq/configuration.go index 8a390a946f16..2e7a21b08141 100644 --- a/internal/service/mq/configuration.go +++ b/internal/service/mq/configuration.go @@ -20,6 +20,7 @@ import ( "github.com/hashicorp/terraform-provider-aws/internal/enum" "github.com/hashicorp/terraform-provider-aws/internal/errs" "github.com/hashicorp/terraform-provider-aws/internal/errs/sdkdiag" + "github.com/hashicorp/terraform-provider-aws/internal/flex" tftags "github.com/hashicorp/terraform-provider-aws/internal/tags" "github.com/hashicorp/terraform-provider-aws/internal/tfresource" "github.com/hashicorp/terraform-provider-aws/internal/verify" @@ -132,7 +133,7 @@ func resourceConfigurationCreate(ctx context.Context, d *schema.ResourceData, me if v, ok := d.GetOk("data"); ok { input := &mq.UpdateConfigurationInput{ ConfigurationId: aws.String(d.Id()), - Data: aws.String(base64.StdEncoding.EncodeToString([]byte(v.(string)))), + Data: flex.StringValueToBase64String(v.(string)), } if v, ok := d.GetOk("description"); ok { diff --git a/internal/service/s3/bucket_object.go b/internal/service/s3/bucket_object.go index fe0d296ce847..853f7f1780db 100644 --- a/internal/service/s3/bucket_object.go +++ b/internal/service/s3/bucket_object.go @@ -10,7 +10,6 @@ package s3 import ( "bytes" "context" - "encoding/base64" "fmt" "io" "log" @@ -32,6 +31,7 @@ import ( "github.com/hashicorp/terraform-provider-aws/internal/service/kms" tftags "github.com/hashicorp/terraform-provider-aws/internal/tags" "github.com/hashicorp/terraform-provider-aws/internal/tfresource" + itypes "github.com/hashicorp/terraform-provider-aws/internal/types" "github.com/hashicorp/terraform-provider-aws/internal/verify" "github.com/hashicorp/terraform-provider-aws/names" "github.com/mitchellh/go-homedir" @@ -402,7 +402,7 @@ func resourceBucketObjectUpload(ctx context.Context, d *schema.ResourceData, met content := v.(string) // We can't do streaming decoding here (with base64.NewDecoder) because // the AWS SDK requires an io.ReadSeeker but a base64 decoder can't seek. - contentRaw, err := base64.StdEncoding.DecodeString(content) + contentRaw, err := itypes.Base64Decode(content) if err != nil { return sdkdiag.AppendErrorf(diags, "decoding content_base64: %s", err) } diff --git a/internal/service/s3/object.go b/internal/service/s3/object.go index 606a9a43dcc6..1322fdcc0295 100644 --- a/internal/service/s3/object.go +++ b/internal/service/s3/object.go @@ -6,7 +6,6 @@ package s3 import ( "bytes" "context" - "encoding/base64" "fmt" "io" "log" @@ -34,6 +33,7 @@ import ( "github.com/hashicorp/terraform-provider-aws/internal/service/kms" tftags "github.com/hashicorp/terraform-provider-aws/internal/tags" "github.com/hashicorp/terraform-provider-aws/internal/tfresource" + itypes "github.com/hashicorp/terraform-provider-aws/internal/types" "github.com/hashicorp/terraform-provider-aws/internal/verify" "github.com/hashicorp/terraform-provider-aws/names" "github.com/mitchellh/go-homedir" @@ -481,17 +481,15 @@ func resourceObjectUpload(ctx context.Context, d *schema.ResourceData, meta inte } }() } else if v, ok := d.GetOk("content"); ok { - content := v.(string) - body = bytes.NewReader([]byte(content)) + body = strings.NewReader(v.(string)) } else if v, ok := d.GetOk("content_base64"); ok { - content := v.(string) // We can't do streaming decoding here (with base64.NewDecoder) because // the AWS SDK requires an io.ReadSeeker but a base64 decoder can't seek. - contentRaw, err := base64.StdEncoding.DecodeString(content) + v, err := itypes.Base64Decode(v.(string)) if err != nil { - return sdkdiag.AppendErrorf(diags, "decoding content_base64: %s", err) + return sdkdiag.AppendFromErr(diags, err) } - body = bytes.NewReader(contentRaw) + body = bytes.NewReader(v) } else { body = bytes.NewReader([]byte{}) } diff --git a/internal/service/sagemaker/notebook_instance_lifecycle_configuration_test.go b/internal/service/sagemaker/notebook_instance_lifecycle_configuration_test.go index 6adca00bc626..cbb6825e7a20 100644 --- a/internal/service/sagemaker/notebook_instance_lifecycle_configuration_test.go +++ b/internal/service/sagemaker/notebook_instance_lifecycle_configuration_test.go @@ -16,7 +16,7 @@ import ( "github.com/hashicorp/terraform-plugin-testing/terraform" "github.com/hashicorp/terraform-provider-aws/internal/acctest" "github.com/hashicorp/terraform-provider-aws/internal/conns" - "github.com/hashicorp/terraform-provider-aws/internal/verify" + itypes "github.com/hashicorp/terraform-provider-aws/internal/types" "github.com/hashicorp/terraform-provider-aws/names" ) @@ -77,8 +77,8 @@ func TestAccSageMakerNotebookInstanceLifecycleConfiguration_update(t *testing.T) Check: resource.ComposeTestCheckFunc( testAccCheckNotebookInstanceLifecycleConfigurationExists(ctx, resourceName, &lifecycleConfig), - resource.TestCheckResourceAttr(resourceName, "on_create", verify.Base64Encode([]byte("echo bla"))), - resource.TestCheckResourceAttr(resourceName, "on_start", verify.Base64Encode([]byte("echo blub"))), + resource.TestCheckResourceAttr(resourceName, "on_create", itypes.Base64EncodeOnce([]byte("echo bla"))), + resource.TestCheckResourceAttr(resourceName, "on_start", itypes.Base64EncodeOnce([]byte("echo blub"))), ), }, { diff --git a/internal/service/secretsmanager/secret_version.go b/internal/service/secretsmanager/secret_version.go index a7c6b94c4fba..3a41145cff5c 100644 --- a/internal/service/secretsmanager/secret_version.go +++ b/internal/service/secretsmanager/secret_version.go @@ -5,7 +5,6 @@ package secretsmanager import ( "context" - "encoding/base64" "fmt" "log" "strings" @@ -22,6 +21,7 @@ import ( "github.com/hashicorp/terraform-provider-aws/internal/errs/sdkdiag" "github.com/hashicorp/terraform-provider-aws/internal/flex" "github.com/hashicorp/terraform-provider-aws/internal/tfresource" + itypes "github.com/hashicorp/terraform-provider-aws/internal/types" "github.com/hashicorp/terraform-provider-aws/internal/verify" ) @@ -93,8 +93,7 @@ func resourceSecretVersionCreate(ctx context.Context, d *schema.ResourceData, me if v, ok := d.GetOk("secret_binary"); ok { var err error - input.SecretBinary, err = base64.StdEncoding.DecodeString(v.(string)) - + input.SecretBinary, err = itypes.Base64Decode(v.(string)) if err != nil { return sdkdiag.AppendFromErr(diags, err) } @@ -148,7 +147,7 @@ func resourceSecretVersionRead(ctx context.Context, d *schema.ResourceData, meta } d.Set("arn", output.ARN) - d.Set("secret_binary", verify.Base64Encode(output.SecretBinary)) + d.Set("secret_binary", itypes.Base64EncodeOnce(output.SecretBinary)) d.Set("secret_id", secretID) d.Set("secret_string", output.SecretString) d.Set("version_id", output.VersionId) diff --git a/internal/service/secretsmanager/secret_version_test.go b/internal/service/secretsmanager/secret_version_test.go index 582f48de82d9..607644b53aa2 100644 --- a/internal/service/secretsmanager/secret_version_test.go +++ b/internal/service/secretsmanager/secret_version_test.go @@ -17,7 +17,7 @@ import ( "github.com/hashicorp/terraform-provider-aws/internal/conns" tfsecretsmanager "github.com/hashicorp/terraform-provider-aws/internal/service/secretsmanager" "github.com/hashicorp/terraform-provider-aws/internal/tfresource" - "github.com/hashicorp/terraform-provider-aws/internal/verify" + itypes "github.com/hashicorp/terraform-provider-aws/internal/types" "github.com/hashicorp/terraform-provider-aws/names" ) @@ -71,7 +71,7 @@ func TestAccSecretsManagerSecretVersion_base64Binary(t *testing.T) { Config: testAccSecretVersionConfig_binary(rName), Check: resource.ComposeTestCheckFunc( testAccCheckSecretVersionExists(ctx, resourceName, &version), - resource.TestCheckResourceAttr(resourceName, "secret_binary", verify.Base64Encode([]byte("test-binary"))), + resource.TestCheckResourceAttr(resourceName, "secret_binary", itypes.Base64EncodeOnce([]byte("test-binary"))), resource.TestCheckResourceAttrSet(resourceName, "version_id"), resource.TestCheckResourceAttr(resourceName, "version_stages.#", "1"), resource.TestCheckTypeSetElemAttr(resourceName, "version_stages.*", "AWSCURRENT"), diff --git a/internal/service/sesv2/email_identity.go b/internal/service/sesv2/email_identity.go index 9a0e1ddef4f6..43809770c7ad 100644 --- a/internal/service/sesv2/email_identity.go +++ b/internal/service/sesv2/email_identity.go @@ -68,15 +68,7 @@ func ResourceEmailIdentity() *schema.Resource { RequiredWith: []string{"dkim_signing_attributes.0.domain_signing_selector"}, ValidateFunc: validation.All( validation.StringLenBetween(1, 20480), - func(v interface{}, name string) (warns []string, errs []error) { - s := v.(string) - if !verify.IsBase64Encoded([]byte(s)) { - errs = append(errs, fmt.Errorf( - "%s: must be base64-encoded", name, - )) - } - return - }, + verify.ValidBase64String, ), }, "domain_signing_selector": { diff --git a/internal/service/sesv2/email_identity_test.go b/internal/service/sesv2/email_identity_test.go index 801a73626156..27f1a5c524b6 100644 --- a/internal/service/sesv2/email_identity_test.go +++ b/internal/service/sesv2/email_identity_test.go @@ -18,7 +18,7 @@ import ( "github.com/hashicorp/terraform-provider-aws/internal/conns" "github.com/hashicorp/terraform-provider-aws/internal/create" tfsesv2 "github.com/hashicorp/terraform-provider-aws/internal/service/sesv2" - "github.com/hashicorp/terraform-provider-aws/internal/verify" + itypes "github.com/hashicorp/terraform-provider-aws/internal/types" "github.com/hashicorp/terraform-provider-aws/names" ) @@ -194,10 +194,10 @@ func TestAccSESV2EmailIdentity_domainSigning(t *testing.T) { rName := acctest.RandomDomainName() resourceName := "aws_sesv2_email_identity.test" - key1 := verify.Base64Encode([]byte(acctest.TLSRSAPrivateKeyPEM(t, 2048))) + key1 := itypes.Base64EncodeOnce([]byte(acctest.TLSRSAPrivateKeyPEM(t, 2048))) selector1 := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) - key2 := verify.Base64Encode([]byte(acctest.TLSRSAPrivateKeyPEM(t, 2048))) + key2 := itypes.Base64EncodeOnce([]byte(acctest.TLSRSAPrivateKeyPEM(t, 2048))) selector2 := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resource.ParallelTest(t, resource.TestCase{ diff --git a/internal/types/base64.go b/internal/types/base64.go new file mode 100644 index 000000000000..72240ad88605 --- /dev/null +++ b/internal/types/base64.go @@ -0,0 +1,38 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package types + +import ( + "encoding/base64" + + "github.com/hashicorp/terraform-provider-aws/internal/errs" +) + +func Base64Decode(s string) ([]byte, error) { + return base64.StdEncoding.DecodeString(s) +} + +func MustBase64Decode(s string) []byte { + return errs.Must(Base64Decode(s)) +} + +func Base64Encode(blob []byte) string { + return base64.StdEncoding.EncodeToString(blob) +} + +// Base64EncodeOnce encodes the input blob using base64.StdEncoding.EncodeToString. +// If the blob is already base64 encoded, return the original input unchanged. +func Base64EncodeOnce(blob []byte) string { + if s := string(blob); IsBase64Encoded(s) { + return s + } + + return Base64Encode(blob) +} + +// IsBase64Encoded checks if the input string is base64 encoded. +func IsBase64Encoded(s string) bool { + _, err := base64.StdEncoding.DecodeString(s) + return err == nil +} diff --git a/internal/types/base64_test.go b/internal/types/base64_test.go new file mode 100644 index 000000000000..826731c4a93e --- /dev/null +++ b/internal/types/base64_test.go @@ -0,0 +1,42 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package types + +import "testing" + +func TestBase64EncodeOnce(t *testing.T) { + t.Parallel() + + for _, tc := range []struct { + blob []byte + s string + }{ + {[]byte("data should be encoded"), "ZGF0YSBzaG91bGQgYmUgZW5jb2RlZA=="}, + {[]byte("ZGF0YSBzaG91bGQgYmUgZW5jb2RlZA=="), "ZGF0YSBzaG91bGQgYmUgZW5jb2RlZA=="}, + } { + s := Base64EncodeOnce(tc.blob) + if got, want := s, tc.s; got != want { + t.Errorf("Base64EncodeOnce(%q) = %v, want %v", tc.blob, got, want) + } + } +} + +func TestIsBase64Encoded(t *testing.T) { + t.Parallel() + + for _, tc := range []struct { + s string + valid bool + }{ + {"ZGF0YSBzaG91bGQgYmUgZW5jb2RlZA==", true}, + {"ZGF0YSBzaG91bGQgYmUgZW5jb2RlZA==%%", false}, + {"123456789012", true}, + {"", true}, + } { + ok := IsBase64Encoded(tc.s) + if got, want := ok, tc.valid; got != want { + t.Errorf("IsBase64Encoded(%q) = %v, want %v", tc.s, got, want) + } + } +} diff --git a/internal/verify/base64.go b/internal/verify/base64.go deleted file mode 100644 index 9927d226da94..000000000000 --- a/internal/verify/base64.go +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package verify - -import ( - "encoding/base64" -) - -// Base64Encode encodes data if the input isn't already encoded using base64.StdEncoding.EncodeToString. -// If the input is already base64 encoded, return the original input unchanged. -func Base64Encode(data []byte) string { - // Check whether the data is already Base64 encoded; don't double-encode - if IsBase64Encoded(data) { - return string(data) - } - // data has not been encoded encode and return - return base64.StdEncoding.EncodeToString(data) -} - -func IsBase64Encoded(data []byte) bool { - _, err := base64.StdEncoding.DecodeString(string(data)) - return err == nil -} diff --git a/internal/verify/base64_test.go b/internal/verify/base64_test.go deleted file mode 100644 index 7e5284db0ba2..000000000000 --- a/internal/verify/base64_test.go +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package verify - -import ( - "testing" -) - -func TestBase64Encode(t *testing.T) { - t.Parallel() - - for _, tt := range base64encodingTests { - out := Base64Encode(tt.in) - if out != tt.out { - t.Errorf("Base64Encode(%s) => %s, want %s", tt.in, out, tt.out) - } - } -} - -var base64encodingTests = []struct { - in []byte - out string -}{ - // normal encoding case - {[]byte("data should be encoded"), "ZGF0YSBzaG91bGQgYmUgZW5jb2RlZA=="}, - // base64 encoded input should result in no change of output - {[]byte("ZGF0YSBzaG91bGQgYmUgZW5jb2RlZA=="), "ZGF0YSBzaG91bGQgYmUgZW5jb2RlZA=="}, -} diff --git a/internal/verify/validate.go b/internal/verify/validate.go index 509b72dfee75..9bf6284dec89 100644 --- a/internal/verify/validate.go +++ b/internal/verify/validate.go @@ -142,7 +142,7 @@ func ValidAccountID(v interface{}, k string) (ws []string, errors []error) { func ValidBase64String(v interface{}, k string) (ws []string, errors []error) { value := v.(string) - if !IsBase64Encoded([]byte(value)) { + if !itypes.IsBase64Encoded(value) { errors = append(errors, fmt.Errorf( "%q (%q) must be base64-encoded", k, value)) diff --git a/website/docs/r/datasync_location_azure_blob.html.markdown b/website/docs/r/datasync_location_azure_blob.html.markdown index 4960595020cf..84b3604e3b5a 100644 --- a/website/docs/r/datasync_location_azure_blob.html.markdown +++ b/website/docs/r/datasync_location_azure_blob.html.markdown @@ -18,7 +18,7 @@ Manages a Microsoft Azure Blob Storage Location within AWS DataSync. resource "aws_datasync_location_azure_blob" "example" { agent_arns = [aws_datasync_agent.example.arn] authentication_type = "SAS" - container_url = "https://example.com/path" + container_url = "https://myaccount.blob.core.windows.net/mycontainer" sas_configuration { token = "sp=r&st=2023-12-20T14:54:52Z&se=2023-12-20T22:54:52Z&spr=https&sv=2021-06-08&sr=c&sig=aBBKDWQvyuVcTPH9EBp%2FXTI9E%2F%2Fmq171%2BZU178wcwqU%3D" diff --git a/website/docs/r/datasync_location_hdfs.html.markdown b/website/docs/r/datasync_location_hdfs.html.markdown index 78ee108fca3f..1d9273564352 100644 --- a/website/docs/r/datasync_location_hdfs.html.markdown +++ b/website/docs/r/datasync_location_hdfs.html.markdown @@ -27,21 +27,41 @@ resource "aws_datasync_location_hdfs" "example" { } ``` +### Kerberos Authentication + +```terraform +resource "aws_datasync_location_hdfs" "example" { + agent_arns = [aws_datasync_agent.example.arn] + authentication_type = "KERBEROS" + + name_node { + hostname = aws_instance.example.private_dns + port = 80 + } + + kerberos_principal = "user@example.com" + kerberos_keytab_base64 = filebase64("user.keytab") + kerberos_krb5_conf = file("krb5.conf") +} +``` + ## Argument Reference This resource supports the following arguments: * `agent_arns` - (Required) A list of DataSync Agent ARNs with which this location will be associated. * `authentication_type` - (Required) The type of authentication used to determine the identity of the user. Valid values are `SIMPLE` and `KERBEROS`. -* `name_node` - (Required) The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below. -* `simple_user` - (Optional) The user name used to identify the client on the host operating system. If `SIMPLE` is specified for `authentication_type`, this parameter is required. * `block_size` - (Optional) The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB). -* `replication_factor` - (Optional) The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes. -* `kerberos_keytab` - (Optional) The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. If `KERBEROS` is specified for `authentication_type`, this parameter is required. -* `kerberos_krb5_conf` - (Optional) The krb5.conf file that contains the Kerberos configuration information. If `KERBEROS` is specified for `authentication_type`, this parameter is required. +* `kerberos_keytab` - (Optional) The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use `kerberos_keytab_base64` instead whenever the value is not a valid UTF-8 string. If `KERBEROS` is specified for `authentication_type`, this parameter (or `kerberos_keytab_base64`) is required. +* `kerberos_keytab_base64` - (Optional) Use instead of `kerberos_keytab` to pass base64-encoded binary data directly. If `KERBEROS` is specified for `authentication_type`, this parameter (or `kerberos_keytab`) is required. +* `kerberos_krb5_conf` - (Optional) The krb5.conf file that contains the Kerberos configuration information. Use `kerberos_krb5_conf_base64` instead whenever the value is not a valid UTF-8 string. If `KERBEROS` is specified for `authentication_type`, this parameter (or `kerberos_krb5_conf_base64`) is required. +* `kerberos_krb5_conf_base64` - (Optional) Use instead of `kerberos_krb5_conf` to pass base64-encoded binary data directly. If `KERBEROS` is specified for `authentication_type`, this parameter (or `kerberos_krb5_conf`) is required. * `kerberos_principal` - (Optional) The Kerberos principal with access to the files and folders on the HDFS cluster. If `KERBEROS` is specified for `authentication_type`, this parameter is required. * `kms_key_provider_uri` - (Optional) The URI of the HDFS cluster's Key Management Server (KMS). +* `name_node` - (Required) The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below. * `qop_configuration` - (Optional) The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If `qop_configuration` isn't specified, `rpc_protection` and `data_transfer_protection` default to `PRIVACY`. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below. +* `replication_factor` - (Optional) The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes. +* `simple_user` - (Optional) The user name used to identify the client on the host operating system. If `SIMPLE` is specified for `authentication_type`, this parameter is required. * `subdirectory` - (Optional) A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /. * `tags` - (Optional) Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level.