Skip to content

Commit

Permalink
Documentation note on aws_s3_bucket_object keys with leading '/'. (ha…
Browse files Browse the repository at this point in the history
  • Loading branch information
Kit Ewbank authored and nywilken committed Oct 17, 2019
1 parent abd539e commit 252bc3e
Show file tree
Hide file tree
Showing 3 changed files with 168 additions and 0 deletions.
164 changes: 164 additions & 0 deletions aws/data_source_aws_s3_bucket_object_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -255,6 +255,101 @@ func TestAccDataSourceAWSS3BucketObject_ObjectLockLegalHoldOn(t *testing.T) {
})
}

func TestAccDataSourceAWSS3BucketObject_LeadingSlash(t *testing.T) {
var rObj s3.GetObjectOutput
var dsObj1, dsObj2, dsObj3 s3.GetObjectOutput
resourceName := "aws_s3_bucket_object.object"
dataSourceName1 := "data.aws_s3_bucket_object.obj1"
dataSourceName2 := "data.aws_s3_bucket_object.obj2"
dataSourceName3 := "data.aws_s3_bucket_object.obj3"
rInt := acctest.RandInt()
resourceOnlyConf, conf := testAccAWSDataSourceS3ObjectConfig_leadingSlash(rInt)

resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
PreventPostDestroyRefresh: true,
Steps: []resource.TestStep{
{
Config: resourceOnlyConf,
Check: resource.ComposeTestCheckFunc(
testAccCheckAWSS3BucketObjectExists(resourceName, &rObj),
),
},
{
Config: conf,
Check: resource.ComposeTestCheckFunc(
testAccCheckAwsS3ObjectDataSourceExists(dataSourceName1, &dsObj1),
resource.TestCheckResourceAttr(dataSourceName1, "content_length", "3"),
resource.TestCheckResourceAttr(dataSourceName1, "content_type", "text/plain"),
resource.TestCheckResourceAttr(dataSourceName1, "etag", "a6105c0a611b41b08f1209506350279e"),
resource.TestMatchResourceAttr(dataSourceName1, "last_modified",
regexp.MustCompile("^[a-zA-Z]{3}, [0-9]+ [a-zA-Z]+ [0-9]{4} [0-9:]+ [A-Z]+$")),
resource.TestCheckResourceAttr(dataSourceName1, "body", "yes"),
testAccCheckAwsS3ObjectDataSourceExists(dataSourceName2, &dsObj2),
resource.TestCheckResourceAttr(dataSourceName2, "content_length", "3"),
resource.TestCheckResourceAttr(dataSourceName2, "content_type", "text/plain"),
resource.TestCheckResourceAttr(dataSourceName2, "etag", "a6105c0a611b41b08f1209506350279e"),
resource.TestMatchResourceAttr(dataSourceName2, "last_modified",
regexp.MustCompile("^[a-zA-Z]{3}, [0-9]+ [a-zA-Z]+ [0-9]{4} [0-9:]+ [A-Z]+$")),
resource.TestCheckResourceAttr(dataSourceName2, "body", "yes"),
testAccCheckAwsS3ObjectDataSourceExists(dataSourceName3, &dsObj3),
resource.TestCheckResourceAttr(dataSourceName3, "content_length", "3"),
resource.TestCheckResourceAttr(dataSourceName3, "content_type", "text/plain"),
resource.TestCheckResourceAttr(dataSourceName3, "etag", "a6105c0a611b41b08f1209506350279e"),
resource.TestMatchResourceAttr(dataSourceName3, "last_modified",
regexp.MustCompile("^[a-zA-Z]{3}, [0-9]+ [a-zA-Z]+ [0-9]{4} [0-9:]+ [A-Z]+$")),
resource.TestCheckResourceAttr(dataSourceName3, "body", "yes"),
),
},
},
})
}

func TestAccDataSourceAWSS3BucketObject_MultipleSlashes(t *testing.T) {
var rObj1, rObj2 s3.GetObjectOutput
var dsObj1, dsObj2, dsObj3 s3.GetObjectOutput
resourceName1 := "aws_s3_bucket_object.object1"
resourceName2 := "aws_s3_bucket_object.object2"
dataSourceName1 := "data.aws_s3_bucket_object.obj1"
dataSourceName2 := "data.aws_s3_bucket_object.obj2"
dataSourceName3 := "data.aws_s3_bucket_object.obj3"
rInt := acctest.RandInt()
resourceOnlyConf, conf := testAccAWSDataSourceS3ObjectConfig_multipleSlashes(rInt)

resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
PreventPostDestroyRefresh: true,
Steps: []resource.TestStep{
{
Config: resourceOnlyConf,
Check: resource.ComposeTestCheckFunc(
testAccCheckAWSS3BucketObjectExists(resourceName1, &rObj1),
testAccCheckAWSS3BucketObjectExists(resourceName2, &rObj2),
),
},
{
Config: conf,
Check: resource.ComposeTestCheckFunc(
testAccCheckAwsS3ObjectDataSourceExists(dataSourceName1, &dsObj1),
resource.TestCheckResourceAttr(dataSourceName1, "content_length", "3"),
resource.TestCheckResourceAttr(dataSourceName1, "content_type", "text/plain"),
resource.TestCheckResourceAttr(dataSourceName1, "body", "yes"),
testAccCheckAwsS3ObjectDataSourceExists(dataSourceName2, &dsObj2),
resource.TestCheckResourceAttr(dataSourceName2, "content_length", "3"),
resource.TestCheckResourceAttr(dataSourceName2, "content_type", "text/plain"),
resource.TestCheckResourceAttr(dataSourceName2, "body", "yes"),
testAccCheckAwsS3ObjectDataSourceExists(dataSourceName3, &dsObj3),
resource.TestCheckResourceAttr(dataSourceName3, "content_length", "2"),
resource.TestCheckResourceAttr(dataSourceName3, "content_type", "text/plain"),
resource.TestCheckResourceAttr(dataSourceName3, "body", "no"),
),
},
},
})
}

func testAccCheckAwsS3ObjectDataSourceExists(n string, obj *s3.GetObjectOutput) resource.TestCheckFunc {
return func(s *terraform.State) error {
rs, ok := s.RootModule().Resources[n]
Expand Down Expand Up @@ -456,3 +551,72 @@ data "aws_s3_bucket_object" "obj" {

return resources, both
}

func testAccAWSDataSourceS3ObjectConfig_leadingSlash(randInt int) (string, string) {
resources := fmt.Sprintf(`
resource "aws_s3_bucket" "object_bucket" {
bucket = "tf-object-test-bucket-%d"
}
resource "aws_s3_bucket_object" "object" {
bucket = "${aws_s3_bucket.object_bucket.bucket}"
key = "//tf-testing-obj-%d-readable"
content = "yes"
content_type = "text/plain"
}
`, randInt, randInt)

both := fmt.Sprintf(`%s
data "aws_s3_bucket_object" "obj1" {
bucket = "tf-object-test-bucket-%d"
key = "tf-testing-obj-%d-readable"
}
data "aws_s3_bucket_object" "obj2" {
bucket = "tf-object-test-bucket-%d"
key = "/tf-testing-obj-%d-readable"
}
data "aws_s3_bucket_object" "obj3" {
bucket = "tf-object-test-bucket-%d"
key = "//tf-testing-obj-%d-readable"
}
`, resources, randInt, randInt, randInt, randInt, randInt, randInt)

return resources, both
}

func testAccAWSDataSourceS3ObjectConfig_multipleSlashes(randInt int) (string, string) {
resources := fmt.Sprintf(`
resource "aws_s3_bucket" "object_bucket" {
bucket = "tf-object-test-bucket-%d"
}
resource "aws_s3_bucket_object" "object1" {
bucket = "${aws_s3_bucket.object_bucket.bucket}"
key = "first//second///third//"
content = "yes"
content_type = "text/plain"
}
# Without a trailing slash.
resource "aws_s3_bucket_object" "object2" {
bucket = "${aws_s3_bucket.object_bucket.bucket}"
key = "/first////second/third"
content = "no"
content_type = "text/plain"
}
`, randInt)

both := fmt.Sprintf(`%s
data "aws_s3_bucket_object" "obj1" {
bucket = "tf-object-test-bucket-%d"
key = "first/second/third/"
}
data "aws_s3_bucket_object" "obj2" {
bucket = "tf-object-test-bucket-%d"
key = "first//second///third//"
}
data "aws_s3_bucket_object" "obj3" {
bucket = "tf-object-test-bucket-%d"
key = "first/second/third"
}
`, resources, randInt, randInt, randInt)

return resources, both
}
2 changes: 2 additions & 0 deletions website/docs/d/s3_bucket_object.html.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -85,3 +85,5 @@ In addition to all arguments above, the following attributes are exported:
* `version_id` - The latest version ID of the object returned.
* `website_redirect_location` - If the bucket is configured as a website, redirects requests for this object to another object in the same bucket or to an external URL. Amazon S3 stores the value of this header in the object metadata.
* `tags` - A mapping of tags assigned to the object.

-> **Note:** Terraform ignores all leading `/`s in the object's `key` and treats multiple `/`s in the rest of the object's `key` as a single `/`, so values of `/index.html` and `index.html` correspond to the same S3 object as do `first//second///third//` and `first/second/third/`.
2 changes: 2 additions & 0 deletions website/docs/r/s3_bucket_object.html.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -145,6 +145,8 @@ Default is `false`. This value should be set to `true` only if the bucket has S3

If no content is provided through `source`, `content` or `content_base64`, then the object will be empty.

-> **Note:** Terraform ignores all leading `/`s in the object's `key` and treats multiple `/`s in the rest of the object's `key` as a single `/`, so values of `/index.html` and `index.html` correspond to the same S3 object as do `first//second///third//` and `first/second/third/`.

## Attributes Reference

The following attributes are exported
Expand Down

0 comments on commit 252bc3e

Please sign in to comment.