From 6995f2402e0647f2927307e6df420bf5878dcdc3 Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Fri, 10 May 2024 12:09:53 -0500 Subject: [PATCH 01/71] skaff --- .../service/rekognition/stream_processor.go | 819 ++++++++++++++++++ .../rekognition/stream_processor_test.go | 332 +++++++ ...rekognition_stream_processor.html.markdown | 69 ++ 3 files changed, 1220 insertions(+) create mode 100644 internal/service/rekognition/stream_processor.go create mode 100644 internal/service/rekognition/stream_processor_test.go create mode 100644 website/docs/r/rekognition_stream_processor.html.markdown diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go new file mode 100644 index 000000000000..30dcb7ae1ee5 --- /dev/null +++ b/internal/service/rekognition/stream_processor.go @@ -0,0 +1,819 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package rekognition +// **PLEASE DELETE THIS AND ALL TIP COMMENTS BEFORE SUBMITTING A PR FOR REVIEW!** +// +// TIP: ==== INTRODUCTION ==== +// Thank you for trying the skaff tool! +// +// You have opted to include these helpful comments. They all include "TIP:" +// to help you find and remove them when you're done with them. +// +// While some aspects of this file are customized to your input, the +// scaffold tool does *not* look at the AWS API and ensure it has correct +// function, structure, and variable names. It makes guesses based on +// commonalities. You will need to make significant adjustments. +// +// In other words, as generated, this is a rough outline of the work you will +// need to do. If something doesn't make sense for your situation, get rid of +// it. + +import ( + // TIP: ==== IMPORTS ==== + // This is a common set of imports but not customized to your code since + // your code hasn't been written yet. Make sure you, your IDE, or + // goimports -w fixes these imports. + // + // The provider linter wants your imports to be in two groups: first, + // standard library (i.e., "fmt" or "strings"), second, everything else. + // + // Also, AWS Go SDK v2 may handle nested structures differently than v1, + // using the services/rekognition/types package. If so, you'll + // need to import types and reference the nested types, e.g., as + // awstypes.. + "context" + "errors" + "time" + + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/service/rekognition" + awstypes "github.com/aws/aws-sdk-go-v2/service/rekognition/types" + "github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts" + "github.com/hashicorp/terraform-plugin-framework-validators/listvalidator" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/retry" + "github.com/hashicorp/terraform-provider-aws/internal/create" + "github.com/hashicorp/terraform-provider-aws/internal/errs" + "github.com/hashicorp/terraform-provider-aws/internal/framework" + "github.com/hashicorp/terraform-provider-aws/internal/framework/flex" + "github.com/hashicorp/terraform-provider-aws/internal/tfresource" + "github.com/hashicorp/terraform-provider-aws/names" +) +// TIP: ==== FILE STRUCTURE ==== +// All resources should follow this basic outline. Improve this resource's +// maintainability by sticking to it. +// +// 1. Package declaration +// 2. Imports +// 3. Main resource struct with schema method +// 4. Create, read, update, delete methods (in that order) +// 5. Other functions (flatteners, expanders, waiters, finders, etc.) + +// Function annotations are used for resource registration to the Provider. DO NOT EDIT. +// @FrameworkResource("aws_rekognition_stream_processor", name="Stream Processor") +func newResourceStreamProcessor(_ context.Context) (resource.ResourceWithConfigure, error) { + r := &resourceStreamProcessor{} + + // TIP: ==== CONFIGURABLE TIMEOUTS ==== + // Users can configure timeout lengths but you need to use the times they + // provide. Access the timeout they configure (or the defaults) using, + // e.g., r.CreateTimeout(ctx, plan.Timeouts) (see below). The times here are + // the defaults if they don't configure timeouts. + r.SetDefaultCreateTimeout(30 * time.Minute) + r.SetDefaultUpdateTimeout(30 * time.Minute) + r.SetDefaultDeleteTimeout(30 * time.Minute) + + return r, nil +} + +const ( + ResNameStreamProcessor = "Stream Processor" +) + +type resourceStreamProcessor struct { + framework.ResourceWithConfigure + framework.WithTimeouts +} + +func (r *resourceStreamProcessor) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { + resp.TypeName = "aws_rekognition_stream_processor" +} + +// TIP: ==== SCHEMA ==== +// In the schema, add each of the attributes in snake case (e.g., +// delete_automated_backups). +// +// Formatting rules: +// * Alphabetize attributes to make them easier to find. +// * Do not add a blank line between attributes. +// +// Attribute basics: +// * If a user can provide a value ("configure a value") for an +// attribute (e.g., instances = 5), we call the attribute an +// "argument." +// * You change the way users interact with attributes using: +// - Required +// - Optional +// - Computed +// * There are only four valid combinations: +// +// 1. Required only - the user must provide a value +// Required: true, +// +// 2. Optional only - the user can configure or omit a value; do not +// use Default or DefaultFunc +// Optional: true, +// +// 3. Computed only - the provider can provide a value but the user +// cannot, i.e., read-only +// Computed: true, +// +// 4. Optional AND Computed - the provider or user can provide a value; +// use this combination if you are using Default +// Optional: true, +// Computed: true, +// +// You will typically find arguments in the input struct +// (e.g., CreateDBInstanceInput) for the create operation. Sometimes +// they are only in the input struct (e.g., ModifyDBInstanceInput) for +// the modify operation. +// +// For more about schema options, visit +// https://developer.hashicorp.com/terraform/plugin/framework/handling-data/schemas?page=schemas +func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { + resp.Schema = schema.Schema{ + Attributes: map[string]schema.Attribute{ + "arn": framework.ARNAttributeComputedOnly(), + "description": schema.StringAttribute{ + Optional: true, + }, + "id": framework.IDAttribute(), + "name": schema.StringAttribute{ + Required: true, + // TIP: ==== PLAN MODIFIERS ==== + // Plan modifiers were introduced with Plugin-Framework to provide a mechanism + // for adjusting planned changes prior to apply. The planmodifier subpackage + // provides built-in modifiers for many common use cases such as + // requiring replacement on a value change ("ForceNew: true" in Plugin-SDK + // resources). + // + // See more: + // https://developer.hashicorp.com/terraform/plugin/framework/resources/plan-modification + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + }, + "type": schema.StringAttribute{ + Required: true, + }, + }, + Blocks: map[string]schema.Block{ + "complex_argument": schema.ListNestedBlock{ + // TIP: ==== LIST VALIDATORS ==== + // List and set validators take the place of MaxItems and MinItems in + // Plugin-Framework based resources. Use listvalidator.SizeAtLeast(1) to + // make a nested object required. Similar to Plugin-SDK, complex objects + // can be represented as lists or sets with listvalidator.SizeAtMost(1). + // + // For a complete mapping of Plugin-SDK to Plugin-Framework schema fields, + // see: + // https://developer.hashicorp.com/terraform/plugin/framework/migrating/attributes-blocks/blocks + Validators: []validator.List{ + listvalidator.SizeAtMost(1), + }, + NestedObject: schema.NestedBlockObject{ + Attributes: map[string]schema.Attribute{ + "nested_required": schema.StringAttribute{ + Required: true, + }, + "nested_computed": schema.StringAttribute{ + Computed: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.UseStateForUnknown(), + }, + }, + }, + }, + }, + "timeouts": timeouts.Block(ctx, timeouts.Opts{ + Create: true, + Update: true, + Delete: true, + }), + }, + } +} + +func (r *resourceStreamProcessor) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + // TIP: ==== RESOURCE CREATE ==== + // Generally, the Create function should do the following things. Make + // sure there is a good reason if you don't do one of these. + // + // 1. Get a client connection to the relevant service + // 2. Fetch the plan + // 3. Populate a create input structure + // 4. Call the AWS create/put function + // 5. Using the output from the create function, set the minimum arguments + // and attributes for the Read function to work, as well as any computed + // only attributes. + // 6. Use a waiter to wait for create to complete + // 7. Save the request plan to response state + + // TIP: -- 1. Get a client connection to the relevant service + conn := r.Meta().RekognitionClient(ctx) + + // TIP: -- 2. Fetch the plan + var plan resourceStreamProcessorData + resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...) + if resp.Diagnostics.HasError() { + return + } + + // TIP: -- 3. Populate a create input structure + in := &rekognition.CreateStreamProcessorInput{ + // TIP: Mandatory or fields that will always be present can be set when + // you create the Input structure. (Replace these with real fields.) + StreamProcessorName: aws.String(plan.Name.ValueString()), + StreamProcessorType: aws.String(plan.Type.ValueString()), + } + + if !plan.Description.IsNull() { + // TIP: Optional fields should be set based on whether or not they are + // used. + in.Description = aws.String(plan.Description.ValueString()) + } + if !plan.ComplexArgument.IsNull() { + // TIP: Use an expander to assign a complex argument. The elements must be + // deserialized into the appropriate struct before being passed to the expander. + var tfList []complexArgumentData + resp.Diagnostics.Append(plan.ComplexArgument.ElementsAs(ctx, &tfList, false)...) + if resp.Diagnostics.HasError() { + return + } + + in.ComplexArgument = expandComplexArgument(tfList) + } + + // TIP: -- 4. Call the AWS create function + out, err := conn.CreateStreamProcessor(ctx, in) + if err != nil { + // TIP: Since ID has not been set yet, you cannot use plan.ID.String() + // in error messages at this point. + resp.Diagnostics.AddError( + create.ProblemStandardMessage(names.Rekognition, create.ErrActionCreating, ResNameStreamProcessor, plan.Name.String(), err), + err.Error(), + ) + return + } + if out == nil || out.StreamProcessor == nil { + resp.Diagnostics.AddError( + create.ProblemStandardMessage(names.Rekognition, create.ErrActionCreating, ResNameStreamProcessor, plan.Name.String(), nil), + errors.New("empty output").Error(), + ) + return + } + + // TIP: -- 5. Using the output from the create function, set the minimum attributes + plan.ARN = flex.StringToFramework(ctx, out.StreamProcessor.Arn) + plan.ID = flex.StringToFramework(ctx, out.StreamProcessor.StreamProcessorId) + + // TIP: -- 6. Use a waiter to wait for create to complete + createTimeout := r.CreateTimeout(ctx, plan.Timeouts) + _, err = waitStreamProcessorCreated(ctx, conn, plan.ID.ValueString(), createTimeout) + if err != nil { + resp.Diagnostics.AddError( + create.ProblemStandardMessage(names.Rekognition, create.ErrActionWaitingForCreation, ResNameStreamProcessor, plan.Name.String(), err), + err.Error(), + ) + return + } + + // TIP: -- 7. Save the request plan to response state + resp.Diagnostics.Append(resp.State.Set(ctx, plan)...) +} + +func (r *resourceStreamProcessor) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { + // TIP: ==== RESOURCE READ ==== + // Generally, the Read function should do the following things. Make + // sure there is a good reason if you don't do one of these. + // + // 1. Get a client connection to the relevant service + // 2. Fetch the state + // 3. Get the resource from AWS + // 4. Remove resource from state if it is not found + // 5. Set the arguments and attributes + // 6. Set the state + + // TIP: -- 1. Get a client connection to the relevant service + conn := r.Meta().RekognitionClient(ctx) + + // TIP: -- 2. Fetch the state + var state resourceStreamProcessorData + resp.Diagnostics.Append(req.State.Get(ctx, &state)...) + if resp.Diagnostics.HasError() { + return + } + + // TIP: -- 3. Get the resource from AWS using an API Get, List, or Describe- + // type function, or, better yet, using a finder. + out, err := findStreamProcessorByID(ctx, conn, state.ID.ValueString()) + // TIP: -- 4. Remove resource from state if it is not found + if tfresource.NotFound(err) { + resp.State.RemoveResource(ctx) + return + } + if err != nil { + resp.Diagnostics.AddError( + create.ProblemStandardMessage(names.Rekognition, create.ErrActionSetting, ResNameStreamProcessor, state.ID.String(), err), + err.Error(), + ) + return + } + + // TIP: -- 5. Set the arguments and attributes + // + // For simple data types (i.e., schema.StringAttribute, schema.BoolAttribute, + // schema.Int64Attribute, and schema.Float64Attribue), simply setting the + // appropriate data struct field is sufficient. The flex package implements + // helpers for converting between Go and Plugin-Framework types seamlessly. No + // error or nil checking is necessary. + // + // However, there are some situations where more handling is needed such as + // complex data types (e.g., schema.ListAttribute, schema.SetAttribute). In + // these cases the flatten function may have a diagnostics return value, which + // should be appended to resp.Diagnostics. + state.ARN = flex.StringToFramework(ctx, out.Arn) + state.ID = flex.StringToFramework(ctx, out.StreamProcessorId) + state.Name = flex.StringToFramework(ctx, out.StreamProcessorName) + state.Type = flex.StringToFramework(ctx, out.StreamProcessorType) + + // TIP: Setting a complex type. + complexArgument, d := flattenComplexArgument(ctx, out.ComplexArgument) + resp.Diagnostics.Append(d...) + state.ComplexArgument = complexArgument + + // TIP: -- 6. Set the state + resp.Diagnostics.Append(resp.State.Set(ctx, &state)...) +} + +func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { + // TIP: ==== RESOURCE UPDATE ==== + // Not all resources have Update functions. There are a few reasons: + // a. The AWS API does not support changing a resource + // b. All arguments have RequiresReplace() plan modifiers + // c. The AWS API uses a create call to modify an existing resource + // + // In the cases of a. and b., the resource will not have an update method + // defined. In the case of c., Update and Create can be refactored to call + // the same underlying function. + // + // The rest of the time, there should be an Update function and it should + // do the following things. Make sure there is a good reason if you don't + // do one of these. + // + // 1. Get a client connection to the relevant service + // 2. Fetch the plan and state + // 3. Populate a modify input structure and check for changes + // 4. Call the AWS modify/update function + // 5. Use a waiter to wait for update to complete + // 6. Save the request plan to response state + // TIP: -- 1. Get a client connection to the relevant service + conn := r.Meta().RekognitionClient(ctx) + + // TIP: -- 2. Fetch the plan + var plan, state resourceStreamProcessorData + resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...) + resp.Diagnostics.Append(req.State.Get(ctx, &state)...) + if resp.Diagnostics.HasError() { + return + } + + // TIP: -- 3. Populate a modify input structure and check for changes + if !plan.Name.Equal(state.Name) || + !plan.Description.Equal(state.Description) || + !plan.ComplexArgument.Equal(state.ComplexArgument) || + !plan.Type.Equal(state.Type) { + + in := &rekognition.UpdateStreamProcessorInput{ + // TIP: Mandatory or fields that will always be present can be set when + // you create the Input structure. (Replace these with real fields.) + StreamProcessorId: aws.String(plan.ID.ValueString()), + StreamProcessorName: aws.String(plan.Name.ValueString()), + StreamProcessorType: aws.String(plan.Type.ValueString()), + } + + if !plan.Description.IsNull() { + // TIP: Optional fields should be set based on whether or not they are + // used. + in.Description = aws.String(plan.Description.ValueString()) + } + if !plan.ComplexArgument.IsNull() { + // TIP: Use an expander to assign a complex argument. The elements must be + // deserialized into the appropriate struct before being passed to the expander. + var tfList []complexArgumentData + resp.Diagnostics.Append(plan.ComplexArgument.ElementsAs(ctx, &tfList, false)...) + if resp.Diagnostics.HasError() { + return + } + + in.ComplexArgument = expandComplexArgument(tfList) + } + + // TIP: -- 4. Call the AWS modify/update function + out, err := conn.UpdateStreamProcessor(ctx, in) + if err != nil { + resp.Diagnostics.AddError( + create.ProblemStandardMessage(names.Rekognition, create.ErrActionUpdating, ResNameStreamProcessor, plan.ID.String(), err), + err.Error(), + ) + return + } + if out == nil || out.StreamProcessor == nil { + resp.Diagnostics.AddError( + create.ProblemStandardMessage(names.Rekognition, create.ErrActionUpdating, ResNameStreamProcessor, plan.ID.String(), nil), + errors.New("empty output").Error(), + ) + return + } + + // TIP: Using the output from the update function, re-set any computed attributes + plan.ARN = flex.StringToFramework(ctx, out.StreamProcessor.Arn) + plan.ID = flex.StringToFramework(ctx, out.StreamProcessor.StreamProcessorId) + } + + + // TIP: -- 5. Use a waiter to wait for update to complete + updateTimeout := r.UpdateTimeout(ctx, plan.Timeouts) + _, err := waitStreamProcessorUpdated(ctx, conn, plan.ID.ValueString(), updateTimeout) + if err != nil { + resp.Diagnostics.AddError( + create.ProblemStandardMessage(names.Rekognition, create.ErrActionWaitingForUpdate, ResNameStreamProcessor, plan.ID.String(), err), + err.Error(), + ) + return + } + + + // TIP: -- 6. Save the request plan to response state + resp.Diagnostics.Append(resp.State.Set(ctx, &plan)...) +} + +func (r *resourceStreamProcessor) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { + // TIP: ==== RESOURCE DELETE ==== + // Most resources have Delete functions. There are rare situations + // where you might not need a delete: + // a. The AWS API does not provide a way to delete the resource + // b. The point of your resource is to perform an action (e.g., reboot a + // server) and deleting serves no purpose. + // + // The Delete function should do the following things. Make sure there + // is a good reason if you don't do one of these. + // + // 1. Get a client connection to the relevant service + // 2. Fetch the state + // 3. Populate a delete input structure + // 4. Call the AWS delete function + // 5. Use a waiter to wait for delete to complete + // TIP: -- 1. Get a client connection to the relevant service + conn := r.Meta().RekognitionClient(ctx) + + // TIP: -- 2. Fetch the state + var state resourceStreamProcessorData + resp.Diagnostics.Append(req.State.Get(ctx, &state)...) + if resp.Diagnostics.HasError() { + return + } + + // TIP: -- 3. Populate a delete input structure + in := &rekognition.DeleteStreamProcessorInput{ + StreamProcessorId: aws.String(state.ID.ValueString()), + } + + // TIP: -- 4. Call the AWS delete function + _, err := conn.DeleteStreamProcessor(ctx, in) + // TIP: On rare occassions, the API returns a not found error after deleting a + // resource. If that happens, we don't want it to show up as an error. + if err != nil { + if errs.IsA[*awstypes.ResourceNotFoundException](err) { + return + } + resp.Diagnostics.AddError( + create.ProblemStandardMessage(names.Rekognition, create.ErrActionDeleting, ResNameStreamProcessor, state.ID.String(), err), + err.Error(), + ) + return + } + + // TIP: -- 5. Use a waiter to wait for delete to complete + deleteTimeout := r.DeleteTimeout(ctx, state.Timeouts) + _, err = waitStreamProcessorDeleted(ctx, conn, state.ID.ValueString(), deleteTimeout) + if err != nil { + resp.Diagnostics.AddError( + create.ProblemStandardMessage(names.Rekognition, create.ErrActionWaitingForDeletion, ResNameStreamProcessor, state.ID.String(), err), + err.Error(), + ) + return + } +} + +// TIP: ==== TERRAFORM IMPORTING ==== +// If Read can get all the information it needs from the Identifier +// (i.e., path.Root("id")), you can use the PassthroughID importer. Otherwise, +// you'll need a custom import function. +// +// See more: +// https://developer.hashicorp.com/terraform/plugin/framework/resources/import +func (r *resourceStreamProcessor) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { + resource.ImportStatePassthroughID(ctx, path.Root("id"), req, resp) +} + + +// TIP: ==== STATUS CONSTANTS ==== +// Create constants for states and statuses if the service does not +// already have suitable constants. We prefer that you use the constants +// provided in the service if available (e.g., awstypes.StatusInProgress). +const ( + statusChangePending = "Pending" + statusDeleting = "Deleting" + statusNormal = "Normal" + statusUpdated = "Updated" +) + +// TIP: ==== WAITERS ==== +// Some resources of some services have waiters provided by the AWS API. +// Unless they do not work properly, use them rather than defining new ones +// here. +// +// Sometimes we define the wait, status, and find functions in separate +// files, wait.go, status.go, and find.go. Follow the pattern set out in the +// service and define these where it makes the most sense. +// +// If these functions are used in the _test.go file, they will need to be +// exported (i.e., capitalized). +// +// You will need to adjust the parameters and names to fit the service. +func waitStreamProcessorCreated(ctx context.Context, conn *rekognition.Client, id string, timeout time.Duration) (*awstypes.StreamProcessor, error) { + stateConf := &retry.StateChangeConf{ + Pending: []string{}, + Target: []string{statusNormal}, + Refresh: statusStreamProcessor(ctx, conn, id), + Timeout: timeout, + NotFoundChecks: 20, + ContinuousTargetOccurence: 2, + } + + outputRaw, err := stateConf.WaitForStateContext(ctx) + if out, ok := outputRaw.(*rekognition.StreamProcessor); ok { + return out, err + } + + return nil, err +} + +// TIP: It is easier to determine whether a resource is updated for some +// resources than others. The best case is a status flag that tells you when +// the update has been fully realized. Other times, you can check to see if a +// key resource argument is updated to a new value or not. +func waitStreamProcessorUpdated(ctx context.Context, conn *rekognition.Client, id string, timeout time.Duration) (*awstypes.StreamProcessor, error) { + stateConf := &retry.StateChangeConf{ + Pending: []string{statusChangePending}, + Target: []string{statusUpdated}, + Refresh: statusStreamProcessor(ctx, conn, id), + Timeout: timeout, + NotFoundChecks: 20, + ContinuousTargetOccurence: 2, + } + + outputRaw, err := stateConf.WaitForStateContext(ctx) + if out, ok := outputRaw.(*rekognition.StreamProcessor); ok { + return out, err + } + + return nil, err +} + +// TIP: A deleted waiter is almost like a backwards created waiter. There may +// be additional pending states, however. +func waitStreamProcessorDeleted(ctx context.Context, conn *rekognition.Client, id string, timeout time.Duration) (*awstypes.StreamProcessor, error) { + stateConf := &retry.StateChangeConf{ + Pending: []string{statusDeleting, statusNormal}, + Target: []string{}, + Refresh: statusStreamProcessor(ctx, conn, id), + Timeout: timeout, + } + + outputRaw, err := stateConf.WaitForStateContext(ctx) + if out, ok := outputRaw.(*rekognition.StreamProcessor); ok { + return out, err + } + + return nil, err +} + +// TIP: ==== STATUS ==== +// The status function can return an actual status when that field is +// available from the API (e.g., out.Status). Otherwise, you can use custom +// statuses to communicate the states of the resource. +// +// Waiters consume the values returned by status functions. Design status so +// that it can be reused by a create, update, and delete waiter, if possible. +func statusStreamProcessor(ctx context.Context, conn *rekognition.Client, id string) retry.StateRefreshFunc { + return func() (interface{}, string, error) { + out, err := findStreamProcessorByID(ctx, conn, id) + if tfresource.NotFound(err) { + return nil, "", nil + } + + if err != nil { + return nil, "", err + } + + return out, aws.ToString(out.Status), nil + } +} + +// TIP: ==== FINDERS ==== +// The find function is not strictly necessary. You could do the API +// request from the status function. However, we have found that find often +// comes in handy in other places besides the status function. As a result, it +// is good practice to define it separately. +func findStreamProcessorByID(ctx context.Context, conn *rekognition.Client, id string) (*awstypes.StreamProcessor, error) { + in := &rekognition.GetStreamProcessorInput{ + Id: aws.String(id), + } + + out, err := conn.GetStreamProcessor(ctx, in) + if err != nil { + if errs.IsA[*awstypes.ResourceNotFoundException](err) { + return nil, &retry.NotFoundError{ + LastError: err, + LastRequest: in, + } + } + + return nil, err + } + + if out == nil || out.StreamProcessor == nil { + return nil, tfresource.NewEmptyResultError(in) + } + + return out.StreamProcessor, nil +} + +// TIP: ==== FLEX ==== +// Flatteners and expanders ("flex" functions) help handle complex data +// types. Flatteners take an API data type and return the equivalent Plugin-Framework +// type. In other words, flatteners translate from AWS -> Terraform. +// +// On the other hand, expanders take a Terraform data structure and return +// something that you can send to the AWS API. In other words, expanders +// translate from Terraform -> AWS. +// +// See more: +// https://hashicorp.github.io/terraform-provider-aws/data-handling-and-conversion/ +func flattenComplexArgument(ctx context.Context, apiObject *awstypes.ComplexArgument) (types.List, diag.Diagnostics) { + var diags diag.Diagnostics + elemType := types.ObjectType{AttrTypes: complexArgumentAttrTypes} + + if apiObject == nil { + return types.ListNull(elemType), diags + } + + obj := map[string]attr.Value{ + "nested_required": flex.StringValueToFramework(ctx, apiObject.NestedRequired), + "nested_optional": flex.StringValueToFramework(ctx, apiObject.NestedOptional), + } + objVal, d := types.ObjectValue(complexArgumentAttrTypes, obj) + diags.Append(d...) + + listVal, d := types.ListValue(elemType, []attr.Value{objVal}) + diags.Append(d...) + + return listVal, diags +} + +// TIP: Often the AWS API will return a slice of structures in response to a +// request for information. Sometimes you will have set criteria (e.g., the ID) +// that means you'll get back a one-length slice. This plural function works +// brilliantly for that situation too. +func flattenComplexArguments(ctx context.Context, apiObjects []*awstypes.ComplexArgument) (types.List, diag.Diagnostics) { + var diags diag.Diagnostics + elemType := types.ObjectType{AttrTypes: complexArgumentAttrTypes} + + if len(apiObjects) == 0 { + return types.ListNull(elemType), diags + } + + elems := []attr.Value{} + for _, apiObject := range apiObjects { + if apiObject == nil { + continue + } + + obj := map[string]attr.Value{ + "nested_required": flex.StringValueToFramework(ctx, apiObject.NestedRequired), + "nested_optional": flex.StringValueToFramework(ctx, apiObject.NestedOptional), + } + objVal, d := types.ObjectValue(complexArgumentAttrTypes, obj) + diags.Append(d...) + + elems = append(elems, objVal) + } + + listVal, d := types.ListValue(elemType, elems) + diags.Append(d...) + + return listVal, diags +} + +// TIP: Remember, as mentioned above, expanders take a Terraform data structure +// and return something that you can send to the AWS API. In other words, +// expanders translate from Terraform -> AWS. +// +// See more: +// https://hashicorp.github.io/terraform-provider-aws/data-handling-and-conversion/ +func expandComplexArgument(tfList []complexArgumentData) *awstypes.ComplexArgument { + if len(tfList) == 0 { + return nil + } + + tfObj := tfList[0] + apiObject := &awstypes.ComplexArgument{ + NestedRequired: aws.String(tfObj.NestedRequired.ValueString()), + } + if !tfObj.NestedOptional.IsNull() { + apiObject.NestedOptional = aws.String(tfObj.NestedOptional.ValueString()) + } + + return apiObject +} + +// TIP: Even when you have a list with max length of 1, this plural function +// works brilliantly. However, if the AWS API takes a structure rather than a +// slice of structures, you will not need it. +func expandComplexArguments(tfList []complexArgumentData) []*rekognition.ComplexArgument { + // TIP: The AWS API can be picky about whether you send a nil or zero- + // length for an argument that should be cleared. For example, in some + // cases, if you send a nil value, the AWS API interprets that as "make no + // changes" when what you want to say is "remove everything." Sometimes + // using a zero-length list will cause an error. + // + // As a result, here are two options. Usually, option 1, nil, will work as + // expected, clearing the field. But, test going from something to nothing + // to make sure it works. If not, try the second option. + // TIP: Option 1: Returning nil for zero-length list + if len(tfList) == 0 { + return nil + } + var apiObject []*awstypes.ComplexArgument + // TIP: Option 2: Return zero-length list for zero-length list. If option 1 does + // not work, after testing going from something to nothing (if that is + // possible), uncomment out the next line and remove option 1. + // + // apiObject := make([]*rekognition.ComplexArgument, 0) + + for _, tfObj := range tfList { + item := &rekognition.ComplexArgument{ + NestedRequired: aws.String(tfObj.NestedRequired.ValueString()), + } + if !tfObj.NestedOptional.IsNull() { + item.NestedOptional = aws.String(tfObj.NestedOptional.ValueString()) + } + + apiObject = append(apiObject, item) + } + + return apiObject +} + +// TIP: ==== DATA STRUCTURES ==== +// With Terraform Plugin-Framework configurations are deserialized into +// Go types, providing type safety without the need for type assertions. +// These structs should match the schema definition exactly, and the `tfsdk` +// tag value should match the attribute name. +// +// Nested objects are represented in their own data struct. These will +// also have a corresponding attribute type mapping for use inside flex +// functions. +// +// See more: +// https://developer.hashicorp.com/terraform/plugin/framework/handling-data/accessing-values +type resourceStreamProcessorData struct { + ARN types.String `tfsdk:"arn"` + ComplexArgument types.List `tfsdk:"complex_argument"` + Description types.String `tfsdk:"description"` + ID types.String `tfsdk:"id"` + Name types.String `tfsdk:"name"` + Timeouts timeouts.Value `tfsdk:"timeouts"` + Type types.String `tfsdk:"type"` +} + +type complexArgumentData struct { + NestedRequired types.String `tfsdk:"nested_required"` + NestedOptional types.String `tfsdk:"nested_optional"` +} + +var complexArgumentAttrTypes = map[string]attr.Type{ + "nested_required": types.StringType, + "nested_optional": types.StringType, +} diff --git a/internal/service/rekognition/stream_processor_test.go b/internal/service/rekognition/stream_processor_test.go new file mode 100644 index 000000000000..979b5a05d587 --- /dev/null +++ b/internal/service/rekognition/stream_processor_test.go @@ -0,0 +1,332 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package rekognition_test +// **PLEASE DELETE THIS AND ALL TIP COMMENTS BEFORE SUBMITTING A PR FOR REVIEW!** +// +// TIP: ==== INTRODUCTION ==== +// Thank you for trying the skaff tool! +// +// You have opted to include these helpful comments. They all include "TIP:" +// to help you find and remove them when you're done with them. +// +// While some aspects of this file are customized to your input, the +// scaffold tool does *not* look at the AWS API and ensure it has correct +// function, structure, and variable names. It makes guesses based on +// commonalities. You will need to make significant adjustments. +// +// In other words, as generated, this is a rough outline of the work you will +// need to do. If something doesn't make sense for your situation, get rid of +// it. + +import ( + // TIP: ==== IMPORTS ==== + // This is a common set of imports but not customized to your code since + // your code hasn't been written yet. Make sure you, your IDE, or + // goimports -w fixes these imports. + // + // The provider linter wants your imports to be in two groups: first, + // standard library (i.e., "fmt" or "strings"), second, everything else. + // + // Also, AWS Go SDK v2 may handle nested structures differently than v1, + // using the services/rekognition/types package. If so, you'll + // need to import types and reference the nested types, e.g., as + // types.. + "context" + "errors" + "fmt" + "testing" + + "github.com/YakDriver/regexache" + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/service/rekognition" + "github.com/aws/aws-sdk-go-v2/service/rekognition/types" + sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" + "github.com/hashicorp/terraform-provider-aws/internal/acctest" + "github.com/hashicorp/terraform-provider-aws/internal/conns" + "github.com/hashicorp/terraform-provider-aws/internal/create" + "github.com/hashicorp/terraform-provider-aws/internal/errs" + "github.com/hashicorp/terraform-provider-aws/names" + + // TIP: You will often need to import the package that this test file lives + // in. Since it is in the "test" context, it must import the package to use + // any normal context constants, variables, or functions. + tfrekognition "github.com/hashicorp/terraform-provider-aws/internal/service/rekognition" +) + +// TIP: File Structure. The basic outline for all test files should be as +// follows. Improve this resource's maintainability by following this +// outline. +// +// 1. Package declaration (add "_test" since this is a test file) +// 2. Imports +// 3. Unit tests +// 4. Basic test +// 5. Disappears test +// 6. All the other tests +// 7. Helper functions (exists, destroy, check, etc.) +// 8. Functions that return Terraform configurations + +// TIP: ==== UNIT TESTS ==== +// This is an example of a unit test. Its name is not prefixed with +// "TestAcc" like an acceptance test. +// +// Unlike acceptance tests, unit tests do not access AWS and are focused on a +// function (or method). Because of this, they are quick and cheap to run. +// +// In designing a resource's implementation, isolate complex bits from AWS bits +// so that they can be tested through a unit test. We encourage more unit tests +// in the provider. +// +// Cut and dry functions using well-used patterns, like typical flatteners and +// expanders, don't need unit testing. However, if they are complex or +// intricate, they should be unit tested. +func TestStreamProcessorExampleUnitTest(t *testing.T) { + t.Parallel() + + testCases := []struct { + TestName string + Input string + Expected string + Error bool + }{ + { + TestName: "empty", + Input: "", + Expected: "", + Error: true, + }, + { + TestName: "descriptive name", + Input: "some input", + Expected: "some output", + Error: false, + }, + { + TestName: "another descriptive name", + Input: "more input", + Expected: "more output", + Error: false, + }, + } + + for _, testCase := range testCases { + testCase := testCase + t.Run(testCase.TestName, func(t *testing.T) { + t.Parallel() + got, err := tfrekognition.FunctionFromResource(testCase.Input) + + if err != nil && !testCase.Error { + t.Errorf("got error (%s), expected no error", err) + } + + if err == nil && testCase.Error { + t.Errorf("got (%s) and no error, expected error", got) + } + + if got != testCase.Expected { + t.Errorf("got %s, expected %s", got, testCase.Expected) + } + }) + } +} + +// TIP: ==== ACCEPTANCE TESTS ==== +// This is an example of a basic acceptance test. This should test as much of +// standard functionality of the resource as possible, and test importing, if +// applicable. We prefix its name with "TestAcc", the service, and the +// resource name. +// +// Acceptance test access AWS and cost money to run. +func TestAccRekognitionStreamProcessor_basic(t *testing.T) { + ctx := acctest.Context(t) + // TIP: This is a long-running test guard for tests that run longer than + // 300s (5 min) generally. + if testing.Short() { + t.Skip("skipping long-running test in short mode") + } + + var streamprocessor rekognition.DescribeStreamProcessorResponse + rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) + resourceName := "aws_rekognition_stream_processor.test" + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + acctest.PreCheck(ctx, t) + acctest.PreCheckPartitionHasService(t, names.RekognitionEndpointID) + testAccPreCheck(ctx, t) + }, + ErrorCheck: acctest.ErrorCheck(t, names.RekognitionServiceID), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, + CheckDestroy: testAccCheckStreamProcessorDestroy(ctx), + Steps: []resource.TestStep{ + { + Config: testAccStreamProcessorConfig_basic(rName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), + resource.TestCheckResourceAttr(resourceName, "auto_minor_version_upgrade", "false"), + resource.TestCheckResourceAttrSet(resourceName, "maintenance_window_start_time.0.day_of_week"), + resource.TestCheckTypeSetElemNestedAttrs(resourceName, "user.*", map[string]string{ + "console_access": "false", + "groups.#": "0", + "username": "Test", + "password": "TestTest1234", + }), + acctest.MatchResourceAttrRegionalARN(resourceName, "arn", "rekognition", regexache.MustCompile(`streamprocessor:+.`)), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"apply_immediately", "user"}, + }, + }, + }) +} + +func TestAccRekognitionStreamProcessor_disappears(t *testing.T) { + ctx := acctest.Context(t) + if testing.Short() { + t.Skip("skipping long-running test in short mode") + } + + var streamprocessor rekognition.DescribeStreamProcessorResponse + rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) + resourceName := "aws_rekognition_stream_processor.test" + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + acctest.PreCheck(ctx, t) + acctest.PreCheckPartitionHasService(t, names.RekognitionEndpointID) + testAccPreCheck(t) + }, + ErrorCheck: acctest.ErrorCheck(t, names.RekognitionServiceID), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, + CheckDestroy: testAccCheckStreamProcessorDestroy(ctx), + Steps: []resource.TestStep{ + { + Config: testAccStreamProcessorConfig_basic(rName, testAccStreamProcessorVersionNewer), + Check: resource.ComposeTestCheckFunc( + testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), + // TIP: The Plugin-Framework disappears helper is similar to the Plugin-SDK version, + // but expects a new resource factory function as the third argument. To expose this + // private function to the testing package, you may need to add a line like the following + // to exports_test.go: + // + // var ResourceStreamProcessor = newResourceStreamProcessor + acctest.CheckFrameworkResourceDisappears(ctx, acctest.Provider, tfrekognition.ResourceStreamProcessor, resourceName), + ), + ExpectNonEmptyPlan: true, + }, + }, + }) +} + +func testAccCheckStreamProcessorDestroy(ctx context.Context) resource.TestCheckFunc { + return func(s *terraform.State) error { + conn := acctest.Provider.Meta().(*conns.AWSClient).RekognitionClient(ctx) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "aws_rekognition_stream_processor" { + continue + } + + input := &rekognition.DescribeStreamProcessorInput{ + StreamProcessorId: aws.String(rs.Primary.ID), + } + _, err := conn.DescribeStreamProcessor(ctx, &rekognition.DescribeStreamProcessorInput{ + StreamProcessorId: aws.String(rs.Primary.ID), + }) + if errs.IsA[*types.ResourceNotFoundException](err){ + return nil + } + if err != nil { + return create.Error(names.Rekognition, create.ErrActionCheckingDestroyed, tfrekognition.ResNameStreamProcessor, rs.Primary.ID, err) + } + + return create.Error(names.Rekognition, create.ErrActionCheckingDestroyed, tfrekognition.ResNameStreamProcessor, rs.Primary.ID, errors.New("not destroyed")) + } + + return nil + } +} + +func testAccCheckStreamProcessorExists(ctx context.Context, name string, streamprocessor *rekognition.DescribeStreamProcessorResponse) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[name] + if !ok { + return create.Error(names.Rekognition, create.ErrActionCheckingExistence, tfrekognition.ResNameStreamProcessor, name, errors.New("not found")) + } + + if rs.Primary.ID == "" { + return create.Error(names.Rekognition, create.ErrActionCheckingExistence, tfrekognition.ResNameStreamProcessor, name, errors.New("not set")) + } + + conn := acctest.Provider.Meta().(*conns.AWSClient).RekognitionClient(ctx) + resp, err := conn.DescribeStreamProcessor(ctx, &rekognition.DescribeStreamProcessorInput{ + StreamProcessorId: aws.String(rs.Primary.ID), + }) + + if err != nil { + return create.Error(names.Rekognition, create.ErrActionCheckingExistence, tfrekognition.ResNameStreamProcessor, rs.Primary.ID, err) + } + + *streamprocessor = *resp + + return nil + } +} + +func testAccPreCheck(ctx context.Context, t *testing.T) { + conn := acctest.Provider.Meta().(*conns.AWSClient).RekognitionClient(ctx) + + input := &rekognition.ListStreamProcessorsInput{} + _, err := conn.ListStreamProcessors(ctx, input) + + if acctest.PreCheckSkipError(err) { + t.Skipf("skipping acceptance testing: %s", err) + } + if err != nil { + t.Fatalf("unexpected PreCheck error: %s", err) + } +} + +func testAccCheckStreamProcessorNotRecreated(before, after *rekognition.DescribeStreamProcessorResponse) resource.TestCheckFunc { + return func(s *terraform.State) error { + if before, after := aws.ToString(before.StreamProcessorId), aws.ToString(after.StreamProcessorId); before != after { + return create.Error(names.Rekognition, create.ErrActionCheckingNotRecreated, tfrekognition.ResNameStreamProcessor, aws.ToString(before.StreamProcessorId), errors.New("recreated")) + } + + return nil + } +} + +func testAccStreamProcessorConfig_basic(rName, version string) string { + return fmt.Sprintf(` +resource "aws_security_group" "test" { + name = %[1]q +} + +resource "aws_rekognition_stream_processor" "test" { + stream_processor_name = %[1]q + engine_type = "ActiveRekognition" + engine_version = %[2]q + host_instance_type = "rekognition.t2.micro" + security_groups = [aws_security_group.test.id] + authentication_strategy = "simple" + storage_type = "efs" + + logs { + general = true + } + + user { + username = "Test" + password = "TestTest1234" + } +} +`, rName, version) +} diff --git a/website/docs/r/rekognition_stream_processor.html.markdown b/website/docs/r/rekognition_stream_processor.html.markdown new file mode 100644 index 000000000000..c1db9ef586d4 --- /dev/null +++ b/website/docs/r/rekognition_stream_processor.html.markdown @@ -0,0 +1,69 @@ +--- +subcategory: "Rekognition" +layout: "aws" +page_title: "AWS: aws_rekognition_stream_processor" +description: |- + Terraform resource for managing an AWS Rekognition Stream Processor. +--- +` +# Resource: aws_rekognition_stream_processor + +Terraform resource for managing an AWS Rekognition Stream Processor. + +## Example Usage + +### Basic Usage + +```terraform +resource "aws_rekognition_stream_processor" "example" { +} +``` + +## Argument Reference + +The following arguments are required: + +* `example_arg` - (Required) Concise argument description. Do not begin the description with "An", "The", "Defines", "Indicates", or "Specifies," as these are verbose. In other words, "Indicates the amount of storage," can be rewritten as "Amount of storage," without losing any information. + +The following arguments are optional: + +* `optional_arg` - (Optional) Concise argument description. Do not begin the description with "An", "The", "Defines", "Indicates", or "Specifies," as these are verbose. In other words, "Indicates the amount of storage," can be rewritten as "Amount of storage," without losing any information. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Stream Processor. Do not begin the description with "An", "The", "Defines", "Indicates", or "Specifies," as these are verbose. In other words, "Indicates the amount of storage," can be rewritten as "Amount of storage," without losing any information. +* `example_attribute` - Concise description. Do not begin the description with "An", "The", "Defines", "Indicates", or "Specifies," as these are verbose. In other words, "Indicates the amount of storage," can be rewritten as "Amount of storage," without losing any information. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `60m`) +* `update` - (Default `180m`) +* `delete` - (Default `90m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Rekognition Stream Processor using the `example_id_arg`. For example: + +```terraform +import { + to = aws_rekognition_stream_processor.example + id = "stream_processor-id-12345678" +} +``` + +Using `terraform import`, import Rekognition Stream Processor using the `example_id_arg`. For example: + +```console +% terraform import aws_rekognition_stream_processor.example stream_processor-id-12345678 +``` From d5ade846b9bbed1ce1b9e4136d53198f3e332496 Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Fri, 10 May 2024 12:41:28 -0500 Subject: [PATCH 02/71] build out resource --- .../service/rekognition/stream_processor.go | 577 ++++-------------- 1 file changed, 126 insertions(+), 451 deletions(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index 30dcb7ae1ee5..25abcf5e78e7 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -40,15 +40,11 @@ import ( "github.com/aws/aws-sdk-go-v2/service/rekognition" awstypes "github.com/aws/aws-sdk-go-v2/service/rekognition/types" "github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts" - "github.com/hashicorp/terraform-plugin-framework-validators/listvalidator" - "github.com/hashicorp/terraform-plugin-framework/attr" - "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/path" "github.com/hashicorp/terraform-plugin-framework/resource" "github.com/hashicorp/terraform-plugin-framework/resource/schema" "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" - "github.com/hashicorp/terraform-plugin-framework/schema/validator" "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/retry" "github.com/hashicorp/terraform-provider-aws/internal/create" @@ -57,6 +53,8 @@ import ( "github.com/hashicorp/terraform-provider-aws/internal/framework/flex" "github.com/hashicorp/terraform-provider-aws/internal/tfresource" "github.com/hashicorp/terraform-provider-aws/names" + tftags "github.com/hashicorp/terraform-provider-aws/internal/tags" + ) // TIP: ==== FILE STRUCTURE ==== // All resources should follow this basic outline. Improve this resource's @@ -98,47 +96,6 @@ func (r *resourceStreamProcessor) Metadata(_ context.Context, req resource.Metad resp.TypeName = "aws_rekognition_stream_processor" } -// TIP: ==== SCHEMA ==== -// In the schema, add each of the attributes in snake case (e.g., -// delete_automated_backups). -// -// Formatting rules: -// * Alphabetize attributes to make them easier to find. -// * Do not add a blank line between attributes. -// -// Attribute basics: -// * If a user can provide a value ("configure a value") for an -// attribute (e.g., instances = 5), we call the attribute an -// "argument." -// * You change the way users interact with attributes using: -// - Required -// - Optional -// - Computed -// * There are only four valid combinations: -// -// 1. Required only - the user must provide a value -// Required: true, -// -// 2. Optional only - the user can configure or omit a value; do not -// use Default or DefaultFunc -// Optional: true, -// -// 3. Computed only - the provider can provide a value but the user -// cannot, i.e., read-only -// Computed: true, -// -// 4. Optional AND Computed - the provider or user can provide a value; -// use this combination if you are using Default -// Optional: true, -// Computed: true, -// -// You will typically find arguments in the input struct -// (e.g., CreateDBInstanceInput) for the create operation. Sometimes -// they are only in the input struct (e.g., ModifyDBInstanceInput) for -// the modify operation. -// -// For more about schema options, visit -// https://developer.hashicorp.com/terraform/plugin/framework/handling-data/schemas?page=schemas func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { resp.Schema = schema.Schema{ Attributes: map[string]schema.Attribute{ @@ -149,15 +106,6 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem "id": framework.IDAttribute(), "name": schema.StringAttribute{ Required: true, - // TIP: ==== PLAN MODIFIERS ==== - // Plan modifiers were introduced with Plugin-Framework to provide a mechanism - // for adjusting planned changes prior to apply. The planmodifier subpackage - // provides built-in modifiers for many common use cases such as - // requiring replacement on a value change ("ForceNew: true" in Plugin-SDK - // resources). - // - // See more: - // https://developer.hashicorp.com/terraform/plugin/framework/resources/plan-modification PlanModifiers: []planmodifier.String{ stringplanmodifier.RequiresReplace(), }, @@ -165,41 +113,43 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem "type": schema.StringAttribute{ Required: true, }, + names.AttrTags: tftags.TagsAttribute(), + names.AttrTagsAll: tftags.TagsAttributeComputedOnly(), }, - Blocks: map[string]schema.Block{ - "complex_argument": schema.ListNestedBlock{ - // TIP: ==== LIST VALIDATORS ==== - // List and set validators take the place of MaxItems and MinItems in - // Plugin-Framework based resources. Use listvalidator.SizeAtLeast(1) to - // make a nested object required. Similar to Plugin-SDK, complex objects - // can be represented as lists or sets with listvalidator.SizeAtMost(1). - // - // For a complete mapping of Plugin-SDK to Plugin-Framework schema fields, - // see: - // https://developer.hashicorp.com/terraform/plugin/framework/migrating/attributes-blocks/blocks - Validators: []validator.List{ - listvalidator.SizeAtMost(1), - }, - NestedObject: schema.NestedBlockObject{ - Attributes: map[string]schema.Attribute{ - "nested_required": schema.StringAttribute{ - Required: true, - }, - "nested_computed": schema.StringAttribute{ - Computed: true, - PlanModifiers: []planmodifier.String{ - stringplanmodifier.UseStateForUnknown(), - }, - }, - }, - }, - }, - "timeouts": timeouts.Block(ctx, timeouts.Opts{ - Create: true, - Update: true, - Delete: true, - }), - }, + // Blocks: map[string]schema.Block{ + // "complex_argument": schema.ListNestedBlock{ + // // TIP: ==== LIST VALIDATORS ==== + // // List and set validators take the place of MaxItems and MinItems in + // // Plugin-Framework based resources. Use listvalidator.SizeAtLeast(1) to + // // make a nested object required. Similar to Plugin-SDK, complex objects + // // can be represented as lists or sets with listvalidator.SizeAtMost(1). + // // + // // For a complete mapping of Plugin-SDK to Plugin-Framework schema fields, + // // see: + // // https://developer.hashicorp.com/terraform/plugin/framework/migrating/attributes-blocks/blocks + // Validators: []validator.List{ + // listvalidator.SizeAtMost(1), + // }, + // NestedObject: schema.NestedBlockObject{ + // Attributes: map[string]schema.Attribute{ + // "nested_required": schema.StringAttribute{ + // Required: true, + // }, + // "nested_computed": schema.StringAttribute{ + // Computed: true, + // PlanModifiers: []planmodifier.String{ + // stringplanmodifier.UseStateForUnknown(), + // }, + // }, + // }, + // }, + // }, + // "timeouts": timeouts.Block(ctx, timeouts.Opts{ + // Create: true, + // Update: true, + // Delete: true, + // }), + // }, } } @@ -230,28 +180,25 @@ func (r *resourceStreamProcessor) Create(ctx context.Context, req resource.Creat // TIP: -- 3. Populate a create input structure in := &rekognition.CreateStreamProcessorInput{ - // TIP: Mandatory or fields that will always be present can be set when - // you create the Input structure. (Replace these with real fields.) - StreamProcessorName: aws.String(plan.Name.ValueString()), - StreamProcessorType: aws.String(plan.Type.ValueString()), - } - - if !plan.Description.IsNull() { - // TIP: Optional fields should be set based on whether or not they are - // used. - in.Description = aws.String(plan.Description.ValueString()) - } - if !plan.ComplexArgument.IsNull() { - // TIP: Use an expander to assign a complex argument. The elements must be - // deserialized into the appropriate struct before being passed to the expander. - var tfList []complexArgumentData - resp.Diagnostics.Append(plan.ComplexArgument.ElementsAs(ctx, &tfList, false)...) - if resp.Diagnostics.HasError() { - return - } - - in.ComplexArgument = expandComplexArgument(tfList) - } + Name: aws.String(plan.Name.ValueString()), + } + + // if !plan.Description.IsNull() { + // // TIP: Optional fields should be set based on whether or not they are + // // used. + // in.Description = aws.String(plan.Description.ValueString()) + // } + // if !plan.ComplexArgument.IsNull() { + // // TIP: Use an expander to assign a complex argument. The elements must be + // // deserialized into the appropriate struct before being passed to the expander. + // var tfList []complexArgumentData + // resp.Diagnostics.Append(plan.ComplexArgument.ElementsAs(ctx, &tfList, false)...) + // if resp.Diagnostics.HasError() { + // return + // } + + // in.ComplexArgument = expandComplexArgument(tfList) + // } // TIP: -- 4. Call the AWS create function out, err := conn.CreateStreamProcessor(ctx, in) @@ -264,7 +211,7 @@ func (r *resourceStreamProcessor) Create(ctx context.Context, req resource.Creat ) return } - if out == nil || out.StreamProcessor == nil { + if out == nil || out.StreamProcessorArn == nil { resp.Diagnostics.AddError( create.ProblemStandardMessage(names.Rekognition, create.ErrActionCreating, ResNameStreamProcessor, plan.Name.String(), nil), errors.New("empty output").Error(), @@ -273,12 +220,11 @@ func (r *resourceStreamProcessor) Create(ctx context.Context, req resource.Creat } // TIP: -- 5. Using the output from the create function, set the minimum attributes - plan.ARN = flex.StringToFramework(ctx, out.StreamProcessor.Arn) - plan.ID = flex.StringToFramework(ctx, out.StreamProcessor.StreamProcessorId) + plan.ARN = flex.StringToFramework(ctx, out.StreamProcessorArn) // TIP: -- 6. Use a waiter to wait for create to complete createTimeout := r.CreateTimeout(ctx, plan.Timeouts) - _, err = waitStreamProcessorCreated(ctx, conn, plan.ID.ValueString(), createTimeout) + _, err = waitStreamProcessorCreated(ctx, conn, plan.Name.ValueString(), createTimeout) if err != nil { resp.Diagnostics.AddError( create.ProblemStandardMessage(names.Rekognition, create.ErrActionWaitingForCreation, ResNameStreamProcessor, plan.Name.String(), err), @@ -292,18 +238,6 @@ func (r *resourceStreamProcessor) Create(ctx context.Context, req resource.Creat } func (r *resourceStreamProcessor) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { - // TIP: ==== RESOURCE READ ==== - // Generally, the Read function should do the following things. Make - // sure there is a good reason if you don't do one of these. - // - // 1. Get a client connection to the relevant service - // 2. Fetch the state - // 3. Get the resource from AWS - // 4. Remove resource from state if it is not found - // 5. Set the arguments and attributes - // 6. Set the state - - // TIP: -- 1. Get a client connection to the relevant service conn := r.Meta().RekognitionClient(ctx) // TIP: -- 2. Fetch the state @@ -312,10 +246,8 @@ func (r *resourceStreamProcessor) Read(ctx context.Context, req resource.ReadReq if resp.Diagnostics.HasError() { return } - - // TIP: -- 3. Get the resource from AWS using an API Get, List, or Describe- - // type function, or, better yet, using a finder. - out, err := findStreamProcessorByID(ctx, conn, state.ID.ValueString()) + + out, err := findStreamProcessorByID(ctx, conn, state.Name.ValueString()) // TIP: -- 4. Remove resource from state if it is not found if tfresource.NotFound(err) { resp.State.RemoveResource(ctx) @@ -323,60 +255,25 @@ func (r *resourceStreamProcessor) Read(ctx context.Context, req resource.ReadReq } if err != nil { resp.Diagnostics.AddError( - create.ProblemStandardMessage(names.Rekognition, create.ErrActionSetting, ResNameStreamProcessor, state.ID.String(), err), + create.ProblemStandardMessage(names.Rekognition, create.ErrActionSetting, ResNameStreamProcessor, state.Name.String(), err), err.Error(), ) return } - // TIP: -- 5. Set the arguments and attributes - // - // For simple data types (i.e., schema.StringAttribute, schema.BoolAttribute, - // schema.Int64Attribute, and schema.Float64Attribue), simply setting the - // appropriate data struct field is sufficient. The flex package implements - // helpers for converting between Go and Plugin-Framework types seamlessly. No - // error or nil checking is necessary. - // - // However, there are some situations where more handling is needed such as - // complex data types (e.g., schema.ListAttribute, schema.SetAttribute). In - // these cases the flatten function may have a diagnostics return value, which - // should be appended to resp.Diagnostics. - state.ARN = flex.StringToFramework(ctx, out.Arn) - state.ID = flex.StringToFramework(ctx, out.StreamProcessorId) - state.Name = flex.StringToFramework(ctx, out.StreamProcessorName) - state.Type = flex.StringToFramework(ctx, out.StreamProcessorType) + + state.Name = flex.StringToFramework(ctx, out.Name) // TIP: Setting a complex type. - complexArgument, d := flattenComplexArgument(ctx, out.ComplexArgument) - resp.Diagnostics.Append(d...) - state.ComplexArgument = complexArgument + // complexArgument, d := flattenComplexArgument(ctx, out.ComplexArgument) + // resp.Diagnostics.Append(d...) + // state.ComplexArgument = complexArgument // TIP: -- 6. Set the state resp.Diagnostics.Append(resp.State.Set(ctx, &state)...) } func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { - // TIP: ==== RESOURCE UPDATE ==== - // Not all resources have Update functions. There are a few reasons: - // a. The AWS API does not support changing a resource - // b. All arguments have RequiresReplace() plan modifiers - // c. The AWS API uses a create call to modify an existing resource - // - // In the cases of a. and b., the resource will not have an update method - // defined. In the case of c., Update and Create can be refactored to call - // the same underlying function. - // - // The rest of the time, there should be an Update function and it should - // do the following things. Make sure there is a good reason if you don't - // do one of these. - // - // 1. Get a client connection to the relevant service - // 2. Fetch the plan and state - // 3. Populate a modify input structure and check for changes - // 4. Call the AWS modify/update function - // 5. Use a waiter to wait for update to complete - // 6. Save the request plan to response state - // TIP: -- 1. Get a client connection to the relevant service conn := r.Meta().RekognitionClient(ctx) // TIP: -- 2. Fetch the plan @@ -388,65 +285,46 @@ func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.Updat } // TIP: -- 3. Populate a modify input structure and check for changes - if !plan.Name.Equal(state.Name) || - !plan.Description.Equal(state.Description) || - !plan.ComplexArgument.Equal(state.ComplexArgument) || - !plan.Type.Equal(state.Type) { + if !plan.Name.Equal(state.Name) { in := &rekognition.UpdateStreamProcessorInput{ // TIP: Mandatory or fields that will always be present can be set when // you create the Input structure. (Replace these with real fields.) - StreamProcessorId: aws.String(plan.ID.ValueString()), - StreamProcessorName: aws.String(plan.Name.ValueString()), - StreamProcessorType: aws.String(plan.Type.ValueString()), - } - - if !plan.Description.IsNull() { - // TIP: Optional fields should be set based on whether or not they are - // used. - in.Description = aws.String(plan.Description.ValueString()) - } - if !plan.ComplexArgument.IsNull() { - // TIP: Use an expander to assign a complex argument. The elements must be - // deserialized into the appropriate struct before being passed to the expander. - var tfList []complexArgumentData - resp.Diagnostics.Append(plan.ComplexArgument.ElementsAs(ctx, &tfList, false)...) - if resp.Diagnostics.HasError() { - return - } - - in.ComplexArgument = expandComplexArgument(tfList) + Name: aws.String(plan.Name.ValueString()), } // TIP: -- 4. Call the AWS modify/update function - out, err := conn.UpdateStreamProcessor(ctx, in) + _, err := conn.UpdateStreamProcessor(ctx, in) if err != nil { resp.Diagnostics.AddError( - create.ProblemStandardMessage(names.Rekognition, create.ErrActionUpdating, ResNameStreamProcessor, plan.ID.String(), err), + create.ProblemStandardMessage(names.Rekognition, create.ErrActionUpdating, ResNameStreamProcessor, plan.Name.String(), err), err.Error(), ) return } - if out == nil || out.StreamProcessor == nil { - resp.Diagnostics.AddError( - create.ProblemStandardMessage(names.Rekognition, create.ErrActionUpdating, ResNameStreamProcessor, plan.ID.String(), nil), - errors.New("empty output").Error(), - ) - return - } + + // we have to call describe to get the new values + + // if out == nil || out.ResultMetadata == nil { + // resp.Diagnostics.AddError( + // create.ProblemStandardMessage(names.Rekognition, create.ErrActionUpdating, ResNameStreamProcessor, plan.Name.String(), nil), + // errors.New("empty output").Error(), + // ) + // return + // } // TIP: Using the output from the update function, re-set any computed attributes - plan.ARN = flex.StringToFramework(ctx, out.StreamProcessor.Arn) - plan.ID = flex.StringToFramework(ctx, out.StreamProcessor.StreamProcessorId) + // plan.ARN = flex.StringToFramework(ctx, out.Arn) + // plan.ID = flex.StringToFramework(ctx, out.StreamProcessor.StreamProcessorId) } // TIP: -- 5. Use a waiter to wait for update to complete updateTimeout := r.UpdateTimeout(ctx, plan.Timeouts) - _, err := waitStreamProcessorUpdated(ctx, conn, plan.ID.ValueString(), updateTimeout) + _, err := waitStreamProcessorUpdated(ctx, conn, plan.Name.ValueString(), updateTimeout) if err != nil { resp.Diagnostics.AddError( - create.ProblemStandardMessage(names.Rekognition, create.ErrActionWaitingForUpdate, ResNameStreamProcessor, plan.ID.String(), err), + create.ProblemStandardMessage(names.Rekognition, create.ErrActionWaitingForUpdate, ResNameStreamProcessor, plan.Name.String(), err), err.Error(), ) return @@ -458,22 +336,6 @@ func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.Updat } func (r *resourceStreamProcessor) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { - // TIP: ==== RESOURCE DELETE ==== - // Most resources have Delete functions. There are rare situations - // where you might not need a delete: - // a. The AWS API does not provide a way to delete the resource - // b. The point of your resource is to perform an action (e.g., reboot a - // server) and deleting serves no purpose. - // - // The Delete function should do the following things. Make sure there - // is a good reason if you don't do one of these. - // - // 1. Get a client connection to the relevant service - // 2. Fetch the state - // 3. Populate a delete input structure - // 4. Call the AWS delete function - // 5. Use a waiter to wait for delete to complete - // TIP: -- 1. Get a client connection to the relevant service conn := r.Meta().RekognitionClient(ctx) // TIP: -- 2. Fetch the state @@ -485,19 +347,18 @@ func (r *resourceStreamProcessor) Delete(ctx context.Context, req resource.Delet // TIP: -- 3. Populate a delete input structure in := &rekognition.DeleteStreamProcessorInput{ - StreamProcessorId: aws.String(state.ID.ValueString()), + Name: aws.String(state.Name.ValueString()), } - // TIP: -- 4. Call the AWS delete function + _, err := conn.DeleteStreamProcessor(ctx, in) - // TIP: On rare occassions, the API returns a not found error after deleting a - // resource. If that happens, we don't want it to show up as an error. + if err != nil { if errs.IsA[*awstypes.ResourceNotFoundException](err) { return } resp.Diagnostics.AddError( - create.ProblemStandardMessage(names.Rekognition, create.ErrActionDeleting, ResNameStreamProcessor, state.ID.String(), err), + create.ProblemStandardMessage(names.Rekognition, create.ErrActionDeleting, ResNameStreamProcessor, state.Name.String(), err), err.Error(), ) return @@ -505,56 +366,27 @@ func (r *resourceStreamProcessor) Delete(ctx context.Context, req resource.Delet // TIP: -- 5. Use a waiter to wait for delete to complete deleteTimeout := r.DeleteTimeout(ctx, state.Timeouts) - _, err = waitStreamProcessorDeleted(ctx, conn, state.ID.ValueString(), deleteTimeout) + _, err = waitStreamProcessorDeleted(ctx, conn, state.Name.ValueString(), deleteTimeout) if err != nil { resp.Diagnostics.AddError( - create.ProblemStandardMessage(names.Rekognition, create.ErrActionWaitingForDeletion, ResNameStreamProcessor, state.ID.String(), err), + create.ProblemStandardMessage(names.Rekognition, create.ErrActionWaitingForDeletion, ResNameStreamProcessor, state.Name.String(), err), err.Error(), ) return } } -// TIP: ==== TERRAFORM IMPORTING ==== -// If Read can get all the information it needs from the Identifier -// (i.e., path.Root("id")), you can use the PassthroughID importer. Otherwise, -// you'll need a custom import function. -// -// See more: -// https://developer.hashicorp.com/terraform/plugin/framework/resources/import func (r *resourceStreamProcessor) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { resource.ImportStatePassthroughID(ctx, path.Root("id"), req, resp) } - -// TIP: ==== STATUS CONSTANTS ==== -// Create constants for states and statuses if the service does not -// already have suitable constants. We prefer that you use the constants -// provided in the service if available (e.g., awstypes.StatusInProgress). -const ( - statusChangePending = "Pending" - statusDeleting = "Deleting" - statusNormal = "Normal" - statusUpdated = "Updated" -) - -// TIP: ==== WAITERS ==== -// Some resources of some services have waiters provided by the AWS API. -// Unless they do not work properly, use them rather than defining new ones -// here. -// -// Sometimes we define the wait, status, and find functions in separate -// files, wait.go, status.go, and find.go. Follow the pattern set out in the -// service and define these where it makes the most sense. -// -// If these functions are used in the _test.go file, they will need to be -// exported (i.e., capitalized). -// -// You will need to adjust the parameters and names to fit the service. -func waitStreamProcessorCreated(ctx context.Context, conn *rekognition.Client, id string, timeout time.Duration) (*awstypes.StreamProcessor, error) { +func waitStreamProcessorCreated(ctx context.Context, conn *rekognition.Client, id string, timeout time.Duration) (*rekognition.DescribeStreamProcessorOutput, error) { stateConf := &retry.StateChangeConf{ Pending: []string{}, - Target: []string{statusNormal}, + Target: []string{ + string(awstypes.StreamProcessorStatusStarting), + string(awstypes.StreamProcessorStatusRunning), + string(awstypes.StreamProcessorStatusFailed),}, Refresh: statusStreamProcessor(ctx, conn, id), Timeout: timeout, NotFoundChecks: 20, @@ -562,21 +394,20 @@ func waitStreamProcessorCreated(ctx context.Context, conn *rekognition.Client, i } outputRaw, err := stateConf.WaitForStateContext(ctx) - if out, ok := outputRaw.(*rekognition.StreamProcessor); ok { + if out, ok := outputRaw.(*rekognition.DescribeStreamProcessorOutput); ok { return out, err } return nil, err } -// TIP: It is easier to determine whether a resource is updated for some -// resources than others. The best case is a status flag that tells you when -// the update has been fully realized. Other times, you can check to see if a -// key resource argument is updated to a new value or not. -func waitStreamProcessorUpdated(ctx context.Context, conn *rekognition.Client, id string, timeout time.Duration) (*awstypes.StreamProcessor, error) { +func waitStreamProcessorUpdated(ctx context.Context, conn *rekognition.Client, id string, timeout time.Duration) (*rekognition.DescribeStreamProcessorOutput, error) { stateConf := &retry.StateChangeConf{ - Pending: []string{statusChangePending}, - Target: []string{statusUpdated}, + Pending: []string{string(awstypes.StreamProcessorStatusUpdating)}, + Target: []string{ + string(awstypes.StreamProcessorStatusStarting), + string(awstypes.StreamProcessorStatusRunning), + string(awstypes.StreamProcessorStatusFailed),}, Refresh: statusStreamProcessor(ctx, conn, id), Timeout: timeout, NotFoundChecks: 20, @@ -584,38 +415,36 @@ func waitStreamProcessorUpdated(ctx context.Context, conn *rekognition.Client, i } outputRaw, err := stateConf.WaitForStateContext(ctx) - if out, ok := outputRaw.(*rekognition.StreamProcessor); ok { + if out, ok := outputRaw.(*rekognition.DescribeStreamProcessorOutput); ok { return out, err } return nil, err } -// TIP: A deleted waiter is almost like a backwards created waiter. There may -// be additional pending states, however. -func waitStreamProcessorDeleted(ctx context.Context, conn *rekognition.Client, id string, timeout time.Duration) (*awstypes.StreamProcessor, error) { +func waitStreamProcessorDeleted(ctx context.Context, conn *rekognition.Client, id string, timeout time.Duration) (*rekognition.DescribeStreamProcessorOutput, error) { stateConf := &retry.StateChangeConf{ - Pending: []string{statusDeleting, statusNormal}, + Pending: []string{ + string(awstypes.StreamProcessorStatusStopped), + string(awstypes.StreamProcessorStatusStarting), + string(awstypes.StreamProcessorStatusRunning), + string(awstypes.StreamProcessorStatusFailed), + string(awstypes.StreamProcessorStatusStopping), + string(awstypes.StreamProcessorStatusUpdating), + }, Target: []string{}, Refresh: statusStreamProcessor(ctx, conn, id), Timeout: timeout, } outputRaw, err := stateConf.WaitForStateContext(ctx) - if out, ok := outputRaw.(*rekognition.StreamProcessor); ok { + if out, ok := outputRaw.(*rekognition.DescribeStreamProcessorOutput); ok { return out, err } return nil, err } -// TIP: ==== STATUS ==== -// The status function can return an actual status when that field is -// available from the API (e.g., out.Status). Otherwise, you can use custom -// statuses to communicate the states of the resource. -// -// Waiters consume the values returned by status functions. Design status so -// that it can be reused by a create, update, and delete waiter, if possible. func statusStreamProcessor(ctx context.Context, conn *rekognition.Client, id string) retry.StateRefreshFunc { return func() (interface{}, string, error) { out, err := findStreamProcessorByID(ctx, conn, id) @@ -627,21 +456,16 @@ func statusStreamProcessor(ctx context.Context, conn *rekognition.Client, id str return nil, "", err } - return out, aws.ToString(out.Status), nil + return out, aws.ToString((*string)(&out.Status)), nil } } -// TIP: ==== FINDERS ==== -// The find function is not strictly necessary. You could do the API -// request from the status function. However, we have found that find often -// comes in handy in other places besides the status function. As a result, it -// is good practice to define it separately. -func findStreamProcessorByID(ctx context.Context, conn *rekognition.Client, id string) (*awstypes.StreamProcessor, error) { - in := &rekognition.GetStreamProcessorInput{ - Id: aws.String(id), +func findStreamProcessorByID(ctx context.Context, conn *rekognition.Client, name string) (*rekognition.DescribeStreamProcessorOutput, error) { + in := &rekognition.DescribeStreamProcessorInput{ + Name: aws.String(name), } - out, err := conn.GetStreamProcessor(ctx, in) + out, err := conn.DescribeStreamProcessor(ctx, in) if err != nil { if errs.IsA[*awstypes.ResourceNotFoundException](err) { return nil, &retry.NotFoundError{ @@ -653,167 +477,18 @@ func findStreamProcessorByID(ctx context.Context, conn *rekognition.Client, id s return nil, err } - if out == nil || out.StreamProcessor == nil { + if out == nil { return nil, tfresource.NewEmptyResultError(in) } - return out.StreamProcessor, nil + return out, nil } -// TIP: ==== FLEX ==== -// Flatteners and expanders ("flex" functions) help handle complex data -// types. Flatteners take an API data type and return the equivalent Plugin-Framework -// type. In other words, flatteners translate from AWS -> Terraform. -// -// On the other hand, expanders take a Terraform data structure and return -// something that you can send to the AWS API. In other words, expanders -// translate from Terraform -> AWS. -// -// See more: -// https://hashicorp.github.io/terraform-provider-aws/data-handling-and-conversion/ -func flattenComplexArgument(ctx context.Context, apiObject *awstypes.ComplexArgument) (types.List, diag.Diagnostics) { - var diags diag.Diagnostics - elemType := types.ObjectType{AttrTypes: complexArgumentAttrTypes} - - if apiObject == nil { - return types.ListNull(elemType), diags - } - - obj := map[string]attr.Value{ - "nested_required": flex.StringValueToFramework(ctx, apiObject.NestedRequired), - "nested_optional": flex.StringValueToFramework(ctx, apiObject.NestedOptional), - } - objVal, d := types.ObjectValue(complexArgumentAttrTypes, obj) - diags.Append(d...) - - listVal, d := types.ListValue(elemType, []attr.Value{objVal}) - diags.Append(d...) - return listVal, diags -} - -// TIP: Often the AWS API will return a slice of structures in response to a -// request for information. Sometimes you will have set criteria (e.g., the ID) -// that means you'll get back a one-length slice. This plural function works -// brilliantly for that situation too. -func flattenComplexArguments(ctx context.Context, apiObjects []*awstypes.ComplexArgument) (types.List, diag.Diagnostics) { - var diags diag.Diagnostics - elemType := types.ObjectType{AttrTypes: complexArgumentAttrTypes} - - if len(apiObjects) == 0 { - return types.ListNull(elemType), diags - } - - elems := []attr.Value{} - for _, apiObject := range apiObjects { - if apiObject == nil { - continue - } - - obj := map[string]attr.Value{ - "nested_required": flex.StringValueToFramework(ctx, apiObject.NestedRequired), - "nested_optional": flex.StringValueToFramework(ctx, apiObject.NestedOptional), - } - objVal, d := types.ObjectValue(complexArgumentAttrTypes, obj) - diags.Append(d...) - - elems = append(elems, objVal) - } - - listVal, d := types.ListValue(elemType, elems) - diags.Append(d...) - - return listVal, diags -} - -// TIP: Remember, as mentioned above, expanders take a Terraform data structure -// and return something that you can send to the AWS API. In other words, -// expanders translate from Terraform -> AWS. -// -// See more: -// https://hashicorp.github.io/terraform-provider-aws/data-handling-and-conversion/ -func expandComplexArgument(tfList []complexArgumentData) *awstypes.ComplexArgument { - if len(tfList) == 0 { - return nil - } - - tfObj := tfList[0] - apiObject := &awstypes.ComplexArgument{ - NestedRequired: aws.String(tfObj.NestedRequired.ValueString()), - } - if !tfObj.NestedOptional.IsNull() { - apiObject.NestedOptional = aws.String(tfObj.NestedOptional.ValueString()) - } - - return apiObject -} - -// TIP: Even when you have a list with max length of 1, this plural function -// works brilliantly. However, if the AWS API takes a structure rather than a -// slice of structures, you will not need it. -func expandComplexArguments(tfList []complexArgumentData) []*rekognition.ComplexArgument { - // TIP: The AWS API can be picky about whether you send a nil or zero- - // length for an argument that should be cleared. For example, in some - // cases, if you send a nil value, the AWS API interprets that as "make no - // changes" when what you want to say is "remove everything." Sometimes - // using a zero-length list will cause an error. - // - // As a result, here are two options. Usually, option 1, nil, will work as - // expected, clearing the field. But, test going from something to nothing - // to make sure it works. If not, try the second option. - // TIP: Option 1: Returning nil for zero-length list - if len(tfList) == 0 { - return nil - } - var apiObject []*awstypes.ComplexArgument - // TIP: Option 2: Return zero-length list for zero-length list. If option 1 does - // not work, after testing going from something to nothing (if that is - // possible), uncomment out the next line and remove option 1. - // - // apiObject := make([]*rekognition.ComplexArgument, 0) - - for _, tfObj := range tfList { - item := &rekognition.ComplexArgument{ - NestedRequired: aws.String(tfObj.NestedRequired.ValueString()), - } - if !tfObj.NestedOptional.IsNull() { - item.NestedOptional = aws.String(tfObj.NestedOptional.ValueString()) - } - - apiObject = append(apiObject, item) - } - - return apiObject -} - -// TIP: ==== DATA STRUCTURES ==== -// With Terraform Plugin-Framework configurations are deserialized into -// Go types, providing type safety without the need for type assertions. -// These structs should match the schema definition exactly, and the `tfsdk` -// tag value should match the attribute name. -// -// Nested objects are represented in their own data struct. These will -// also have a corresponding attribute type mapping for use inside flex -// functions. -// -// See more: -// https://developer.hashicorp.com/terraform/plugin/framework/handling-data/accessing-values type resourceStreamProcessorData struct { ARN types.String `tfsdk:"arn"` - ComplexArgument types.List `tfsdk:"complex_argument"` - Description types.String `tfsdk:"description"` - ID types.String `tfsdk:"id"` Name types.String `tfsdk:"name"` - Timeouts timeouts.Value `tfsdk:"timeouts"` - Type types.String `tfsdk:"type"` -} - -type complexArgumentData struct { - NestedRequired types.String `tfsdk:"nested_required"` - NestedOptional types.String `tfsdk:"nested_optional"` -} - -var complexArgumentAttrTypes = map[string]attr.Type{ - "nested_required": types.StringType, - "nested_optional": types.StringType, -} + Tags types.Map `tfsdk:"tags"` + TagsAll types.Map `tfsdk:"tags_all"` + Timeouts timeouts.Value `tfsdk:"timeouts"` +} \ No newline at end of file From c313856c57785d4c25cd7660db4101d85d3d7439 Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Fri, 10 May 2024 12:43:54 -0500 Subject: [PATCH 03/71] format --- .../service/rekognition/stream_processor.go | 104 +++++++++--------- .../rekognition/stream_processor_test.go | 1 + 2 files changed, 51 insertions(+), 54 deletions(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index 25abcf5e78e7..5a2494df8426 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -2,6 +2,7 @@ // SPDX-License-Identifier: MPL-2.0 package rekognition + // **PLEASE DELETE THIS AND ALL TIP COMMENTS BEFORE SUBMITTING A PR FOR REVIEW!** // // TIP: ==== INTRODUCTION ==== @@ -51,11 +52,11 @@ import ( "github.com/hashicorp/terraform-provider-aws/internal/errs" "github.com/hashicorp/terraform-provider-aws/internal/framework" "github.com/hashicorp/terraform-provider-aws/internal/framework/flex" + tftags "github.com/hashicorp/terraform-provider-aws/internal/tags" "github.com/hashicorp/terraform-provider-aws/internal/tfresource" "github.com/hashicorp/terraform-provider-aws/names" - tftags "github.com/hashicorp/terraform-provider-aws/internal/tags" - ) + // TIP: ==== FILE STRUCTURE ==== // All resources should follow this basic outline. Improve this resource's // maintainability by sticking to it. @@ -70,7 +71,7 @@ import ( // @FrameworkResource("aws_rekognition_stream_processor", name="Stream Processor") func newResourceStreamProcessor(_ context.Context) (resource.ResourceWithConfigure, error) { r := &resourceStreamProcessor{} - + // TIP: ==== CONFIGURABLE TIMEOUTS ==== // Users can configure timeout lengths but you need to use the times they // provide. Access the timeout they configure (or the defaults) using, @@ -119,12 +120,12 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem // Blocks: map[string]schema.Block{ // "complex_argument": schema.ListNestedBlock{ // // TIP: ==== LIST VALIDATORS ==== - // // List and set validators take the place of MaxItems and MinItems in + // // List and set validators take the place of MaxItems and MinItems in // // Plugin-Framework based resources. Use listvalidator.SizeAtLeast(1) to - // // make a nested object required. Similar to Plugin-SDK, complex objects + // // make a nested object required. Similar to Plugin-SDK, complex objects // // can be represented as lists or sets with listvalidator.SizeAtMost(1). // // - // // For a complete mapping of Plugin-SDK to Plugin-Framework schema fields, + // // For a complete mapping of Plugin-SDK to Plugin-Framework schema fields, // // see: // // https://developer.hashicorp.com/terraform/plugin/framework/migrating/attributes-blocks/blocks // Validators: []validator.List{ @@ -164,20 +165,20 @@ func (r *resourceStreamProcessor) Create(ctx context.Context, req resource.Creat // 4. Call the AWS create/put function // 5. Using the output from the create function, set the minimum arguments // and attributes for the Read function to work, as well as any computed - // only attributes. + // only attributes. // 6. Use a waiter to wait for create to complete // 7. Save the request plan to response state // TIP: -- 1. Get a client connection to the relevant service conn := r.Meta().RekognitionClient(ctx) - + // TIP: -- 2. Fetch the plan var plan resourceStreamProcessorData resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...) if resp.Diagnostics.HasError() { return } - + // TIP: -- 3. Populate a create input structure in := &rekognition.CreateStreamProcessorInput{ Name: aws.String(plan.Name.ValueString()), @@ -199,7 +200,7 @@ func (r *resourceStreamProcessor) Create(ctx context.Context, req resource.Creat // in.ComplexArgument = expandComplexArgument(tfList) // } - + // TIP: -- 4. Call the AWS create function out, err := conn.CreateStreamProcessor(ctx, in) if err != nil { @@ -218,10 +219,10 @@ func (r *resourceStreamProcessor) Create(ctx context.Context, req resource.Creat ) return } - + // TIP: -- 5. Using the output from the create function, set the minimum attributes plan.ARN = flex.StringToFramework(ctx, out.StreamProcessorArn) - + // TIP: -- 6. Use a waiter to wait for create to complete createTimeout := r.CreateTimeout(ctx, plan.Timeouts) _, err = waitStreamProcessorCreated(ctx, conn, plan.Name.ValueString(), createTimeout) @@ -232,14 +233,14 @@ func (r *resourceStreamProcessor) Create(ctx context.Context, req resource.Creat ) return } - + // TIP: -- 7. Save the request plan to response state resp.Diagnostics.Append(resp.State.Set(ctx, plan)...) } func (r *resourceStreamProcessor) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { conn := r.Meta().RekognitionClient(ctx) - + // TIP: -- 2. Fetch the state var state resourceStreamProcessorData resp.Diagnostics.Append(req.State.Get(ctx, &state)...) @@ -260,22 +261,21 @@ func (r *resourceStreamProcessor) Read(ctx context.Context, req resource.ReadReq ) return } - state.Name = flex.StringToFramework(ctx, out.Name) - + // TIP: Setting a complex type. // complexArgument, d := flattenComplexArgument(ctx, out.ComplexArgument) // resp.Diagnostics.Append(d...) // state.ComplexArgument = complexArgument - + // TIP: -- 6. Set the state resp.Diagnostics.Append(resp.State.Set(ctx, &state)...) } func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { conn := r.Meta().RekognitionClient(ctx) - + // TIP: -- 2. Fetch the plan var plan, state resourceStreamProcessorData resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...) @@ -283,16 +283,16 @@ func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.Updat if resp.Diagnostics.HasError() { return } - + // TIP: -- 3. Populate a modify input structure and check for changes - if !plan.Name.Equal(state.Name) { + if !plan.Name.Equal(state.Name) { in := &rekognition.UpdateStreamProcessorInput{ // TIP: Mandatory or fields that will always be present can be set when // you create the Input structure. (Replace these with real fields.) - Name: aws.String(plan.Name.ValueString()), + Name: aws.String(plan.Name.ValueString()), } - + // TIP: -- 4. Call the AWS modify/update function _, err := conn.UpdateStreamProcessor(ctx, in) if err != nil { @@ -312,13 +312,12 @@ func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.Updat // ) // return // } - + // TIP: Using the output from the update function, re-set any computed attributes // plan.ARN = flex.StringToFramework(ctx, out.Arn) // plan.ID = flex.StringToFramework(ctx, out.StreamProcessor.StreamProcessorId) } - // TIP: -- 5. Use a waiter to wait for update to complete updateTimeout := r.UpdateTimeout(ctx, plan.Timeouts) _, err := waitStreamProcessorUpdated(ctx, conn, plan.Name.ValueString(), updateTimeout) @@ -330,26 +329,24 @@ func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.Updat return } - // TIP: -- 6. Save the request plan to response state resp.Diagnostics.Append(resp.State.Set(ctx, &plan)...) } func (r *resourceStreamProcessor) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { conn := r.Meta().RekognitionClient(ctx) - + // TIP: -- 2. Fetch the state var state resourceStreamProcessorData resp.Diagnostics.Append(req.State.Get(ctx, &state)...) if resp.Diagnostics.HasError() { return } - + // TIP: -- 3. Populate a delete input structure in := &rekognition.DeleteStreamProcessorInput{ Name: aws.String(state.Name.ValueString()), } - _, err := conn.DeleteStreamProcessor(ctx, in) @@ -363,7 +360,7 @@ func (r *resourceStreamProcessor) Delete(ctx context.Context, req resource.Delet ) return } - + // TIP: -- 5. Use a waiter to wait for delete to complete deleteTimeout := r.DeleteTimeout(ctx, state.Timeouts) _, err = waitStreamProcessorDeleted(ctx, conn, state.Name.ValueString(), deleteTimeout) @@ -382,11 +379,11 @@ func (r *resourceStreamProcessor) ImportState(ctx context.Context, req resource. func waitStreamProcessorCreated(ctx context.Context, conn *rekognition.Client, id string, timeout time.Duration) (*rekognition.DescribeStreamProcessorOutput, error) { stateConf := &retry.StateChangeConf{ - Pending: []string{}, - Target: []string{ + Pending: []string{}, + Target: []string{ string(awstypes.StreamProcessorStatusStarting), string(awstypes.StreamProcessorStatusRunning), - string(awstypes.StreamProcessorStatusFailed),}, + string(awstypes.StreamProcessorStatusFailed)}, Refresh: statusStreamProcessor(ctx, conn, id), Timeout: timeout, NotFoundChecks: 20, @@ -403,11 +400,11 @@ func waitStreamProcessorCreated(ctx context.Context, conn *rekognition.Client, i func waitStreamProcessorUpdated(ctx context.Context, conn *rekognition.Client, id string, timeout time.Duration) (*rekognition.DescribeStreamProcessorOutput, error) { stateConf := &retry.StateChangeConf{ - Pending: []string{string(awstypes.StreamProcessorStatusUpdating)}, - Target: []string{ + Pending: []string{string(awstypes.StreamProcessorStatusUpdating)}, + Target: []string{ string(awstypes.StreamProcessorStatusStarting), string(awstypes.StreamProcessorStatusRunning), - string(awstypes.StreamProcessorStatusFailed),}, + string(awstypes.StreamProcessorStatusFailed)}, Refresh: statusStreamProcessor(ctx, conn, id), Timeout: timeout, NotFoundChecks: 20, @@ -424,17 +421,17 @@ func waitStreamProcessorUpdated(ctx context.Context, conn *rekognition.Client, i func waitStreamProcessorDeleted(ctx context.Context, conn *rekognition.Client, id string, timeout time.Duration) (*rekognition.DescribeStreamProcessorOutput, error) { stateConf := &retry.StateChangeConf{ - Pending: []string{ - string(awstypes.StreamProcessorStatusStopped), - string(awstypes.StreamProcessorStatusStarting), - string(awstypes.StreamProcessorStatusRunning), - string(awstypes.StreamProcessorStatusFailed), - string(awstypes.StreamProcessorStatusStopping), - string(awstypes.StreamProcessorStatusUpdating), - }, - Target: []string{}, - Refresh: statusStreamProcessor(ctx, conn, id), - Timeout: timeout, + Pending: []string{ + string(awstypes.StreamProcessorStatusStopped), + string(awstypes.StreamProcessorStatusStarting), + string(awstypes.StreamProcessorStatusRunning), + string(awstypes.StreamProcessorStatusFailed), + string(awstypes.StreamProcessorStatusStopping), + string(awstypes.StreamProcessorStatusUpdating), + }, + Target: []string{}, + Refresh: statusStreamProcessor(ctx, conn, id), + Timeout: timeout, } outputRaw, err := stateConf.WaitForStateContext(ctx) @@ -464,7 +461,7 @@ func findStreamProcessorByID(ctx context.Context, conn *rekognition.Client, name in := &rekognition.DescribeStreamProcessorInput{ Name: aws.String(name), } - + out, err := conn.DescribeStreamProcessor(ctx, in) if err != nil { if errs.IsA[*awstypes.ResourceNotFoundException](err) { @@ -484,11 +481,10 @@ func findStreamProcessorByID(ctx context.Context, conn *rekognition.Client, name return out, nil } - type resourceStreamProcessorData struct { - ARN types.String `tfsdk:"arn"` - Name types.String `tfsdk:"name"` - Tags types.Map `tfsdk:"tags"` - TagsAll types.Map `tfsdk:"tags_all"` - Timeouts timeouts.Value `tfsdk:"timeouts"` -} \ No newline at end of file + ARN types.String `tfsdk:"arn"` + Name types.String `tfsdk:"name"` + Tags types.Map `tfsdk:"tags"` + TagsAll types.Map `tfsdk:"tags_all"` + Timeouts timeouts.Value `tfsdk:"timeouts"` +} diff --git a/internal/service/rekognition/stream_processor_test.go b/internal/service/rekognition/stream_processor_test.go index 979b5a05d587..74028d09cc17 100644 --- a/internal/service/rekognition/stream_processor_test.go +++ b/internal/service/rekognition/stream_processor_test.go @@ -2,6 +2,7 @@ // SPDX-License-Identifier: MPL-2.0 package rekognition_test + // **PLEASE DELETE THIS AND ALL TIP COMMENTS BEFORE SUBMITTING A PR FOR REVIEW!** // // TIP: ==== INTRODUCTION ==== From 501a134f902226a55a9507c444534c220d3590be Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Fri, 10 May 2024 14:27:10 -0500 Subject: [PATCH 04/71] fmt --- .../service/rekognition/stream_processor.go | 88 +++++-------------- 1 file changed, 22 insertions(+), 66 deletions(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index 5a2494df8426..3b4152d527ea 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -3,80 +3,38 @@ package rekognition -// **PLEASE DELETE THIS AND ALL TIP COMMENTS BEFORE SUBMITTING A PR FOR REVIEW!** -// -// TIP: ==== INTRODUCTION ==== -// Thank you for trying the skaff tool! -// -// You have opted to include these helpful comments. They all include "TIP:" -// to help you find and remove them when you're done with them. -// -// While some aspects of this file are customized to your input, the -// scaffold tool does *not* look at the AWS API and ensure it has correct -// function, structure, and variable names. It makes guesses based on -// commonalities. You will need to make significant adjustments. -// -// In other words, as generated, this is a rough outline of the work you will -// need to do. If something doesn't make sense for your situation, get rid of -// it. - import ( - // TIP: ==== IMPORTS ==== - // This is a common set of imports but not customized to your code since - // your code hasn't been written yet. Make sure you, your IDE, or - // goimports -w fixes these imports. - // - // The provider linter wants your imports to be in two groups: first, - // standard library (i.e., "fmt" or "strings"), second, everything else. - // - // Also, AWS Go SDK v2 may handle nested structures differently than v1, - // using the services/rekognition/types package. If so, you'll - // need to import types and reference the nested types, e.g., as - // awstypes.. "context" "errors" "time" + "github.com/YakDriver/regexache" "github.com/aws/aws-sdk-go-v2/aws" "github.com/aws/aws-sdk-go-v2/service/rekognition" awstypes "github.com/aws/aws-sdk-go-v2/service/rekognition/types" "github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts" + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" "github.com/hashicorp/terraform-plugin-framework/path" "github.com/hashicorp/terraform-plugin-framework/resource" "github.com/hashicorp/terraform-plugin-framework/resource/schema" "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/retry" "github.com/hashicorp/terraform-provider-aws/internal/create" "github.com/hashicorp/terraform-provider-aws/internal/errs" "github.com/hashicorp/terraform-provider-aws/internal/framework" "github.com/hashicorp/terraform-provider-aws/internal/framework/flex" + fwtypes "github.com/hashicorp/terraform-provider-aws/internal/framework/types" tftags "github.com/hashicorp/terraform-provider-aws/internal/tags" "github.com/hashicorp/terraform-provider-aws/internal/tfresource" "github.com/hashicorp/terraform-provider-aws/names" ) -// TIP: ==== FILE STRUCTURE ==== -// All resources should follow this basic outline. Improve this resource's -// maintainability by sticking to it. -// -// 1. Package declaration -// 2. Imports -// 3. Main resource struct with schema method -// 4. Create, read, update, delete methods (in that order) -// 5. Other functions (flatteners, expanders, waiters, finders, etc.) - -// Function annotations are used for resource registration to the Provider. DO NOT EDIT. // @FrameworkResource("aws_rekognition_stream_processor", name="Stream Processor") func newResourceStreamProcessor(_ context.Context) (resource.ResourceWithConfigure, error) { r := &resourceStreamProcessor{} - - // TIP: ==== CONFIGURABLE TIMEOUTS ==== - // Users can configure timeout lengths but you need to use the times they - // provide. Access the timeout they configure (or the defaults) using, - // e.g., r.CreateTimeout(ctx, plan.Timeouts) (see below). The times here are - // the defaults if they don't configure timeouts. r.SetDefaultCreateTimeout(30 * time.Minute) r.SetDefaultUpdateTimeout(30 * time.Minute) r.SetDefaultDeleteTimeout(30 * time.Minute) @@ -98,21 +56,32 @@ func (r *resourceStreamProcessor) Metadata(_ context.Context, req resource.Metad } func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { + kmsKeyIdRegex := regexache.MustCompile(`^[A-Za-z0-9][A-Za-z0-9:_/+=,@.-]{0,2048}$`) + nameRegex := regexache.MustCompile(`[a-zA-Z0-9_.\-]+`) + resp.Schema = schema.Schema{ Attributes: map[string]schema.Attribute{ "arn": framework.ARNAttributeComputedOnly(), - "description": schema.StringAttribute{ + "kms_key_id": schema.StringAttribute{ Optional: true, + Validators: []validator.String{ + stringvalidator.LengthAtMost(2048), + stringvalidator.RegexMatches(kmsKeyIdRegex, "must conform to: ^[A-Za-z0-9][A-Za-z0-9:_/+=,@.-]{0,2048}$"), + }, }, - "id": framework.IDAttribute(), "name": schema.StringAttribute{ Required: true, + Validators: []validator.String{ + stringvalidator.LengthAtMost(128), + stringvalidator.RegexMatches(nameRegex, "must conform to: [a-zA-Z0-9_.\\-]+"), + }, PlanModifiers: []planmodifier.String{ stringplanmodifier.RequiresReplace(), }, }, - "type": schema.StringAttribute{ - Required: true, + "role_arn": schema.StringAttribute{ + CustomType: fwtypes.ARNType, + Required: true, }, names.AttrTags: tftags.TagsAttribute(), names.AttrTagsAll: tftags.TagsAttributeComputedOnly(), @@ -155,21 +124,6 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem } func (r *resourceStreamProcessor) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { - // TIP: ==== RESOURCE CREATE ==== - // Generally, the Create function should do the following things. Make - // sure there is a good reason if you don't do one of these. - // - // 1. Get a client connection to the relevant service - // 2. Fetch the plan - // 3. Populate a create input structure - // 4. Call the AWS create/put function - // 5. Using the output from the create function, set the minimum arguments - // and attributes for the Read function to work, as well as any computed - // only attributes. - // 6. Use a waiter to wait for create to complete - // 7. Save the request plan to response state - - // TIP: -- 1. Get a client connection to the relevant service conn := r.Meta().RekognitionClient(ctx) // TIP: -- 2. Fetch the plan @@ -404,7 +358,8 @@ func waitStreamProcessorUpdated(ctx context.Context, conn *rekognition.Client, i Target: []string{ string(awstypes.StreamProcessorStatusStarting), string(awstypes.StreamProcessorStatusRunning), - string(awstypes.StreamProcessorStatusFailed)}, + string(awstypes.StreamProcessorStatusFailed), + }, Refresh: statusStreamProcessor(ctx, conn, id), Timeout: timeout, NotFoundChecks: 20, @@ -484,6 +439,7 @@ func findStreamProcessorByID(ctx context.Context, conn *rekognition.Client, name type resourceStreamProcessorData struct { ARN types.String `tfsdk:"arn"` Name types.String `tfsdk:"name"` + RoleARN fwtypes.ARN `tfsdk:"role_arn"` Tags types.Map `tfsdk:"tags"` TagsAll types.Map `tfsdk:"tags_all"` Timeouts timeouts.Value `tfsdk:"timeouts"` From df1550f835964f9ea0e9497ef887679f2f4bf2ec Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Fri, 10 May 2024 16:03:27 -0500 Subject: [PATCH 05/71] wip --- .../service/rekognition/stream_processor.go | 128 +++++++++++++----- 1 file changed, 92 insertions(+), 36 deletions(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index 3b4152d527ea..9b5705500109 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -13,10 +13,12 @@ import ( "github.com/aws/aws-sdk-go-v2/service/rekognition" awstypes "github.com/aws/aws-sdk-go-v2/service/rekognition/types" "github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts" + "github.com/hashicorp/terraform-plugin-framework-validators/objectvalidator" "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" "github.com/hashicorp/terraform-plugin-framework/path" "github.com/hashicorp/terraform-plugin-framework/resource" "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/booldefault" "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" "github.com/hashicorp/terraform-plugin-framework/schema/validator" @@ -61,16 +63,19 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem resp.Schema = schema.Schema{ Attributes: map[string]schema.Attribute{ - "arn": framework.ARNAttributeComputedOnly(), + names.AttrARN: framework.ARNAttributeComputedOnly(), "kms_key_id": schema.StringAttribute{ - Optional: true, + Description: "The identifier for your AWS Key Management Service key (AWS KMS key). You can supply the Amazon Resource Name (ARN) of your KMS key, the ID of your KMS key, an alias for your KMS key, or an alias ARN.", + Optional: true, Validators: []validator.String{ stringvalidator.LengthAtMost(2048), stringvalidator.RegexMatches(kmsKeyIdRegex, "must conform to: ^[A-Za-z0-9][A-Za-z0-9:_/+=,@.-]{0,2048}$"), }, }, - "name": schema.StringAttribute{ - Required: true, + names.AttrID: framework.IDAttribute(), + names.AttrName: schema.StringAttribute{ + Description: "An identifier you assign to the stream processor.", + Required: true, Validators: []validator.String{ stringvalidator.LengthAtMost(128), stringvalidator.RegexMatches(nameRegex, "must conform to: [a-zA-Z0-9_.\\-]+"), @@ -80,12 +85,56 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem }, }, "role_arn": schema.StringAttribute{ - CustomType: fwtypes.ARNType, - Required: true, + Description: "The Amazon Resource Number (ARN) of the IAM role that allows access to the stream processor.", + CustomType: fwtypes.ARNType, + Required: true, + }, + "data_sharing_enabled": schema.BoolAttribute{ + Description: "Do you want to share data with Rekognition to improve model performance.", + Optional: true, + Default: booldefault.StaticBool(false), }, names.AttrTags: tftags.TagsAttribute(), names.AttrTagsAll: tftags.TagsAttributeComputedOnly(), }, + Blocks: map[string]schema.Block{ + "input": schema.SingleNestedBlock{ + Validators: []validator.Object{ + objectvalidator.IsRequired(), + }, + Attributes: map[string]schema.Attribute{ + "kinesis_video_stream_arn": schema.StringAttribute{ + CustomType: fwtypes.ARNType, + Required: true, + }, + }, + }, + "notification_channel": schema.SingleNestedBlock{ + Attributes: map[string]schema.Attribute{ + "sns_topic_arn": schema.StringAttribute{ + CustomType: fwtypes.ARNType, + Optional: true, + }, + }, + }, + "output": schema.SingleNestedBlock{ + Validators: []validator.Object{ + objectvalidator.IsRequired(), + }, + Attributes: map[string]schema.Attribute{ + "kinesis_video_stream_arn": schema.StringAttribute{ + CustomType: fwtypes.ARNType, + Required: true, + }, + "s3_bucket": schema.StringAttribute{ + Required: true, + }, + "s3_key_prefix": schema.StringAttribute{ + Required: true, + }, + }, + }, + }, // Blocks: map[string]schema.Block{ // "complex_argument": schema.ListNestedBlock{ // // TIP: ==== LIST VALIDATORS ==== @@ -126,40 +175,34 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem func (r *resourceStreamProcessor) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { conn := r.Meta().RekognitionClient(ctx) - // TIP: -- 2. Fetch the plan var plan resourceStreamProcessorData resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...) if resp.Diagnostics.HasError() { return } - // TIP: -- 3. Populate a create input structure in := &rekognition.CreateStreamProcessorInput{ - Name: aws.String(plan.Name.ValueString()), + Name: aws.String(plan.Name.ValueString()), + KmsKeyId: aws.String(plan.KmsKeyId.ValueString()), + RoleArn: aws.String(plan.RoleARN.ValueString()), + DataSharingPreference: &awstypes.StreamProcessorDataSharingPreference{ + OptIn: plan.DataSharingEnabled.ValueBool(), + }, + Input: &awstypes.StreamProcessorInput{ + KinesisVideoStream: &awstypes.KinesisVideoStream{ + Arn: aws.String(plan.Input.KinesisVideoStreamArn.ValueString()), + }, + }, + } + + if !plan.NotificationChannel.SNSTopicArn.IsNull() { + in.NotificationChannel = &awstypes.StreamProcessorNotificationChannel{ + SNSTopicArn: aws.String(plan.NotificationChannel.SNSTopicArn.ValueString()), + } } - // if !plan.Description.IsNull() { - // // TIP: Optional fields should be set based on whether or not they are - // // used. - // in.Description = aws.String(plan.Description.ValueString()) - // } - // if !plan.ComplexArgument.IsNull() { - // // TIP: Use an expander to assign a complex argument. The elements must be - // // deserialized into the appropriate struct before being passed to the expander. - // var tfList []complexArgumentData - // resp.Diagnostics.Append(plan.ComplexArgument.ElementsAs(ctx, &tfList, false)...) - // if resp.Diagnostics.HasError() { - // return - // } - - // in.ComplexArgument = expandComplexArgument(tfList) - // } - - // TIP: -- 4. Call the AWS create function out, err := conn.CreateStreamProcessor(ctx, in) if err != nil { - // TIP: Since ID has not been set yet, you cannot use plan.ID.String() - // in error messages at this point. resp.Diagnostics.AddError( create.ProblemStandardMessage(names.Rekognition, create.ErrActionCreating, ResNameStreamProcessor, plan.Name.String(), err), err.Error(), @@ -174,8 +217,8 @@ func (r *resourceStreamProcessor) Create(ctx context.Context, req resource.Creat return } - // TIP: -- 5. Using the output from the create function, set the minimum attributes plan.ARN = flex.StringToFramework(ctx, out.StreamProcessorArn) + plan.ID = plan.ARN // TIP: -- 6. Use a waiter to wait for create to complete createTimeout := r.CreateTimeout(ctx, plan.Timeouts) @@ -437,10 +480,23 @@ func findStreamProcessorByID(ctx context.Context, conn *rekognition.Client, name } type resourceStreamProcessorData struct { - ARN types.String `tfsdk:"arn"` - Name types.String `tfsdk:"name"` - RoleARN fwtypes.ARN `tfsdk:"role_arn"` - Tags types.Map `tfsdk:"tags"` - TagsAll types.Map `tfsdk:"tags_all"` - Timeouts timeouts.Value `tfsdk:"timeouts"` + ARN types.String `tfsdk:"arn"` + DataSharingEnabled types.Bool `tfsdk:"data_sharing_enabled"` + ID types.String `tfsdk:"id"` + Input streamProcessorInput `tfsdk:"input"` + KmsKeyId types.String `tfsdk:"kms_key_id"` + NotificationChannel notificationChannel `tfsdk:"notification_channel"` + Name types.String `tfsdk:"name"` + RoleARN fwtypes.ARN `tfsdk:"role_arn"` + Tags types.Map `tfsdk:"tags"` + TagsAll types.Map `tfsdk:"tags_all"` + Timeouts timeouts.Value `tfsdk:"timeouts"` +} + +type streamProcessorInput struct { + KinesisVideoStreamArn fwtypes.ARN `tfsdk:"kinesis_video_stream_arn"` +} + +type notificationChannel struct { + SNSTopicArn fwtypes.ARN `tfsdk:"sns_topic_arn"` } From 62180395ef6bd1c00442a9edb4b2bf4bc0698056 Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Mon, 13 May 2024 09:59:42 -0500 Subject: [PATCH 06/71] switch to new flex mapping --- .../rekognition/service_package_gen.go | 4 + .../service/rekognition/stream_processor.go | 108 ++++++++++++------ 2 files changed, 80 insertions(+), 32 deletions(-) diff --git a/internal/service/rekognition/service_package_gen.go b/internal/service/rekognition/service_package_gen.go index d64afcf50498..b1ae950d1f66 100644 --- a/internal/service/rekognition/service_package_gen.go +++ b/internal/service/rekognition/service_package_gen.go @@ -31,6 +31,10 @@ func (p *servicePackage) FrameworkResources(ctx context.Context) []*types.Servic Factory: newResourceProject, Name: "Project", }, + { + Factory: newResourceStreamProcessor, + Name: "StreamProcessor", + }, } } diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index 9b5705500109..ef298963fa4f 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -28,6 +28,7 @@ import ( "github.com/hashicorp/terraform-provider-aws/internal/errs" "github.com/hashicorp/terraform-provider-aws/internal/framework" "github.com/hashicorp/terraform-provider-aws/internal/framework/flex" + fwflex "github.com/hashicorp/terraform-provider-aws/internal/framework/flex" fwtypes "github.com/hashicorp/terraform-provider-aws/internal/framework/types" tftags "github.com/hashicorp/terraform-provider-aws/internal/tags" "github.com/hashicorp/terraform-provider-aws/internal/tfresource" @@ -181,24 +182,11 @@ func (r *resourceStreamProcessor) Create(ctx context.Context, req resource.Creat return } - in := &rekognition.CreateStreamProcessorInput{ - Name: aws.String(plan.Name.ValueString()), - KmsKeyId: aws.String(plan.KmsKeyId.ValueString()), - RoleArn: aws.String(plan.RoleARN.ValueString()), - DataSharingPreference: &awstypes.StreamProcessorDataSharingPreference{ - OptIn: plan.DataSharingEnabled.ValueBool(), - }, - Input: &awstypes.StreamProcessorInput{ - KinesisVideoStream: &awstypes.KinesisVideoStream{ - Arn: aws.String(plan.Input.KinesisVideoStreamArn.ValueString()), - }, - }, - } + in := &rekognition.CreateStreamProcessorInput{} - if !plan.NotificationChannel.SNSTopicArn.IsNull() { - in.NotificationChannel = &awstypes.StreamProcessorNotificationChannel{ - SNSTopicArn: aws.String(plan.NotificationChannel.SNSTopicArn.ValueString()), - } + resp.Diagnostics.Append(fwflex.Expand(ctx, plan, in)...) + if resp.Diagnostics.HasError() { + return } out, err := conn.CreateStreamProcessor(ctx, in) @@ -217,7 +205,6 @@ func (r *resourceStreamProcessor) Create(ctx context.Context, req resource.Creat return } - plan.ARN = flex.StringToFramework(ctx, out.StreamProcessorArn) plan.ID = plan.ARN // TIP: -- 6. Use a waiter to wait for create to complete @@ -480,23 +467,80 @@ func findStreamProcessorByID(ctx context.Context, conn *rekognition.Client, name } type resourceStreamProcessorData struct { - ARN types.String `tfsdk:"arn"` - DataSharingEnabled types.Bool `tfsdk:"data_sharing_enabled"` - ID types.String `tfsdk:"id"` - Input streamProcessorInput `tfsdk:"input"` - KmsKeyId types.String `tfsdk:"kms_key_id"` - NotificationChannel notificationChannel `tfsdk:"notification_channel"` - Name types.String `tfsdk:"name"` - RoleARN fwtypes.ARN `tfsdk:"role_arn"` - Tags types.Map `tfsdk:"tags"` - TagsAll types.Map `tfsdk:"tags_all"` - Timeouts timeouts.Value `tfsdk:"timeouts"` + ARN types.String `tfsdk:"arn"` + DataSharingPreference fwtypes.ObjectValueOf[dataSharingPreferenceModel] `tfsdk:"data_sharing_preference"` + ID types.String `tfsdk:"id"` + Input fwtypes.ObjectValueOf[inputModel] `tfsdk:"input"` + KmsKeyId types.String `tfsdk:"kms_key_id"` + NotificationChannel fwtypes.ObjectValueOf[notificationChannelModel] `tfsdk:"notification_channel"` + Name types.String `tfsdk:"name"` + Output fwtypes.ObjectValueOf[outputModel] `tfsdk:"output"` + RegionsOfInterest fwtypes.ObjectValueOf[[]regionOfInterestModel] `tfsdk:"regions_of_interest"` + RoleARN fwtypes.ARN `tfsdk:"role_arn"` + Settings fwtypes.ObjectValueOf[settingsModel] `tfsdk:"settings"` + Tags types.Map `tfsdk:"tags"` + TagsAll types.Map `tfsdk:"tags_all"` + Timeouts timeouts.Value `tfsdk:"timeouts"` } -type streamProcessorInput struct { - KinesisVideoStreamArn fwtypes.ARN `tfsdk:"kinesis_video_stream_arn"` +type dataSharingPreferenceModel struct { + OptIn types.Bool `tfsdk:"opt_in"` } -type notificationChannel struct { +type inputModel struct { + KinesisVideoStream fwtypes.ObjectValueOf[kinesisVideoStreamInputModel] `tfsdk:"kinesis_video_stream"` +} + +type kinesisVideoStreamInputModel struct { + ARN types.String `tfsdk:"arn"` +} + +type notificationChannelModel struct { SNSTopicArn fwtypes.ARN `tfsdk:"sns_topic_arn"` } + +type outputModel struct { + KinesisDataStream fwtypes.ObjectValueOf[kinesisDataStreamModel] `tfsdk:"kinesis_data_stream"` + S3Destination fwtypes.ObjectValueOf[s3DestinationModel] `tfsdk:"s3_destination"` +} + +type kinesisDataStreamModel struct { + ARN types.String `tfsdk:"arn"` +} + +type s3DestinationModel struct { + Bucket types.String `tfsdk:"bucket"` + KeyPrefix types.String `tfsdk:"key_prefix"` +} + +type regionOfInterestModel struct { + BoundingBox fwtypes.ObjectValueOf[boundingBoxModel] `tfsdk:"bounding_box"` + Polygon fwtypes.ObjectValueOf[polygonModel] `tfsdk:"polygon"` +} + +type boundingBoxModel struct { + Height types.Number `tfsdk:"height"` + Left types.Number `tfsdk:"left"` + Top types.Number `tfsdk:"top"` + Width types.Number `tfsdk:"width"` +} + +type polygonModel struct { + X types.Number `tfsdk:"x"` + Y types.Number `tfsdk:"y"` +} + +type settingsModel struct { + ConnectedHome fwtypes.ObjectValueOf[connectedHomeModel] `tfsdk:"connected_home"` + FaceSearch fwtypes.ObjectValueOf[faceSearchModel] `tfsdk:"face_search"` +} + +type connectedHomeModel struct { + Labels types.List `tfsdk:"labels"` + MinConfidence types.Number `tfsdk:"min_confidence"` +} + +type faceSearchModel struct { + CollectionId types.String `tfsdk:"collection_id"` + FaceMatchThreshold types.Number `tfsdk:"face_match_threshold"` +} From 92aa8c47dfb553be25da598780f8b4bc6ecbee0f Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Mon, 13 May 2024 10:34:50 -0500 Subject: [PATCH 07/71] finish setting up props --- .../service/rekognition/stream_processor.go | 214 +++++++++--------- 1 file changed, 113 insertions(+), 101 deletions(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index ef298963fa4f..8378c8f7fc06 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -26,8 +26,8 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/retry" "github.com/hashicorp/terraform-provider-aws/internal/create" "github.com/hashicorp/terraform-provider-aws/internal/errs" + "github.com/hashicorp/terraform-provider-aws/internal/errs/fwdiag" "github.com/hashicorp/terraform-provider-aws/internal/framework" - "github.com/hashicorp/terraform-provider-aws/internal/framework/flex" fwflex "github.com/hashicorp/terraform-provider-aws/internal/framework/flex" fwtypes "github.com/hashicorp/terraform-provider-aws/internal/framework/types" tftags "github.com/hashicorp/terraform-provider-aws/internal/tags" @@ -90,27 +90,39 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem CustomType: fwtypes.ARNType, Required: true, }, - "data_sharing_enabled": schema.BoolAttribute{ - Description: "Do you want to share data with Rekognition to improve model performance.", - Optional: true, - Default: booldefault.StaticBool(false), - }, names.AttrTags: tftags.TagsAttribute(), names.AttrTagsAll: tftags.TagsAttributeComputedOnly(), }, Blocks: map[string]schema.Block{ + "data_sharing_preference": schema.SingleNestedBlock{ + CustomType: fwtypes.NewObjectTypeOf[dataSharingPreferenceModel](ctx), + Attributes: map[string]schema.Attribute{ + "opt_in": schema.BoolAttribute{ + Description: "Do you want to share data with Rekognition to improve model performance.", + Optional: true, + Default: booldefault.StaticBool(false), + }, + }, + }, "input": schema.SingleNestedBlock{ + CustomType: fwtypes.NewObjectTypeOf[inputModel](ctx), Validators: []validator.Object{ objectvalidator.IsRequired(), }, - Attributes: map[string]schema.Attribute{ - "kinesis_video_stream_arn": schema.StringAttribute{ - CustomType: fwtypes.ARNType, - Required: true, + Blocks: map[string]schema.Block{ + "kinesis_video_stream": schema.SingleNestedBlock{ + CustomType: fwtypes.NewObjectTypeOf[kinesisVideoStreamInputModel](ctx), + Attributes: map[string]schema.Attribute{ + "kinesis_video_stream_arn": schema.StringAttribute{ + CustomType: fwtypes.ARNType, + Required: true, + }, + }, }, }, }, "notification_channel": schema.SingleNestedBlock{ + CustomType: fwtypes.NewObjectTypeOf[notificationChannelModel](ctx), Attributes: map[string]schema.Attribute{ "sns_topic_arn": schema.StringAttribute{ CustomType: fwtypes.ARNType, @@ -118,65 +130,90 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem }, }, }, + "regions_of_interest": schema.ListNestedBlock{ + CustomType: fwtypes.NewListNestedObjectTypeOf[regionOfInterestModel](ctx), + NestedObject: schema.NestedBlockObject{ + CustomType: fwtypes.NewObjectTypeOf[regionOfInterestModel](ctx), + Blocks: map[string]schema.Block{ + "bounding_box": schema.SingleNestedBlock{ + CustomType: fwtypes.NewObjectTypeOf[boundingBoxModel](ctx), + Attributes: map[string]schema.Attribute{ + "height": schema.NumberAttribute{}, + "left": schema.NumberAttribute{}, + "top": schema.NumberAttribute{}, + "width": schema.NumberAttribute{}, + }, + }, + "polygon": schema.SingleNestedBlock{ + CustomType: fwtypes.NewObjectTypeOf[polygonModel](ctx), + Attributes: map[string]schema.Attribute{ + "x": schema.NumberAttribute{}, + "y": schema.NumberAttribute{}, + }, + }, + }, + }, + }, "output": schema.SingleNestedBlock{ + CustomType: fwtypes.NewObjectTypeOf[outputModel](ctx), Validators: []validator.Object{ objectvalidator.IsRequired(), }, - Attributes: map[string]schema.Attribute{ - "kinesis_video_stream_arn": schema.StringAttribute{ - CustomType: fwtypes.ARNType, - Required: true, + Blocks: map[string]schema.Block{ + "kinesis_data_stream": schema.SingleNestedBlock{ + CustomType: fwtypes.NewObjectTypeOf[kinesisDataStreamModel](ctx), + Attributes: map[string]schema.Attribute{ + "arn": schema.StringAttribute{ + CustomType: fwtypes.ARNType, + Required: true, + }, + }, }, - "s3_bucket": schema.StringAttribute{ - Required: true, + "s3_destination": schema.SingleNestedBlock{ + CustomType: fwtypes.NewObjectTypeOf[s3DestinationModel](ctx), + Attributes: map[string]schema.Attribute{ + names.AttrBucket: schema.StringAttribute{ + Optional: true, + }, + "key_prefix": schema.StringAttribute{ + Optional: true, + }, + }, }, - "s3_key_prefix": schema.StringAttribute{ - Required: true, + }, + }, + "settings": schema.SingleNestedBlock{ + CustomType: fwtypes.NewObjectTypeOf[settingsModel](ctx), + Validators: []validator.Object{ + objectvalidator.IsRequired(), + }, + Blocks: map[string]schema.Block{ + "connected_home": schema.SingleNestedBlock{ + CustomType: fwtypes.NewObjectTypeOf[connectedHomeModel](ctx), + Attributes: map[string]schema.Attribute{ + "labels": schema.ListAttribute{ + ElementType: types.StringType, + }, + "min_confidence": schema.NumberAttribute{}, + }, + }, + "face_search": schema.SingleNestedBlock{ + CustomType: fwtypes.NewObjectTypeOf[faceSearchModel](ctx), + Attributes: map[string]schema.Attribute{ + "collection_id": schema.StringAttribute{}, + "min_confidence": schema.NumberAttribute{}, + }, }, }, }, }, - // Blocks: map[string]schema.Block{ - // "complex_argument": schema.ListNestedBlock{ - // // TIP: ==== LIST VALIDATORS ==== - // // List and set validators take the place of MaxItems and MinItems in - // // Plugin-Framework based resources. Use listvalidator.SizeAtLeast(1) to - // // make a nested object required. Similar to Plugin-SDK, complex objects - // // can be represented as lists or sets with listvalidator.SizeAtMost(1). - // // - // // For a complete mapping of Plugin-SDK to Plugin-Framework schema fields, - // // see: - // // https://developer.hashicorp.com/terraform/plugin/framework/migrating/attributes-blocks/blocks - // Validators: []validator.List{ - // listvalidator.SizeAtMost(1), - // }, - // NestedObject: schema.NestedBlockObject{ - // Attributes: map[string]schema.Attribute{ - // "nested_required": schema.StringAttribute{ - // Required: true, - // }, - // "nested_computed": schema.StringAttribute{ - // Computed: true, - // PlanModifiers: []planmodifier.String{ - // stringplanmodifier.UseStateForUnknown(), - // }, - // }, - // }, - // }, - // }, - // "timeouts": timeouts.Block(ctx, timeouts.Opts{ - // Create: true, - // Update: true, - // Delete: true, - // }), - // }, } } func (r *resourceStreamProcessor) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { conn := r.Meta().RekognitionClient(ctx) - var plan resourceStreamProcessorData + var plan resourceStreamProcessorDataModel resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...) if resp.Diagnostics.HasError() { return @@ -207,7 +244,6 @@ func (r *resourceStreamProcessor) Create(ctx context.Context, req resource.Creat plan.ID = plan.ARN - // TIP: -- 6. Use a waiter to wait for create to complete createTimeout := r.CreateTimeout(ctx, plan.Timeouts) _, err = waitStreamProcessorCreated(ctx, conn, plan.Name.ValueString(), createTimeout) if err != nil { @@ -218,23 +254,21 @@ func (r *resourceStreamProcessor) Create(ctx context.Context, req resource.Creat return } - // TIP: -- 7. Save the request plan to response state resp.Diagnostics.Append(resp.State.Set(ctx, plan)...) } func (r *resourceStreamProcessor) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { conn := r.Meta().RekognitionClient(ctx) - // TIP: -- 2. Fetch the state - var state resourceStreamProcessorData + var state resourceStreamProcessorDataModel resp.Diagnostics.Append(req.State.Get(ctx, &state)...) if resp.Diagnostics.HasError() { return } out, err := findStreamProcessorByID(ctx, conn, state.Name.ValueString()) - // TIP: -- 4. Remove resource from state if it is not found if tfresource.NotFound(err) { + resp.Diagnostics.Append(fwdiag.NewResourceNotFoundWarningDiagnostic(err)) resp.State.RemoveResource(ctx) return } @@ -246,34 +280,27 @@ func (r *resourceStreamProcessor) Read(ctx context.Context, req resource.ReadReq return } - state.Name = flex.StringToFramework(ctx, out.Name) - - // TIP: Setting a complex type. - // complexArgument, d := flattenComplexArgument(ctx, out.ComplexArgument) - // resp.Diagnostics.Append(d...) - // state.ComplexArgument = complexArgument + resp.Diagnostics.Append(fwflex.Flatten(ctx, out, &state)...) + if resp.Diagnostics.HasError() { + return + } - // TIP: -- 6. Set the state resp.Diagnostics.Append(resp.State.Set(ctx, &state)...) } func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { conn := r.Meta().RekognitionClient(ctx) - // TIP: -- 2. Fetch the plan - var plan, state resourceStreamProcessorData + var plan, state resourceStreamProcessorDataModel resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...) resp.Diagnostics.Append(req.State.Get(ctx, &state)...) if resp.Diagnostics.HasError() { return } - // TIP: -- 3. Populate a modify input structure and check for changes if !plan.Name.Equal(state.Name) { in := &rekognition.UpdateStreamProcessorInput{ - // TIP: Mandatory or fields that will always be present can be set when - // you create the Input structure. (Replace these with real fields.) Name: aws.String(plan.Name.ValueString()), } @@ -287,22 +314,8 @@ func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.Updat return } - // we have to call describe to get the new values - - // if out == nil || out.ResultMetadata == nil { - // resp.Diagnostics.AddError( - // create.ProblemStandardMessage(names.Rekognition, create.ErrActionUpdating, ResNameStreamProcessor, plan.Name.String(), nil), - // errors.New("empty output").Error(), - // ) - // return - // } - - // TIP: Using the output from the update function, re-set any computed attributes - // plan.ARN = flex.StringToFramework(ctx, out.Arn) - // plan.ID = flex.StringToFramework(ctx, out.StreamProcessor.StreamProcessorId) } - // TIP: -- 5. Use a waiter to wait for update to complete updateTimeout := r.UpdateTimeout(ctx, plan.Timeouts) _, err := waitStreamProcessorUpdated(ctx, conn, plan.Name.ValueString(), updateTimeout) if err != nil { @@ -313,7 +326,6 @@ func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.Updat return } - // TIP: -- 6. Save the request plan to response state resp.Diagnostics.Append(resp.State.Set(ctx, &plan)...) } @@ -321,7 +333,7 @@ func (r *resourceStreamProcessor) Delete(ctx context.Context, req resource.Delet conn := r.Meta().RekognitionClient(ctx) // TIP: -- 2. Fetch the state - var state resourceStreamProcessorData + var state resourceStreamProcessorDataModel resp.Diagnostics.Append(req.State.Get(ctx, &state)...) if resp.Diagnostics.HasError() { return @@ -466,21 +478,21 @@ func findStreamProcessorByID(ctx context.Context, conn *rekognition.Client, name return out, nil } -type resourceStreamProcessorData struct { - ARN types.String `tfsdk:"arn"` - DataSharingPreference fwtypes.ObjectValueOf[dataSharingPreferenceModel] `tfsdk:"data_sharing_preference"` - ID types.String `tfsdk:"id"` - Input fwtypes.ObjectValueOf[inputModel] `tfsdk:"input"` - KmsKeyId types.String `tfsdk:"kms_key_id"` - NotificationChannel fwtypes.ObjectValueOf[notificationChannelModel] `tfsdk:"notification_channel"` - Name types.String `tfsdk:"name"` - Output fwtypes.ObjectValueOf[outputModel] `tfsdk:"output"` - RegionsOfInterest fwtypes.ObjectValueOf[[]regionOfInterestModel] `tfsdk:"regions_of_interest"` - RoleARN fwtypes.ARN `tfsdk:"role_arn"` - Settings fwtypes.ObjectValueOf[settingsModel] `tfsdk:"settings"` - Tags types.Map `tfsdk:"tags"` - TagsAll types.Map `tfsdk:"tags_all"` - Timeouts timeouts.Value `tfsdk:"timeouts"` +type resourceStreamProcessorDataModel struct { + ARN types.String `tfsdk:"arn"` + DataSharingPreference fwtypes.ObjectValueOf[dataSharingPreferenceModel] `tfsdk:"data_sharing_preference"` + ID types.String `tfsdk:"id"` + Input fwtypes.ObjectValueOf[inputModel] `tfsdk:"input"` + KmsKeyId types.String `tfsdk:"kms_key_id"` + NotificationChannel fwtypes.ObjectValueOf[notificationChannelModel] `tfsdk:"notification_channel"` + Name types.String `tfsdk:"name"` + Output fwtypes.ObjectValueOf[outputModel] `tfsdk:"output"` + RegionsOfInterest fwtypes.ListNestedObjectValueOf[regionOfInterestModel] `tfsdk:"regions_of_interest"` + RoleARN fwtypes.ARN `tfsdk:"role_arn"` + Settings fwtypes.ObjectValueOf[settingsModel] `tfsdk:"settings"` + Tags types.Map `tfsdk:"tags"` + TagsAll types.Map `tfsdk:"tags_all"` + Timeouts timeouts.Value `tfsdk:"timeouts"` } type dataSharingPreferenceModel struct { From 5a5802178e510dda5e7a0460b9ecc289ec3ed52e Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Mon, 13 May 2024 11:11:42 -0500 Subject: [PATCH 08/71] add regions of interest --- .../service/rekognition/stream_processor.go | 34 +++++++++++-------- 1 file changed, 19 insertions(+), 15 deletions(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index 8378c8f7fc06..b65003da8394 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -133,22 +133,26 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem "regions_of_interest": schema.ListNestedBlock{ CustomType: fwtypes.NewListNestedObjectTypeOf[regionOfInterestModel](ctx), NestedObject: schema.NestedBlockObject{ - CustomType: fwtypes.NewObjectTypeOf[regionOfInterestModel](ctx), Blocks: map[string]schema.Block{ - "bounding_box": schema.SingleNestedBlock{ - CustomType: fwtypes.NewObjectTypeOf[boundingBoxModel](ctx), - Attributes: map[string]schema.Attribute{ - "height": schema.NumberAttribute{}, - "left": schema.NumberAttribute{}, - "top": schema.NumberAttribute{}, - "width": schema.NumberAttribute{}, - }, - }, - "polygon": schema.SingleNestedBlock{ - CustomType: fwtypes.NewObjectTypeOf[polygonModel](ctx), - Attributes: map[string]schema.Attribute{ - "x": schema.NumberAttribute{}, - "y": schema.NumberAttribute{}, + "region": schema.SingleNestedBlock{ + CustomType: fwtypes.NewObjectTypeOf[regionOfInterestModel](ctx), + Blocks: map[string]schema.Block{ + "bounding_box": schema.SingleNestedBlock{ + CustomType: fwtypes.NewObjectTypeOf[boundingBoxModel](ctx), + Attributes: map[string]schema.Attribute{ + "height": schema.NumberAttribute{}, + "left": schema.NumberAttribute{}, + "top": schema.NumberAttribute{}, + "width": schema.NumberAttribute{}, + }, + }, + "polygon": schema.SingleNestedBlock{ + CustomType: fwtypes.NewObjectTypeOf[polygonModel](ctx), + Attributes: map[string]schema.Attribute{ + "x": schema.NumberAttribute{}, + "y": schema.NumberAttribute{}, + }, + }, }, }, }, From 3c50701f1613f23d690db6a98109e32b1ba80001 Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Mon, 13 May 2024 11:35:39 -0500 Subject: [PATCH 09/71] add descriptions to all props --- .../service/rekognition/stream_processor.go | 74 +++++++++++++------ 1 file changed, 50 insertions(+), 24 deletions(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index b65003da8394..d7407767a578 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -95,7 +95,8 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem }, Blocks: map[string]schema.Block{ "data_sharing_preference": schema.SingleNestedBlock{ - CustomType: fwtypes.NewObjectTypeOf[dataSharingPreferenceModel](ctx), + CustomType: fwtypes.NewObjectTypeOf[dataSharingPreferenceModel](ctx), + Description: "Shows whether you are sharing data with Rekognition to improve model performance.", Attributes: map[string]schema.Attribute{ "opt_in": schema.BoolAttribute{ Description: "Do you want to share data with Rekognition to improve model performance.", @@ -105,40 +106,47 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem }, }, "input": schema.SingleNestedBlock{ - CustomType: fwtypes.NewObjectTypeOf[inputModel](ctx), + CustomType: fwtypes.NewObjectTypeOf[inputModel](ctx), + Description: "Information about the source streaming video.", Validators: []validator.Object{ objectvalidator.IsRequired(), }, Blocks: map[string]schema.Block{ "kinesis_video_stream": schema.SingleNestedBlock{ - CustomType: fwtypes.NewObjectTypeOf[kinesisVideoStreamInputModel](ctx), + CustomType: fwtypes.NewObjectTypeOf[kinesisVideoStreamInputModel](ctx), + Description: "Kinesis video stream stream that provides the source streaming video for a Amazon Rekognition Video stream processor.", Attributes: map[string]schema.Attribute{ "kinesis_video_stream_arn": schema.StringAttribute{ - CustomType: fwtypes.ARNType, - Required: true, + CustomType: fwtypes.ARNType, + Description: "ARN of the Kinesis video stream stream that streams the source video.", + Required: true, }, }, }, }, }, "notification_channel": schema.SingleNestedBlock{ - CustomType: fwtypes.NewObjectTypeOf[notificationChannelModel](ctx), + CustomType: fwtypes.NewObjectTypeOf[notificationChannelModel](ctx), + Description: "The Amazon Simple Notification Service topic to which Amazon Rekognition publishes the object detection results and completion status of a video analysis operation.", Attributes: map[string]schema.Attribute{ "sns_topic_arn": schema.StringAttribute{ - CustomType: fwtypes.ARNType, - Optional: true, + Description: "The Amazon Resource Number (ARN) of the Amazon Amazon Simple Notification Service topic to which Amazon Rekognition posts the completion status.", + CustomType: fwtypes.ARNType, + Optional: true, }, }, }, "regions_of_interest": schema.ListNestedBlock{ - CustomType: fwtypes.NewListNestedObjectTypeOf[regionOfInterestModel](ctx), + CustomType: fwtypes.NewListNestedObjectTypeOf[regionOfInterestModel](ctx), + Description: "Specifies locations in the frames where Amazon Rekognition checks for objects or people. You can specify up to 10 regions of interest, and each region has either a polygon or a bounding box.", NestedObject: schema.NestedBlockObject{ Blocks: map[string]schema.Block{ "region": schema.SingleNestedBlock{ CustomType: fwtypes.NewObjectTypeOf[regionOfInterestModel](ctx), Blocks: map[string]schema.Block{ "bounding_box": schema.SingleNestedBlock{ - CustomType: fwtypes.NewObjectTypeOf[boundingBoxModel](ctx), + CustomType: fwtypes.NewObjectTypeOf[boundingBoxModel](ctx), + Description: "The box representing a region of interest on screen.", Attributes: map[string]schema.Attribute{ "height": schema.NumberAttribute{}, "left": schema.NumberAttribute{}, @@ -147,7 +155,8 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem }, }, "polygon": schema.SingleNestedBlock{ - CustomType: fwtypes.NewObjectTypeOf[polygonModel](ctx), + CustomType: fwtypes.NewObjectTypeOf[polygonModel](ctx), + Description: "Specifies a shape made up of up to 10 Point objects to define a region of interest.", Attributes: map[string]schema.Attribute{ "x": schema.NumberAttribute{}, "y": schema.NumberAttribute{}, @@ -159,53 +168,70 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem }, }, "output": schema.SingleNestedBlock{ - CustomType: fwtypes.NewObjectTypeOf[outputModel](ctx), + CustomType: fwtypes.NewObjectTypeOf[outputModel](ctx), + Description: "Kinesis data stream stream or Amazon S3 bucket location to which Amazon Rekognition Video puts the analysis results.", Validators: []validator.Object{ objectvalidator.IsRequired(), }, Blocks: map[string]schema.Block{ "kinesis_data_stream": schema.SingleNestedBlock{ - CustomType: fwtypes.NewObjectTypeOf[kinesisDataStreamModel](ctx), + CustomType: fwtypes.NewObjectTypeOf[kinesisDataStreamModel](ctx), + Description: "The Amazon Kinesis Data Streams stream to which the Amazon Rekognition stream processor streams the analysis results.", Attributes: map[string]schema.Attribute{ "arn": schema.StringAttribute{ - CustomType: fwtypes.ARNType, - Required: true, + CustomType: fwtypes.ARNType, + Description: "ARN of the output Amazon Kinesis Data Streams stream.", + Required: true, }, }, }, "s3_destination": schema.SingleNestedBlock{ - CustomType: fwtypes.NewObjectTypeOf[s3DestinationModel](ctx), + CustomType: fwtypes.NewObjectTypeOf[s3DestinationModel](ctx), + Description: "The Amazon S3 bucket location to which Amazon Rekognition publishes the detailed inference results of a video analysis operation.", Attributes: map[string]schema.Attribute{ names.AttrBucket: schema.StringAttribute{ - Optional: true, + Description: "The name of the Amazon S3 bucket you want to associate with the streaming video project.", + Optional: true, }, "key_prefix": schema.StringAttribute{ - Optional: true, + Description: "The prefix value of the location within the bucket that you want the information to be published to.", + Optional: true, }, }, }, }, }, "settings": schema.SingleNestedBlock{ - CustomType: fwtypes.NewObjectTypeOf[settingsModel](ctx), + CustomType: fwtypes.NewObjectTypeOf[settingsModel](ctx), + Description: "Input parameters used in a streaming video analyzed by a stream processor.", Validators: []validator.Object{ objectvalidator.IsRequired(), }, Blocks: map[string]schema.Block{ "connected_home": schema.SingleNestedBlock{ - CustomType: fwtypes.NewObjectTypeOf[connectedHomeModel](ctx), + CustomType: fwtypes.NewObjectTypeOf[connectedHomeModel](ctx), + Description: "Label detection settings to use on a streaming video.", Attributes: map[string]schema.Attribute{ "labels": schema.ListAttribute{ + Description: "Specifies what you want to detect in the video, such as people, packages, or pets.", ElementType: types.StringType, }, - "min_confidence": schema.NumberAttribute{}, + "min_confidence": schema.NumberAttribute{ + Description: "The minimum confidence required to label an object in the video.", + }, }, }, "face_search": schema.SingleNestedBlock{ - CustomType: fwtypes.NewObjectTypeOf[faceSearchModel](ctx), + CustomType: fwtypes.NewObjectTypeOf[faceSearchModel](ctx), + Description: "Face search settings to use on a streaming video.", Attributes: map[string]schema.Attribute{ - "collection_id": schema.StringAttribute{}, - "min_confidence": schema.NumberAttribute{}, + "collection_id": schema.StringAttribute{ + Description: "The ID of a collection that contains faces that you want to search for.", + }, + "face_match_threshold": schema.NumberAttribute{ + Description: "Minimum face match confidence score that must be met to return a result for a recognized face.", + Validators: []validator.Number{}, + }, }, }, }, From 831e54edc0e3cfad242e3599489f2ae576eb87b5 Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Mon, 13 May 2024 11:39:34 -0500 Subject: [PATCH 10/71] wip --- internal/service/rekognition/stream_processor.go | 1 - 1 file changed, 1 deletion(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index d7407767a578..dd1bbf1bbb80 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -230,7 +230,6 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem }, "face_match_threshold": schema.NumberAttribute{ Description: "Minimum face match confidence score that must be met to return a result for a recognized face.", - Validators: []validator.Number{}, }, }, }, From dad2f4e1e7d3258f24e4bd71ffd684473496f4ef Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Mon, 13 May 2024 12:19:37 -0500 Subject: [PATCH 11/71] add more metadata --- .../service/rekognition/stream_processor.go | 98 +++++++++++++++---- 1 file changed, 79 insertions(+), 19 deletions(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index dd1bbf1bbb80..35b79168d7c4 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -13,6 +13,8 @@ import ( "github.com/aws/aws-sdk-go-v2/service/rekognition" awstypes "github.com/aws/aws-sdk-go-v2/service/rekognition/types" "github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts" + "github.com/hashicorp/terraform-plugin-framework-validators/float64validator" + "github.com/hashicorp/terraform-plugin-framework-validators/int64validator" "github.com/hashicorp/terraform-plugin-framework-validators/objectvalidator" "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" "github.com/hashicorp/terraform-plugin-framework/path" @@ -61,6 +63,11 @@ func (r *resourceStreamProcessor) Metadata(_ context.Context, req resource.Metad func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { kmsKeyIdRegex := regexache.MustCompile(`^[A-Za-z0-9][A-Za-z0-9:_/+=,@.-]{0,2048}$`) nameRegex := regexache.MustCompile(`[a-zA-Z0-9_.\-]+`) + collectionIdRegex := regexache.MustCompile(`[a-zA-Z0-9_.\-]+`) + kinesisStreamArnRegex := regexache.MustCompile(`(^arn:([a-z\d-]+):kinesis:([a-z\d-]+):\d{12}:.+$)`) + s3bucketRegex := regexache.MustCompile(`[0-9A-Za-z\.\-_]*`) + snsArnRegex := regexache.MustCompile(`(^arn:aws:sns:.*:\w{12}:.+$)`) + roleArnRegex := regexache.MustCompile(`arn:aws:iam::\d{12}:role/?[a-zA-Z_0-9+=,.@\-_/]+`) resp.Schema = schema.Schema{ Attributes: map[string]schema.Attribute{ @@ -69,7 +76,7 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem Description: "The identifier for your AWS Key Management Service key (AWS KMS key). You can supply the Amazon Resource Name (ARN) of your KMS key, the ID of your KMS key, an alias for your KMS key, or an alias ARN.", Optional: true, Validators: []validator.String{ - stringvalidator.LengthAtMost(2048), + stringvalidator.LengthBetween(1, 2048), stringvalidator.RegexMatches(kmsKeyIdRegex, "must conform to: ^[A-Za-z0-9][A-Za-z0-9:_/+=,@.-]{0,2048}$"), }, }, @@ -89,6 +96,9 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem Description: "The Amazon Resource Number (ARN) of the IAM role that allows access to the stream processor.", CustomType: fwtypes.ARNType, Required: true, + Validators: []validator.String{ + stringvalidator.RegexMatches(roleArnRegex, "must conform to: arn:aws:iam::\\d{12}:role/?[a-zA-Z_0-9+=,.@\\-_/]+"), + }, }, names.AttrTags: tftags.TagsAttribute(), names.AttrTagsAll: tftags.TagsAttributeComputedOnly(), @@ -108,9 +118,6 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem "input": schema.SingleNestedBlock{ CustomType: fwtypes.NewObjectTypeOf[inputModel](ctx), Description: "Information about the source streaming video.", - Validators: []validator.Object{ - objectvalidator.IsRequired(), - }, Blocks: map[string]schema.Block{ "kinesis_video_stream": schema.SingleNestedBlock{ CustomType: fwtypes.NewObjectTypeOf[kinesisVideoStreamInputModel](ctx), @@ -119,7 +126,10 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem "kinesis_video_stream_arn": schema.StringAttribute{ CustomType: fwtypes.ARNType, Description: "ARN of the Kinesis video stream stream that streams the source video.", - Required: true, + Optional: true, + Validators: []validator.String{ + stringvalidator.RegexMatches(kinesisStreamArnRegex, "must conform to: (^arn:([a-z\\d-]+):kinesisvideo:([a-z\\d-]+):\\d{12}:.+$)"), + }, }, }, }, @@ -132,7 +142,10 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem "sns_topic_arn": schema.StringAttribute{ Description: "The Amazon Resource Number (ARN) of the Amazon Amazon Simple Notification Service topic to which Amazon Rekognition posts the completion status.", CustomType: fwtypes.ARNType, - Optional: true, + Required: true, + Validators: []validator.String{ + stringvalidator.RegexMatches(snsArnRegex, "must conform to: (^arn:aws:sns:.*:\\w{12}:.+$)"), + }, }, }, }, @@ -148,18 +161,48 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem CustomType: fwtypes.NewObjectTypeOf[boundingBoxModel](ctx), Description: "The box representing a region of interest on screen.", Attributes: map[string]schema.Attribute{ - "height": schema.NumberAttribute{}, - "left": schema.NumberAttribute{}, - "top": schema.NumberAttribute{}, - "width": schema.NumberAttribute{}, + "height": schema.Float64Attribute{ + Optional: true, + Validators: []validator.Float64{ + float64validator.Between(0.0, 1.0), + }, + }, + "left": schema.Float64Attribute{ + Optional: true, + Validators: []validator.Float64{ + float64validator.Between(0.0, 1.0), + }, + }, + "top": schema.Float64Attribute{ + Optional: true, + Validators: []validator.Float64{ + float64validator.Between(0.0, 1.0), + }, + }, + "width": schema.Float64Attribute{ + Optional: true, + Validators: []validator.Float64{ + float64validator.Between(0.0, 1.0), + }, + }, }, }, "polygon": schema.SingleNestedBlock{ CustomType: fwtypes.NewObjectTypeOf[polygonModel](ctx), Description: "Specifies a shape made up of up to 10 Point objects to define a region of interest.", Attributes: map[string]schema.Attribute{ - "x": schema.NumberAttribute{}, - "y": schema.NumberAttribute{}, + "x": schema.Float64Attribute{ + Optional: true, + Validators: []validator.Float64{ + float64validator.Between(0.0, 1.0), + }, + }, + "y": schema.Float64Attribute{ + Optional: true, + Validators: []validator.Float64{ + float64validator.Between(0.0, 1.0), + }, + }, }, }, }, @@ -181,7 +224,10 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem "arn": schema.StringAttribute{ CustomType: fwtypes.ARNType, Description: "ARN of the output Amazon Kinesis Data Streams stream.", - Required: true, + Optional: true, + Validators: []validator.String{ + stringvalidator.RegexMatches(kinesisStreamArnRegex, "must conform to: (^arn:([a-z\\d-]+):kinesis:([a-z\\d-]+):\\d{12}:.+$)"), + }, }, }, }, @@ -192,10 +238,17 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem names.AttrBucket: schema.StringAttribute{ Description: "The name of the Amazon S3 bucket you want to associate with the streaming video project.", Optional: true, + Validators: []validator.String{ + stringvalidator.LengthBetween(3, 255), + stringvalidator.RegexMatches(s3bucketRegex, "must conform to: [0-9A-Za-z\\.\\-_]*"), + }, }, "key_prefix": schema.StringAttribute{ Description: "The prefix value of the location within the bucket that you want the information to be published to.", Optional: true, + Validators: []validator.String{ + stringvalidator.LengthAtMost(1024), + }, }, }, }, @@ -204,9 +257,6 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem "settings": schema.SingleNestedBlock{ CustomType: fwtypes.NewObjectTypeOf[settingsModel](ctx), Description: "Input parameters used in a streaming video analyzed by a stream processor.", - Validators: []validator.Object{ - objectvalidator.IsRequired(), - }, Blocks: map[string]schema.Block{ "connected_home": schema.SingleNestedBlock{ CustomType: fwtypes.NewObjectTypeOf[connectedHomeModel](ctx), @@ -214,10 +264,13 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem Attributes: map[string]schema.Attribute{ "labels": schema.ListAttribute{ Description: "Specifies what you want to detect in the video, such as people, packages, or pets.", - ElementType: types.StringType, + ElementType: types.StringType, //TODO: THIS SHOULD BE A CUSTOM ENUM TYPE "PERSON", "PET", "PACKAGE", and "ALL". + Required: true, }, - "min_confidence": schema.NumberAttribute{ + "min_confidence": schema.Int64Attribute{ Description: "The minimum confidence required to label an object in the video.", + Validators: []validator.Int64{int64validator.Between(0, 100)}, + Optional: true, }, }, }, @@ -227,9 +280,16 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem Attributes: map[string]schema.Attribute{ "collection_id": schema.StringAttribute{ Description: "The ID of a collection that contains faces that you want to search for.", + Validators: []validator.String{ + stringvalidator.LengthAtMost(2048), + stringvalidator.RegexMatches(collectionIdRegex, "must conform to: [a-zA-Z0-9_.\\-]+"), + }, + Optional: true, }, - "face_match_threshold": schema.NumberAttribute{ + "face_match_threshold": schema.Int64Attribute{ Description: "Minimum face match confidence score that must be met to return a result for a recognized face.", + Validators: []validator.Int64{int64validator.Between(0, 100)}, + Optional: true, }, }, }, From b4e90fd640612c5afdc08b689f511f34eb1a31e2 Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Mon, 13 May 2024 12:32:49 -0500 Subject: [PATCH 12/71] add label enum --- internal/service/rekognition/stream_processor.go | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index 35b79168d7c4..91f52cd8fe00 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -264,7 +264,7 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem Attributes: map[string]schema.Attribute{ "labels": schema.ListAttribute{ Description: "Specifies what you want to detect in the video, such as people, packages, or pets.", - ElementType: types.StringType, //TODO: THIS SHOULD BE A CUSTOM ENUM TYPE "PERSON", "PET", "PACKAGE", and "ALL". + ElementType: fwtypes.StringEnumType[labelSettings](), Required: true, }, "min_confidence": schema.Int64Attribute{ @@ -645,3 +645,15 @@ type faceSearchModel struct { CollectionId types.String `tfsdk:"collection_id"` FaceMatchThreshold types.Number `tfsdk:"face_match_threshold"` } + +/** AWS SDK doesn't have a settings.connectedhome.labels enum available */ +type labelSettings string + +func (labelSettings) Values() []labelSettings { + return []labelSettings{ + "PERSON", + "PET", + "PACKAGE", + "ALL", + } +} From 5dd5d702c34d2b1e95c4c80a1606ac32ac5e96ec Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Mon, 13 May 2024 12:36:10 -0500 Subject: [PATCH 13/71] add note --- internal/service/rekognition/stream_processor.go | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index 91f52cd8fe00..341d2d45b76d 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -646,7 +646,11 @@ type faceSearchModel struct { FaceMatchThreshold types.Number `tfsdk:"face_match_threshold"` } -/** AWS SDK doesn't have a settings.connectedhome.labels enum available */ +/* +- AWS SDK doesn't have a CreateStreamProcessorInput.StreamProcessorSettings.ConnectedHomeSettings.Labels enum available as of 5/13/24 + +- see docs https://docs.aws.amazon.com/rekognition/latest/APIReference/API_ConnectedHomeSettings.html#API_ConnectedHomeSettings_Contents +*/ type labelSettings string func (labelSettings) Values() []labelSettings { From d88c43096142d699d4550cac063dc40e3cd6948c Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Mon, 13 May 2024 16:35:28 -0500 Subject: [PATCH 14/71] wip --- .../service/rekognition/stream_processor.go | 62 ++++++++++++------- 1 file changed, 39 insertions(+), 23 deletions(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index 341d2d45b76d..a66084efccc5 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -20,7 +20,6 @@ import ( "github.com/hashicorp/terraform-plugin-framework/path" "github.com/hashicorp/terraform-plugin-framework/resource" "github.com/hashicorp/terraform-plugin-framework/resource/schema" - "github.com/hashicorp/terraform-plugin-framework/resource/schema/booldefault" "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" "github.com/hashicorp/terraform-plugin-framework/schema/validator" @@ -61,7 +60,7 @@ func (r *resourceStreamProcessor) Metadata(_ context.Context, req resource.Metad } func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { - kmsKeyIdRegex := regexache.MustCompile(`^[A-Za-z0-9][A-Za-z0-9:_/+=,@.-]{0,2048}$`) + kmsKeyIdRegex := regexache.MustCompile(`^[A-Za-z0-9][A-Za-z0-9:_/+=,@.-]$`) nameRegex := regexache.MustCompile(`[a-zA-Z0-9_.\-]+`) collectionIdRegex := regexache.MustCompile(`[a-zA-Z0-9_.\-]+`) kinesisStreamArnRegex := regexache.MustCompile(`(^arn:([a-z\d-]+):kinesis:([a-z\d-]+):\d{12}:.+$)`) @@ -77,7 +76,7 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem Optional: true, Validators: []validator.String{ stringvalidator.LengthBetween(1, 2048), - stringvalidator.RegexMatches(kmsKeyIdRegex, "must conform to: ^[A-Za-z0-9][A-Za-z0-9:_/+=,@.-]{0,2048}$"), + stringvalidator.RegexMatches(kmsKeyIdRegex, "must conform to: ^[A-Za-z0-9][A-Za-z0-9:_/+=,@.-]$"), }, }, names.AttrID: framework.IDAttribute(), @@ -92,10 +91,10 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem stringplanmodifier.RequiresReplace(), }, }, - "role_arn": schema.StringAttribute{ + names.AttrRoleARN: schema.StringAttribute{ Description: "The Amazon Resource Number (ARN) of the IAM role that allows access to the stream processor.", - CustomType: fwtypes.ARNType, - Required: true, + // CustomType: fwtypes.ARNType, + Required: true, Validators: []validator.String{ stringvalidator.RegexMatches(roleArnRegex, "must conform to: arn:aws:iam::\\d{12}:role/?[a-zA-Z_0-9+=,.@\\-_/]+"), }, @@ -111,24 +110,31 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem "opt_in": schema.BoolAttribute{ Description: "Do you want to share data with Rekognition to improve model performance.", Optional: true, - Default: booldefault.StaticBool(false), }, }, }, "input": schema.SingleNestedBlock{ CustomType: fwtypes.NewObjectTypeOf[inputModel](ctx), Description: "Information about the source streaming video.", + Validators: []validator.Object{ + objectvalidator.IsRequired(), + }, Blocks: map[string]schema.Block{ "kinesis_video_stream": schema.SingleNestedBlock{ + Validators: []validator.Object{ + objectvalidator.IsRequired(), + }, CustomType: fwtypes.NewObjectTypeOf[kinesisVideoStreamInputModel](ctx), Description: "Kinesis video stream stream that provides the source streaming video for a Amazon Rekognition Video stream processor.", Attributes: map[string]schema.Attribute{ - "kinesis_video_stream_arn": schema.StringAttribute{ + "arn": schema.StringAttribute{ CustomType: fwtypes.ARNType, Description: "ARN of the Kinesis video stream stream that streams the source video.", - Optional: true, + Required: true, Validators: []validator.String{ - stringvalidator.RegexMatches(kinesisStreamArnRegex, "must conform to: (^arn:([a-z\\d-]+):kinesisvideo:([a-z\\d-]+):\\d{12}:.+$)"), + stringvalidator.All( + stringvalidator.RegexMatches(kinesisStreamArnRegex, "must conform to: (^arn:([a-z\\d-]+):kinesisvideo:([a-z\\d-]+):\\d{12}:.+$)"), + ), }, }, }, @@ -142,7 +148,7 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem "sns_topic_arn": schema.StringAttribute{ Description: "The Amazon Resource Number (ARN) of the Amazon Amazon Simple Notification Service topic to which Amazon Rekognition posts the completion status.", CustomType: fwtypes.ARNType, - Required: true, + Optional: true, Validators: []validator.String{ stringvalidator.RegexMatches(snsArnRegex, "must conform to: (^arn:aws:sns:.*:\\w{12}:.+$)"), }, @@ -257,6 +263,9 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem "settings": schema.SingleNestedBlock{ CustomType: fwtypes.NewObjectTypeOf[settingsModel](ctx), Description: "Input parameters used in a streaming video analyzed by a stream processor.", + Validators: []validator.Object{ + objectvalidator.IsRequired(), + }, Blocks: map[string]schema.Block{ "connected_home": schema.SingleNestedBlock{ CustomType: fwtypes.NewObjectTypeOf[connectedHomeModel](ctx), @@ -264,8 +273,10 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem Attributes: map[string]schema.Attribute{ "labels": schema.ListAttribute{ Description: "Specifies what you want to detect in the video, such as people, packages, or pets.", + CustomType: fwtypes.ListOfStringType, ElementType: fwtypes.StringEnumType[labelSettings](), - Required: true, + Optional: true, + //TODO: validation for label values }, "min_confidence": schema.Int64Attribute{ Description: "The minimum confidence required to label an object in the video.", @@ -295,6 +306,11 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem }, }, }, + names.AttrTimeouts: timeouts.Block(ctx, timeouts.Opts{ + Create: true, + Update: true, + Delete: true, + }), }, } } @@ -374,7 +390,7 @@ func (r *resourceStreamProcessor) Read(ctx context.Context, req resource.ReadReq return } - resp.Diagnostics.Append(resp.State.Set(ctx, &state)...) + // resp.Diagnostics.Append(resp.State.Set(ctx, &state)...) } func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { @@ -577,7 +593,7 @@ type resourceStreamProcessorDataModel struct { Name types.String `tfsdk:"name"` Output fwtypes.ObjectValueOf[outputModel] `tfsdk:"output"` RegionsOfInterest fwtypes.ListNestedObjectValueOf[regionOfInterestModel] `tfsdk:"regions_of_interest"` - RoleARN fwtypes.ARN `tfsdk:"role_arn"` + RoleARN types.String `tfsdk:"role_arn"` //TODO ARN types? Settings fwtypes.ObjectValueOf[settingsModel] `tfsdk:"settings"` Tags types.Map `tfsdk:"tags"` TagsAll types.Map `tfsdk:"tags_all"` @@ -620,15 +636,15 @@ type regionOfInterestModel struct { } type boundingBoxModel struct { - Height types.Number `tfsdk:"height"` - Left types.Number `tfsdk:"left"` - Top types.Number `tfsdk:"top"` - Width types.Number `tfsdk:"width"` + Height types.Float64 `tfsdk:"height"` + Left types.Float64 `tfsdk:"left"` + Top types.Float64 `tfsdk:"top"` + Width types.Float64 `tfsdk:"width"` } type polygonModel struct { - X types.Number `tfsdk:"x"` - Y types.Number `tfsdk:"y"` + X types.Float64 `tfsdk:"x"` + Y types.Float64 `tfsdk:"y"` } type settingsModel struct { @@ -637,13 +653,13 @@ type settingsModel struct { } type connectedHomeModel struct { - Labels types.List `tfsdk:"labels"` - MinConfidence types.Number `tfsdk:"min_confidence"` + Labels fwtypes.ListValueOf[types.String] `tfsdk:"labels"` + MinConfidence types.Int64 `tfsdk:"min_confidence"` } type faceSearchModel struct { CollectionId types.String `tfsdk:"collection_id"` - FaceMatchThreshold types.Number `tfsdk:"face_match_threshold"` + FaceMatchThreshold types.Int64 `tfsdk:"face_match_threshold"` } /* From 1584e83970d5ac0c93a5038eda795d3304b085d5 Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Tue, 14 May 2024 14:12:12 -0500 Subject: [PATCH 15/71] create works --- .../service/rekognition/stream_processor.go | 30 ++++++++++--------- 1 file changed, 16 insertions(+), 14 deletions(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index a66084efccc5..b7f00a161a01 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -63,7 +63,7 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem kmsKeyIdRegex := regexache.MustCompile(`^[A-Za-z0-9][A-Za-z0-9:_/+=,@.-]$`) nameRegex := regexache.MustCompile(`[a-zA-Z0-9_.\-]+`) collectionIdRegex := regexache.MustCompile(`[a-zA-Z0-9_.\-]+`) - kinesisStreamArnRegex := regexache.MustCompile(`(^arn:([a-z\d-]+):kinesis:([a-z\d-]+):\d{12}:.+$)`) + kinesisStreamArnRegex := regexache.MustCompile(`(^arn:([a-z\d-]+):kinesisvideo:([a-z\d-]+):\d{12}:.+$)`) s3bucketRegex := regexache.MustCompile(`[0-9A-Za-z\.\-_]*`) snsArnRegex := regexache.MustCompile(`(^arn:aws:sns:.*:\w{12}:.+$)`) roleArnRegex := regexache.MustCompile(`arn:aws:iam::\d{12}:role/?[a-zA-Z_0-9+=,.@\-_/]+`) @@ -142,13 +142,16 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem }, }, "notification_channel": schema.SingleNestedBlock{ - CustomType: fwtypes.NewObjectTypeOf[notificationChannelModel](ctx), + CustomType: fwtypes.NewObjectTypeOf[notificationChannelModel](ctx), + Validators: []validator.Object{ + objectvalidator.IsRequired(), + }, Description: "The Amazon Simple Notification Service topic to which Amazon Rekognition publishes the object detection results and completion status of a video analysis operation.", Attributes: map[string]schema.Attribute{ "sns_topic_arn": schema.StringAttribute{ Description: "The Amazon Resource Number (ARN) of the Amazon Amazon Simple Notification Service topic to which Amazon Rekognition posts the completion status.", CustomType: fwtypes.ARNType, - Optional: true, + Required: true, Validators: []validator.String{ stringvalidator.RegexMatches(snsArnRegex, "must conform to: (^arn:aws:sns:.*:\\w{12}:.+$)"), }, @@ -325,6 +328,7 @@ func (r *resourceStreamProcessor) Create(ctx context.Context, req resource.Creat } in := &rekognition.CreateStreamProcessorInput{} + in.Tags = getTagsIn(ctx) resp.Diagnostics.Append(fwflex.Expand(ctx, plan, in)...) if resp.Diagnostics.HasError() { @@ -347,6 +351,7 @@ func (r *resourceStreamProcessor) Create(ctx context.Context, req resource.Creat return } + plan.ARN = fwflex.StringToFramework(ctx, out.StreamProcessorArn) plan.ID = plan.ARN createTimeout := r.CreateTimeout(ctx, plan.Timeouts) @@ -478,13 +483,14 @@ func (r *resourceStreamProcessor) ImportState(ctx context.Context, req resource. resource.ImportStatePassthroughID(ctx, path.Root("id"), req, resp) } +func (r *resourceStreamProcessor) ModifyPlan(ctx context.Context, request resource.ModifyPlanRequest, response *resource.ModifyPlanResponse) { + r.SetTagsAll(ctx, request, response) +} + func waitStreamProcessorCreated(ctx context.Context, conn *rekognition.Client, id string, timeout time.Duration) (*rekognition.DescribeStreamProcessorOutput, error) { stateConf := &retry.StateChangeConf{ - Pending: []string{}, - Target: []string{ - string(awstypes.StreamProcessorStatusStarting), - string(awstypes.StreamProcessorStatusRunning), - string(awstypes.StreamProcessorStatusFailed)}, + Pending: []string{}, + Target: []string{string(awstypes.StreamProcessorStatusStopped)}, Refresh: statusStreamProcessor(ctx, conn, id), Timeout: timeout, NotFoundChecks: 20, @@ -501,12 +507,8 @@ func waitStreamProcessorCreated(ctx context.Context, conn *rekognition.Client, i func waitStreamProcessorUpdated(ctx context.Context, conn *rekognition.Client, id string, timeout time.Duration) (*rekognition.DescribeStreamProcessorOutput, error) { stateConf := &retry.StateChangeConf{ - Pending: []string{string(awstypes.StreamProcessorStatusUpdating)}, - Target: []string{ - string(awstypes.StreamProcessorStatusStarting), - string(awstypes.StreamProcessorStatusRunning), - string(awstypes.StreamProcessorStatusFailed), - }, + Pending: []string{string(awstypes.StreamProcessorStatusUpdating)}, + Target: []string{string(awstypes.StreamProcessorStatusStopped)}, Refresh: statusStreamProcessor(ctx, conn, id), Timeout: timeout, NotFoundChecks: 20, From c44f586176c74c488d52d9f651c0c9351faf2aba Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Tue, 14 May 2024 14:15:56 -0500 Subject: [PATCH 16/71] add descriptions --- .../service/rekognition/stream_processor.go | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index b7f00a161a01..7ef94db10ac2 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -171,25 +171,29 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem Description: "The box representing a region of interest on screen.", Attributes: map[string]schema.Attribute{ "height": schema.Float64Attribute{ - Optional: true, + Optional: true, + Description: "Height of the bounding box as a ratio of the overall image height.", Validators: []validator.Float64{ float64validator.Between(0.0, 1.0), }, }, "left": schema.Float64Attribute{ - Optional: true, + Description: "Left coordinate of the bounding box as a ratio of overall image width.", + Optional: true, Validators: []validator.Float64{ float64validator.Between(0.0, 1.0), }, }, "top": schema.Float64Attribute{ - Optional: true, + Description: "Top coordinate of the bounding box as a ratio of overall image height.", + Optional: true, Validators: []validator.Float64{ float64validator.Between(0.0, 1.0), }, }, "width": schema.Float64Attribute{ - Optional: true, + Description: "Width of the bounding box as a ratio of the overall image width.", + Optional: true, Validators: []validator.Float64{ float64validator.Between(0.0, 1.0), }, @@ -201,13 +205,15 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem Description: "Specifies a shape made up of up to 10 Point objects to define a region of interest.", Attributes: map[string]schema.Attribute{ "x": schema.Float64Attribute{ - Optional: true, + Description: "The value of the X coordinate for a point on a Polygon.", + Optional: true, Validators: []validator.Float64{ float64validator.Between(0.0, 1.0), }, }, "y": schema.Float64Attribute{ - Optional: true, + Description: "The value of the Y coordinate for a point on a Polygon.", + Optional: true, Validators: []validator.Float64{ float64validator.Between(0.0, 1.0), }, From d0db26e984b475d028e9bf849e1207f8edff1d32 Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Tue, 14 May 2024 14:37:23 -0500 Subject: [PATCH 17/71] working on update --- .../service/rekognition/stream_processor.go | 34 +++++++++++++++++-- 1 file changed, 31 insertions(+), 3 deletions(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index 7ef94db10ac2..5c021c0d49b3 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -78,6 +78,9 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem stringvalidator.LengthBetween(1, 2048), stringvalidator.RegexMatches(kmsKeyIdRegex, "must conform to: ^[A-Za-z0-9][A-Za-z0-9:_/+=,@.-]$"), }, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, }, names.AttrID: framework.IDAttribute(), names.AttrName: schema.StringAttribute{ @@ -98,6 +101,9 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem Validators: []validator.String{ stringvalidator.RegexMatches(roleArnRegex, "must conform to: arn:aws:iam::\\d{12}:role/?[a-zA-Z_0-9+=,.@\\-_/]+"), }, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, }, names.AttrTags: tftags.TagsAttribute(), names.AttrTagsAll: tftags.TagsAttributeComputedOnly(), @@ -136,6 +142,9 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem stringvalidator.RegexMatches(kinesisStreamArnRegex, "must conform to: (^arn:([a-z\\d-]+):kinesisvideo:([a-z\\d-]+):\\d{12}:.+$)"), ), }, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, }, }, }, @@ -155,6 +164,9 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem Validators: []validator.String{ stringvalidator.RegexMatches(snsArnRegex, "must conform to: (^arn:aws:sns:.*:\\w{12}:.+$)"), }, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, }, }, }, @@ -243,6 +255,9 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem Validators: []validator.String{ stringvalidator.RegexMatches(kinesisStreamArnRegex, "must conform to: (^arn:([a-z\\d-]+):kinesis:([a-z\\d-]+):\\d{12}:.+$)"), }, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, }, }, }, @@ -257,6 +272,9 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem stringvalidator.LengthBetween(3, 255), stringvalidator.RegexMatches(s3bucketRegex, "must conform to: [0-9A-Za-z\\.\\-_]*"), }, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, }, "key_prefix": schema.StringAttribute{ Description: "The prefix value of the location within the bucket that you want the information to be published to.", @@ -264,6 +282,9 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem Validators: []validator.String{ stringvalidator.LengthAtMost(1024), }, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, }, }, }, @@ -400,8 +421,6 @@ func (r *resourceStreamProcessor) Read(ctx context.Context, req resource.ReadReq if resp.Diagnostics.HasError() { return } - - // resp.Diagnostics.Append(resp.State.Set(ctx, &state)...) } func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { @@ -414,12 +433,21 @@ func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.Updat return } - if !plan.Name.Equal(state.Name) { + if !plan.DataSharingPreference.Equal(state.DataSharingPreference) || + !plan.Settings.Equal(state.Settings) || + !plan.RegionsOfInterest.Equal(state.RegionsOfInterest) { in := &rekognition.UpdateStreamProcessorInput{ Name: aws.String(plan.Name.ValueString()), } + if !plan.DataSharingPreference.Equal(state.DataSharingPreference) { + resp.Diagnostics.Append(fwflex.Expand(ctx, plan.DataSharingPreference, in.DataSharingPreferenceForUpdate)...) + if resp.Diagnostics.HasError() { + return + } + } + // TIP: -- 4. Call the AWS modify/update function _, err := conn.UpdateStreamProcessor(ctx, in) if err != nil { From fc05137d2250c0dd3a47b0740fc6b82582794c2d Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Wed, 15 May 2024 09:49:09 -0500 Subject: [PATCH 18/71] wip --- .../service/rekognition/stream_processor.go | 20 ++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index 5c021c0d49b3..5ffca3b86512 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -433,6 +433,7 @@ func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.Updat return } + // the update api uses slightly different property names, so we can't just flex the state into the request :( if !plan.DataSharingPreference.Equal(state.DataSharingPreference) || !plan.Settings.Equal(state.Settings) || !plan.RegionsOfInterest.Equal(state.RegionsOfInterest) { @@ -442,12 +443,25 @@ func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.Updat } if !plan.DataSharingPreference.Equal(state.DataSharingPreference) { - resp.Diagnostics.Append(fwflex.Expand(ctx, plan.DataSharingPreference, in.DataSharingPreferenceForUpdate)...) - if resp.Diagnostics.HasError() { - return + dsp, diags := plan.DataSharingPreference.ToPtr(ctx) + resp.Diagnostics.Append(diags...) + in.DataSharingPreferenceForUpdate = &awstypes.StreamProcessorDataSharingPreference{ + OptIn: dsp.OptIn.ValueBool(), } } + if !plan.Settings.Equal(state.Settings) { + p, diags := plan.Settings.ToPtr(ctx) + resp.Diagnostics.Append(diags...) + + ch, diags := p.ConnectedHome.ToPtr(ctx) + resp.Diagnostics.Append(diags...) + + // s := &awstypes.StreamProcessorSettingsForUpdate{} + + in.SettingsForUpdate = &awstypes.StreamProcessorSettingsForUpdate{} + } + // TIP: -- 4. Call the AWS modify/update function _, err := conn.UpdateStreamProcessor(ctx, in) if err != nil { From 5035323aa6deb34f8755e2eaf3dc91638d2bfdcf Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Wed, 15 May 2024 12:45:48 -0500 Subject: [PATCH 19/71] wip --- .../service/rekognition/stream_processor.go | 62 ++++++++++++------- 1 file changed, 41 insertions(+), 21 deletions(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index 5ffca3b86512..da8b700c414b 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -14,7 +14,6 @@ import ( awstypes "github.com/aws/aws-sdk-go-v2/service/rekognition/types" "github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts" "github.com/hashicorp/terraform-plugin-framework-validators/float64validator" - "github.com/hashicorp/terraform-plugin-framework-validators/int64validator" "github.com/hashicorp/terraform-plugin-framework-validators/objectvalidator" "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" "github.com/hashicorp/terraform-plugin-framework/path" @@ -308,10 +307,12 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem Optional: true, //TODO: validation for label values }, - "min_confidence": schema.Int64Attribute{ + "min_confidence": schema.Float64Attribute{ Description: "The minimum confidence required to label an object in the video.", - Validators: []validator.Int64{int64validator.Between(0, 100)}, - Optional: true, + Validators: []validator.Float64{ + float64validator.Between(0.0, 100.0), + }, + Optional: true, }, }, }, @@ -327,10 +328,12 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem }, Optional: true, }, - "face_match_threshold": schema.Int64Attribute{ + "face_match_threshold": schema.Float64Attribute{ Description: "Minimum face match confidence score that must be met to return a result for a recognized face.", - Validators: []validator.Int64{int64validator.Between(0, 100)}, - Optional: true, + Validators: []validator.Float64{ + float64validator.Between(0.0, 100.0), + }, + Optional: true, }, }, }, @@ -433,33 +436,50 @@ func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.Updat return } - // the update api uses slightly different property names, so we can't just flex the state into the request :( + // the update api uses different property names(ForUpdate) and request shape, so we can't just flex into the request :( if !plan.DataSharingPreference.Equal(state.DataSharingPreference) || !plan.Settings.Equal(state.Settings) || !plan.RegionsOfInterest.Equal(state.RegionsOfInterest) { in := &rekognition.UpdateStreamProcessorInput{ - Name: aws.String(plan.Name.ValueString()), + Name: aws.String(plan.Name.ValueString()), + ParametersToDelete: []awstypes.StreamProcessorParameterToDelete{}, } if !plan.DataSharingPreference.Equal(state.DataSharingPreference) { - dsp, diags := plan.DataSharingPreference.ToPtr(ctx) - resp.Diagnostics.Append(diags...) + optIn := plan.DataSharingPreference.ObjectValue.Attributes()["opt_in"].(types.Bool) in.DataSharingPreferenceForUpdate = &awstypes.StreamProcessorDataSharingPreference{ - OptIn: dsp.OptIn.ValueBool(), + OptIn: optIn.ValueBool(), } } if !plan.Settings.Equal(state.Settings) { - p, diags := plan.Settings.ToPtr(ctx) - resp.Diagnostics.Append(diags...) + in.SettingsForUpdate = &awstypes.StreamProcessorSettingsForUpdate{ + ConnectedHomeForUpdate: &awstypes.ConnectedHomeSettingsForUpdate{}, + } + + planConnectedHome := plan.Settings.Attributes()["connected_home"].(fwtypes.ObjectValueOf[connectedHomeModel]) + stateConnectedHome := state.Settings.Attributes()["connected_home"].(fwtypes.ObjectValueOf[connectedHomeModel]) + + planMinConfidence := planConnectedHome.Attributes()["min_confidence"].(types.Float64) + stateMinConfidence := stateConnectedHome.Attributes()["min_confidence"].(types.Float64) - ch, diags := p.ConnectedHome.ToPtr(ctx) - resp.Diagnostics.Append(diags...) + if !planMinConfidence.Equal(stateMinConfidence) { + if !stateMinConfidence.IsNull() && planMinConfidence.IsNull() { + in.ParametersToDelete = append(in.ParametersToDelete, awstypes.StreamProcessorParameterToDeleteConnectedHomeMinConfidence) + } - // s := &awstypes.StreamProcessorSettingsForUpdate{} + if !planMinConfidence.IsNull() { + in.SettingsForUpdate.ConnectedHomeForUpdate.MinConfidence = aws.Float32(float32(planMinConfidence.ValueFloat64())) + } + } + + planLabels := planConnectedHome.Attributes()["labels"].(fwtypes.ListValueOf[types.String]) + stateLabels := stateConnectedHome.Attributes()["labels"].(fwtypes.ListValueOf[types.String]) - in.SettingsForUpdate = &awstypes.StreamProcessorSettingsForUpdate{} + if !planLabels.Equal(stateLabels) { + in.SettingsForUpdate.ConnectedHomeForUpdate.Labels = fwflex.ExpandFrameworkStringValueList(ctx, planLabels) + } } // TIP: -- 4. Call the AWS modify/update function @@ -704,12 +724,12 @@ type settingsModel struct { type connectedHomeModel struct { Labels fwtypes.ListValueOf[types.String] `tfsdk:"labels"` - MinConfidence types.Int64 `tfsdk:"min_confidence"` + MinConfidence types.Float64 `tfsdk:"min_confidence"` } type faceSearchModel struct { - CollectionId types.String `tfsdk:"collection_id"` - FaceMatchThreshold types.Int64 `tfsdk:"face_match_threshold"` + CollectionId types.String `tfsdk:"collection_id"` + FaceMatchThreshold types.Float64 `tfsdk:"face_match_threshold"` } /* From 939c4ddae4c6ce55c66ff4d2e6a4aecac241b11c Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Wed, 15 May 2024 15:05:48 -0500 Subject: [PATCH 20/71] wip --- .../service/rekognition/stream_processor.go | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index da8b700c414b..f03d8bcabde1 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -436,6 +436,25 @@ func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.Updat return } + planMap := &rekognition.CreateStreamProcessorInput{} + stateMap := &rekognition.CreateStreamProcessorInput{} + resp.Diagnostics.Append(fwflex.Expand(ctx, plan, planMap)...) + resp.Diagnostics.Append(fwflex.Expand(ctx, plan, stateMap)...) + if resp.Diagnostics.HasError() { + return + } + + in := &rekognition.UpdateStreamProcessorInput{ + Name: planMap.Name, + ParametersToDelete: []awstypes.StreamProcessorParameterToDelete{}, + } + + if planMap.DataSharingPreference.OptIn != stateMap.DataSharingPreference.OptIn { + in.DataSharingPreferenceForUpdate = &awstypes.StreamProcessorDataSharingPreference{ + OptIn: planMap.DataSharingPreference.OptIn, + } + } + // the update api uses different property names(ForUpdate) and request shape, so we can't just flex into the request :( if !plan.DataSharingPreference.Equal(state.DataSharingPreference) || !plan.Settings.Equal(state.Settings) || From 6b0b5832d56cb093089abe5fc886d19528ab639b Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Mon, 20 May 2024 09:26:07 -0500 Subject: [PATCH 21/71] use ptrs --- .../service/rekognition/stream_processor.go | 99 ++++++++++--------- 1 file changed, 52 insertions(+), 47 deletions(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index f03d8bcabde1..53843e6914a7 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -16,6 +16,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework-validators/float64validator" "github.com/hashicorp/terraform-plugin-framework-validators/objectvalidator" "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/path" "github.com/hashicorp/terraform-plugin-framework/resource" "github.com/hashicorp/terraform-plugin-framework/resource/schema" @@ -333,7 +334,11 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem Validators: []validator.Float64{ float64validator.Between(0.0, 100.0), }, - Optional: true, + Optional: true, + PlanModifiers: []planmodifier.Float64{ + + //TODO: Requires replacement + }, }, }, }, @@ -426,6 +431,16 @@ func (r *resourceStreamProcessor) Read(ctx context.Context, req resource.ReadReq } } +func unwrapObjectValueOf[T any](plan fwtypes.ObjectValueOf[T], state fwtypes.ObjectValueOf[T], diagnostics diag.Diagnostics, ctx context.Context) (*T, *T) { + ptrPlan, diags := plan.ToPtr(ctx) + diagnostics.Append(diags...) + + ptrState, diags := state.ToPtr(ctx) + diagnostics.Append(diags...) + + return ptrPlan, ptrState +} + func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { conn := r.Meta().RekognitionClient(ctx) @@ -436,69 +451,59 @@ func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.Updat return } - planMap := &rekognition.CreateStreamProcessorInput{} - stateMap := &rekognition.CreateStreamProcessorInput{} - resp.Diagnostics.Append(fwflex.Expand(ctx, plan, planMap)...) - resp.Diagnostics.Append(fwflex.Expand(ctx, plan, stateMap)...) - if resp.Diagnostics.HasError() { - return - } - in := &rekognition.UpdateStreamProcessorInput{ - Name: planMap.Name, + Name: plan.Name.ValueStringPointer(), ParametersToDelete: []awstypes.StreamProcessorParameterToDelete{}, } - if planMap.DataSharingPreference.OptIn != stateMap.DataSharingPreference.OptIn { - in.DataSharingPreferenceForUpdate = &awstypes.StreamProcessorDataSharingPreference{ - OptIn: planMap.DataSharingPreference.OptIn, + if !plan.DataSharingPreference.Equal(state.DataSharingPreference) { + dspPlan, dspState := unwrapObjectValueOf(plan.DataSharingPreference, state.DataSharingPreference, resp.Diagnostics, ctx) + if resp.Diagnostics.HasError() { + return } - } - // the update api uses different property names(ForUpdate) and request shape, so we can't just flex into the request :( - if !plan.DataSharingPreference.Equal(state.DataSharingPreference) || - !plan.Settings.Equal(state.Settings) || - !plan.RegionsOfInterest.Equal(state.RegionsOfInterest) { + if !dspPlan.OptIn.Equal(dspState.OptIn) { + in.DataSharingPreferenceForUpdate = &awstypes.StreamProcessorDataSharingPreference{ + OptIn: dspPlan.OptIn.ValueBool(), + } + } + } - in := &rekognition.UpdateStreamProcessorInput{ - Name: aws.String(plan.Name.ValueString()), - ParametersToDelete: []awstypes.StreamProcessorParameterToDelete{}, + if !plan.Settings.Equal(state.Settings) { + in.SettingsForUpdate = &awstypes.StreamProcessorSettingsForUpdate{ + ConnectedHomeForUpdate: &awstypes.ConnectedHomeSettingsForUpdate{}, } - if !plan.DataSharingPreference.Equal(state.DataSharingPreference) { - optIn := plan.DataSharingPreference.ObjectValue.Attributes()["opt_in"].(types.Bool) - in.DataSharingPreferenceForUpdate = &awstypes.StreamProcessorDataSharingPreference{ - OptIn: optIn.ValueBool(), - } + settingsPlan, settingsState := unwrapObjectValueOf(plan.Settings, state.Settings, resp.Diagnostics, ctx) + if resp.Diagnostics.HasError() { + return } - if !plan.Settings.Equal(state.Settings) { - in.SettingsForUpdate = &awstypes.StreamProcessorSettingsForUpdate{ - ConnectedHomeForUpdate: &awstypes.ConnectedHomeSettingsForUpdate{}, + connectedHomePlan, connectedHomeState := unwrapObjectValueOf(settingsPlan.ConnectedHome, settingsState.ConnectedHome, resp.Diagnostics, ctx) + if !connectedHomePlan.MinConfidence.Equal(connectedHomeState.MinConfidence) { + if !connectedHomePlan.MinConfidence.IsNull() && connectedHomeState.MinConfidence.IsNull() { + in.ParametersToDelete = append(in.ParametersToDelete, awstypes.StreamProcessorParameterToDeleteConnectedHomeMinConfidence) } - planConnectedHome := plan.Settings.Attributes()["connected_home"].(fwtypes.ObjectValueOf[connectedHomeModel]) - stateConnectedHome := state.Settings.Attributes()["connected_home"].(fwtypes.ObjectValueOf[connectedHomeModel]) - - planMinConfidence := planConnectedHome.Attributes()["min_confidence"].(types.Float64) - stateMinConfidence := stateConnectedHome.Attributes()["min_confidence"].(types.Float64) + if !connectedHomePlan.MinConfidence.IsNull() { + in.SettingsForUpdate.ConnectedHomeForUpdate.MinConfidence = aws.Float32(float32(connectedHomePlan.MinConfidence.ValueFloat64())) + } + } - if !planMinConfidence.Equal(stateMinConfidence) { - if !stateMinConfidence.IsNull() && planMinConfidence.IsNull() { - in.ParametersToDelete = append(in.ParametersToDelete, awstypes.StreamProcessorParameterToDeleteConnectedHomeMinConfidence) - } + if !connectedHomePlan.Labels.Equal(connectedHomeState.Labels) { + in.SettingsForUpdate.ConnectedHomeForUpdate.Labels = fwflex.ExpandFrameworkStringValueList(ctx, connectedHomePlan.Labels) + } - if !planMinConfidence.IsNull() { - in.SettingsForUpdate.ConnectedHomeForUpdate.MinConfidence = aws.Float32(float32(planMinConfidence.ValueFloat64())) - } - } + } - planLabels := planConnectedHome.Attributes()["labels"].(fwtypes.ListValueOf[types.String]) - stateLabels := stateConnectedHome.Attributes()["labels"].(fwtypes.ListValueOf[types.String]) + // the update api uses different property names(ForUpdate) and request shape, so we can't just flex into the request :( + if !plan.DataSharingPreference.Equal(state.DataSharingPreference) || + !plan.Settings.Equal(state.Settings) || + !plan.RegionsOfInterest.Equal(state.RegionsOfInterest) { - if !planLabels.Equal(stateLabels) { - in.SettingsForUpdate.ConnectedHomeForUpdate.Labels = fwflex.ExpandFrameworkStringValueList(ctx, planLabels) - } + in := &rekognition.UpdateStreamProcessorInput{ + Name: aws.String(plan.Name.ValueString()), + ParametersToDelete: []awstypes.StreamProcessorParameterToDelete{}, } // TIP: -- 4. Call the AWS modify/update function From 978371925625cc7ff5608a5de2afea147f7efe99 Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Mon, 20 May 2024 10:20:13 -0500 Subject: [PATCH 22/71] wip --- .../service/rekognition/stream_processor.go | 136 ++++++++++-------- 1 file changed, 77 insertions(+), 59 deletions(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index 53843e6914a7..0e4c9194c168 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -14,12 +14,14 @@ import ( awstypes "github.com/aws/aws-sdk-go-v2/service/rekognition/types" "github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts" "github.com/hashicorp/terraform-plugin-framework-validators/float64validator" + "github.com/hashicorp/terraform-plugin-framework-validators/listvalidator" "github.com/hashicorp/terraform-plugin-framework-validators/objectvalidator" "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/path" "github.com/hashicorp/terraform-plugin-framework/resource" "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/float64planmodifier" "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" "github.com/hashicorp/terraform-plugin-framework/schema/validator" @@ -304,9 +306,11 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem "labels": schema.ListAttribute{ Description: "Specifies what you want to detect in the video, such as people, packages, or pets.", CustomType: fwtypes.ListOfStringType, - ElementType: fwtypes.StringEnumType[labelSettings](), - Optional: true, - //TODO: validation for label values + Required: true, + Validators: []validator.List{ + listvalidator.SizeAtLeast(1), + listvalidator.ValueStringsAre(stringvalidator.OneOf(connectedHomeLabels()...)), + }, }, "min_confidence": schema.Float64Attribute{ Description: "The minimum confidence required to label an object in the video.", @@ -328,16 +332,18 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem stringvalidator.RegexMatches(collectionIdRegex, "must conform to: [a-zA-Z0-9_.\\-]+"), }, Optional: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, }, "face_match_threshold": schema.Float64Attribute{ Description: "Minimum face match confidence score that must be met to return a result for a recognized face.", Validators: []validator.Float64{ float64validator.Between(0.0, 100.0), }, - Optional: true, + Optional: true, PlanModifiers: []planmodifier.Float64{ - - //TODO: Requires replacement + float64planmodifier.RequiresReplace(), }, }, }, @@ -431,16 +437,6 @@ func (r *resourceStreamProcessor) Read(ctx context.Context, req resource.ReadReq } } -func unwrapObjectValueOf[T any](plan fwtypes.ObjectValueOf[T], state fwtypes.ObjectValueOf[T], diagnostics diag.Diagnostics, ctx context.Context) (*T, *T) { - ptrPlan, diags := plan.ToPtr(ctx) - diagnostics.Append(diags...) - - ptrState, diags := state.ToPtr(ctx) - diagnostics.Append(diags...) - - return ptrPlan, ptrState -} - func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { conn := r.Meta().RekognitionClient(ctx) @@ -451,62 +447,58 @@ func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.Updat return } - in := &rekognition.UpdateStreamProcessorInput{ - Name: plan.Name.ValueStringPointer(), - ParametersToDelete: []awstypes.StreamProcessorParameterToDelete{}, - } + if !plan.DataSharingPreference.Equal(state.DataSharingPreference) || + !plan.Settings.Equal(state.Settings) || + !plan.RegionsOfInterest.Equal(state.RegionsOfInterest) { - if !plan.DataSharingPreference.Equal(state.DataSharingPreference) { - dspPlan, dspState := unwrapObjectValueOf(plan.DataSharingPreference, state.DataSharingPreference, resp.Diagnostics, ctx) - if resp.Diagnostics.HasError() { - return + in := &rekognition.UpdateStreamProcessorInput{ + Name: plan.Name.ValueStringPointer(), + ParametersToDelete: []awstypes.StreamProcessorParameterToDelete{}, } - if !dspPlan.OptIn.Equal(dspState.OptIn) { - in.DataSharingPreferenceForUpdate = &awstypes.StreamProcessorDataSharingPreference{ - OptIn: dspPlan.OptIn.ValueBool(), + if !plan.DataSharingPreference.Equal(state.DataSharingPreference) { + dspPlan, dspState := unwrapObjectValueOf(plan.DataSharingPreference, state.DataSharingPreference, resp.Diagnostics, ctx) + if resp.Diagnostics.HasError() { + return } - } - } - if !plan.Settings.Equal(state.Settings) { - in.SettingsForUpdate = &awstypes.StreamProcessorSettingsForUpdate{ - ConnectedHomeForUpdate: &awstypes.ConnectedHomeSettingsForUpdate{}, - } - - settingsPlan, settingsState := unwrapObjectValueOf(plan.Settings, state.Settings, resp.Diagnostics, ctx) - if resp.Diagnostics.HasError() { - return + if !dspPlan.OptIn.Equal(dspState.OptIn) { + in.DataSharingPreferenceForUpdate = &awstypes.StreamProcessorDataSharingPreference{ + OptIn: dspPlan.OptIn.ValueBool(), + } + } } - connectedHomePlan, connectedHomeState := unwrapObjectValueOf(settingsPlan.ConnectedHome, settingsState.ConnectedHome, resp.Diagnostics, ctx) - if !connectedHomePlan.MinConfidence.Equal(connectedHomeState.MinConfidence) { - if !connectedHomePlan.MinConfidence.IsNull() && connectedHomeState.MinConfidence.IsNull() { - in.ParametersToDelete = append(in.ParametersToDelete, awstypes.StreamProcessorParameterToDeleteConnectedHomeMinConfidence) + if !plan.Settings.Equal(state.Settings) { + in.SettingsForUpdate = &awstypes.StreamProcessorSettingsForUpdate{ + ConnectedHomeForUpdate: &awstypes.ConnectedHomeSettingsForUpdate{}, } - if !connectedHomePlan.MinConfidence.IsNull() { - in.SettingsForUpdate.ConnectedHomeForUpdate.MinConfidence = aws.Float32(float32(connectedHomePlan.MinConfidence.ValueFloat64())) + settingsPlan, settingsState := unwrapObjectValueOf(plan.Settings, state.Settings, resp.Diagnostics, ctx) + if resp.Diagnostics.HasError() { + return } - } - if !connectedHomePlan.Labels.Equal(connectedHomeState.Labels) { - in.SettingsForUpdate.ConnectedHomeForUpdate.Labels = fwflex.ExpandFrameworkStringValueList(ctx, connectedHomePlan.Labels) - } + connectedHomePlan, connectedHomeState := unwrapObjectValueOf(settingsPlan.ConnectedHome, settingsState.ConnectedHome, resp.Diagnostics, ctx) + if resp.Diagnostics.HasError() { + return + } - } + if !connectedHomePlan.MinConfidence.Equal(connectedHomeState.MinConfidence) { + if !connectedHomePlan.MinConfidence.IsNull() && connectedHomeState.MinConfidence.IsNull() { + in.ParametersToDelete = append(in.ParametersToDelete, awstypes.StreamProcessorParameterToDeleteConnectedHomeMinConfidence) + } - // the update api uses different property names(ForUpdate) and request shape, so we can't just flex into the request :( - if !plan.DataSharingPreference.Equal(state.DataSharingPreference) || - !plan.Settings.Equal(state.Settings) || - !plan.RegionsOfInterest.Equal(state.RegionsOfInterest) { + if !connectedHomePlan.MinConfidence.IsNull() { + in.SettingsForUpdate.ConnectedHomeForUpdate.MinConfidence = aws.Float32(float32(connectedHomePlan.MinConfidence.ValueFloat64())) + } + } - in := &rekognition.UpdateStreamProcessorInput{ - Name: aws.String(plan.Name.ValueString()), - ParametersToDelete: []awstypes.StreamProcessorParameterToDelete{}, + if !connectedHomePlan.Labels.Equal(connectedHomeState.Labels) { + in.SettingsForUpdate.ConnectedHomeForUpdate.Labels = fwflex.ExpandFrameworkStringValueList(ctx, connectedHomePlan.Labels) + } } - // TIP: -- 4. Call the AWS modify/update function _, err := conn.UpdateStreamProcessor(ctx, in) if err != nil { resp.Diagnostics.AddError( @@ -515,7 +507,6 @@ func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.Updat ) return } - } updateTimeout := r.UpdateTimeout(ctx, plan.Timeouts) @@ -677,6 +668,16 @@ func findStreamProcessorByID(ctx context.Context, conn *rekognition.Client, name return out, nil } +func unwrapObjectValueOf[T any](plan fwtypes.ObjectValueOf[T], state fwtypes.ObjectValueOf[T], diagnostics diag.Diagnostics, ctx context.Context) (*T, *T) { + ptrPlan, diags := plan.ToPtr(ctx) + diagnostics.Append(diags...) + + ptrState, diags := state.ToPtr(ctx) + diagnostics.Append(diags...) + + return ptrPlan, ptrState +} + type resourceStreamProcessorDataModel struct { ARN types.String `tfsdk:"arn"` DataSharingPreference fwtypes.ObjectValueOf[dataSharingPreferenceModel] `tfsdk:"data_sharing_preference"` @@ -763,11 +764,28 @@ type faceSearchModel struct { */ type labelSettings string -func (labelSettings) Values() []labelSettings { - return []labelSettings{ +func (labelSettings) Values() []string { + return []string{ "PERSON", "PET", "PACKAGE", "ALL", } } + +const ( + person_label = "PERSON" + pet_label = "PET" + package_label = "PACKAGE" + all_label = "ALL" +) + +// OAuthFlowType_Values returns all elements of the OAuthFlowType enum +func connectedHomeLabels() []string { + return []string{ + person_label, + pet_label, + package_label, + all_label, + } +} From 79d39e24cf9a28a18c0e4c40afc970a287bc7461 Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Mon, 20 May 2024 13:07:28 -0500 Subject: [PATCH 23/71] wip --- .../service/rekognition/stream_processor.go | 131 ++++++++---------- 1 file changed, 60 insertions(+), 71 deletions(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index 0e4c9194c168..e91713c2e766 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -173,63 +173,64 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem }, }, "regions_of_interest": schema.ListNestedBlock{ - CustomType: fwtypes.NewListNestedObjectTypeOf[regionOfInterestModel](ctx), - Description: "Specifies locations in the frames where Amazon Rekognition checks for objects or people. You can specify up to 10 regions of interest, and each region has either a polygon or a bounding box.", + CustomType: fwtypes.NewListNestedObjectTypeOf[regionOfInterestModel](ctx), NestedObject: schema.NestedBlockObject{ Blocks: map[string]schema.Block{ - "region": schema.SingleNestedBlock{ - CustomType: fwtypes.NewObjectTypeOf[regionOfInterestModel](ctx), - Blocks: map[string]schema.Block{ - "bounding_box": schema.SingleNestedBlock{ - CustomType: fwtypes.NewObjectTypeOf[boundingBoxModel](ctx), - Description: "The box representing a region of interest on screen.", - Attributes: map[string]schema.Attribute{ - "height": schema.Float64Attribute{ - Optional: true, - Description: "Height of the bounding box as a ratio of the overall image height.", - Validators: []validator.Float64{ - float64validator.Between(0.0, 1.0), - }, - }, - "left": schema.Float64Attribute{ - Description: "Left coordinate of the bounding box as a ratio of overall image width.", - Optional: true, - Validators: []validator.Float64{ - float64validator.Between(0.0, 1.0), - }, - }, - "top": schema.Float64Attribute{ - Description: "Top coordinate of the bounding box as a ratio of overall image height.", - Optional: true, - Validators: []validator.Float64{ - float64validator.Between(0.0, 1.0), - }, - }, - "width": schema.Float64Attribute{ - Description: "Width of the bounding box as a ratio of the overall image width.", - Optional: true, - Validators: []validator.Float64{ - float64validator.Between(0.0, 1.0), - }, - }, + "bounding_box": schema.SingleNestedBlock{ + CustomType: fwtypes.NewObjectTypeOf[boundingBoxModel](ctx), + Description: "The box representing a region of interest on screen.", + Attributes: map[string]schema.Attribute{ + "height": schema.Float64Attribute{ + Optional: true, + Description: "Height of the bounding box as a ratio of the overall image height.", + Validators: []validator.Float64{ + float64validator.Between(0.0, 1.0), }, }, - "polygon": schema.SingleNestedBlock{ - CustomType: fwtypes.NewObjectTypeOf[polygonModel](ctx), - Description: "Specifies a shape made up of up to 10 Point objects to define a region of interest.", - Attributes: map[string]schema.Attribute{ - "x": schema.Float64Attribute{ - Description: "The value of the X coordinate for a point on a Polygon.", - Optional: true, - Validators: []validator.Float64{ - float64validator.Between(0.0, 1.0), + "left": schema.Float64Attribute{ + Description: "Left coordinate of the bounding box as a ratio of overall image width.", + Optional: true, + Validators: []validator.Float64{ + float64validator.Between(0.0, 1.0), + }, + }, + "top": schema.Float64Attribute{ + Description: "Top coordinate of the bounding box as a ratio of overall image height.", + Optional: true, + Validators: []validator.Float64{ + float64validator.Between(0.0, 1.0), + }, + }, + "width": schema.Float64Attribute{ + Description: "Width of the bounding box as a ratio of the overall image width.", + Optional: true, + Validators: []validator.Float64{ + float64validator.Between(0.0, 1.0), + }, + }, + }, + }, + "polygon": schema.ListNestedBlock{ + CustomType: fwtypes.NewListNestedObjectTypeOf[polygonModel](ctx), + Description: "Specifies a shape made up of up to 10 Point objects to define a region of interest.", + NestedObject: schema.NestedBlockObject{ + Blocks: map[string]schema.Block{ + "polygon_region": schema.SingleNestedBlock{ + CustomType: fwtypes.NewObjectTypeOf[polygonModel](ctx), + Attributes: map[string]schema.Attribute{ + "x": schema.Float64Attribute{ + Description: "The value of the X coordinate for a point on a Polygon.", + Optional: true, + Validators: []validator.Float64{ + float64validator.Between(0.0, 1.0), + }, }, - }, - "y": schema.Float64Attribute{ - Description: "The value of the Y coordinate for a point on a Polygon.", - Optional: true, - Validators: []validator.Float64{ - float64validator.Between(0.0, 1.0), + "y": schema.Float64Attribute{ + Description: "The value of the Y coordinate for a point on a Polygon.", + Optional: true, + Validators: []validator.Float64{ + float64validator.Between(0.0, 1.0), + }, }, }, }, @@ -726,8 +727,8 @@ type s3DestinationModel struct { } type regionOfInterestModel struct { - BoundingBox fwtypes.ObjectValueOf[boundingBoxModel] `tfsdk:"bounding_box"` - Polygon fwtypes.ObjectValueOf[polygonModel] `tfsdk:"polygon"` + BoundingBox fwtypes.ObjectValueOf[boundingBoxModel] `tfsdk:"bounding_box"` + Polygon fwtypes.ListNestedObjectValueOf[polygonModel] `tfsdk:"polygon"` } type boundingBoxModel struct { @@ -757,22 +758,6 @@ type faceSearchModel struct { FaceMatchThreshold types.Float64 `tfsdk:"face_match_threshold"` } -/* -- AWS SDK doesn't have a CreateStreamProcessorInput.StreamProcessorSettings.ConnectedHomeSettings.Labels enum available as of 5/13/24 - -- see docs https://docs.aws.amazon.com/rekognition/latest/APIReference/API_ConnectedHomeSettings.html#API_ConnectedHomeSettings_Contents -*/ -type labelSettings string - -func (labelSettings) Values() []string { - return []string{ - "PERSON", - "PET", - "PACKAGE", - "ALL", - } -} - const ( person_label = "PERSON" pet_label = "PET" @@ -780,7 +765,11 @@ const ( all_label = "ALL" ) -// OAuthFlowType_Values returns all elements of the OAuthFlowType enum +/* +- AWS SDK doesn't have a CreateStreamProcessorInput.StreamProcessorSettings.ConnectedHomeSettings.Labels enum available as of 5/13/24 + +- see docs https://docs.aws.amazon.com/rekognition/latest/APIReference/API_ConnectedHomeSettings.html#API_ConnectedHomeSettings_Contents +*/ func connectedHomeLabels() []string { return []string{ person_label, From 5b66b70572d0c6e13d3b43cc3cfe5b2f7ff78f02 Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Mon, 20 May 2024 15:00:39 -0500 Subject: [PATCH 24/71] wip --- .../service/rekognition/stream_processor.go | 75 +++++++++++++++++++ 1 file changed, 75 insertions(+) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index e91713c2e766..2f58f83929ea 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -174,6 +174,9 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem }, "regions_of_interest": schema.ListNestedBlock{ CustomType: fwtypes.NewListNestedObjectTypeOf[regionOfInterestModel](ctx), + Validators: []validator.List{ + listvalidator.SizeAtMost(10), + }, NestedObject: schema.NestedBlockObject{ Blocks: map[string]schema.Block{ "bounding_box": schema.SingleNestedBlock{ @@ -500,6 +503,68 @@ func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.Updat } } + if plan.RegionsOfInterest.IsNull() && !state.RegionsOfInterest.IsNull() { + in.ParametersToDelete = append(in.ParametersToDelete, awstypes.StreamProcessorParameterToDeleteRegionsOfInterest) + } + + if !plan.RegionsOfInterest.Equal(state.RegionsOfInterest) { + var regions []awstypes.RegionOfInterest + + planRegions, _ := unwrapListObjectValueOf(plan.RegionsOfInterest, state.RegionsOfInterest, resp.Diagnostics, ctx) + + for i := 0; i < len(planRegions); i++ { + planRegion := planRegions[i] + region := &awstypes.RegionOfInterest{} + + if !planRegion.BoundingBox.IsNull() { + boundingBox, diags := planRegion.BoundingBox.ToPtr(ctx) + resp.Diagnostics.Append(diags...) + + region.BoundingBox = &awstypes.BoundingBox{} + + if !boundingBox.Top.IsNull() { + region.BoundingBox.Top = aws.Float32(float32(boundingBox.Top.ValueFloat64())) + } + + if !boundingBox.Left.IsNull() { + region.BoundingBox.Left = aws.Float32(float32(boundingBox.Left.ValueFloat64())) + } + + if !boundingBox.Height.IsNull() { + region.BoundingBox.Height = aws.Float32(float32(boundingBox.Height.ValueFloat64())) + } + + if !boundingBox.Width.IsNull() { + region.BoundingBox.Width = aws.Float32(float32(boundingBox.Width.ValueFloat64())) + } + } + + if !planRegion.Polygon.IsNull() { + polygons, diags := planRegion.Polygon.ToSlice(ctx) + resp.Diagnostics.Append(diags...) + + plannedPolygons := make([]awstypes.Point, len(polygons)) + + for i := 0; i < len(polygons); i++ { + polygon := polygons[i] + plannedPolygons[i] = awstypes.Point{} + + if !polygon.X.IsNull() { + plannedPolygons[i].X = aws.Float32(float32(polygon.X.ValueFloat64())) + } + + if !polygon.Y.IsNull() { + plannedPolygons[i].Y = aws.Float32(float32(polygon.Y.ValueFloat64())) + } + } + region.Polygon = plannedPolygons + } + regions = append(regions, *region) + } + + in.RegionsOfInterestForUpdate = regions + } + _, err := conn.UpdateStreamProcessor(ctx, in) if err != nil { resp.Diagnostics.AddError( @@ -679,6 +744,16 @@ func unwrapObjectValueOf[T any](plan fwtypes.ObjectValueOf[T], state fwtypes.Obj return ptrPlan, ptrState } +func unwrapListObjectValueOf[T any](plan fwtypes.ListNestedObjectValueOf[T], state fwtypes.ListNestedObjectValueOf[T], diagnostics diag.Diagnostics, ctx context.Context) ([]*T, []*T) { + ptrPlan, diags := plan.ToSlice(ctx) + diagnostics.Append(diags...) + + ptrState, diags := state.ToSlice(ctx) + diagnostics.Append(diags...) + + return ptrPlan, ptrState +} + type resourceStreamProcessorDataModel struct { ARN types.String `tfsdk:"arn"` DataSharingPreference fwtypes.ObjectValueOf[dataSharingPreferenceModel] `tfsdk:"data_sharing_preference"` From 864be7786c459a739b57f1bc8404f4add705a6a0 Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Mon, 20 May 2024 16:20:43 -0500 Subject: [PATCH 25/71] wip --- .../service/rekognition/stream_processor.go | 67 +++++++++++++------ 1 file changed, 45 insertions(+), 22 deletions(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index 2f58f83929ea..dbb10f0e97c3 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -176,12 +176,22 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem CustomType: fwtypes.NewListNestedObjectTypeOf[regionOfInterestModel](ctx), Validators: []validator.List{ listvalidator.SizeAtMost(10), + listvalidator.AtLeastOneOf(path.MatchRelative().AtName("bounding_box"), path.MatchRelative().AtName("polygon")), }, NestedObject: schema.NestedBlockObject{ Blocks: map[string]schema.Block{ "bounding_box": schema.SingleNestedBlock{ CustomType: fwtypes.NewObjectTypeOf[boundingBoxModel](ctx), Description: "The box representing a region of interest on screen.", + Validators: []validator.Object{ + objectvalidator.AlsoRequires( + path.MatchRelative().AtName("height"), + path.MatchRelative().AtName("left"), + path.MatchRelative().AtName("top"), + path.MatchRelative().AtName("width"), + ), + objectvalidator.ConflictsWith(path.MatchRelative().AtParent().AtName("polygon")), + }, Attributes: map[string]schema.Attribute{ "height": schema.Float64Attribute{ Optional: true, @@ -215,26 +225,25 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem }, "polygon": schema.ListNestedBlock{ CustomType: fwtypes.NewListNestedObjectTypeOf[polygonModel](ctx), - Description: "Specifies a shape made up of up to 10 Point objects to define a region of interest.", + Description: "Specifies a shape made of 3 to 10 Point objects that define a region of interest.", + Validators: []validator.List{ + listvalidator.SizeBetween(3, 10), + listvalidator.ConflictsWith(path.MatchRelative().AtParent().AtName("bounding_box")), + }, NestedObject: schema.NestedBlockObject{ - Blocks: map[string]schema.Block{ - "polygon_region": schema.SingleNestedBlock{ - CustomType: fwtypes.NewObjectTypeOf[polygonModel](ctx), - Attributes: map[string]schema.Attribute{ - "x": schema.Float64Attribute{ - Description: "The value of the X coordinate for a point on a Polygon.", - Optional: true, - Validators: []validator.Float64{ - float64validator.Between(0.0, 1.0), - }, - }, - "y": schema.Float64Attribute{ - Description: "The value of the Y coordinate for a point on a Polygon.", - Optional: true, - Validators: []validator.Float64{ - float64validator.Between(0.0, 1.0), - }, - }, + Attributes: map[string]schema.Attribute{ + "x": schema.Float64Attribute{ + Description: "The value of the X coordinate for a point on a Polygon.", + Optional: true, + Validators: []validator.Float64{ + float64validator.Between(0.0, 1.0), + }, + }, + "y": schema.Float64Attribute{ + Description: "The value of the Y coordinate for a point on a Polygon.", + Optional: true, + Validators: []validator.Float64{ + float64validator.Between(0.0, 1.0), }, }, }, @@ -321,6 +330,7 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem Validators: []validator.Float64{ float64validator.Between(0.0, 100.0), }, + Computed: true, Optional: true, }, }, @@ -400,7 +410,7 @@ func (r *resourceStreamProcessor) Create(ctx context.Context, req resource.Creat plan.ID = plan.ARN createTimeout := r.CreateTimeout(ctx, plan.Timeouts) - _, err = waitStreamProcessorCreated(ctx, conn, plan.Name.ValueString(), createTimeout) + created, err := waitStreamProcessorCreated(ctx, conn, plan.Name.ValueString(), createTimeout) if err != nil { resp.Diagnostics.AddError( create.ProblemStandardMessage(names.Rekognition, create.ErrActionWaitingForCreation, ResNameStreamProcessor, plan.Name.String(), err), @@ -409,6 +419,11 @@ func (r *resourceStreamProcessor) Create(ctx context.Context, req resource.Creat return } + resp.Diagnostics.Append(fwflex.Flatten(ctx, created, &plan)...) + if resp.Diagnostics.HasError() { + return + } + resp.Diagnostics.Append(resp.State.Set(ctx, plan)...) } @@ -559,7 +574,10 @@ func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.Updat } region.Polygon = plannedPolygons } - regions = append(regions, *region) + + if region.BoundingBox != nil && len(region.Polygon) > 0 { + regions = append(regions, *region) + } } in.RegionsOfInterestForUpdate = regions @@ -576,7 +594,7 @@ func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.Updat } updateTimeout := r.UpdateTimeout(ctx, plan.Timeouts) - _, err := waitStreamProcessorUpdated(ctx, conn, plan.Name.ValueString(), updateTimeout) + updated, err := waitStreamProcessorUpdated(ctx, conn, plan.Name.ValueString(), updateTimeout) if err != nil { resp.Diagnostics.AddError( create.ProblemStandardMessage(names.Rekognition, create.ErrActionWaitingForUpdate, ResNameStreamProcessor, plan.Name.String(), err), @@ -585,6 +603,11 @@ func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.Updat return } + resp.Diagnostics.Append(fwflex.Flatten(ctx, updated, &plan)...) + if resp.Diagnostics.HasError() { + return + } + resp.Diagnostics.Append(resp.State.Set(ctx, &plan)...) } From 80fde29d824dbec4325624ce7a75cf80bfe4483f Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Mon, 20 May 2024 16:48:50 -0500 Subject: [PATCH 26/71] wip --- internal/service/rekognition/stream_processor.go | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index dbb10f0e97c3..88f1e4be2d0c 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -176,9 +176,11 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem CustomType: fwtypes.NewListNestedObjectTypeOf[regionOfInterestModel](ctx), Validators: []validator.List{ listvalidator.SizeAtMost(10), - listvalidator.AtLeastOneOf(path.MatchRelative().AtName("bounding_box"), path.MatchRelative().AtName("polygon")), }, NestedObject: schema.NestedBlockObject{ + Validators: []validator.Object{ + objectvalidator.AtLeastOneOf(path.MatchRelative().AtName("bounding_box"), path.MatchRelative().AtName("polygon")), + }, Blocks: map[string]schema.Block{ "bounding_box": schema.SingleNestedBlock{ CustomType: fwtypes.NewObjectTypeOf[boundingBoxModel](ctx), From d7587626be37f0827d82622df78d45124696dc13 Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Mon, 20 May 2024 20:54:33 -0500 Subject: [PATCH 27/71] wip --- internal/service/rekognition/stream_processor.go | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index 88f1e4be2d0c..b82a49216c09 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -231,6 +231,10 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem Validators: []validator.List{ listvalidator.SizeBetween(3, 10), listvalidator.ConflictsWith(path.MatchRelative().AtParent().AtName("bounding_box")), + listvalidator.AlsoRequires( + path.MatchRelative().AtName("x"), + path.MatchRelative().AtName("y"), + ), }, NestedObject: schema.NestedBlockObject{ Attributes: map[string]schema.Attribute{ @@ -258,7 +262,10 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem CustomType: fwtypes.NewObjectTypeOf[outputModel](ctx), Description: "Kinesis data stream stream or Amazon S3 bucket location to which Amazon Rekognition Video puts the analysis results.", Validators: []validator.Object{ - objectvalidator.IsRequired(), + objectvalidator.AtLeastOneOf( + path.MatchRelative().AtName("kinesis_data_stream"), + path.MatchRelative().AtName("s3_destination"), + ), }, Blocks: map[string]schema.Block{ "kinesis_data_stream": schema.SingleNestedBlock{ From 2ce3fa116ebff629142afc30f6d05f96eaa8eea0 Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Tue, 21 May 2024 09:24:42 -0500 Subject: [PATCH 28/71] start on tests --- .../service/rekognition/stream_processor.go | 31 ++- .../rekognition/stream_processor_test.go | 255 ++++++------------ 2 files changed, 109 insertions(+), 177 deletions(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index b82a49216c09..815becbc70bb 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -271,6 +271,9 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem "kinesis_data_stream": schema.SingleNestedBlock{ CustomType: fwtypes.NewObjectTypeOf[kinesisDataStreamModel](ctx), Description: "The Amazon Kinesis Data Streams stream to which the Amazon Rekognition stream processor streams the analysis results.", + Validators: []validator.Object{ + objectvalidator.ConflictsWith(path.MatchRelative().AtParent().AtName("s3_destination")), + }, Attributes: map[string]schema.Attribute{ "arn": schema.StringAttribute{ CustomType: fwtypes.ARNType, @@ -288,6 +291,9 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem "s3_destination": schema.SingleNestedBlock{ CustomType: fwtypes.NewObjectTypeOf[s3DestinationModel](ctx), Description: "The Amazon S3 bucket location to which Amazon Rekognition publishes the detailed inference results of a video analysis operation.", + Validators: []validator.Object{ + objectvalidator.ConflictsWith(path.MatchRelative().AtParent().AtName("kinesis_data_stream")), + }, Attributes: map[string]schema.Attribute{ names.AttrBucket: schema.StringAttribute{ Description: "The name of the Amazon S3 bucket you want to associate with the streaming video project.", @@ -318,12 +324,18 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem CustomType: fwtypes.NewObjectTypeOf[settingsModel](ctx), Description: "Input parameters used in a streaming video analyzed by a stream processor.", Validators: []validator.Object{ - objectvalidator.IsRequired(), + objectvalidator.AtLeastOneOf( + path.MatchRelative().AtName("connected_home"), + path.MatchRelative().AtName("face_search"), + ), }, Blocks: map[string]schema.Block{ "connected_home": schema.SingleNestedBlock{ CustomType: fwtypes.NewObjectTypeOf[connectedHomeModel](ctx), Description: "Label detection settings to use on a streaming video.", + Validators: []validator.Object{ + objectvalidator.ConflictsWith(path.MatchRelative().AtParent().AtName("face_search")), + }, Attributes: map[string]schema.Attribute{ "labels": schema.ListAttribute{ Description: "Specifies what you want to detect in the video, such as people, packages, or pets.", @@ -347,6 +359,9 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem "face_search": schema.SingleNestedBlock{ CustomType: fwtypes.NewObjectTypeOf[faceSearchModel](ctx), Description: "Face search settings to use on a streaming video.", + Validators: []validator.Object{ + objectvalidator.ConflictsWith(path.MatchRelative().AtParent().AtName("connected_home")), + }, Attributes: map[string]schema.Attribute{ "collection_id": schema.StringAttribute{ Description: "The ID of a collection that contains faces that you want to search for.", @@ -416,7 +431,7 @@ func (r *resourceStreamProcessor) Create(ctx context.Context, req resource.Creat } plan.ARN = fwflex.StringToFramework(ctx, out.StreamProcessorArn) - plan.ID = plan.ARN + plan.ID = plan.Name createTimeout := r.CreateTimeout(ctx, plan.Timeouts) created, err := waitStreamProcessorCreated(ctx, conn, plan.Name.ValueString(), createTimeout) @@ -445,7 +460,7 @@ func (r *resourceStreamProcessor) Read(ctx context.Context, req resource.ReadReq return } - out, err := findStreamProcessorByID(ctx, conn, state.Name.ValueString()) + out, err := findStreamProcessorByID(ctx, conn, state.ID.ValueString()) if tfresource.NotFound(err) { resp.Diagnostics.Append(fwdiag.NewResourceNotFoundWarningDiagnostic(err)) resp.State.RemoveResource(ctx) @@ -453,7 +468,7 @@ func (r *resourceStreamProcessor) Read(ctx context.Context, req resource.ReadReq } if err != nil { resp.Diagnostics.AddError( - create.ProblemStandardMessage(names.Rekognition, create.ErrActionSetting, ResNameStreamProcessor, state.Name.String(), err), + create.ProblemStandardMessage(names.Rekognition, create.ErrActionSetting, ResNameStreamProcessor, state.ID.String(), err), err.Error(), ) return @@ -632,7 +647,7 @@ func (r *resourceStreamProcessor) Delete(ctx context.Context, req resource.Delet // TIP: -- 3. Populate a delete input structure in := &rekognition.DeleteStreamProcessorInput{ - Name: aws.String(state.Name.ValueString()), + Name: aws.String(state.ID.ValueString()), } _, err := conn.DeleteStreamProcessor(ctx, in) @@ -642,7 +657,7 @@ func (r *resourceStreamProcessor) Delete(ctx context.Context, req resource.Delet return } resp.Diagnostics.AddError( - create.ProblemStandardMessage(names.Rekognition, create.ErrActionDeleting, ResNameStreamProcessor, state.Name.String(), err), + create.ProblemStandardMessage(names.Rekognition, create.ErrActionDeleting, ResNameStreamProcessor, state.ID.String(), err), err.Error(), ) return @@ -650,10 +665,10 @@ func (r *resourceStreamProcessor) Delete(ctx context.Context, req resource.Delet // TIP: -- 5. Use a waiter to wait for delete to complete deleteTimeout := r.DeleteTimeout(ctx, state.Timeouts) - _, err = waitStreamProcessorDeleted(ctx, conn, state.Name.ValueString(), deleteTimeout) + _, err = waitStreamProcessorDeleted(ctx, conn, state.ID.ValueString(), deleteTimeout) if err != nil { resp.Diagnostics.AddError( - create.ProblemStandardMessage(names.Rekognition, create.ErrActionWaitingForDeletion, ResNameStreamProcessor, state.Name.String(), err), + create.ProblemStandardMessage(names.Rekognition, create.ErrActionWaitingForDeletion, ResNameStreamProcessor, state.ID.String(), err), err.Error(), ) return diff --git a/internal/service/rekognition/stream_processor_test.go b/internal/service/rekognition/stream_processor_test.go index 74028d09cc17..9b1829c5d196 100644 --- a/internal/service/rekognition/stream_processor_test.go +++ b/internal/service/rekognition/stream_processor_test.go @@ -3,36 +3,7 @@ package rekognition_test -// **PLEASE DELETE THIS AND ALL TIP COMMENTS BEFORE SUBMITTING A PR FOR REVIEW!** -// -// TIP: ==== INTRODUCTION ==== -// Thank you for trying the skaff tool! -// -// You have opted to include these helpful comments. They all include "TIP:" -// to help you find and remove them when you're done with them. -// -// While some aspects of this file are customized to your input, the -// scaffold tool does *not* look at the AWS API and ensure it has correct -// function, structure, and variable names. It makes guesses based on -// commonalities. You will need to make significant adjustments. -// -// In other words, as generated, this is a rough outline of the work you will -// need to do. If something doesn't make sense for your situation, get rid of -// it. - import ( - // TIP: ==== IMPORTS ==== - // This is a common set of imports but not customized to your code since - // your code hasn't been written yet. Make sure you, your IDE, or - // goimports -w fixes these imports. - // - // The provider linter wants your imports to be in two groups: first, - // standard library (i.e., "fmt" or "strings"), second, everything else. - // - // Also, AWS Go SDK v2 may handle nested structures differently than v1, - // using the services/rekognition/types package. If so, you'll - // need to import types and reference the nested types, e.g., as - // types.. "context" "errors" "fmt" @@ -49,98 +20,10 @@ import ( "github.com/hashicorp/terraform-provider-aws/internal/conns" "github.com/hashicorp/terraform-provider-aws/internal/create" "github.com/hashicorp/terraform-provider-aws/internal/errs" - "github.com/hashicorp/terraform-provider-aws/names" - - // TIP: You will often need to import the package that this test file lives - // in. Since it is in the "test" context, it must import the package to use - // any normal context constants, variables, or functions. tfrekognition "github.com/hashicorp/terraform-provider-aws/internal/service/rekognition" + "github.com/hashicorp/terraform-provider-aws/names" ) -// TIP: File Structure. The basic outline for all test files should be as -// follows. Improve this resource's maintainability by following this -// outline. -// -// 1. Package declaration (add "_test" since this is a test file) -// 2. Imports -// 3. Unit tests -// 4. Basic test -// 5. Disappears test -// 6. All the other tests -// 7. Helper functions (exists, destroy, check, etc.) -// 8. Functions that return Terraform configurations - -// TIP: ==== UNIT TESTS ==== -// This is an example of a unit test. Its name is not prefixed with -// "TestAcc" like an acceptance test. -// -// Unlike acceptance tests, unit tests do not access AWS and are focused on a -// function (or method). Because of this, they are quick and cheap to run. -// -// In designing a resource's implementation, isolate complex bits from AWS bits -// so that they can be tested through a unit test. We encourage more unit tests -// in the provider. -// -// Cut and dry functions using well-used patterns, like typical flatteners and -// expanders, don't need unit testing. However, if they are complex or -// intricate, they should be unit tested. -func TestStreamProcessorExampleUnitTest(t *testing.T) { - t.Parallel() - - testCases := []struct { - TestName string - Input string - Expected string - Error bool - }{ - { - TestName: "empty", - Input: "", - Expected: "", - Error: true, - }, - { - TestName: "descriptive name", - Input: "some input", - Expected: "some output", - Error: false, - }, - { - TestName: "another descriptive name", - Input: "more input", - Expected: "more output", - Error: false, - }, - } - - for _, testCase := range testCases { - testCase := testCase - t.Run(testCase.TestName, func(t *testing.T) { - t.Parallel() - got, err := tfrekognition.FunctionFromResource(testCase.Input) - - if err != nil && !testCase.Error { - t.Errorf("got error (%s), expected no error", err) - } - - if err == nil && testCase.Error { - t.Errorf("got (%s) and no error, expected error", got) - } - - if got != testCase.Expected { - t.Errorf("got %s, expected %s", got, testCase.Expected) - } - }) - } -} - -// TIP: ==== ACCEPTANCE TESTS ==== -// This is an example of a basic acceptance test. This should test as much of -// standard functionality of the resource as possible, and test importing, if -// applicable. We prefix its name with "TestAcc", the service, and the -// resource name. -// -// Acceptance test access AWS and cost money to run. func TestAccRekognitionStreamProcessor_basic(t *testing.T) { ctx := acctest.Context(t) // TIP: This is a long-running test guard for tests that run longer than @@ -149,7 +32,7 @@ func TestAccRekognitionStreamProcessor_basic(t *testing.T) { t.Skip("skipping long-running test in short mode") } - var streamprocessor rekognition.DescribeStreamProcessorResponse + var streamprocessor rekognition.DescribeStreamProcessorOutput rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_rekognition_stream_processor.test" @@ -164,7 +47,7 @@ func TestAccRekognitionStreamProcessor_basic(t *testing.T) { CheckDestroy: testAccCheckStreamProcessorDestroy(ctx), Steps: []resource.TestStep{ { - Config: testAccStreamProcessorConfig_basic(rName), + Config: testAccStreamProcessorConfig_basic(rName, "1.0"), Check: resource.ComposeTestCheckFunc( testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), resource.TestCheckResourceAttr(resourceName, "auto_minor_version_upgrade", "false"), @@ -188,43 +71,43 @@ func TestAccRekognitionStreamProcessor_basic(t *testing.T) { }) } -func TestAccRekognitionStreamProcessor_disappears(t *testing.T) { - ctx := acctest.Context(t) - if testing.Short() { - t.Skip("skipping long-running test in short mode") - } - - var streamprocessor rekognition.DescribeStreamProcessorResponse - rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) - resourceName := "aws_rekognition_stream_processor.test" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { - acctest.PreCheck(ctx, t) - acctest.PreCheckPartitionHasService(t, names.RekognitionEndpointID) - testAccPreCheck(t) - }, - ErrorCheck: acctest.ErrorCheck(t, names.RekognitionServiceID), - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, - CheckDestroy: testAccCheckStreamProcessorDestroy(ctx), - Steps: []resource.TestStep{ - { - Config: testAccStreamProcessorConfig_basic(rName, testAccStreamProcessorVersionNewer), - Check: resource.ComposeTestCheckFunc( - testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), - // TIP: The Plugin-Framework disappears helper is similar to the Plugin-SDK version, - // but expects a new resource factory function as the third argument. To expose this - // private function to the testing package, you may need to add a line like the following - // to exports_test.go: - // - // var ResourceStreamProcessor = newResourceStreamProcessor - acctest.CheckFrameworkResourceDisappears(ctx, acctest.Provider, tfrekognition.ResourceStreamProcessor, resourceName), - ), - ExpectNonEmptyPlan: true, - }, - }, - }) -} +// func TestAccRekognitionStreamProcessor_disappears(t *testing.T) { +// ctx := acctest.Context(t) +// if testing.Short() { +// t.Skip("skipping long-running test in short mode") +// } + +// var streamprocessor rekognition.DescribeStreamProcessorResponse +// rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) +// resourceName := "aws_rekognition_stream_processor.test" + +// resource.ParallelTest(t, resource.TestCase{ +// PreCheck: func() { +// acctest.PreCheck(ctx, t) +// acctest.PreCheckPartitionHasService(t, names.RekognitionEndpointID) +// testAccPreCheck(t) +// }, +// ErrorCheck: acctest.ErrorCheck(t, names.RekognitionServiceID), +// ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, +// CheckDestroy: testAccCheckStreamProcessorDestroy(ctx), +// Steps: []resource.TestStep{ +// { +// Config: testAccStreamProcessorConfig_basic(rName, testAccStreamProcessorVersionNewer), +// Check: resource.ComposeTestCheckFunc( +// testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), +// // TIP: The Plugin-Framework disappears helper is similar to the Plugin-SDK version, +// // but expects a new resource factory function as the third argument. To expose this +// // private function to the testing package, you may need to add a line like the following +// // to exports_test.go: +// // +// // var ResourceStreamProcessor = newResourceStreamProcessor +// acctest.CheckFrameworkResourceDisappears(ctx, acctest.Provider, tfrekognition.ResourceStreamProcessor, resourceName), +// ), +// ExpectNonEmptyPlan: true, +// }, +// }, +// }) +// } func testAccCheckStreamProcessorDestroy(ctx context.Context) resource.TestCheckFunc { return func(s *terraform.State) error { @@ -235,17 +118,14 @@ func testAccCheckStreamProcessorDestroy(ctx context.Context) resource.TestCheckF continue } - input := &rekognition.DescribeStreamProcessorInput{ - StreamProcessorId: aws.String(rs.Primary.ID), - } _, err := conn.DescribeStreamProcessor(ctx, &rekognition.DescribeStreamProcessorInput{ - StreamProcessorId: aws.String(rs.Primary.ID), + Name: aws.String(rs.Primary.ID), }) - if errs.IsA[*types.ResourceNotFoundException](err){ + if errs.IsA[*types.ResourceNotFoundException](err) { return nil } if err != nil { - return create.Error(names.Rekognition, create.ErrActionCheckingDestroyed, tfrekognition.ResNameStreamProcessor, rs.Primary.ID, err) + return create.Error(names.Rekognition, create.ErrActionCheckingDestroyed, tfrekognition.ResNameStreamProcessor, rs.Primary.ID, err) } return create.Error(names.Rekognition, create.ErrActionCheckingDestroyed, tfrekognition.ResNameStreamProcessor, rs.Primary.ID, errors.New("not destroyed")) @@ -255,7 +135,7 @@ func testAccCheckStreamProcessorDestroy(ctx context.Context) resource.TestCheckF } } -func testAccCheckStreamProcessorExists(ctx context.Context, name string, streamprocessor *rekognition.DescribeStreamProcessorResponse) resource.TestCheckFunc { +func testAccCheckStreamProcessorExists(ctx context.Context, name string, streamprocessor *rekognition.DescribeStreamProcessorOutput) resource.TestCheckFunc { return func(s *terraform.State) error { rs, ok := s.RootModule().Resources[name] if !ok { @@ -268,7 +148,7 @@ func testAccCheckStreamProcessorExists(ctx context.Context, name string, streamp conn := acctest.Provider.Meta().(*conns.AWSClient).RekognitionClient(ctx) resp, err := conn.DescribeStreamProcessor(ctx, &rekognition.DescribeStreamProcessorInput{ - StreamProcessorId: aws.String(rs.Primary.ID), + Name: aws.String(rs.Primary.ID), }) if err != nil { @@ -295,16 +175,53 @@ func testAccPreCheck(ctx context.Context, t *testing.T) { } } -func testAccCheckStreamProcessorNotRecreated(before, after *rekognition.DescribeStreamProcessorResponse) resource.TestCheckFunc { +func testAccCheckStreamProcessorNotRecreated(before, after *rekognition.DescribeStreamProcessorOutput) resource.TestCheckFunc { return func(s *terraform.State) error { - if before, after := aws.ToString(before.StreamProcessorId), aws.ToString(after.StreamProcessorId); before != after { - return create.Error(names.Rekognition, create.ErrActionCheckingNotRecreated, tfrekognition.ResNameStreamProcessor, aws.ToString(before.StreamProcessorId), errors.New("recreated")) + if before, after := aws.ToString(before.StreamProcessorArn), aws.ToString(after.StreamProcessorArn); before != after { + return create.Error(names.Rekognition, create.ErrActionCheckingNotRecreated, tfrekognition.ResNameStreamProcessor, aws.ToString(&before), errors.New("recreated")) } return nil } } +func testAccStreamProcessorConfig_setup(rName string) string { + return fmt.Sprintf(` +resource "aws_iam_role" "test" { + name = "%[1]q-test-role" + + assume_role_policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Action = "sts:AssumeRole" + Effect = "Allow" + Sid = "" + Principal = { + Service = "ec2.amazonaws.com" + } + }, + ] + }) +} + +resource "aws_s3_bucket" "test" { + bucket = "%[1]q-test-bucket" +} + +resource "aws_sns_topic" "test" { + name = "%[1]q-test-topic" +} + +resource "aws_kinesis_video_stream" "test" { + name = "%[1]q-test-kinesis-input" + data_retention_in_hours = 1 + device_name = "kinesis-video-device-name" + media_type = "video/h264" +} + `, rName) +} + func testAccStreamProcessorConfig_basic(rName, version string) string { return fmt.Sprintf(` resource "aws_security_group" "test" { @@ -312,7 +229,7 @@ resource "aws_security_group" "test" { } resource "aws_rekognition_stream_processor" "test" { - stream_processor_name = %[1]q + stream_processor_name = %[1]q engine_type = "ActiveRekognition" engine_version = %[2]q host_instance_type = "rekognition.t2.micro" From 51401f63d5f261dcfe50fabce87a78641fd6a1f4 Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Tue, 21 May 2024 09:29:37 -0500 Subject: [PATCH 29/71] wip --- .../rekognition/stream_processor_test.go | 86 ++++++++++--------- 1 file changed, 45 insertions(+), 41 deletions(-) diff --git a/internal/service/rekognition/stream_processor_test.go b/internal/service/rekognition/stream_processor_test.go index 9b1829c5d196..8267386a1569 100644 --- a/internal/service/rekognition/stream_processor_test.go +++ b/internal/service/rekognition/stream_processor_test.go @@ -9,7 +9,6 @@ import ( "fmt" "testing" - "github.com/YakDriver/regexache" "github.com/aws/aws-sdk-go-v2/aws" "github.com/aws/aws-sdk-go-v2/service/rekognition" "github.com/aws/aws-sdk-go-v2/service/rekognition/types" @@ -26,11 +25,6 @@ import ( func TestAccRekognitionStreamProcessor_basic(t *testing.T) { ctx := acctest.Context(t) - // TIP: This is a long-running test guard for tests that run longer than - // 300s (5 min) generally. - if testing.Short() { - t.Skip("skipping long-running test in short mode") - } var streamprocessor rekognition.DescribeStreamProcessorOutput rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) @@ -47,18 +41,18 @@ func TestAccRekognitionStreamProcessor_basic(t *testing.T) { CheckDestroy: testAccCheckStreamProcessorDestroy(ctx), Steps: []resource.TestStep{ { - Config: testAccStreamProcessorConfig_basic(rName, "1.0"), + Config: testAccStreamProcessorConfig_basic(testAccStreamProcessorConfig_setup(rName), rName), Check: resource.ComposeTestCheckFunc( testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), - resource.TestCheckResourceAttr(resourceName, "auto_minor_version_upgrade", "false"), - resource.TestCheckResourceAttrSet(resourceName, "maintenance_window_start_time.0.day_of_week"), - resource.TestCheckTypeSetElemNestedAttrs(resourceName, "user.*", map[string]string{ - "console_access": "false", - "groups.#": "0", - "username": "Test", - "password": "TestTest1234", - }), - acctest.MatchResourceAttrRegionalARN(resourceName, "arn", "rekognition", regexache.MustCompile(`streamprocessor:+.`)), + // resource.TestCheckResourceAttr(resourceName, "auto_minor_version_upgrade", "false"), + // resource.TestCheckResourceAttrSet(resourceName, "maintenance_window_start_time.0.day_of_week"), + // resource.TestCheckTypeSetElemNestedAttrs(resourceName, "user.*", map[string]string{ + // "console_access": "false", + // "groups.#": "0", + // "username": "Test", + // "password": "TestTest1234", + // }), + // acctest.MatchResourceAttrRegionalARN(resourceName, "arn", "rekognition", regexache.MustCompile(`streamprocessor:+.`)), ), }, { @@ -188,7 +182,7 @@ func testAccCheckStreamProcessorNotRecreated(before, after *rekognition.Describe func testAccStreamProcessorConfig_setup(rName string) string { return fmt.Sprintf(` resource "aws_iam_role" "test" { - name = "%[1]q-test-role" + name = "%[1]q-acctest-role" assume_role_policy = jsonencode({ Version = "2012-10-17" @@ -206,15 +200,15 @@ resource "aws_iam_role" "test" { } resource "aws_s3_bucket" "test" { - bucket = "%[1]q-test-bucket" + bucket = "%[1]q-acctest-bucket" } resource "aws_sns_topic" "test" { - name = "%[1]q-test-topic" + name = "%[1]q-acctest-topic" } resource "aws_kinesis_video_stream" "test" { - name = "%[1]q-test-kinesis-input" + name = "%[1]q-acctest-kinesis-input" data_retention_in_hours = 1 device_name = "kinesis-video-device-name" media_type = "video/h264" @@ -222,29 +216,39 @@ resource "aws_kinesis_video_stream" "test" { `, rName) } -func testAccStreamProcessorConfig_basic(rName, version string) string { +func testAccStreamProcessorConfig_basic(setup, rName string) string { return fmt.Sprintf(` -resource "aws_security_group" "test" { - name = %[1]q -} +%[1]q resource "aws_rekognition_stream_processor" "test" { - stream_processor_name = %[1]q - engine_type = "ActiveRekognition" - engine_version = %[2]q - host_instance_type = "rekognition.t2.micro" - security_groups = [aws_security_group.test.id] - authentication_strategy = "simple" - storage_type = "efs" - - logs { - general = true - } - - user { - username = "Test" - password = "TestTest1234" + role_arn = aws_iam_role.test.arn + name = "%[1]q-acctest-processor" + + data_sharing_preference { + opt_in = true + } + + output { + s3_destination { + bucket = aws_s3_bucket.test.bucket + } + } + + settings { + connected_home { + labels = ["PERSON", "ALL"] + } + } + + input { + kinesis_video_stream { + arn = aws_kinesis_video_stream.test.arn + } + } + + notification_channel { + sns_topic_arn = aws_sns_topic.test.arn + } } -} -`, rName, version) +`, setup, rName) } From 11065ac5fb9440ba848d3ecb7ed7013b1c66b39b Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Tue, 21 May 2024 09:30:47 -0500 Subject: [PATCH 30/71] fmt --- .../rekognition/stream_processor_test.go | 56 +++++++++---------- 1 file changed, 28 insertions(+), 28 deletions(-) diff --git a/internal/service/rekognition/stream_processor_test.go b/internal/service/rekognition/stream_processor_test.go index 8267386a1569..afed2ea3d55b 100644 --- a/internal/service/rekognition/stream_processor_test.go +++ b/internal/service/rekognition/stream_processor_test.go @@ -221,34 +221,34 @@ func testAccStreamProcessorConfig_basic(setup, rName string) string { %[1]q resource "aws_rekognition_stream_processor" "test" { - role_arn = aws_iam_role.test.arn - name = "%[1]q-acctest-processor" - - data_sharing_preference { - opt_in = true - } - - output { - s3_destination { - bucket = aws_s3_bucket.test.bucket - } - } - - settings { - connected_home { - labels = ["PERSON", "ALL"] - } - } - - input { - kinesis_video_stream { - arn = aws_kinesis_video_stream.test.arn - } - } - - notification_channel { - sns_topic_arn = aws_sns_topic.test.arn - } + role_arn = aws_iam_role.test.arn + name = "%[1]q-acctest-processor" + + data_sharing_preference { + opt_in = true + } + + output { + s3_destination { + bucket = aws_s3_bucket.test.bucket + } } + + settings { + connected_home { + labels = ["PERSON", "ALL"] + } + } + + input { + kinesis_video_stream { + arn = aws_kinesis_video_stream.test.arn + } + } + + notification_channel { + sns_topic_arn = aws_sns_topic.test.arn + } +} `, setup, rName) } From 8105e5abf3334d9d7d575672a6591d6c3cba8b24 Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Tue, 21 May 2024 10:07:21 -0500 Subject: [PATCH 31/71] get test running --- internal/service/rekognition/exports_test.go | 10 +- .../rekognition/stream_processor_test.go | 91 ++++++++----------- 2 files changed, 46 insertions(+), 55 deletions(-) diff --git a/internal/service/rekognition/exports_test.go b/internal/service/rekognition/exports_test.go index 8eae3dcd7dbd..f08d56287a35 100644 --- a/internal/service/rekognition/exports_test.go +++ b/internal/service/rekognition/exports_test.go @@ -6,11 +6,13 @@ package rekognition // Exports for use in tests only. var ( - ResourceProject = newResourceProject - ResourceCollection = newResourceCollection + ResourceProject = newResourceProject + ResourceCollection = newResourceCollection + ResourceStreamProcessor = newResourceStreamProcessor ) var ( - FindCollectionByID = findCollectionByID - FindProjectByName = findProjectByName + FindCollectionByID = findCollectionByID + FindProjectByName = findProjectByName + FindStreamProcessorByID = findStreamProcessorByID ) diff --git a/internal/service/rekognition/stream_processor_test.go b/internal/service/rekognition/stream_processor_test.go index afed2ea3d55b..4b17ad8b06a6 100644 --- a/internal/service/rekognition/stream_processor_test.go +++ b/internal/service/rekognition/stream_processor_test.go @@ -11,15 +11,14 @@ import ( "github.com/aws/aws-sdk-go-v2/aws" "github.com/aws/aws-sdk-go-v2/service/rekognition" - "github.com/aws/aws-sdk-go-v2/service/rekognition/types" sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/terraform" "github.com/hashicorp/terraform-provider-aws/internal/acctest" "github.com/hashicorp/terraform-provider-aws/internal/conns" "github.com/hashicorp/terraform-provider-aws/internal/create" - "github.com/hashicorp/terraform-provider-aws/internal/errs" tfrekognition "github.com/hashicorp/terraform-provider-aws/internal/service/rekognition" + "github.com/hashicorp/terraform-provider-aws/internal/tfresource" "github.com/hashicorp/terraform-provider-aws/names" ) @@ -44,7 +43,7 @@ func TestAccRekognitionStreamProcessor_basic(t *testing.T) { Config: testAccStreamProcessorConfig_basic(testAccStreamProcessorConfig_setup(rName), rName), Check: resource.ComposeTestCheckFunc( testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), - // resource.TestCheckResourceAttr(resourceName, "auto_minor_version_upgrade", "false"), + resource.TestCheckResourceAttr(resourceName, "auto_minor_version_upgrade", "false"), // resource.TestCheckResourceAttrSet(resourceName, "maintenance_window_start_time.0.day_of_week"), // resource.TestCheckTypeSetElemNestedAttrs(resourceName, "user.*", map[string]string{ // "console_access": "false", @@ -65,43 +64,34 @@ func TestAccRekognitionStreamProcessor_basic(t *testing.T) { }) } -// func TestAccRekognitionStreamProcessor_disappears(t *testing.T) { -// ctx := acctest.Context(t) -// if testing.Short() { -// t.Skip("skipping long-running test in short mode") -// } - -// var streamprocessor rekognition.DescribeStreamProcessorResponse -// rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) -// resourceName := "aws_rekognition_stream_processor.test" - -// resource.ParallelTest(t, resource.TestCase{ -// PreCheck: func() { -// acctest.PreCheck(ctx, t) -// acctest.PreCheckPartitionHasService(t, names.RekognitionEndpointID) -// testAccPreCheck(t) -// }, -// ErrorCheck: acctest.ErrorCheck(t, names.RekognitionServiceID), -// ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, -// CheckDestroy: testAccCheckStreamProcessorDestroy(ctx), -// Steps: []resource.TestStep{ -// { -// Config: testAccStreamProcessorConfig_basic(rName, testAccStreamProcessorVersionNewer), -// Check: resource.ComposeTestCheckFunc( -// testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), -// // TIP: The Plugin-Framework disappears helper is similar to the Plugin-SDK version, -// // but expects a new resource factory function as the third argument. To expose this -// // private function to the testing package, you may need to add a line like the following -// // to exports_test.go: -// // -// // var ResourceStreamProcessor = newResourceStreamProcessor -// acctest.CheckFrameworkResourceDisappears(ctx, acctest.Provider, tfrekognition.ResourceStreamProcessor, resourceName), -// ), -// ExpectNonEmptyPlan: true, -// }, -// }, -// }) -// } +func TestAccRekognitionStreamProcessor_disappears(t *testing.T) { + ctx := acctest.Context(t) + + var streamprocessor rekognition.DescribeStreamProcessorOutput + rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) + resourceName := "aws_rekognition_stream_processor.test" + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + acctest.PreCheck(ctx, t) + acctest.PreCheckPartitionHasService(t, names.RekognitionEndpointID) + testAccPreCheck(ctx, t) + }, + ErrorCheck: acctest.ErrorCheck(t, names.RekognitionServiceID), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, + CheckDestroy: testAccCheckStreamProcessorDestroy(ctx), + Steps: []resource.TestStep{ + { + Config: testAccStreamProcessorConfig_basic(testAccStreamProcessorConfig_setup(rName), rName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), + acctest.CheckFrameworkResourceDisappears(ctx, acctest.Provider, tfrekognition.ResourceStreamProcessor, resourceName), + ), + ExpectNonEmptyPlan: true, + }, + }, + }) +} func testAccCheckStreamProcessorDestroy(ctx context.Context) resource.TestCheckFunc { return func(s *terraform.State) error { @@ -112,12 +102,11 @@ func testAccCheckStreamProcessorDestroy(ctx context.Context) resource.TestCheckF continue } - _, err := conn.DescribeStreamProcessor(ctx, &rekognition.DescribeStreamProcessorInput{ - Name: aws.String(rs.Primary.ID), - }) - if errs.IsA[*types.ResourceNotFoundException](err) { - return nil + _, err := tfrekognition.FindCollectionByID(ctx, conn, rs.Primary.ID) + if tfresource.NotFound(err) { + continue } + if err != nil { return create.Error(names.Rekognition, create.ErrActionCheckingDestroyed, tfrekognition.ResNameStreamProcessor, rs.Primary.ID, err) } @@ -182,7 +171,7 @@ func testAccCheckStreamProcessorNotRecreated(before, after *rekognition.Describe func testAccStreamProcessorConfig_setup(rName string) string { return fmt.Sprintf(` resource "aws_iam_role" "test" { - name = "%[1]q-acctest-role" + name = "%[1]s-acctest-role" assume_role_policy = jsonencode({ Version = "2012-10-17" @@ -200,15 +189,15 @@ resource "aws_iam_role" "test" { } resource "aws_s3_bucket" "test" { - bucket = "%[1]q-acctest-bucket" + bucket = "%[1]s-acctest-bucket" } resource "aws_sns_topic" "test" { - name = "%[1]q-acctest-topic" + name = "%[1]s-acctest-topic" } resource "aws_kinesis_video_stream" "test" { - name = "%[1]q-acctest-kinesis-input" + name = "%[1]s-acctest-kinesis-input" data_retention_in_hours = 1 device_name = "kinesis-video-device-name" media_type = "video/h264" @@ -218,11 +207,11 @@ resource "aws_kinesis_video_stream" "test" { func testAccStreamProcessorConfig_basic(setup, rName string) string { return fmt.Sprintf(` -%[1]q +%[1]s resource "aws_rekognition_stream_processor" "test" { role_arn = aws_iam_role.test.arn - name = "%[1]q-acctest-processor" + name = "%[2]s-acctest-processor" data_sharing_preference { opt_in = true From a6844d8990d64bdd7588cb2cfd75ceb680cedc2c Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Tue, 21 May 2024 10:27:02 -0500 Subject: [PATCH 32/71] wip --- .../service/rekognition/stream_processor.go | 9 +-- .../rekognition/stream_processor_test.go | 61 ++++++++++--------- 2 files changed, 36 insertions(+), 34 deletions(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index 815becbc70bb..09704f202e70 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -55,6 +55,7 @@ const ( type resourceStreamProcessor struct { framework.ResourceWithConfigure framework.WithTimeouts + framework.WithImportByID } func (r *resourceStreamProcessor) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { @@ -73,7 +74,7 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem resp.Schema = schema.Schema{ Attributes: map[string]schema.Attribute{ names.AttrARN: framework.ARNAttributeComputedOnly(), - "kms_key_id": schema.StringAttribute{ + names.AttrKMSKeyID: schema.StringAttribute{ Description: "The identifier for your AWS Key Management Service key (AWS KMS key). You can supply the Amazon Resource Name (ARN) of your KMS key, the ID of your KMS key, an alias for your KMS key, or an alias ARN.", Optional: true, Validators: []validator.String{ @@ -135,7 +136,7 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem CustomType: fwtypes.NewObjectTypeOf[kinesisVideoStreamInputModel](ctx), Description: "Kinesis video stream stream that provides the source streaming video for a Amazon Rekognition Video stream processor.", Attributes: map[string]schema.Attribute{ - "arn": schema.StringAttribute{ + names.AttrARN: schema.StringAttribute{ CustomType: fwtypes.ARNType, Description: "ARN of the Kinesis video stream stream that streams the source video.", Required: true, @@ -275,7 +276,7 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem objectvalidator.ConflictsWith(path.MatchRelative().AtParent().AtName("s3_destination")), }, Attributes: map[string]schema.Attribute{ - "arn": schema.StringAttribute{ + names.AttrARN: schema.StringAttribute{ CustomType: fwtypes.ARNType, Description: "ARN of the output Amazon Kinesis Data Streams stream.", Optional: true, @@ -676,7 +677,7 @@ func (r *resourceStreamProcessor) Delete(ctx context.Context, req resource.Delet } func (r *resourceStreamProcessor) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { - resource.ImportStatePassthroughID(ctx, path.Root("id"), req, resp) + resource.ImportStatePassthroughID(ctx, path.Root(names.AttrName), req, resp) } func (r *resourceStreamProcessor) ModifyPlan(ctx context.Context, request resource.ModifyPlanRequest, response *resource.ModifyPlanResponse) { diff --git a/internal/service/rekognition/stream_processor_test.go b/internal/service/rekognition/stream_processor_test.go index 4b17ad8b06a6..e689813303aa 100644 --- a/internal/service/rekognition/stream_processor_test.go +++ b/internal/service/rekognition/stream_processor_test.go @@ -43,7 +43,8 @@ func TestAccRekognitionStreamProcessor_basic(t *testing.T) { Config: testAccStreamProcessorConfig_basic(testAccStreamProcessorConfig_setup(rName), rName), Check: resource.ComposeTestCheckFunc( testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), - resource.TestCheckResourceAttr(resourceName, "auto_minor_version_upgrade", "false"), + resource.TestCheckResourceAttr(resourceName, names.AttrID, fmt.Sprintf("%[1]s-acctest-processor", rName)), + resource.TestCheckResourceAttr(resourceName, names.AttrName, fmt.Sprintf("%[1]s-acctest-processor", rName)), // resource.TestCheckResourceAttrSet(resourceName, "maintenance_window_start_time.0.day_of_week"), // resource.TestCheckTypeSetElemNestedAttrs(resourceName, "user.*", map[string]string{ // "console_access": "false", @@ -58,40 +59,40 @@ func TestAccRekognitionStreamProcessor_basic(t *testing.T) { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"apply_immediately", "user"}, + ImportStateVerifyIgnore: []string{}, }, }, }) } -func TestAccRekognitionStreamProcessor_disappears(t *testing.T) { - ctx := acctest.Context(t) - - var streamprocessor rekognition.DescribeStreamProcessorOutput - rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) - resourceName := "aws_rekognition_stream_processor.test" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { - acctest.PreCheck(ctx, t) - acctest.PreCheckPartitionHasService(t, names.RekognitionEndpointID) - testAccPreCheck(ctx, t) - }, - ErrorCheck: acctest.ErrorCheck(t, names.RekognitionServiceID), - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, - CheckDestroy: testAccCheckStreamProcessorDestroy(ctx), - Steps: []resource.TestStep{ - { - Config: testAccStreamProcessorConfig_basic(testAccStreamProcessorConfig_setup(rName), rName), - Check: resource.ComposeTestCheckFunc( - testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), - acctest.CheckFrameworkResourceDisappears(ctx, acctest.Provider, tfrekognition.ResourceStreamProcessor, resourceName), - ), - ExpectNonEmptyPlan: true, - }, - }, - }) -} +// func TestAccRekognitionStreamProcessor_disappears(t *testing.T) { +// ctx := acctest.Context(t) + +// var streamprocessor rekognition.DescribeStreamProcessorOutput +// rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) +// resourceName := "aws_rekognition_stream_processor.test" + +// resource.ParallelTest(t, resource.TestCase{ +// PreCheck: func() { +// acctest.PreCheck(ctx, t) +// acctest.PreCheckPartitionHasService(t, names.RekognitionEndpointID) +// testAccPreCheck(ctx, t) +// }, +// ErrorCheck: acctest.ErrorCheck(t, names.RekognitionServiceID), +// ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, +// CheckDestroy: testAccCheckStreamProcessorDestroy(ctx), +// Steps: []resource.TestStep{ +// { +// Config: testAccStreamProcessorConfig_basic(testAccStreamProcessorConfig_setup(rName), rName), +// Check: resource.ComposeTestCheckFunc( +// testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), +// acctest.CheckFrameworkResourceDisappears(ctx, acctest.Provider, tfrekognition.ResourceStreamProcessor, resourceName), +// ), +// ExpectNonEmptyPlan: true, +// }, +// }, +// }) +// } func testAccCheckStreamProcessorDestroy(ctx context.Context) resource.TestCheckFunc { return func(s *terraform.State) error { From 67eb3ebbdd152fc5921238d287abd4cab18da360 Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Tue, 21 May 2024 12:20:44 -0500 Subject: [PATCH 33/71] fix schema --- .../service/rekognition/stream_processor.go | 12 +- .../rekognition/stream_processor_test.go | 161 +++++++++++++----- 2 files changed, 124 insertions(+), 49 deletions(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index 09704f202e70..01ac6c5f9404 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -232,12 +232,14 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem Validators: []validator.List{ listvalidator.SizeBetween(3, 10), listvalidator.ConflictsWith(path.MatchRelative().AtParent().AtName("bounding_box")), - listvalidator.AlsoRequires( - path.MatchRelative().AtName("x"), - path.MatchRelative().AtName("y"), - ), }, NestedObject: schema.NestedBlockObject{ + CustomType: fwtypes.NewObjectTypeOf[polygonModel](ctx), + Validators: []validator.Object{ + objectvalidator.AlsoRequires( + path.MatchRelative().AtName("x"), + path.MatchRelative().AtName("y"), + )}, Attributes: map[string]schema.Attribute{ "x": schema.Float64Attribute{ Description: "The value of the X coordinate for a point on a Polygon.", @@ -677,7 +679,7 @@ func (r *resourceStreamProcessor) Delete(ctx context.Context, req resource.Delet } func (r *resourceStreamProcessor) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { - resource.ImportStatePassthroughID(ctx, path.Root(names.AttrName), req, resp) + resource.ImportStatePassthroughID(ctx, path.Root("id"), req, resp) } func (r *resourceStreamProcessor) ModifyPlan(ctx context.Context, request resource.ModifyPlanRequest, response *resource.ModifyPlanResponse) { diff --git a/internal/service/rekognition/stream_processor_test.go b/internal/service/rekognition/stream_processor_test.go index e689813303aa..18748281531a 100644 --- a/internal/service/rekognition/stream_processor_test.go +++ b/internal/service/rekognition/stream_processor_test.go @@ -22,7 +22,7 @@ import ( "github.com/hashicorp/terraform-provider-aws/names" ) -func TestAccRekognitionStreamProcessor_basic(t *testing.T) { +func TestAccRekognitionStreamProcessor_connectedHome(t *testing.T) { ctx := acctest.Context(t) var streamprocessor rekognition.DescribeStreamProcessorOutput @@ -40,59 +40,80 @@ func TestAccRekognitionStreamProcessor_basic(t *testing.T) { CheckDestroy: testAccCheckStreamProcessorDestroy(ctx), Steps: []resource.TestStep{ { - Config: testAccStreamProcessorConfig_basic(testAccStreamProcessorConfig_setup(rName), rName), + Config: testAccStreamProcessorConfig_connectedHome(testAccStreamProcessorConfig_connectedHome_setup(rName), rName), Check: resource.ComposeTestCheckFunc( testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), resource.TestCheckResourceAttr(resourceName, names.AttrID, fmt.Sprintf("%[1]s-acctest-processor", rName)), resource.TestCheckResourceAttr(resourceName, names.AttrName, fmt.Sprintf("%[1]s-acctest-processor", rName)), - // resource.TestCheckResourceAttrSet(resourceName, "maintenance_window_start_time.0.day_of_week"), - // resource.TestCheckTypeSetElemNestedAttrs(resourceName, "user.*", map[string]string{ - // "console_access": "false", - // "groups.#": "0", - // "username": "Test", - // "password": "TestTest1234", - // }), - // acctest.MatchResourceAttrRegionalARN(resourceName, "arn", "rekognition", regexache.MustCompile(`streamprocessor:+.`)), ), }, + // { + // ResourceName: resourceName, + // ImportState: true, + // ImportStateVerify: true, + // ImportStateVerifyIgnore: []string{}, + // }, + }, + }) +} + +func TestAccRekognitionStreamProcessor_connectedHome_poylgon(t *testing.T) { + ctx := acctest.Context(t) + + var streamprocessor rekognition.DescribeStreamProcessorOutput + rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) + resourceName := "aws_rekognition_stream_processor.test" + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + acctest.PreCheck(ctx, t) + acctest.PreCheckPartitionHasService(t, names.RekognitionEndpointID) + testAccPreCheck(ctx, t) + }, + ErrorCheck: acctest.ErrorCheck(t, names.RekognitionServiceID), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, + CheckDestroy: testAccCheckStreamProcessorDestroy(ctx), + Steps: []resource.TestStep{ { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{}, + Config: testAccStreamProcessorConfig_connectedHome_polygons(testAccStreamProcessorConfig_connectedHome_setup(rName), rName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.#", "1"), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.#", "3"), + ), }, }, }) } -// func TestAccRekognitionStreamProcessor_disappears(t *testing.T) { -// ctx := acctest.Context(t) - -// var streamprocessor rekognition.DescribeStreamProcessorOutput -// rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) -// resourceName := "aws_rekognition_stream_processor.test" - -// resource.ParallelTest(t, resource.TestCase{ -// PreCheck: func() { -// acctest.PreCheck(ctx, t) -// acctest.PreCheckPartitionHasService(t, names.RekognitionEndpointID) -// testAccPreCheck(ctx, t) -// }, -// ErrorCheck: acctest.ErrorCheck(t, names.RekognitionServiceID), -// ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, -// CheckDestroy: testAccCheckStreamProcessorDestroy(ctx), -// Steps: []resource.TestStep{ -// { -// Config: testAccStreamProcessorConfig_basic(testAccStreamProcessorConfig_setup(rName), rName), -// Check: resource.ComposeTestCheckFunc( -// testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), -// acctest.CheckFrameworkResourceDisappears(ctx, acctest.Provider, tfrekognition.ResourceStreamProcessor, resourceName), -// ), -// ExpectNonEmptyPlan: true, -// }, -// }, -// }) -// } +func TestAccRekognitionStreamProcessor_disappears(t *testing.T) { + ctx := acctest.Context(t) + + var streamprocessor rekognition.DescribeStreamProcessorOutput + rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) + resourceName := "aws_rekognition_stream_processor.test" + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + acctest.PreCheck(ctx, t) + acctest.PreCheckPartitionHasService(t, names.RekognitionEndpointID) + testAccPreCheck(ctx, t) + }, + ErrorCheck: acctest.ErrorCheck(t, names.RekognitionServiceID), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, + CheckDestroy: testAccCheckStreamProcessorDestroy(ctx), + Steps: []resource.TestStep{ + { + Config: testAccStreamProcessorConfig_connectedHome(testAccStreamProcessorConfig_connectedHome_setup(rName), rName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), + acctest.CheckFrameworkResourceDisappears(ctx, acctest.Provider, tfrekognition.ResourceStreamProcessor, resourceName), + ), + ExpectNonEmptyPlan: true, + }, + }, + }) +} func testAccCheckStreamProcessorDestroy(ctx context.Context) resource.TestCheckFunc { return func(s *terraform.State) error { @@ -169,7 +190,7 @@ func testAccCheckStreamProcessorNotRecreated(before, after *rekognition.Describe } } -func testAccStreamProcessorConfig_setup(rName string) string { +func testAccStreamProcessorConfig_connectedHome_setup(rName string) string { return fmt.Sprintf(` resource "aws_iam_role" "test" { name = "%[1]s-acctest-role" @@ -206,7 +227,44 @@ resource "aws_kinesis_video_stream" "test" { `, rName) } -func testAccStreamProcessorConfig_basic(setup, rName string) string { +func testAccStreamProcessorConfig_connectedHome(setup, rName string) string { + return fmt.Sprintf(` +%[1]s + +resource "aws_rekognition_stream_processor" "test" { + role_arn = aws_iam_role.test.arn + name = "%[2]s-acctest-processor" + + data_sharing_preference { + opt_in = true + } + + output { + s3_destination { + bucket = aws_s3_bucket.test.bucket + } + } + + settings { + connected_home { + labels = ["PERSON", "ALL"] + } + } + + input { + kinesis_video_stream { + arn = aws_kinesis_video_stream.test.arn + } + } + + notification_channel { + sns_topic_arn = aws_sns_topic.test.arn + } +} +`, setup, rName) +} + +func testAccStreamProcessorConfig_connectedHome_polygons(setup, rName string) string { return fmt.Sprintf(` %[1]s @@ -224,6 +282,21 @@ resource "aws_rekognition_stream_processor" "test" { } } + regions_of_interest { + polygon { + x = 0.5 + y = 0.5 + } + polygon { + x = 0.5 + y = 0.5 + } + polygon { + x = 0.5 + y = 0.5 + } + } + settings { connected_home { labels = ["PERSON", "ALL"] From 49d63d940b2fa0c65c93bf159bb5941ae172abb6 Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Tue, 21 May 2024 12:42:16 -0500 Subject: [PATCH 34/71] start on docs --- ...rekognition_stream_processor.html.markdown | 55 +++++++++++++++---- 1 file changed, 44 insertions(+), 11 deletions(-) diff --git a/website/docs/r/rekognition_stream_processor.html.markdown b/website/docs/r/rekognition_stream_processor.html.markdown index c1db9ef586d4..592edad8c0ed 100644 --- a/website/docs/r/rekognition_stream_processor.html.markdown +++ b/website/docs/r/rekognition_stream_processor.html.markdown @@ -5,18 +5,13 @@ page_title: "AWS: aws_rekognition_stream_processor" description: |- Terraform resource for managing an AWS Rekognition Stream Processor. --- -` + # Resource: aws_rekognition_stream_processor Terraform resource for managing an AWS Rekognition Stream Processor. +~> **Note:** This resource must be configured specifically for your use case, and not all options are compatible with one another. See [Stream Processor API documentation](https://docs.aws.amazon.com/rekognition/latest/APIReference/API_CreateStreamProcessor.html#rekognition-CreateStreamProcessor-request-Input) for configuration information. + ## Example Usage ### Basic Usage @@ -30,18 +25,56 @@ resource "aws_rekognition_stream_processor" "example" { The following arguments are required: -* `example_arg` - (Required) Concise argument description. Do not begin the description with "An", "The", "Defines", "Indicates", or "Specifies," as these are verbose. In other words, "Indicates the amount of storage," can be rewritten as "Amount of storage," without losing any information. +* `input` - (Required) Input video stream. See [`input`](#input) definition. +* `name` - (Required) The name of the Stream Processor +* `role_arn` - (Required) The ARN of the IAM role that allows access to the stream processor. +* `output` - (Required) Kinesis data stream stream or Amazon S3 bucket location to which Amazon Rekognition Video puts the analysis results The following arguments are optional: -* `optional_arg` - (Optional) Concise argument description. Do not begin the description with "An", "The", "Defines", "Indicates", or "Specifies," as these are verbose. In other words, "Indicates the amount of storage," can be rewritten as "Amount of storage," without losing any information. +* `kms_key_id` - (Optional) Optional parameter for label detection stream processors +* `data_sharing_preference` - (Optional) See [`data_sharing_preference`](#data_sharing_preference) definition. +* `notification_channel` - (Optional) The Amazon Simple Notification Service topic to which Amazon Rekognition publishes the completion status. See [`notification_channel`](#notification_channel) definition. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Nested Blocks + +### `input` + +* `kinesis_video_stream` - Kinesis input stream. See [`kinesis_video_stream`](#kinesis_video_stream) definition. + +#### `kinesis_video_stream` + +* `arn` - ARN of the Kinesis video stream stream that streams the source video + +### `output` + +* `kinesis_data_stream` - (Optional) The Amazon Kinesis Data Streams stream to which the Amazon Rekognition stream processor streams the analysis results. See [`kinesis_data_stream`](#kinesis_data_stream) definition. +* `s3_destination` - (Optiona) The Amazon S3 bucket location to which Amazon Rekognition publishes the detailed inference results of a video analysis operation. See [`s3_destination`](#s3_destination) definition. + +#### `kinesis_data_stream` + +* `arn` - ARN of the output Amazon Kinesis Data Streams stream. + +#### `s3_destination` + +* `bucket` - The name of the Amazon S3 bucket you want to associate with the streaming video project +* `key_prefixx` - The prefix value of the location within the bucket that you want the information to be published to + +### `data_sharing_preference` + +* `opt_in` - (Optional) Shows whether you are sharing data with Rekognition to improve model performance. + +### `notification_channel` + +* `sns_topic_arn` - The Amazon Resource Number (ARN) of the Amazon Amazon Simple Notification Service topic to which Amazon Rekognition posts the completion status. ## Attribute Reference This resource exports the following attributes in addition to the arguments above: * `arn` - ARN of the Stream Processor. Do not begin the description with "An", "The", "Defines", "Indicates", or "Specifies," as these are verbose. In other words, "Indicates the amount of storage," can be rewritten as "Amount of storage," without losing any information. -* `example_attribute` - Concise description. Do not begin the description with "An", "The", "Defines", "Indicates", or "Specifies," as these are verbose. In other words, "Indicates the amount of storage," can be rewritten as "Amount of storage," without losing any information. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). ## Timeouts From f1b61fcdc64123e0ce851ca8c191d8c71fa29d81 Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Tue, 21 May 2024 12:48:52 -0500 Subject: [PATCH 35/71] docs --- ...rekognition_stream_processor.html.markdown | 22 +++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/website/docs/r/rekognition_stream_processor.html.markdown b/website/docs/r/rekognition_stream_processor.html.markdown index 592edad8c0ed..2772a534c51e 100644 --- a/website/docs/r/rekognition_stream_processor.html.markdown +++ b/website/docs/r/rekognition_stream_processor.html.markdown @@ -35,6 +35,7 @@ The following arguments are optional: * `kms_key_id` - (Optional) Optional parameter for label detection stream processors * `data_sharing_preference` - (Optional) See [`data_sharing_preference`](#data_sharing_preference) definition. * `notification_channel` - (Optional) The Amazon Simple Notification Service topic to which Amazon Rekognition publishes the completion status. See [`notification_channel`](#notification_channel) definition. +* `regions_of_interest` - (Optional) Specifies locations in the frames where Amazon Rekognition checks for objects or people. See [`regions_of_interest`] definition. * `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. ## Nested Blocks @@ -65,6 +66,27 @@ The following arguments are optional: * `opt_in` - (Optional) Shows whether you are sharing data with Rekognition to improve model performance. +### `regions_of_interest` + +* `bounding_box` - (Optional) The box representing a region of interest on screen. Only 1 per region is allowed. See [`bounding_box`](#bounding_box) definition. +* `polygon` - (Optional) Shows whether you are sharing data with Rekognition to improve model performance. See [`polygon`](#polygon) definition. + +#### `bounding_box` + +A region can only have a single `bounding_box` + +* `height` - (Required) Height of the bounding box as a ratio of the overall image height. +* `wight` - (Required) Width of the bounding box as a ratio of the overall image width. +* `left` - (Required) Left coordinate of the bounding box as a ratio of overall image width. +* `top` - (Required) Top coordinate of the bounding box as a ratio of overall image height. + +#### `polygon` + +If using `polygon`, a minimum of 3 per region is required, with a maximum of 10. + +* `x` - (Required) The value of the X coordinate for a point on a Polygon. +* `y` - (Required) The value of the Y coordinate for a point on a Polygon. + ### `notification_channel` * `sns_topic_arn` - The Amazon Resource Number (ARN) of the Amazon Amazon Simple Notification Service topic to which Amazon Rekognition posts the completion status. From 1f828cafb1772764e17a0656fbbb2be5146eeaae Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Tue, 21 May 2024 12:56:15 -0500 Subject: [PATCH 36/71] docs --- .../r/rekognition_stream_processor.html.markdown | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/website/docs/r/rekognition_stream_processor.html.markdown b/website/docs/r/rekognition_stream_processor.html.markdown index 2772a534c51e..63e96507c961 100644 --- a/website/docs/r/rekognition_stream_processor.html.markdown +++ b/website/docs/r/rekognition_stream_processor.html.markdown @@ -29,6 +29,7 @@ The following arguments are required: * `name` - (Required) The name of the Stream Processor * `role_arn` - (Required) The ARN of the IAM role that allows access to the stream processor. * `output` - (Required) Kinesis data stream stream or Amazon S3 bucket location to which Amazon Rekognition Video puts the analysis results +* `settings` - (Required) Input parameters used in a streaming video analyzed by a stream processor. See [`settings`](#settings) definition. The following arguments are optional: @@ -91,6 +92,21 @@ If using `polygon`, a minimum of 3 per region is required, with a maximum of 10. * `sns_topic_arn` - The Amazon Resource Number (ARN) of the Amazon Amazon Simple Notification Service topic to which Amazon Rekognition posts the completion status. +### `settings` + +* `connected_home` - Label detection settings to use on a streaming video. See [`connected_home`](#connected_home) definition. +* `face_search` - Input face recognition parameters for an Amazon Rekognition stream processor. See [`face_search`](#face_search) definition. + +#### `connected_home` + +* `labels` - Specifies what you want to detect in the video, such as people, packages, or pets. The current valid labels you can include in this list are: "PERSON", "PET", "PACKAGE", and "ALL". +* `min_confidence` - The minimum confidence required to label an object in the video. + +#### `face_search` + +* `collection_id` - The ID of a collection that contains faces that you want to search for. +* `face_match_threshold` - Minimum face match confidence score that must be met to return a result for a recognized face + ## Attribute Reference This resource exports the following attributes in addition to the arguments above: From fe48c079b4989386a14f86c4d25f51d466a8d52b Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Tue, 21 May 2024 12:59:21 -0500 Subject: [PATCH 37/71] example --- ...rekognition_stream_processor.html.markdown | 68 ++++++++++++++++++- 1 file changed, 67 insertions(+), 1 deletion(-) diff --git a/website/docs/r/rekognition_stream_processor.html.markdown b/website/docs/r/rekognition_stream_processor.html.markdown index 63e96507c961..06b3eab865a2 100644 --- a/website/docs/r/rekognition_stream_processor.html.markdown +++ b/website/docs/r/rekognition_stream_processor.html.markdown @@ -12,7 +12,73 @@ Terraform resource for managing an AWS Rekognition Stream Processor. ~> **Note:** This resource must be configured specifically for your use case, and not all options are compatible with one another. See [Stream Processor API documentation](https://docs.aws.amazon.com/rekognition/latest/APIReference/API_CreateStreamProcessor.html#rekognition-CreateStreamProcessor-request-Input) for configuration information. -## Example Usage +## Label Detection Usage + +```terraform +Resource "aws_iam_role" "example" { + name = "example-role" + + assume_role_policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Action = "sts:AssumeRole" + Effect = "Allow" + Sid = "" + Principal = { + Service = "ec2.amazonaws.com" + } + }, + ] + }) +} + +resource "aws_s3_bucket" "example" { + bucket = "example-bucket" +} + +resource "aws_sns_topic" "example" { + name = "example-topic" +} + +resource "aws_kinesis_video_stream" "example" { + name = "example-kinesis-input" + data_retention_in_hours = 1 + device_name = "kinesis-video-device-name" + media_type = "video/h264" +} + +resource "aws_rekognition_stream_processor" "example" { + role_arn = aws_iam_role.example.arn + name = "example-processor" + + data_sharing_preference { + opt_in = true + } + + output { + s3_destination { + bucket = aws_s3_bucket.example.bucket + } + } + + settings { + connected_home { + labels = ["PERSON", "PET"] + } + } + + input { + kinesis_video_stream { + arn = aws_kinesis_video_stream.example.arn + } + } + + notification_channel { + sns_topic_arn = aws_sns_topic.example.arn + } +} +``` ### Basic Usage From 358c5ea1343a4e61c2a583ad902bc61286bc45a9 Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Tue, 21 May 2024 13:01:35 -0500 Subject: [PATCH 38/71] docs --- website/docs/r/rekognition_stream_processor.html.markdown | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/website/docs/r/rekognition_stream_processor.html.markdown b/website/docs/r/rekognition_stream_processor.html.markdown index 06b3eab865a2..f7cfcf59d5f4 100644 --- a/website/docs/r/rekognition_stream_processor.html.markdown +++ b/website/docs/r/rekognition_stream_processor.html.markdown @@ -80,11 +80,10 @@ resource "aws_rekognition_stream_processor" "example" { } ``` -### Basic Usage +### Face Detection Usage ```terraform -resource "aws_rekognition_stream_processor" "example" { -} +TODO ``` ## Argument Reference From 7273f36c2e4dbf88d755e25e097972f0624e37f3 Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Tue, 21 May 2024 13:10:35 -0500 Subject: [PATCH 39/71] wip --- .../rekognition/service_package_gen.go | 3 + .../rekognition/stream_processor_test.go | 95 +++++++++++++++++-- ...rekognition_stream_processor.html.markdown | 22 ++++- 3 files changed, 110 insertions(+), 10 deletions(-) diff --git a/internal/service/rekognition/service_package_gen.go b/internal/service/rekognition/service_package_gen.go index b1ae950d1f66..9a34be1967a9 100644 --- a/internal/service/rekognition/service_package_gen.go +++ b/internal/service/rekognition/service_package_gen.go @@ -34,6 +34,9 @@ func (p *servicePackage) FrameworkResources(ctx context.Context) []*types.Servic { Factory: newResourceStreamProcessor, Name: "StreamProcessor", + Tags: &types.ServicePackageResourceTags{ + IdentifierAttribute: names.AttrARN, + }, }, } } diff --git a/internal/service/rekognition/stream_processor_test.go b/internal/service/rekognition/stream_processor_test.go index 18748281531a..adcb23556b45 100644 --- a/internal/service/rekognition/stream_processor_test.go +++ b/internal/service/rekognition/stream_processor_test.go @@ -86,6 +86,39 @@ func TestAccRekognitionStreamProcessor_connectedHome_poylgon(t *testing.T) { }) } +func TestAccRekognitionStreamProcessor_connectedHome_boundingBox(t *testing.T) { + ctx := acctest.Context(t) + + var streamprocessor rekognition.DescribeStreamProcessorOutput + rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) + resourceName := "aws_rekognition_stream_processor.test" + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + acctest.PreCheck(ctx, t) + acctest.PreCheckPartitionHasService(t, names.RekognitionEndpointID) + testAccPreCheck(ctx, t) + }, + ErrorCheck: acctest.ErrorCheck(t, names.RekognitionServiceID), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, + CheckDestroy: testAccCheckStreamProcessorDestroy(ctx), + Steps: []resource.TestStep{ + { + Config: testAccStreamProcessorConfig_connectedHome_boundingBox(testAccStreamProcessorConfig_connectedHome_setup(rName), rName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.#", "1"), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.#", "0"), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.bounding_box.left", "0.5"), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.bounding_box.top", "0.5"), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.bounding_box.height", "0.5"), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.bounding_box.width", "0.5"), + ), + }, + }, + }) +} + func TestAccRekognitionStreamProcessor_disappears(t *testing.T) { ctx := acctest.Context(t) @@ -180,15 +213,15 @@ func testAccPreCheck(ctx context.Context, t *testing.T) { } } -func testAccCheckStreamProcessorNotRecreated(before, after *rekognition.DescribeStreamProcessorOutput) resource.TestCheckFunc { - return func(s *terraform.State) error { - if before, after := aws.ToString(before.StreamProcessorArn), aws.ToString(after.StreamProcessorArn); before != after { - return create.Error(names.Rekognition, create.ErrActionCheckingNotRecreated, tfrekognition.ResNameStreamProcessor, aws.ToString(&before), errors.New("recreated")) - } +// func testAccCheckStreamProcessorNotRecreated(before, after *rekognition.DescribeStreamProcessorOutput) resource.TestCheckFunc { +// return func(s *terraform.State) error { +// if before, after := aws.ToString(before.StreamProcessorArn), aws.ToString(after.StreamProcessorArn); before != after { +// return create.Error(names.Rekognition, create.ErrActionCheckingNotRecreated, tfrekognition.ResNameStreamProcessor, aws.ToString(&before), errors.New("recreated")) +// } - return nil - } -} +// return nil +// } +// } func testAccStreamProcessorConfig_connectedHome_setup(rName string) string { return fmt.Sprintf(` @@ -315,3 +348,49 @@ resource "aws_rekognition_stream_processor" "test" { } `, setup, rName) } + +func testAccStreamProcessorConfig_connectedHome_boundingBox(setup, rName string) string { + return fmt.Sprintf(` +%[1]s + +resource "aws_rekognition_stream_processor" "test" { + role_arn = aws_iam_role.test.arn + name = "%[2]s-acctest-processor" + + data_sharing_preference { + opt_in = true + } + + output { + s3_destination { + bucket = aws_s3_bucket.test.bucket + } + } + + regions_of_interest { + bounding_box { + left = 0.5 + top = 0.5 + height = 0.5 + width = 0.5 + } + } + + settings { + connected_home { + labels = ["PERSON", "ALL"] + } + } + + input { + kinesis_video_stream { + arn = aws_kinesis_video_stream.test.arn + } + } + + notification_channel { + sns_topic_arn = aws_sns_topic.test.arn + } +} +`, setup, rName) +} diff --git a/website/docs/r/rekognition_stream_processor.html.markdown b/website/docs/r/rekognition_stream_processor.html.markdown index f7cfcf59d5f4..6207ee44d77a 100644 --- a/website/docs/r/rekognition_stream_processor.html.markdown +++ b/website/docs/r/rekognition_stream_processor.html.markdown @@ -12,7 +12,9 @@ Terraform resource for managing an AWS Rekognition Stream Processor. ~> **Note:** This resource must be configured specifically for your use case, and not all options are compatible with one another. See [Stream Processor API documentation](https://docs.aws.amazon.com/rekognition/latest/APIReference/API_CreateStreamProcessor.html#rekognition-CreateStreamProcessor-request-Input) for configuration information. -## Label Detection Usage +## Example Usage + +### Label Detection Usage ```terraform Resource "aws_iam_role" "example" { @@ -83,7 +85,23 @@ resource "aws_rekognition_stream_processor" "example" { ### Face Detection Usage ```terraform -TODO +Resource "aws_iam_role" "example" { + name = "example-role" + + assume_role_policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Action = "sts:AssumeRole" + Effect = "Allow" + Sid = "TBD" + Principal = { + Service = "ec2.amazonaws.com" + } + }, + ] + }) +} ``` ## Argument Reference From c7a3e4dd1db99a6d27b5765124d162b12b5ef7d0 Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Tue, 21 May 2024 13:15:06 -0500 Subject: [PATCH 40/71] regen --- internal/service/rekognition/service_package_gen.go | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/internal/service/rekognition/service_package_gen.go b/internal/service/rekognition/service_package_gen.go index 9a34be1967a9..a5516373d81e 100644 --- a/internal/service/rekognition/service_package_gen.go +++ b/internal/service/rekognition/service_package_gen.go @@ -33,10 +33,7 @@ func (p *servicePackage) FrameworkResources(ctx context.Context) []*types.Servic }, { Factory: newResourceStreamProcessor, - Name: "StreamProcessor", - Tags: &types.ServicePackageResourceTags{ - IdentifierAttribute: names.AttrARN, - }, + Name: "Stream Processor", }, } } From e38a95fbaaaecaac9213cadda6832b7a79d64406 Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Tue, 21 May 2024 13:28:28 -0500 Subject: [PATCH 41/71] fix lint --- internal/service/rekognition/stream_processor.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index 01ac6c5f9404..f461c9bd3df5 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -679,7 +679,7 @@ func (r *resourceStreamProcessor) Delete(ctx context.Context, req resource.Delet } func (r *resourceStreamProcessor) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { - resource.ImportStatePassthroughID(ctx, path.Root("id"), req, resp) + resource.ImportStatePassthroughID(ctx, path.Root(names.AttrID), req, resp) } func (r *resourceStreamProcessor) ModifyPlan(ctx context.Context, request resource.ModifyPlanRequest, response *resource.ModifyPlanResponse) { From a26d67caa1be7b241c86d4596f6f8da17182b118 Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Tue, 21 May 2024 13:34:30 -0500 Subject: [PATCH 42/71] lint --- internal/service/rekognition/stream_processor_test.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/internal/service/rekognition/stream_processor_test.go b/internal/service/rekognition/stream_processor_test.go index adcb23556b45..995efd3d8e60 100644 --- a/internal/service/rekognition/stream_processor_test.go +++ b/internal/service/rekognition/stream_processor_test.go @@ -78,8 +78,8 @@ func TestAccRekognitionStreamProcessor_connectedHome_poylgon(t *testing.T) { Config: testAccStreamProcessorConfig_connectedHome_polygons(testAccStreamProcessorConfig_connectedHome_setup(rName), rName), Check: resource.ComposeTestCheckFunc( testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), - resource.TestCheckResourceAttr(resourceName, "regions_of_interest.#", "1"), - resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.#", "3"), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.#", acctest.Ct1), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.#", acctest.Ct3), ), }, }, @@ -107,8 +107,8 @@ func TestAccRekognitionStreamProcessor_connectedHome_boundingBox(t *testing.T) { Config: testAccStreamProcessorConfig_connectedHome_boundingBox(testAccStreamProcessorConfig_connectedHome_setup(rName), rName), Check: resource.ComposeTestCheckFunc( testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), - resource.TestCheckResourceAttr(resourceName, "regions_of_interest.#", "1"), - resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.#", "0"), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.#", acctest.Ct1), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.#", acctest.Ct0), resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.bounding_box.left", "0.5"), resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.bounding_box.top", "0.5"), resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.bounding_box.height", "0.5"), From 577fff8dfecbfd26d068c7a312441ebf5b7db531 Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Tue, 21 May 2024 14:15:02 -0500 Subject: [PATCH 43/71] lint --- .../service/rekognition/stream_processor.go | 25 ++++++++++--------- .../rekognition/stream_processor_test.go | 2 +- 2 files changed, 14 insertions(+), 13 deletions(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index f461c9bd3df5..47cce57741ce 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -28,6 +28,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/retry" "github.com/hashicorp/terraform-provider-aws/internal/create" + "github.com/hashicorp/terraform-provider-aws/internal/enum" "github.com/hashicorp/terraform-provider-aws/internal/errs" "github.com/hashicorp/terraform-provider-aws/internal/errs/fwdiag" "github.com/hashicorp/terraform-provider-aws/internal/framework" @@ -160,7 +161,7 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem }, Description: "The Amazon Simple Notification Service topic to which Amazon Rekognition publishes the object detection results and completion status of a video analysis operation.", Attributes: map[string]schema.Attribute{ - "sns_topic_arn": schema.StringAttribute{ + names.AttrSNSTopicARN: schema.StringAttribute{ Description: "The Amazon Resource Number (ARN) of the Amazon Amazon Simple Notification Service topic to which Amazon Rekognition posts the completion status.", CustomType: fwtypes.ARNType, Required: true, @@ -689,7 +690,7 @@ func (r *resourceStreamProcessor) ModifyPlan(ctx context.Context, request resour func waitStreamProcessorCreated(ctx context.Context, conn *rekognition.Client, id string, timeout time.Duration) (*rekognition.DescribeStreamProcessorOutput, error) { stateConf := &retry.StateChangeConf{ Pending: []string{}, - Target: []string{string(awstypes.StreamProcessorStatusStopped)}, + Target: enum.Slice(awstypes.StreamProcessorStatusStopped), Refresh: statusStreamProcessor(ctx, conn, id), Timeout: timeout, NotFoundChecks: 20, @@ -706,8 +707,8 @@ func waitStreamProcessorCreated(ctx context.Context, conn *rekognition.Client, i func waitStreamProcessorUpdated(ctx context.Context, conn *rekognition.Client, id string, timeout time.Duration) (*rekognition.DescribeStreamProcessorOutput, error) { stateConf := &retry.StateChangeConf{ - Pending: []string{string(awstypes.StreamProcessorStatusUpdating)}, - Target: []string{string(awstypes.StreamProcessorStatusStopped)}, + Pending: enum.Slice(awstypes.StreamProcessorStatusUpdating), + Target: enum.Slice(awstypes.StreamProcessorStatusStopped), Refresh: statusStreamProcessor(ctx, conn, id), Timeout: timeout, NotFoundChecks: 20, @@ -724,14 +725,14 @@ func waitStreamProcessorUpdated(ctx context.Context, conn *rekognition.Client, i func waitStreamProcessorDeleted(ctx context.Context, conn *rekognition.Client, id string, timeout time.Duration) (*rekognition.DescribeStreamProcessorOutput, error) { stateConf := &retry.StateChangeConf{ - Pending: []string{ - string(awstypes.StreamProcessorStatusStopped), - string(awstypes.StreamProcessorStatusStarting), - string(awstypes.StreamProcessorStatusRunning), - string(awstypes.StreamProcessorStatusFailed), - string(awstypes.StreamProcessorStatusStopping), - string(awstypes.StreamProcessorStatusUpdating), - }, + Pending: enum.Slice( + awstypes.StreamProcessorStatusStopped, + awstypes.StreamProcessorStatusStarting, + awstypes.StreamProcessorStatusRunning, + awstypes.StreamProcessorStatusFailed, + awstypes.StreamProcessorStatusStopping, + awstypes.StreamProcessorStatusUpdating, + ), Target: []string{}, Refresh: statusStreamProcessor(ctx, conn, id), Timeout: timeout, diff --git a/internal/service/rekognition/stream_processor_test.go b/internal/service/rekognition/stream_processor_test.go index 995efd3d8e60..1ff37942213d 100644 --- a/internal/service/rekognition/stream_processor_test.go +++ b/internal/service/rekognition/stream_processor_test.go @@ -373,7 +373,7 @@ resource "aws_rekognition_stream_processor" "test" { top = 0.5 height = 0.5 width = 0.5 - } + } } settings { From 23e8a7b594802a36719450b4164994ac5fe8d713 Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Tue, 21 May 2024 14:28:19 -0500 Subject: [PATCH 44/71] ignore semgrep --- internal/service/rekognition/stream_processor.go | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index 47cce57741ce..1a86d02f4781 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -531,17 +531,17 @@ func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.Updat return } - if !connectedHomePlan.MinConfidence.Equal(connectedHomeState.MinConfidence) { - if !connectedHomePlan.MinConfidence.IsNull() && connectedHomeState.MinConfidence.IsNull() { + if !connectedHomePlan.MinConfidence.Equal(connectedHomeState.MinConfidence) { // nosemgrep:ci.semgrep.migrate.aws-api-context + if !connectedHomePlan.MinConfidence.IsNull() && connectedHomeState.MinConfidence.IsNull() { // nosemgrep:ci.semgrep.migrate.aws-api-context in.ParametersToDelete = append(in.ParametersToDelete, awstypes.StreamProcessorParameterToDeleteConnectedHomeMinConfidence) } - if !connectedHomePlan.MinConfidence.IsNull() { - in.SettingsForUpdate.ConnectedHomeForUpdate.MinConfidence = aws.Float32(float32(connectedHomePlan.MinConfidence.ValueFloat64())) + if !connectedHomePlan.MinConfidence.IsNull() { // nosemgrep:ci.semgrep.migrate.aws-api-context + in.SettingsForUpdate.ConnectedHomeForUpdate.MinConfidence = aws.Float32(float32(connectedHomePlan.MinConfidence.ValueFloat64())) // nosemgrep:ci.semgrep.migrate.aws-api-context } } - if !connectedHomePlan.Labels.Equal(connectedHomeState.Labels) { + if !connectedHomePlan.Labels.Equal(connectedHomeState.Labels) { // nosemgrep:ci.semgrep.migrate.aws-api-context in.SettingsForUpdate.ConnectedHomeForUpdate.Labels = fwflex.ExpandFrameworkStringValueList(ctx, connectedHomePlan.Labels) } } From f0acf81cbdbf26968f6103104accbc9428c4b02d Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Tue, 21 May 2024 14:53:47 -0500 Subject: [PATCH 45/71] lint --- .../service/rekognition/stream_processor.go | 26 +++++++++---------- .../rekognition/stream_processor_test.go | 2 +- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index 1a86d02f4781..c97eaca2263b 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -67,10 +67,10 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem kmsKeyIdRegex := regexache.MustCompile(`^[A-Za-z0-9][A-Za-z0-9:_/+=,@.-]$`) nameRegex := regexache.MustCompile(`[a-zA-Z0-9_.\-]+`) collectionIdRegex := regexache.MustCompile(`[a-zA-Z0-9_.\-]+`) - kinesisStreamArnRegex := regexache.MustCompile(`(^arn:([a-z\d-]+):kinesisvideo:([a-z\d-]+):\d{12}:.+$)`) s3bucketRegex := regexache.MustCompile(`[0-9A-Za-z\.\-_]*`) - snsArnRegex := regexache.MustCompile(`(^arn:aws:sns:.*:\w{12}:.+$)`) - roleArnRegex := regexache.MustCompile(`arn:aws:iam::\d{12}:role/?[a-zA-Z_0-9+=,.@\-_/]+`) + kinesisStreamArnRegex := regexache.MustCompile(`(^arn:([a-z\d-]+):kinesisvideo:([a-z\d-]+):\d{12}:.+$)`) // lintignore:AWSAT005 + snsArnRegex := regexache.MustCompile(`(^arn:aws:sns:.*:\w{12}:.+$)`) // lintignore:AWSAT005 + roleArnRegex := regexache.MustCompile(`arn:aws:iam::\d{12}:role/?[a-zA-Z_0-9+=,.@\-_/]+`) // lintignore:AWSAT005 resp.Schema = schema.Schema{ Attributes: map[string]schema.Attribute{ @@ -103,7 +103,7 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem // CustomType: fwtypes.ARNType, Required: true, Validators: []validator.String{ - stringvalidator.RegexMatches(roleArnRegex, "must conform to: arn:aws:iam::\\d{12}:role/?[a-zA-Z_0-9+=,.@\\-_/]+"), + stringvalidator.RegexMatches(roleArnRegex, "must conform to: arn:aws:iam::\\d{12}:role/?[a-zA-Z_0-9+=,.@\\-_/]+"), // lintignore:AWSAT005 }, PlanModifiers: []planmodifier.String{ stringplanmodifier.RequiresReplace(), @@ -143,7 +143,7 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem Required: true, Validators: []validator.String{ stringvalidator.All( - stringvalidator.RegexMatches(kinesisStreamArnRegex, "must conform to: (^arn:([a-z\\d-]+):kinesisvideo:([a-z\\d-]+):\\d{12}:.+$)"), + stringvalidator.RegexMatches(kinesisStreamArnRegex, "must conform to: (^arn:([a-z\\d-]+):kinesisvideo:([a-z\\d-]+):\\d{12}:.+$)"), // lintignore:AWSAT005 ), }, PlanModifiers: []planmodifier.String{ @@ -353,7 +353,7 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem "min_confidence": schema.Float64Attribute{ Description: "The minimum confidence required to label an object in the video.", Validators: []validator.Float64{ - float64validator.Between(0.0, 100.0), + float64validator.Between(0.0, 100.0), //nolint:mnd }, Computed: true, Optional: true, @@ -381,7 +381,7 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem "face_match_threshold": schema.Float64Attribute{ Description: "Minimum face match confidence score that must be met to return a result for a recognized face.", Validators: []validator.Float64{ - float64validator.Between(0.0, 100.0), + float64validator.Between(0.0, 100.0), //nolint:mnd }, Optional: true, PlanModifiers: []planmodifier.Float64{ @@ -504,7 +504,7 @@ func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.Updat } if !plan.DataSharingPreference.Equal(state.DataSharingPreference) { - dspPlan, dspState := unwrapObjectValueOf(plan.DataSharingPreference, state.DataSharingPreference, resp.Diagnostics, ctx) + dspPlan, dspState := unwrapObjectValueOf(ctx, resp.Diagnostics, plan.DataSharingPreference, state.DataSharingPreference) if resp.Diagnostics.HasError() { return } @@ -521,12 +521,12 @@ func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.Updat ConnectedHomeForUpdate: &awstypes.ConnectedHomeSettingsForUpdate{}, } - settingsPlan, settingsState := unwrapObjectValueOf(plan.Settings, state.Settings, resp.Diagnostics, ctx) + settingsPlan, settingsState := unwrapObjectValueOf(ctx, resp.Diagnostics, plan.Settings, state.Settings) if resp.Diagnostics.HasError() { return } - connectedHomePlan, connectedHomeState := unwrapObjectValueOf(settingsPlan.ConnectedHome, settingsState.ConnectedHome, resp.Diagnostics, ctx) + connectedHomePlan, connectedHomeState := unwrapObjectValueOf(ctx, resp.Diagnostics, settingsPlan.ConnectedHome, settingsState.ConnectedHome) if resp.Diagnostics.HasError() { return } @@ -553,7 +553,7 @@ func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.Updat if !plan.RegionsOfInterest.Equal(state.RegionsOfInterest) { var regions []awstypes.RegionOfInterest - planRegions, _ := unwrapListObjectValueOf(plan.RegionsOfInterest, state.RegionsOfInterest, resp.Diagnostics, ctx) + planRegions, _ := unwrapListObjectValueOf(ctx, resp.Diagnostics, plan.RegionsOfInterest, state.RegionsOfInterest) for i := 0; i < len(planRegions); i++ { planRegion := planRegions[i] @@ -785,7 +785,7 @@ func findStreamProcessorByID(ctx context.Context, conn *rekognition.Client, name return out, nil } -func unwrapObjectValueOf[T any](plan fwtypes.ObjectValueOf[T], state fwtypes.ObjectValueOf[T], diagnostics diag.Diagnostics, ctx context.Context) (*T, *T) { +func unwrapObjectValueOf[T any](ctx context.Context, diagnostics diag.Diagnostics, plan fwtypes.ObjectValueOf[T], state fwtypes.ObjectValueOf[T]) (*T, *T) { ptrPlan, diags := plan.ToPtr(ctx) diagnostics.Append(diags...) @@ -795,7 +795,7 @@ func unwrapObjectValueOf[T any](plan fwtypes.ObjectValueOf[T], state fwtypes.Obj return ptrPlan, ptrState } -func unwrapListObjectValueOf[T any](plan fwtypes.ListNestedObjectValueOf[T], state fwtypes.ListNestedObjectValueOf[T], diagnostics diag.Diagnostics, ctx context.Context) ([]*T, []*T) { +func unwrapListObjectValueOf[T any](ctx context.Context, diagnostics diag.Diagnostics, plan fwtypes.ListNestedObjectValueOf[T], state fwtypes.ListNestedObjectValueOf[T]) ([]*T, []*T) { ptrPlan, diags := plan.ToSlice(ctx) diagnostics.Append(diags...) diff --git a/internal/service/rekognition/stream_processor_test.go b/internal/service/rekognition/stream_processor_test.go index 1ff37942213d..9b25492ad83b 100644 --- a/internal/service/rekognition/stream_processor_test.go +++ b/internal/service/rekognition/stream_processor_test.go @@ -57,7 +57,7 @@ func TestAccRekognitionStreamProcessor_connectedHome(t *testing.T) { }) } -func TestAccRekognitionStreamProcessor_connectedHome_poylgon(t *testing.T) { +func TestAccRekognitionStreamProcessor_connectedHome_polygon(t *testing.T) { ctx := acctest.Context(t) var streamprocessor rekognition.DescribeStreamProcessorOutput From 6241a73760c6b2a640fe9ea652928d6822ca391c Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Tue, 21 May 2024 15:10:19 -0500 Subject: [PATCH 46/71] lint --- internal/service/rekognition/stream_processor.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index c97eaca2263b..9e4200612252 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -166,7 +166,7 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem CustomType: fwtypes.ARNType, Required: true, Validators: []validator.String{ - stringvalidator.RegexMatches(snsArnRegex, "must conform to: (^arn:aws:sns:.*:\\w{12}:.+$)"), + stringvalidator.RegexMatches(snsArnRegex, "must conform to: (^arn:aws:sns:.*:\\w{12}:.+$)"), // lintignore:AWSAT005 }, PlanModifiers: []planmodifier.String{ stringplanmodifier.RequiresReplace(), From a816c16ab56da2fbe664a7a932daf6a4d6627e05 Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Tue, 21 May 2024 15:29:17 -0500 Subject: [PATCH 47/71] fix import --- .../service/rekognition/stream_processor.go | 2 + .../rekognition/stream_processor_test.go | 39 ++++++++++++++++--- 2 files changed, 35 insertions(+), 6 deletions(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index 9e4200612252..8e1efee8394c 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -482,6 +482,8 @@ func (r *resourceStreamProcessor) Read(ctx context.Context, req resource.ReadReq if resp.Diagnostics.HasError() { return } + + resp.Diagnostics.Append(resp.State.Set(ctx, &state)...) } func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { diff --git a/internal/service/rekognition/stream_processor_test.go b/internal/service/rekognition/stream_processor_test.go index 9b25492ad83b..7630327030d6 100644 --- a/internal/service/rekognition/stream_processor_test.go +++ b/internal/service/rekognition/stream_processor_test.go @@ -22,6 +22,39 @@ import ( "github.com/hashicorp/terraform-provider-aws/names" ) +func TestAccRekognitionStreamProcessor_import(t *testing.T) { + ctx := acctest.Context(t) + + var streamprocessor rekognition.DescribeStreamProcessorOutput + rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) + resourceName := "aws_rekognition_stream_processor.test" + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + acctest.PreCheck(ctx, t) + acctest.PreCheckPartitionHasService(t, names.RekognitionEndpointID) + testAccPreCheck(ctx, t) + }, + ErrorCheck: acctest.ErrorCheck(t, names.RekognitionServiceID), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, + CheckDestroy: testAccCheckStreamProcessorDestroy(ctx), + Steps: []resource.TestStep{ + { + Config: testAccStreamProcessorConfig_connectedHome(testAccStreamProcessorConfig_connectedHome_setup(rName), rName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"arn"}, + }, + }, + }) +} + func TestAccRekognitionStreamProcessor_connectedHome(t *testing.T) { ctx := acctest.Context(t) @@ -47,12 +80,6 @@ func TestAccRekognitionStreamProcessor_connectedHome(t *testing.T) { resource.TestCheckResourceAttr(resourceName, names.AttrName, fmt.Sprintf("%[1]s-acctest-processor", rName)), ), }, - // { - // ResourceName: resourceName, - // ImportState: true, - // ImportStateVerify: true, - // ImportStateVerifyIgnore: []string{}, - // }, }, }) } From 954524d1c1f1ee13ddcd9a4dda154984090a1e2a Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Tue, 21 May 2024 15:50:53 -0500 Subject: [PATCH 48/71] wip --- .../service/rekognition/stream_processor.go | 61 +++++-------------- 1 file changed, 16 insertions(+), 45 deletions(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index 8e1efee8394c..c09922dd31e9 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -21,6 +21,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/path" "github.com/hashicorp/terraform-plugin-framework/resource" "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/float64default" "github.com/hashicorp/terraform-plugin-framework/resource/schema/float64planmodifier" "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" @@ -355,7 +356,7 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem Validators: []validator.Float64{ float64validator.Between(0.0, 100.0), //nolint:mnd }, - Computed: true, + Default: float64default.StaticFloat64(50), //nolint:mnd Optional: true, }, }, @@ -553,34 +554,24 @@ func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.Updat } if !plan.RegionsOfInterest.Equal(state.RegionsOfInterest) { - var regions []awstypes.RegionOfInterest + planRegions, diags := plan.RegionsOfInterest.ToSlice(ctx) + resp.Diagnostics.Append(diags...) - planRegions, _ := unwrapListObjectValueOf(ctx, resp.Diagnostics, plan.RegionsOfInterest, state.RegionsOfInterest) + plannedRegions := make([]awstypes.RegionOfInterest, len(planRegions)) for i := 0; i < len(planRegions); i++ { planRegion := planRegions[i] - region := &awstypes.RegionOfInterest{} + plannedRegions[i] = awstypes.RegionOfInterest{} if !planRegion.BoundingBox.IsNull() { boundingBox, diags := planRegion.BoundingBox.ToPtr(ctx) resp.Diagnostics.Append(diags...) - region.BoundingBox = &awstypes.BoundingBox{} - - if !boundingBox.Top.IsNull() { - region.BoundingBox.Top = aws.Float32(float32(boundingBox.Top.ValueFloat64())) - } - - if !boundingBox.Left.IsNull() { - region.BoundingBox.Left = aws.Float32(float32(boundingBox.Left.ValueFloat64())) - } - - if !boundingBox.Height.IsNull() { - region.BoundingBox.Height = aws.Float32(float32(boundingBox.Height.ValueFloat64())) - } - - if !boundingBox.Width.IsNull() { - region.BoundingBox.Width = aws.Float32(float32(boundingBox.Width.ValueFloat64())) + plannedRegions[i].BoundingBox = &awstypes.BoundingBox{ + Top: aws.Float32(float32(boundingBox.Top.ValueFloat64())), + Left: aws.Float32(float32(boundingBox.Left.ValueFloat64())), + Height: aws.Float32(float32(boundingBox.Height.ValueFloat64())), + Width: aws.Float32(float32(boundingBox.Width.ValueFloat64())), } } @@ -592,25 +583,15 @@ func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.Updat for i := 0; i < len(polygons); i++ { polygon := polygons[i] - plannedPolygons[i] = awstypes.Point{} - - if !polygon.X.IsNull() { - plannedPolygons[i].X = aws.Float32(float32(polygon.X.ValueFloat64())) - } - - if !polygon.Y.IsNull() { - plannedPolygons[i].Y = aws.Float32(float32(polygon.Y.ValueFloat64())) + plannedPolygons[i] = awstypes.Point{ + X: aws.Float32(float32(polygon.X.ValueFloat64())), + Y: aws.Float32(float32(polygon.Y.ValueFloat64())), } } - region.Polygon = plannedPolygons - } - - if region.BoundingBox != nil && len(region.Polygon) > 0 { - regions = append(regions, *region) + plannedRegions[i].Polygon = plannedPolygons } } - - in.RegionsOfInterestForUpdate = regions + in.RegionsOfInterestForUpdate = plannedRegions } _, err := conn.UpdateStreamProcessor(ctx, in) @@ -797,16 +778,6 @@ func unwrapObjectValueOf[T any](ctx context.Context, diagnostics diag.Diagnostic return ptrPlan, ptrState } -func unwrapListObjectValueOf[T any](ctx context.Context, diagnostics diag.Diagnostics, plan fwtypes.ListNestedObjectValueOf[T], state fwtypes.ListNestedObjectValueOf[T]) ([]*T, []*T) { - ptrPlan, diags := plan.ToSlice(ctx) - diagnostics.Append(diags...) - - ptrState, diags := state.ToSlice(ctx) - diagnostics.Append(diags...) - - return ptrPlan, ptrState -} - type resourceStreamProcessorDataModel struct { ARN types.String `tfsdk:"arn"` DataSharingPreference fwtypes.ObjectValueOf[dataSharingPreferenceModel] `tfsdk:"data_sharing_preference"` From 951f16d1d8e820933ce1a34455a459eeed027a6e Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Tue, 21 May 2024 15:51:51 -0500 Subject: [PATCH 49/71] lint --- .../rekognition/stream_processor_test.go | 35 +++---------------- 1 file changed, 4 insertions(+), 31 deletions(-) diff --git a/internal/service/rekognition/stream_processor_test.go b/internal/service/rekognition/stream_processor_test.go index 7630327030d6..23aee77642a5 100644 --- a/internal/service/rekognition/stream_processor_test.go +++ b/internal/service/rekognition/stream_processor_test.go @@ -22,7 +22,7 @@ import ( "github.com/hashicorp/terraform-provider-aws/names" ) -func TestAccRekognitionStreamProcessor_import(t *testing.T) { +func TestAccRekognitionStreamProcessor_connectedHome(t *testing.T) { ctx := acctest.Context(t) var streamprocessor rekognition.DescribeStreamProcessorOutput @@ -43,42 +43,15 @@ func TestAccRekognitionStreamProcessor_import(t *testing.T) { Config: testAccStreamProcessorConfig_connectedHome(testAccStreamProcessorConfig_connectedHome_setup(rName), rName), Check: resource.ComposeTestCheckFunc( testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), + resource.TestCheckResourceAttr(resourceName, names.AttrID, fmt.Sprintf("%[1]s-acctest-processor", rName)), + resource.TestCheckResourceAttr(resourceName, names.AttrName, fmt.Sprintf("%[1]s-acctest-processor", rName)), ), }, { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"arn"}, - }, - }, - }) -} - -func TestAccRekognitionStreamProcessor_connectedHome(t *testing.T) { - ctx := acctest.Context(t) - - var streamprocessor rekognition.DescribeStreamProcessorOutput - rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) - resourceName := "aws_rekognition_stream_processor.test" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { - acctest.PreCheck(ctx, t) - acctest.PreCheckPartitionHasService(t, names.RekognitionEndpointID) - testAccPreCheck(ctx, t) - }, - ErrorCheck: acctest.ErrorCheck(t, names.RekognitionServiceID), - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, - CheckDestroy: testAccCheckStreamProcessorDestroy(ctx), - Steps: []resource.TestStep{ - { - Config: testAccStreamProcessorConfig_connectedHome(testAccStreamProcessorConfig_connectedHome_setup(rName), rName), - Check: resource.ComposeTestCheckFunc( - testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), - resource.TestCheckResourceAttr(resourceName, names.AttrID, fmt.Sprintf("%[1]s-acctest-processor", rName)), - resource.TestCheckResourceAttr(resourceName, names.AttrName, fmt.Sprintf("%[1]s-acctest-processor", rName)), - ), + ImportStateVerifyIgnore: []string{names.AttrARN}, }, }, }) From 958fb0f45bf7635b676bf572ac9390d9fd12c239 Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Tue, 21 May 2024 16:42:13 -0500 Subject: [PATCH 50/71] bug fixed --- internal/service/rekognition/stream_processor.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index c09922dd31e9..9fc91ff7f3ba 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -357,6 +357,7 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem float64validator.Between(0.0, 100.0), //nolint:mnd }, Default: float64default.StaticFloat64(50), //nolint:mnd + Computed: true, Optional: true, }, }, @@ -500,7 +501,6 @@ func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.Updat if !plan.DataSharingPreference.Equal(state.DataSharingPreference) || !plan.Settings.Equal(state.Settings) || !plan.RegionsOfInterest.Equal(state.RegionsOfInterest) { - in := &rekognition.UpdateStreamProcessorInput{ Name: plan.Name.ValueStringPointer(), ParametersToDelete: []awstypes.StreamProcessorParameterToDelete{}, From 7d1edbfd2b601d0385f8dcf02630bbe8d940688f Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Wed, 22 May 2024 10:41:11 -0500 Subject: [PATCH 51/71] add test --- .../rekognition/stream_processor_test.go | 188 +++++------------- 1 file changed, 54 insertions(+), 134 deletions(-) diff --git a/internal/service/rekognition/stream_processor_test.go b/internal/service/rekognition/stream_processor_test.go index 23aee77642a5..ecc0742e495a 100644 --- a/internal/service/rekognition/stream_processor_test.go +++ b/internal/service/rekognition/stream_processor_test.go @@ -40,7 +40,7 @@ func TestAccRekognitionStreamProcessor_connectedHome(t *testing.T) { CheckDestroy: testAccCheckStreamProcessorDestroy(ctx), Steps: []resource.TestStep{ { - Config: testAccStreamProcessorConfig_connectedHome(testAccStreamProcessorConfig_connectedHome_setup(rName), rName), + Config: testAccStreamProcessorConfig_connectedHome(testAccStreamProcessorConfig_connectedHome_setup(rName), rName, ""), Check: resource.ComposeTestCheckFunc( testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), resource.TestCheckResourceAttr(resourceName, names.AttrID, fmt.Sprintf("%[1]s-acctest-processor", rName)), @@ -57,39 +57,10 @@ func TestAccRekognitionStreamProcessor_connectedHome(t *testing.T) { }) } -func TestAccRekognitionStreamProcessor_connectedHome_polygon(t *testing.T) { +func TestAccRekognitionStreamProcessor_connectedHome_boundingBox_to_polygon(t *testing.T) { ctx := acctest.Context(t) - var streamprocessor rekognition.DescribeStreamProcessorOutput - rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) - resourceName := "aws_rekognition_stream_processor.test" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { - acctest.PreCheck(ctx, t) - acctest.PreCheckPartitionHasService(t, names.RekognitionEndpointID) - testAccPreCheck(ctx, t) - }, - ErrorCheck: acctest.ErrorCheck(t, names.RekognitionServiceID), - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, - CheckDestroy: testAccCheckStreamProcessorDestroy(ctx), - Steps: []resource.TestStep{ - { - Config: testAccStreamProcessorConfig_connectedHome_polygons(testAccStreamProcessorConfig_connectedHome_setup(rName), rName), - Check: resource.ComposeTestCheckFunc( - testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), - resource.TestCheckResourceAttr(resourceName, "regions_of_interest.#", acctest.Ct1), - resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.#", acctest.Ct3), - ), - }, - }, - }) -} - -func TestAccRekognitionStreamProcessor_connectedHome_boundingBox(t *testing.T) { - ctx := acctest.Context(t) - - var streamprocessor rekognition.DescribeStreamProcessorOutput + var streamprocessor, streamprocessor2 rekognition.DescribeStreamProcessorOutput rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_rekognition_stream_processor.test" @@ -104,7 +75,7 @@ func TestAccRekognitionStreamProcessor_connectedHome_boundingBox(t *testing.T) { CheckDestroy: testAccCheckStreamProcessorDestroy(ctx), Steps: []resource.TestStep{ { - Config: testAccStreamProcessorConfig_connectedHome_boundingBox(testAccStreamProcessorConfig_connectedHome_setup(rName), rName), + Config: testAccStreamProcessorConfig_connectedHome(testAccStreamProcessorConfig_connectedHome_setup(rName), rName, testAccStreamProcessorConfig_boundingBox()), Check: resource.ComposeTestCheckFunc( testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), resource.TestCheckResourceAttr(resourceName, "regions_of_interest.#", acctest.Ct1), @@ -115,6 +86,21 @@ func TestAccRekognitionStreamProcessor_connectedHome_boundingBox(t *testing.T) { resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.bounding_box.width", "0.5"), ), }, + { + Config: testAccStreamProcessorConfig_connectedHome(testAccStreamProcessorConfig_connectedHome_setup(rName), rName, testAccStreamProcessorConfig_polygons()), + Check: resource.ComposeTestCheckFunc( + testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor2), + testAccCheckStreamProcessorNotRecreated(&streamprocessor, &streamprocessor2), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.#", acctest.Ct1), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.#", acctest.Ct3), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.0.x", "0.5"), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.0.y", "0.5"), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.1.x", "0.5"), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.1.y", "0.5"), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.2.x", "0.5"), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.2.y", "0.5"), + ), + }, }, }) } @@ -137,7 +123,7 @@ func TestAccRekognitionStreamProcessor_disappears(t *testing.T) { CheckDestroy: testAccCheckStreamProcessorDestroy(ctx), Steps: []resource.TestStep{ { - Config: testAccStreamProcessorConfig_connectedHome(testAccStreamProcessorConfig_connectedHome_setup(rName), rName), + Config: testAccStreamProcessorConfig_connectedHome(testAccStreamProcessorConfig_connectedHome_setup(rName), rName, ""), Check: resource.ComposeTestCheckFunc( testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), acctest.CheckFrameworkResourceDisappears(ctx, acctest.Provider, tfrekognition.ResourceStreamProcessor, resourceName), @@ -213,15 +199,15 @@ func testAccPreCheck(ctx context.Context, t *testing.T) { } } -// func testAccCheckStreamProcessorNotRecreated(before, after *rekognition.DescribeStreamProcessorOutput) resource.TestCheckFunc { -// return func(s *terraform.State) error { -// if before, after := aws.ToString(before.StreamProcessorArn), aws.ToString(after.StreamProcessorArn); before != after { -// return create.Error(names.Rekognition, create.ErrActionCheckingNotRecreated, tfrekognition.ResNameStreamProcessor, aws.ToString(&before), errors.New("recreated")) -// } +func testAccCheckStreamProcessorNotRecreated(before, after *rekognition.DescribeStreamProcessorOutput) resource.TestCheckFunc { + return func(s *terraform.State) error { + if before, after := aws.ToString(before.StreamProcessorArn), aws.ToString(after.StreamProcessorArn); before != after { + return create.Error(names.Rekognition, create.ErrActionCheckingNotRecreated, tfrekognition.ResNameStreamProcessor, aws.ToString(&before), errors.New("recreated")) + } -// return nil -// } -// } + return nil + } +} func testAccStreamProcessorConfig_connectedHome_setup(rName string) string { return fmt.Sprintf(` @@ -257,99 +243,40 @@ resource "aws_kinesis_video_stream" "test" { device_name = "kinesis-video-device-name" media_type = "video/h264" } - `, rName) +`, rName) } -func testAccStreamProcessorConfig_connectedHome(setup, rName string) string { - return fmt.Sprintf(` -%[1]s - -resource "aws_rekognition_stream_processor" "test" { - role_arn = aws_iam_role.test.arn - name = "%[2]s-acctest-processor" - - data_sharing_preference { - opt_in = true - } - - output { - s3_destination { - bucket = aws_s3_bucket.test.bucket - } - } - - settings { - connected_home { - labels = ["PERSON", "ALL"] - } +func testAccStreamProcessorConfig_polygons() string { + return ` +regions_of_interest { + polygon { + x = 0.5 + y = 0.5 } - - input { - kinesis_video_stream { - arn = aws_kinesis_video_stream.test.arn - } + polygon { + x = 0.5 + y = 0.5 } - - notification_channel { - sns_topic_arn = aws_sns_topic.test.arn + polygon { + x = 0.5 + y = 0.5 } +}` } -`, setup, rName) -} - -func testAccStreamProcessorConfig_connectedHome_polygons(setup, rName string) string { - return fmt.Sprintf(` -%[1]s - -resource "aws_rekognition_stream_processor" "test" { - role_arn = aws_iam_role.test.arn - name = "%[2]s-acctest-processor" - - data_sharing_preference { - opt_in = true - } - - output { - s3_destination { - bucket = aws_s3_bucket.test.bucket - } - } - - regions_of_interest { - polygon { - x = 0.5 - y = 0.5 - } - polygon { - x = 0.5 - y = 0.5 - } - polygon { - x = 0.5 - y = 0.5 - } - } - settings { - connected_home { - labels = ["PERSON", "ALL"] - } - } - - input { - kinesis_video_stream { - arn = aws_kinesis_video_stream.test.arn - } - } - - notification_channel { - sns_topic_arn = aws_sns_topic.test.arn +func testAccStreamProcessorConfig_boundingBox() string { + return ` +regions_of_interest { + bounding_box { + left = 0.5 + top = 0.5 + height = 0.5 + width = 0.5 } -} -`, setup, rName) +}` } -func testAccStreamProcessorConfig_connectedHome_boundingBox(setup, rName string) string { +func testAccStreamProcessorConfig_connectedHome(setup, rName, regionsOfInterest string) string { return fmt.Sprintf(` %[1]s @@ -367,14 +294,7 @@ resource "aws_rekognition_stream_processor" "test" { } } - regions_of_interest { - bounding_box { - left = 0.5 - top = 0.5 - height = 0.5 - width = 0.5 - } - } +%[3]s settings { connected_home { @@ -392,5 +312,5 @@ resource "aws_rekognition_stream_processor" "test" { sns_topic_arn = aws_sns_topic.test.arn } } -`, setup, rName) +`, setup, rName, regionsOfInterest) } From c2f34ea151da137e844e7b3978afc99cf9b55989 Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Wed, 22 May 2024 11:19:03 -0500 Subject: [PATCH 52/71] update docs --- ...rekognition_stream_processor.html.markdown | 64 +++++++++++++------ 1 file changed, 45 insertions(+), 19 deletions(-) diff --git a/website/docs/r/rekognition_stream_processor.html.markdown b/website/docs/r/rekognition_stream_processor.html.markdown index 6207ee44d77a..501d2fef10a9 100644 --- a/website/docs/r/rekognition_stream_processor.html.markdown +++ b/website/docs/r/rekognition_stream_processor.html.markdown @@ -17,24 +17,6 @@ Terraform resource for managing an AWS Rekognition Stream Processor. ### Label Detection Usage ```terraform -Resource "aws_iam_role" "example" { - name = "example-role" - - assume_role_policy = jsonencode({ - Version = "2012-10-17" - Statement = [ - { - Action = "sts:AssumeRole" - Effect = "Allow" - Sid = "" - Principal = { - Service = "ec2.amazonaws.com" - } - }, - ] - }) -} - resource "aws_s3_bucket" "example" { bucket = "example-bucket" } @@ -50,6 +32,50 @@ resource "aws_kinesis_video_stream" "example" { media_type = "video/h264" } +resource "aws_iam_role" "example" { + name = "eample-role" + + inline_policy { + name = "Rekognition-Access" + policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Action = ["s3:PutObject"] + Effect = "Allow" + Resource = ["${aws_s3_bucket.example.arn}/*"] + }, + { + Action = ["sns:Publish"] + Effect = "Allow" + Resource = ["${aws_sns_topic.example.arn}"] + }, + { + Action = [ + "kinesis:Get*", + "kinesis:DescribeStreamSummary" + ] + Effect = "Allow" + Resource = ["${aws_kinesis_video_stream.example.arn}"] + }, + ] + }) + } + + assume_role_policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Action = "sts:AssumeRole" + Effect = "Allow" + Principal = { + Service = "rekognition.amazonaws.com" + } + }, + ] + }) +} + resource "aws_rekognition_stream_processor" "example" { role_arn = aws_iam_role.example.arn name = "example-processor" @@ -110,7 +136,7 @@ The following arguments are required: * `input` - (Required) Input video stream. See [`input`](#input) definition. * `name` - (Required) The name of the Stream Processor -* `role_arn` - (Required) The ARN of the IAM role that allows access to the stream processor. +* `role_arn` - (Required) The Amazon Resource Number (ARN) of the IAM role that allows access to the stream processor. The IAM role provides Rekognition read permissions for a Kinesis stream. It also provides write permissions to an Amazon S3 bucket and Amazon Simple Notification Service topic for a label detection stream processor. This is required for both face search and label detection stream processors. * `output` - (Required) Kinesis data stream stream or Amazon S3 bucket location to which Amazon Rekognition Video puts the analysis results * `settings` - (Required) Input parameters used in a streaming video analyzed by a stream processor. See [`settings`](#settings) definition. From ea089c7e701f60ea535c029f1b7be9c59dc007f4 Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Wed, 22 May 2024 12:00:39 -0500 Subject: [PATCH 53/71] schema tweaks --- .../service/rekognition/stream_processor.go | 25 +++++++++++-------- 1 file changed, 14 insertions(+), 11 deletions(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index 9fc91ff7f3ba..80698d9e23d0 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -69,9 +69,10 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem nameRegex := regexache.MustCompile(`[a-zA-Z0-9_.\-]+`) collectionIdRegex := regexache.MustCompile(`[a-zA-Z0-9_.\-]+`) s3bucketRegex := regexache.MustCompile(`[0-9A-Za-z\.\-_]*`) - kinesisStreamArnRegex := regexache.MustCompile(`(^arn:([a-z\d-]+):kinesisvideo:([a-z\d-]+):\d{12}:.+$)`) // lintignore:AWSAT005 - snsArnRegex := regexache.MustCompile(`(^arn:aws:sns:.*:\w{12}:.+$)`) // lintignore:AWSAT005 - roleArnRegex := regexache.MustCompile(`arn:aws:iam::\d{12}:role/?[a-zA-Z_0-9+=,.@\-_/]+`) // lintignore:AWSAT005 + kinesisStreamArnRegex := regexache.MustCompile(`(^arn:([a-z\d-]+):kinesis:([a-z\d-]+):\d{12}:.+$)`) // lintignore:AWSAT005 + kinesisVideoStreamArnRegex := regexache.MustCompile(`(^arn:([a-z\d-]+):kinesisvideo:([a-z\d-]+):\d{12}:.+$)`) // lintignore:AWSAT005 + snsArnRegex := regexache.MustCompile(`(^arn:aws:sns:.*:\w{12}:.+$)`) // lintignore:AWSAT005 + roleArnRegex := regexache.MustCompile(`arn:aws:iam::\d{12}:role/?[a-zA-Z_0-9+=,.@\-_/]+`) // lintignore:AWSAT005 resp.Schema = schema.Schema{ Attributes: map[string]schema.Attribute{ @@ -144,7 +145,7 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem Required: true, Validators: []validator.String{ stringvalidator.All( - stringvalidator.RegexMatches(kinesisStreamArnRegex, "must conform to: (^arn:([a-z\\d-]+):kinesisvideo:([a-z\\d-]+):\\d{12}:.+$)"), // lintignore:AWSAT005 + stringvalidator.RegexMatches(kinesisVideoStreamArnRegex, "must conform to: (^arn:([a-z\\d-]+):kinesisvideo:([a-z\\d-]+):\\d{12}:.+$)"), // lintignore:AWSAT005 ), }, PlanModifiers: []planmodifier.String{ @@ -156,16 +157,13 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem }, }, "notification_channel": schema.SingleNestedBlock{ - CustomType: fwtypes.NewObjectTypeOf[notificationChannelModel](ctx), - Validators: []validator.Object{ - objectvalidator.IsRequired(), - }, + CustomType: fwtypes.NewObjectTypeOf[notificationChannelModel](ctx), Description: "The Amazon Simple Notification Service topic to which Amazon Rekognition publishes the object detection results and completion status of a video analysis operation.", Attributes: map[string]schema.Attribute{ names.AttrSNSTopicARN: schema.StringAttribute{ Description: "The Amazon Resource Number (ARN) of the Amazon Amazon Simple Notification Service topic to which Amazon Rekognition posts the completion status.", CustomType: fwtypes.ARNType, - Required: true, + Optional: true, Validators: []validator.String{ stringvalidator.RegexMatches(snsArnRegex, "must conform to: (^arn:aws:sns:.*:\\w{12}:.+$)"), // lintignore:AWSAT005 }, @@ -345,7 +343,7 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem "labels": schema.ListAttribute{ Description: "Specifies what you want to detect in the video, such as people, packages, or pets.", CustomType: fwtypes.ListOfStringType, - Required: true, + Optional: true, Validators: []validator.List{ listvalidator.SizeAtLeast(1), listvalidator.ValueStringsAre(stringvalidator.OneOf(connectedHomeLabels()...)), @@ -367,6 +365,9 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem Description: "Face search settings to use on a streaming video.", Validators: []validator.Object{ objectvalidator.ConflictsWith(path.MatchRelative().AtParent().AtName("connected_home")), + objectvalidator.AlsoRequires( + path.MatchRelative().AtName("collection_id"), + ), }, Attributes: map[string]schema.Attribute{ "collection_id": schema.StringAttribute{ @@ -385,10 +386,12 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem Validators: []validator.Float64{ float64validator.Between(0.0, 100.0), //nolint:mnd }, - Optional: true, PlanModifiers: []planmodifier.Float64{ float64planmodifier.RequiresReplace(), }, + Default: float64default.StaticFloat64(80), //nolint:mnd + Computed: true, + Optional: true, }, }, }, From eba02b9c6cfe17a094cf357c1db861be69fe4310 Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Wed, 22 May 2024 13:03:19 -0500 Subject: [PATCH 54/71] wip --- .../service/rekognition/stream_processor.go | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index 80698d9e23d0..84ae5bfebf78 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -21,12 +21,14 @@ import ( "github.com/hashicorp/terraform-plugin-framework/path" "github.com/hashicorp/terraform-plugin-framework/resource" "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/boolplanmodifier" "github.com/hashicorp/terraform-plugin-framework/resource/schema/float64default" "github.com/hashicorp/terraform-plugin-framework/resource/schema/float64planmodifier" "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" "github.com/hashicorp/terraform-plugin-framework/schema/validator" "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/retry" "github.com/hashicorp/terraform-provider-aws/internal/create" "github.com/hashicorp/terraform-provider-aws/internal/enum" @@ -122,6 +124,10 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem "opt_in": schema.BoolAttribute{ Description: "Do you want to share data with Rekognition to improve model performance.", Optional: true, + Computed: true, + PlanModifiers: []planmodifier.Bool{ + boolplanmodifier.UseStateForUnknown(), + }, }, }, }, @@ -443,10 +449,10 @@ func (r *resourceStreamProcessor) Create(ctx context.Context, req resource.Creat plan.ID = plan.Name createTimeout := r.CreateTimeout(ctx, plan.Timeouts) - created, err := waitStreamProcessorCreated(ctx, conn, plan.Name.ValueString(), createTimeout) + created, err := waitStreamProcessorCreated(ctx, conn, plan.ID.ValueString(), createTimeout) if err != nil { resp.Diagnostics.AddError( - create.ProblemStandardMessage(names.Rekognition, create.ErrActionWaitingForCreation, ResNameStreamProcessor, plan.Name.String(), err), + create.ProblemStandardMessage(names.Rekognition, create.ErrActionWaitingForCreation, ResNameStreamProcessor, plan.ID.String(), err), err.Error(), ) return @@ -457,7 +463,7 @@ func (r *resourceStreamProcessor) Create(ctx context.Context, req resource.Creat return } - resp.Diagnostics.Append(resp.State.Set(ctx, plan)...) + resp.Diagnostics.Append(resp.State.Set(ctx, &plan)...) } func (r *resourceStreamProcessor) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { @@ -488,6 +494,12 @@ func (r *resourceStreamProcessor) Read(ctx context.Context, req resource.ReadReq return } + if state.DataSharingPreference.IsNull() { + dataSharing, diag := fwtypes.NewObjectValueOf(ctx, &dataSharingPreferenceModel{OptIn: basetypes.NewBoolValue(false)}) + resp.Diagnostics.Append(diag...) + state.DataSharingPreference = dataSharing + } + resp.Diagnostics.Append(resp.State.Set(ctx, &state)...) } From cd6acdb4f7b83836c29fc1c9f85c51d7673cc738 Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Wed, 22 May 2024 16:23:25 -0500 Subject: [PATCH 55/71] make data preferences required --- .../service/rekognition/stream_processor.go | 22 ++--- ...rekognition_stream_processor.html.markdown | 96 +++++++++++++++++-- 2 files changed, 100 insertions(+), 18 deletions(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index 84ae5bfebf78..1a7ef4c42bca 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -21,7 +21,6 @@ import ( "github.com/hashicorp/terraform-plugin-framework/path" "github.com/hashicorp/terraform-plugin-framework/resource" "github.com/hashicorp/terraform-plugin-framework/resource/schema" - "github.com/hashicorp/terraform-plugin-framework/resource/schema/boolplanmodifier" "github.com/hashicorp/terraform-plugin-framework/resource/schema/float64default" "github.com/hashicorp/terraform-plugin-framework/resource/schema/float64planmodifier" "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" @@ -120,14 +119,14 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem "data_sharing_preference": schema.SingleNestedBlock{ CustomType: fwtypes.NewObjectTypeOf[dataSharingPreferenceModel](ctx), Description: "Shows whether you are sharing data with Rekognition to improve model performance.", + Validators: []validator.Object{ + objectvalidator.IsRequired(), + objectvalidator.AlsoRequires(path.MatchRelative().AtName("opt_in")), + }, Attributes: map[string]schema.Attribute{ "opt_in": schema.BoolAttribute{ Description: "Do you want to share data with Rekognition to improve model performance.", Optional: true, - Computed: true, - PlanModifiers: []planmodifier.Bool{ - boolplanmodifier.UseStateForUnknown(), - }, }, }, }, @@ -448,6 +447,13 @@ func (r *resourceStreamProcessor) Create(ctx context.Context, req resource.Creat plan.ARN = fwflex.StringToFramework(ctx, out.StreamProcessorArn) plan.ID = plan.Name + if plan.DataSharingPreference.IsNull() { + dataSharing, diag := fwtypes.NewObjectValueOf(ctx, &dataSharingPreferenceModel{OptIn: basetypes.NewBoolValue(false)}) + resp.Diagnostics.Append(diag...) + plan.DataSharingPreference = dataSharing + resp.Diagnostics.Append(req.Plan.Set(ctx, &plan)...) + } + createTimeout := r.CreateTimeout(ctx, plan.Timeouts) created, err := waitStreamProcessorCreated(ctx, conn, plan.ID.ValueString(), createTimeout) if err != nil { @@ -494,12 +500,6 @@ func (r *resourceStreamProcessor) Read(ctx context.Context, req resource.ReadReq return } - if state.DataSharingPreference.IsNull() { - dataSharing, diag := fwtypes.NewObjectValueOf(ctx, &dataSharingPreferenceModel{OptIn: basetypes.NewBoolValue(false)}) - resp.Diagnostics.Append(diag...) - state.DataSharingPreference = dataSharing - } - resp.Diagnostics.Append(resp.State.Set(ctx, &state)...) } diff --git a/website/docs/r/rekognition_stream_processor.html.markdown b/website/docs/r/rekognition_stream_processor.html.markdown index 501d2fef10a9..534a50aee341 100644 --- a/website/docs/r/rekognition_stream_processor.html.markdown +++ b/website/docs/r/rekognition_stream_processor.html.markdown @@ -10,7 +10,7 @@ description: |- Terraform resource for managing an AWS Rekognition Stream Processor. -~> **Note:** This resource must be configured specifically for your use case, and not all options are compatible with one another. See [Stream Processor API documentation](https://docs.aws.amazon.com/rekognition/latest/APIReference/API_CreateStreamProcessor.html#rekognition-CreateStreamProcessor-request-Input) for configuration information. +~> **Note:** This resource must be configured specifically for your use case, and not all options are compatible with one another. See [Stream Processor API documentation](https://docs.aws.amazon.com/rekognition/latest/APIReference/API_CreateStreamProcessor.html#rekognition-CreateStreamProcessor-request-Input) for configuration information. Additionally, Stream Processors configued for Face Recognition cannot have _any_ properties updated after the fact. ## Example Usage @@ -81,7 +81,7 @@ resource "aws_rekognition_stream_processor" "example" { name = "example-processor" data_sharing_preference { - opt_in = true + opt_in = false } output { @@ -111,39 +111,121 @@ resource "aws_rekognition_stream_processor" "example" { ### Face Detection Usage ```terraform -Resource "aws_iam_role" "example" { + +resource "aws_kinesis_video_stream" "example" { + name = "example-kinesis-input" + data_retention_in_hours = 1 + device_name = "kinesis-video-device-name" + media_type = "video/h264" +} + +resource "aws_kinesis_stream" "example_output" { + name = "terraform-kinesis-example" + shard_count = 1 +} + +resource "aws_iam_role" "example" { name = "example-role" + inline_policy { + name = "Rekognition-Access" + policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Action = [ + "kinesis:Get*", + "kinesis:DescribeStreamSummary" + ] + Effect = "Allow" + Resource = ["${aws_kinesis_video_stream.example.arn}"] + }, + { + Action = [ + "kinesis:PutRecord" + ] + Effect = "Allow" + Resource = ["${aws_kinesis_stream.example_output.arn}"] + }, + ] + }) + } + assume_role_policy = jsonencode({ Version = "2012-10-17" Statement = [ { Action = "sts:AssumeRole" Effect = "Allow" - Sid = "TBD" Principal = { - Service = "ec2.amazonaws.com" + Service = "rekognition.amazonaws.com" } }, ] }) } + +resource "aws_rekognition_collection" "example" { + collection_id = "example-collection" +} + +resource "aws_rekognition_stream_processor" "example" { + role_arn = aws_iam_role.example.arn + name = "example-processor" + + data_sharing_preference { + opt_in = false + } + + regions_of_interest { + polygon { + x = 0.5 + y = 0.5 + } + polygon { + x = 0.5 + y = 0.5 + } + polygon { + x = 0.5 + y = 0.5 + } + } + + input { + kinesis_video_stream { + arn = aws_kinesis_video_stream.example.arn + } + } + + output { + kinesis_data_stream { + arn = aws_kinesis_stream.example_output.arn + } + } + + settings { + face_search { + collection_id = aws_rekognition_collection.example.id + } + } +} ``` ## Argument Reference The following arguments are required: +* `data_sharing_preference` - (Optional) See [`data_sharing_preference`](#data_sharing_preference) definition. * `input` - (Required) Input video stream. See [`input`](#input) definition. * `name` - (Required) The name of the Stream Processor -* `role_arn` - (Required) The Amazon Resource Number (ARN) of the IAM role that allows access to the stream processor. The IAM role provides Rekognition read permissions for a Kinesis stream. It also provides write permissions to an Amazon S3 bucket and Amazon Simple Notification Service topic for a label detection stream processor. This is required for both face search and label detection stream processors. * `output` - (Required) Kinesis data stream stream or Amazon S3 bucket location to which Amazon Rekognition Video puts the analysis results +* `role_arn` - (Required) The Amazon Resource Number (ARN) of the IAM role that allows access to the stream processor. The IAM role provides Rekognition read permissions for a Kinesis stream. It also provides write permissions to an Amazon S3 bucket and Amazon Simple Notification Service topic for a label detection stream processor. This is required for both face search and label detection stream processors. * `settings` - (Required) Input parameters used in a streaming video analyzed by a stream processor. See [`settings`](#settings) definition. The following arguments are optional: * `kms_key_id` - (Optional) Optional parameter for label detection stream processors -* `data_sharing_preference` - (Optional) See [`data_sharing_preference`](#data_sharing_preference) definition. * `notification_channel` - (Optional) The Amazon Simple Notification Service topic to which Amazon Rekognition publishes the completion status. See [`notification_channel`](#notification_channel) definition. * `regions_of_interest` - (Optional) Specifies locations in the frames where Amazon Rekognition checks for objects or people. See [`regions_of_interest`] definition. * `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. From afc5e62f57e2e5f8b0ac04336511caf0aca92fb9 Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Wed, 22 May 2024 16:37:56 -0500 Subject: [PATCH 56/71] more tests --- .../rekognition/stream_processor_test.go | 234 ++++++++++++++++-- ...rekognition_stream_processor.html.markdown | 4 +- 2 files changed, 215 insertions(+), 23 deletions(-) diff --git a/internal/service/rekognition/stream_processor_test.go b/internal/service/rekognition/stream_processor_test.go index ecc0742e495a..a9fbfb27981c 100644 --- a/internal/service/rekognition/stream_processor_test.go +++ b/internal/service/rekognition/stream_processor_test.go @@ -22,7 +22,91 @@ import ( "github.com/hashicorp/terraform-provider-aws/names" ) -func TestAccRekognitionStreamProcessor_connectedHome(t *testing.T) { +// func TestAccRekognitionStreamProcessor_connectedHome(t *testing.T) { +// ctx := acctest.Context(t) + +// var streamprocessor rekognition.DescribeStreamProcessorOutput +// rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) +// resourceName := "aws_rekognition_stream_processor.test" + +// resource.ParallelTest(t, resource.TestCase{ +// PreCheck: func() { +// acctest.PreCheck(ctx, t) +// acctest.PreCheckPartitionHasService(t, names.RekognitionEndpointID) +// testAccPreCheck(ctx, t) +// }, +// ErrorCheck: acctest.ErrorCheck(t, names.RekognitionServiceID), +// ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, +// CheckDestroy: testAccCheckStreamProcessorDestroy(ctx), +// Steps: []resource.TestStep{ +// { +// Config: testAccStreamProcessorConfig_connectedHome(testAccStreamProcessorConfig_connectedHome_setup(rName), rName, ""), +// Check: resource.ComposeTestCheckFunc( +// testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), +// resource.TestCheckResourceAttr(resourceName, names.AttrID, fmt.Sprintf("%[1]s-acctest-processor", rName)), +// resource.TestCheckResourceAttr(resourceName, names.AttrName, fmt.Sprintf("%[1]s-acctest-processor", rName)), +// ), +// }, +// { +// ResourceName: resourceName, +// ImportState: true, +// ImportStateVerify: true, +// ImportStateVerifyIgnore: []string{names.AttrARN}, +// }, +// }, +// }) +// } + +// func TestAccRekognitionStreamProcessor_connectedHome_boundingBox_to_polygon(t *testing.T) { +// ctx := acctest.Context(t) + +// var streamprocessor, streamprocessor2 rekognition.DescribeStreamProcessorOutput +// rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) +// resourceName := "aws_rekognition_stream_processor.test" + +// resource.ParallelTest(t, resource.TestCase{ +// PreCheck: func() { +// acctest.PreCheck(ctx, t) +// acctest.PreCheckPartitionHasService(t, names.RekognitionEndpointID) +// testAccPreCheck(ctx, t) +// }, +// ErrorCheck: acctest.ErrorCheck(t, names.RekognitionServiceID), +// ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, +// CheckDestroy: testAccCheckStreamProcessorDestroy(ctx), +// Steps: []resource.TestStep{ +// { +// Config: testAccStreamProcessorConfig_connectedHome(testAccStreamProcessorConfig_connectedHome_setup(rName), rName, testAccStreamProcessorConfig_boundingBox()), +// Check: resource.ComposeTestCheckFunc( +// testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), +// resource.TestCheckResourceAttr(resourceName, "regions_of_interest.#", acctest.Ct1), +// resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.#", acctest.Ct0), +// resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.bounding_box.left", "0.5"), +// resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.bounding_box.top", "0.5"), +// resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.bounding_box.height", "0.5"), +// resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.bounding_box.width", "0.5"), +// ), +// }, +// { +// Config: testAccStreamProcessorConfig_connectedHome(testAccStreamProcessorConfig_connectedHome_setup(rName), rName, testAccStreamProcessorConfig_polygons()), +// Check: resource.ComposeTestCheckFunc( +// testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor2), +// testAccCheckStreamProcessorNotRecreated(&streamprocessor, &streamprocessor2), +// resource.TestCheckResourceAttr(resourceName, "regions_of_interest.#", acctest.Ct1), +// resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.#", acctest.Ct3), +// resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.0.x", "0.5"), +// resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.0.y", "0.5"), +// resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.1.x", "0.5"), +// resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.1.y", "0.5"), +// resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.2.x", "0.5"), +// resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.2.y", "0.5"), +// ), +// }, +// }, +// }) +// } + +// NOTE: Stream Processors setup for Face Detection cannot be altered after the fact +func TestAccRekognitionStreamProcessor_faceRecognition(t *testing.T) { ctx := acctest.Context(t) var streamprocessor rekognition.DescribeStreamProcessorOutput @@ -40,7 +124,7 @@ func TestAccRekognitionStreamProcessor_connectedHome(t *testing.T) { CheckDestroy: testAccCheckStreamProcessorDestroy(ctx), Steps: []resource.TestStep{ { - Config: testAccStreamProcessorConfig_connectedHome(testAccStreamProcessorConfig_connectedHome_setup(rName), rName, ""), + Config: testAccStreamProcessorConfig_faceRecognition(testAccStreamProcessorConfig_faceRecognition_setup(rName), rName, ""), Check: resource.ComposeTestCheckFunc( testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), resource.TestCheckResourceAttr(resourceName, names.AttrID, fmt.Sprintf("%[1]s-acctest-processor", rName)), @@ -57,10 +141,10 @@ func TestAccRekognitionStreamProcessor_connectedHome(t *testing.T) { }) } -func TestAccRekognitionStreamProcessor_connectedHome_boundingBox_to_polygon(t *testing.T) { +func TestAccRekognitionStreamProcessor_faceRecognition_boundingBox(t *testing.T) { ctx := acctest.Context(t) - var streamprocessor, streamprocessor2 rekognition.DescribeStreamProcessorOutput + var streamprocessor rekognition.DescribeStreamProcessorOutput rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_rekognition_stream_processor.test" @@ -75,30 +159,40 @@ func TestAccRekognitionStreamProcessor_connectedHome_boundingBox_to_polygon(t *t CheckDestroy: testAccCheckStreamProcessorDestroy(ctx), Steps: []resource.TestStep{ { - Config: testAccStreamProcessorConfig_connectedHome(testAccStreamProcessorConfig_connectedHome_setup(rName), rName, testAccStreamProcessorConfig_boundingBox()), + Config: testAccStreamProcessorConfig_faceRecognition(testAccStreamProcessorConfig_faceRecognition_setup(rName), rName, testAccStreamProcessorConfig_boundingBox()), Check: resource.ComposeTestCheckFunc( testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), - resource.TestCheckResourceAttr(resourceName, "regions_of_interest.#", acctest.Ct1), - resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.#", acctest.Ct0), - resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.bounding_box.left", "0.5"), - resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.bounding_box.top", "0.5"), - resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.bounding_box.height", "0.5"), - resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.bounding_box.width", "0.5"), + resource.TestCheckResourceAttr(resourceName, names.AttrID, fmt.Sprintf("%[1]s-acctest-processor", rName)), + resource.TestCheckResourceAttr(resourceName, names.AttrName, fmt.Sprintf("%[1]s-acctest-processor", rName)), ), }, + }, + }) +} + +func TestAccRekognitionStreamProcessor_faceRecognition_polygon(t *testing.T) { + ctx := acctest.Context(t) + + var streamprocessor rekognition.DescribeStreamProcessorOutput + rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) + resourceName := "aws_rekognition_stream_processor.test" + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + acctest.PreCheck(ctx, t) + acctest.PreCheckPartitionHasService(t, names.RekognitionEndpointID) + testAccPreCheck(ctx, t) + }, + ErrorCheck: acctest.ErrorCheck(t, names.RekognitionServiceID), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, + CheckDestroy: testAccCheckStreamProcessorDestroy(ctx), + Steps: []resource.TestStep{ { - Config: testAccStreamProcessorConfig_connectedHome(testAccStreamProcessorConfig_connectedHome_setup(rName), rName, testAccStreamProcessorConfig_polygons()), + Config: testAccStreamProcessorConfig_faceRecognition(testAccStreamProcessorConfig_faceRecognition_setup(rName), rName, testAccStreamProcessorConfig_polygons()), Check: resource.ComposeTestCheckFunc( - testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor2), - testAccCheckStreamProcessorNotRecreated(&streamprocessor, &streamprocessor2), - resource.TestCheckResourceAttr(resourceName, "regions_of_interest.#", acctest.Ct1), - resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.#", acctest.Ct3), - resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.0.x", "0.5"), - resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.0.y", "0.5"), - resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.1.x", "0.5"), - resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.1.y", "0.5"), - resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.2.x", "0.5"), - resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.2.y", "0.5"), + testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), + resource.TestCheckResourceAttr(resourceName, names.AttrID, fmt.Sprintf("%[1]s-acctest-processor", rName)), + resource.TestCheckResourceAttr(resourceName, names.AttrName, fmt.Sprintf("%[1]s-acctest-processor", rName)), ), }, }, @@ -314,3 +408,99 @@ resource "aws_rekognition_stream_processor" "test" { } `, setup, rName, regionsOfInterest) } + +func testAccStreamProcessorConfig_faceRecognition_setup(rName string) string { + return fmt.Sprintf(` +resource "aws_kinesis_video_stream" "test" { + name = "%[1]s-acctest-kinesis-input" + data_retention_in_hours = 1 + device_name = "kinesis-video-device-name" + media_type = "video/h264" +} + +resource "aws_kinesis_stream" "test_output" { + name = "%[1]s-acctest-kinesis-stream" + shard_count = 1 +} + +resource "aws_iam_role" "test" { + name = "%[1]s-acctest-role" + + inline_policy { + name = "Rekognition-Access" + policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Action = [ + "kinesis:Get*", + "kinesis:DescribeStreamSummary" + ] + Effect = "Allow" + Resource = ["${aws_kinesis_video_stream.test.arn}"] + }, + { + Action = [ + "kinesis:PutRecord" + ] + Effect = "Allow" + Resource = ["${aws_kinesis_stream.test_output.arn}"] + }, + ] + }) + } + + assume_role_policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Action = "sts:AssumeRole" + Effect = "Allow" + Principal = { + Service = "rekognition.amazonaws.com" + } + }, + ] + }) +} + +resource "aws_rekognition_collection" "test" { + collection_id = "%[1]s-acctest-rekognition-collection" +} +`, rName) +} + +func testAccStreamProcessorConfig_faceRecognition(setup, rName, regionsOfInterest string) string { + return fmt.Sprintf(` +%[1]s + +resource "aws_rekognition_stream_processor" "test" { + role_arn = aws_iam_role.test.arn + name = "%[2]s-acctest-processor" + + data_sharing_preference { + opt_in = false + } + +%[3]s + + input { + kinesis_video_stream { + arn = aws_kinesis_video_stream.test.arn + } + } + + output { + kinesis_data_stream { + arn = aws_kinesis_stream.test_output.arn + } + } + + settings { + face_search { + collection_id = aws_rekognition_collection.test.id + } + } +} +`, setup, rName, regionsOfInterest) +} diff --git a/website/docs/r/rekognition_stream_processor.html.markdown b/website/docs/r/rekognition_stream_processor.html.markdown index 534a50aee341..2240090b42f2 100644 --- a/website/docs/r/rekognition_stream_processor.html.markdown +++ b/website/docs/r/rekognition_stream_processor.html.markdown @@ -10,7 +10,9 @@ description: |- Terraform resource for managing an AWS Rekognition Stream Processor. -~> **Note:** This resource must be configured specifically for your use case, and not all options are compatible with one another. See [Stream Processor API documentation](https://docs.aws.amazon.com/rekognition/latest/APIReference/API_CreateStreamProcessor.html#rekognition-CreateStreamProcessor-request-Input) for configuration information. Additionally, Stream Processors configued for Face Recognition cannot have _any_ properties updated after the fact. +~> **Note:** This resource must be configured specifically for your use case, and not all options are compatible with one another. See [Stream Processor API documentation](https://docs.aws.amazon.com/rekognition/latest/APIReference/API_CreateStreamProcessor.html#rekognition-CreateStreamProcessor-request-Input) for configuration information. + +~> **Note:** Stream Processors configued for Face Recognition cannot have _any_ properties updated after the fact. ## Example Usage From f076528200f3b6c4a43defb0852221c8e0057c19 Mon Sep 17 00:00:00 2001 From: Bruce Harrison Date: Wed, 22 May 2024 16:42:25 -0500 Subject: [PATCH 57/71] done --- .../rekognition/stream_processor_test.go | 182 +++++++++--------- ...rekognition_stream_processor.html.markdown | 2 +- 2 files changed, 97 insertions(+), 87 deletions(-) diff --git a/internal/service/rekognition/stream_processor_test.go b/internal/service/rekognition/stream_processor_test.go index a9fbfb27981c..3397654dff14 100644 --- a/internal/service/rekognition/stream_processor_test.go +++ b/internal/service/rekognition/stream_processor_test.go @@ -22,88 +22,88 @@ import ( "github.com/hashicorp/terraform-provider-aws/names" ) -// func TestAccRekognitionStreamProcessor_connectedHome(t *testing.T) { -// ctx := acctest.Context(t) - -// var streamprocessor rekognition.DescribeStreamProcessorOutput -// rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) -// resourceName := "aws_rekognition_stream_processor.test" - -// resource.ParallelTest(t, resource.TestCase{ -// PreCheck: func() { -// acctest.PreCheck(ctx, t) -// acctest.PreCheckPartitionHasService(t, names.RekognitionEndpointID) -// testAccPreCheck(ctx, t) -// }, -// ErrorCheck: acctest.ErrorCheck(t, names.RekognitionServiceID), -// ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, -// CheckDestroy: testAccCheckStreamProcessorDestroy(ctx), -// Steps: []resource.TestStep{ -// { -// Config: testAccStreamProcessorConfig_connectedHome(testAccStreamProcessorConfig_connectedHome_setup(rName), rName, ""), -// Check: resource.ComposeTestCheckFunc( -// testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), -// resource.TestCheckResourceAttr(resourceName, names.AttrID, fmt.Sprintf("%[1]s-acctest-processor", rName)), -// resource.TestCheckResourceAttr(resourceName, names.AttrName, fmt.Sprintf("%[1]s-acctest-processor", rName)), -// ), -// }, -// { -// ResourceName: resourceName, -// ImportState: true, -// ImportStateVerify: true, -// ImportStateVerifyIgnore: []string{names.AttrARN}, -// }, -// }, -// }) -// } - -// func TestAccRekognitionStreamProcessor_connectedHome_boundingBox_to_polygon(t *testing.T) { -// ctx := acctest.Context(t) - -// var streamprocessor, streamprocessor2 rekognition.DescribeStreamProcessorOutput -// rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) -// resourceName := "aws_rekognition_stream_processor.test" - -// resource.ParallelTest(t, resource.TestCase{ -// PreCheck: func() { -// acctest.PreCheck(ctx, t) -// acctest.PreCheckPartitionHasService(t, names.RekognitionEndpointID) -// testAccPreCheck(ctx, t) -// }, -// ErrorCheck: acctest.ErrorCheck(t, names.RekognitionServiceID), -// ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, -// CheckDestroy: testAccCheckStreamProcessorDestroy(ctx), -// Steps: []resource.TestStep{ -// { -// Config: testAccStreamProcessorConfig_connectedHome(testAccStreamProcessorConfig_connectedHome_setup(rName), rName, testAccStreamProcessorConfig_boundingBox()), -// Check: resource.ComposeTestCheckFunc( -// testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), -// resource.TestCheckResourceAttr(resourceName, "regions_of_interest.#", acctest.Ct1), -// resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.#", acctest.Ct0), -// resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.bounding_box.left", "0.5"), -// resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.bounding_box.top", "0.5"), -// resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.bounding_box.height", "0.5"), -// resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.bounding_box.width", "0.5"), -// ), -// }, -// { -// Config: testAccStreamProcessorConfig_connectedHome(testAccStreamProcessorConfig_connectedHome_setup(rName), rName, testAccStreamProcessorConfig_polygons()), -// Check: resource.ComposeTestCheckFunc( -// testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor2), -// testAccCheckStreamProcessorNotRecreated(&streamprocessor, &streamprocessor2), -// resource.TestCheckResourceAttr(resourceName, "regions_of_interest.#", acctest.Ct1), -// resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.#", acctest.Ct3), -// resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.0.x", "0.5"), -// resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.0.y", "0.5"), -// resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.1.x", "0.5"), -// resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.1.y", "0.5"), -// resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.2.x", "0.5"), -// resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.2.y", "0.5"), -// ), -// }, -// }, -// }) -// } +func TestAccRekognitionStreamProcessor_connectedHome(t *testing.T) { + ctx := acctest.Context(t) + + var streamprocessor rekognition.DescribeStreamProcessorOutput + rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) + resourceName := "aws_rekognition_stream_processor.test" + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + acctest.PreCheck(ctx, t) + acctest.PreCheckPartitionHasService(t, names.RekognitionEndpointID) + testAccPreCheck(ctx, t) + }, + ErrorCheck: acctest.ErrorCheck(t, names.RekognitionServiceID), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, + CheckDestroy: testAccCheckStreamProcessorDestroy(ctx), + Steps: []resource.TestStep{ + { + Config: testAccStreamProcessorConfig_connectedHome(testAccStreamProcessorConfig_connectedHome_setup(rName), rName, ""), + Check: resource.ComposeTestCheckFunc( + testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), + resource.TestCheckResourceAttr(resourceName, names.AttrID, fmt.Sprintf("%[1]s-acctest-processor", rName)), + resource.TestCheckResourceAttr(resourceName, names.AttrName, fmt.Sprintf("%[1]s-acctest-processor", rName)), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{names.AttrARN}, + }, + }, + }) +} + +func TestAccRekognitionStreamProcessor_connectedHome_boundingBox_to_polygon(t *testing.T) { + ctx := acctest.Context(t) + + var streamprocessor, streamprocessor2 rekognition.DescribeStreamProcessorOutput + rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) + resourceName := "aws_rekognition_stream_processor.test" + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + acctest.PreCheck(ctx, t) + acctest.PreCheckPartitionHasService(t, names.RekognitionEndpointID) + testAccPreCheck(ctx, t) + }, + ErrorCheck: acctest.ErrorCheck(t, names.RekognitionServiceID), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, + CheckDestroy: testAccCheckStreamProcessorDestroy(ctx), + Steps: []resource.TestStep{ + { + Config: testAccStreamProcessorConfig_connectedHome(testAccStreamProcessorConfig_connectedHome_setup(rName), rName, testAccStreamProcessorConfig_boundingBox()), + Check: resource.ComposeTestCheckFunc( + testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.#", acctest.Ct1), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.#", acctest.Ct0), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.bounding_box.left", "0.5"), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.bounding_box.top", "0.5"), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.bounding_box.height", "0.5"), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.bounding_box.width", "0.5"), + ), + }, + { + Config: testAccStreamProcessorConfig_connectedHome(testAccStreamProcessorConfig_connectedHome_setup(rName), rName, testAccStreamProcessorConfig_polygons()), + Check: resource.ComposeTestCheckFunc( + testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor2), + testAccCheckStreamProcessorNotRecreated(&streamprocessor, &streamprocessor2), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.#", acctest.Ct1), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.#", acctest.Ct3), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.0.x", "0.5"), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.0.y", "0.5"), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.1.x", "0.5"), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.1.y", "0.5"), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.2.x", "0.5"), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.2.y", "0.5"), + ), + }, + }, + }) +} // NOTE: Stream Processors setup for Face Detection cannot be altered after the fact func TestAccRekognitionStreamProcessor_faceRecognition(t *testing.T) { @@ -162,8 +162,12 @@ func TestAccRekognitionStreamProcessor_faceRecognition_boundingBox(t *testing.T) Config: testAccStreamProcessorConfig_faceRecognition(testAccStreamProcessorConfig_faceRecognition_setup(rName), rName, testAccStreamProcessorConfig_boundingBox()), Check: resource.ComposeTestCheckFunc( testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), - resource.TestCheckResourceAttr(resourceName, names.AttrID, fmt.Sprintf("%[1]s-acctest-processor", rName)), - resource.TestCheckResourceAttr(resourceName, names.AttrName, fmt.Sprintf("%[1]s-acctest-processor", rName)), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.#", acctest.Ct1), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.#", acctest.Ct0), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.bounding_box.left", "0.5"), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.bounding_box.top", "0.5"), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.bounding_box.height", "0.5"), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.bounding_box.width", "0.5"), ), }, }, @@ -191,8 +195,14 @@ func TestAccRekognitionStreamProcessor_faceRecognition_polygon(t *testing.T) { Config: testAccStreamProcessorConfig_faceRecognition(testAccStreamProcessorConfig_faceRecognition_setup(rName), rName, testAccStreamProcessorConfig_polygons()), Check: resource.ComposeTestCheckFunc( testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), - resource.TestCheckResourceAttr(resourceName, names.AttrID, fmt.Sprintf("%[1]s-acctest-processor", rName)), - resource.TestCheckResourceAttr(resourceName, names.AttrName, fmt.Sprintf("%[1]s-acctest-processor", rName)), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.#", acctest.Ct1), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.#", acctest.Ct3), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.0.x", "0.5"), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.0.y", "0.5"), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.1.x", "0.5"), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.1.y", "0.5"), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.2.x", "0.5"), + resource.TestCheckResourceAttr(resourceName, "regions_of_interest.0.polygon.2.y", "0.5"), ), }, }, diff --git a/website/docs/r/rekognition_stream_processor.html.markdown b/website/docs/r/rekognition_stream_processor.html.markdown index 2240090b42f2..39813a5d8f4d 100644 --- a/website/docs/r/rekognition_stream_processor.html.markdown +++ b/website/docs/r/rekognition_stream_processor.html.markdown @@ -12,7 +12,7 @@ Terraform resource for managing an AWS Rekognition Stream Processor. ~> **Note:** This resource must be configured specifically for your use case, and not all options are compatible with one another. See [Stream Processor API documentation](https://docs.aws.amazon.com/rekognition/latest/APIReference/API_CreateStreamProcessor.html#rekognition-CreateStreamProcessor-request-Input) for configuration information. -~> **Note:** Stream Processors configued for Face Recognition cannot have _any_ properties updated after the fact. +~> **Note:** Stream Processors configured for Face Recognition cannot have _any_ properties updated after the fact, and it will result in an AWS API error. ## Example Usage From a84c75f39c683872570d22aa7f7c2927b265c952 Mon Sep 17 00:00:00 2001 From: Jared Baker Date: Wed, 17 Jul 2024 09:44:12 -0400 Subject: [PATCH 58/71] r/aws_rekognition_stream_processor: adjust regex validation Specifically, removes ARN and KMS key ID validation in favor of AWS server-side validation. Also moves regular expression initialization to the package level so memory is not allocated on each call to the Schema method. --- .../service/rekognition/stream_processor.go | 34 ++++--------------- 1 file changed, 7 insertions(+), 27 deletions(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index 1a7ef4c42bca..7bb799dd00a6 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -41,6 +41,11 @@ import ( "github.com/hashicorp/terraform-provider-aws/names" ) +var ( + nameRegex = regexache.MustCompile(`[a-zA-Z0-9_.\-]+`) + collectionIdRegex = regexache.MustCompile(`[a-zA-Z0-9_.\-]+`) +) + // @FrameworkResource("aws_rekognition_stream_processor", name="Stream Processor") func newResourceStreamProcessor(_ context.Context) (resource.ResourceWithConfigure, error) { r := &resourceStreamProcessor{} @@ -66,15 +71,6 @@ func (r *resourceStreamProcessor) Metadata(_ context.Context, req resource.Metad } func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { - kmsKeyIdRegex := regexache.MustCompile(`^[A-Za-z0-9][A-Za-z0-9:_/+=,@.-]$`) - nameRegex := regexache.MustCompile(`[a-zA-Z0-9_.\-]+`) - collectionIdRegex := regexache.MustCompile(`[a-zA-Z0-9_.\-]+`) - s3bucketRegex := regexache.MustCompile(`[0-9A-Za-z\.\-_]*`) - kinesisStreamArnRegex := regexache.MustCompile(`(^arn:([a-z\d-]+):kinesis:([a-z\d-]+):\d{12}:.+$)`) // lintignore:AWSAT005 - kinesisVideoStreamArnRegex := regexache.MustCompile(`(^arn:([a-z\d-]+):kinesisvideo:([a-z\d-]+):\d{12}:.+$)`) // lintignore:AWSAT005 - snsArnRegex := regexache.MustCompile(`(^arn:aws:sns:.*:\w{12}:.+$)`) // lintignore:AWSAT005 - roleArnRegex := regexache.MustCompile(`arn:aws:iam::\d{12}:role/?[a-zA-Z_0-9+=,.@\-_/]+`) // lintignore:AWSAT005 - resp.Schema = schema.Schema{ Attributes: map[string]schema.Attribute{ names.AttrARN: framework.ARNAttributeComputedOnly(), @@ -83,7 +79,6 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem Optional: true, Validators: []validator.String{ stringvalidator.LengthBetween(1, 2048), - stringvalidator.RegexMatches(kmsKeyIdRegex, "must conform to: ^[A-Za-z0-9][A-Za-z0-9:_/+=,@.-]$"), }, PlanModifiers: []planmodifier.String{ stringplanmodifier.RequiresReplace(), @@ -95,7 +90,7 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem Required: true, Validators: []validator.String{ stringvalidator.LengthAtMost(128), - stringvalidator.RegexMatches(nameRegex, "must conform to: [a-zA-Z0-9_.\\-]+"), + stringvalidator.RegexMatches(nameRegex, ""), }, PlanModifiers: []planmodifier.String{ stringplanmodifier.RequiresReplace(), @@ -105,9 +100,6 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem Description: "The Amazon Resource Number (ARN) of the IAM role that allows access to the stream processor.", // CustomType: fwtypes.ARNType, Required: true, - Validators: []validator.String{ - stringvalidator.RegexMatches(roleArnRegex, "must conform to: arn:aws:iam::\\d{12}:role/?[a-zA-Z_0-9+=,.@\\-_/]+"), // lintignore:AWSAT005 - }, PlanModifiers: []planmodifier.String{ stringplanmodifier.RequiresReplace(), }, @@ -148,11 +140,6 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem CustomType: fwtypes.ARNType, Description: "ARN of the Kinesis video stream stream that streams the source video.", Required: true, - Validators: []validator.String{ - stringvalidator.All( - stringvalidator.RegexMatches(kinesisVideoStreamArnRegex, "must conform to: (^arn:([a-z\\d-]+):kinesisvideo:([a-z\\d-]+):\\d{12}:.+$)"), // lintignore:AWSAT005 - ), - }, PlanModifiers: []planmodifier.String{ stringplanmodifier.RequiresReplace(), }, @@ -169,9 +156,6 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem Description: "The Amazon Resource Number (ARN) of the Amazon Amazon Simple Notification Service topic to which Amazon Rekognition posts the completion status.", CustomType: fwtypes.ARNType, Optional: true, - Validators: []validator.String{ - stringvalidator.RegexMatches(snsArnRegex, "must conform to: (^arn:aws:sns:.*:\\w{12}:.+$)"), // lintignore:AWSAT005 - }, PlanModifiers: []planmodifier.String{ stringplanmodifier.RequiresReplace(), }, @@ -287,9 +271,6 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem CustomType: fwtypes.ARNType, Description: "ARN of the output Amazon Kinesis Data Streams stream.", Optional: true, - Validators: []validator.String{ - stringvalidator.RegexMatches(kinesisStreamArnRegex, "must conform to: (^arn:([a-z\\d-]+):kinesis:([a-z\\d-]+):\\d{12}:.+$)"), - }, PlanModifiers: []planmodifier.String{ stringplanmodifier.RequiresReplace(), }, @@ -308,7 +289,6 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem Optional: true, Validators: []validator.String{ stringvalidator.LengthBetween(3, 255), - stringvalidator.RegexMatches(s3bucketRegex, "must conform to: [0-9A-Za-z\\.\\-_]*"), }, PlanModifiers: []planmodifier.String{ stringplanmodifier.RequiresReplace(), @@ -379,7 +359,7 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem Description: "The ID of a collection that contains faces that you want to search for.", Validators: []validator.String{ stringvalidator.LengthAtMost(2048), - stringvalidator.RegexMatches(collectionIdRegex, "must conform to: [a-zA-Z0-9_.\\-]+"), + stringvalidator.RegexMatches(collectionIdRegex, ""), }, Optional: true, PlanModifiers: []planmodifier.String{ From 520cd626aff02163ff8d3d3c1e422678fa0a4a87 Mon Sep 17 00:00:00 2001 From: Jared Baker Date: Wed, 17 Jul 2024 10:05:05 -0400 Subject: [PATCH 59/71] r/aws_rekognition_stream_processor: tidy local enum --- .../service/rekognition/stream_processor.go | 42 +++++++++---------- 1 file changed, 20 insertions(+), 22 deletions(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index 7bb799dd00a6..d44587083c29 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -41,6 +41,25 @@ import ( "github.com/hashicorp/terraform-provider-aws/names" ) +const ( + labelPerson = "PERSON" + labelPet = "PET" + labelPackage = "PACKAGE" + labelAll = "ALL" +) + +// AWS SDK doesn't have a Labels enum available as of 5/13/24 +// +// Ref: https://docs.aws.amazon.com/rekognition/latest/APIReference/API_ConnectedHomeSettings.html#API_ConnectedHomeSettings_Contents +func labelsEnumValues() []string { + return []string{ + labelPerson, + labelPet, + labelPackage, + labelAll, + } +} + var ( nameRegex = regexache.MustCompile(`[a-zA-Z0-9_.\-]+`) collectionIdRegex = regexache.MustCompile(`[a-zA-Z0-9_.\-]+`) @@ -331,7 +350,7 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem Optional: true, Validators: []validator.List{ listvalidator.SizeAtLeast(1), - listvalidator.ValueStringsAre(stringvalidator.OneOf(connectedHomeLabels()...)), + listvalidator.ValueStringsAre(stringvalidator.OneOf(labelsEnumValues()...)), }, }, "min_confidence": schema.Float64Attribute{ @@ -851,24 +870,3 @@ type faceSearchModel struct { CollectionId types.String `tfsdk:"collection_id"` FaceMatchThreshold types.Float64 `tfsdk:"face_match_threshold"` } - -const ( - person_label = "PERSON" - pet_label = "PET" - package_label = "PACKAGE" - all_label = "ALL" -) - -/* -- AWS SDK doesn't have a CreateStreamProcessorInput.StreamProcessorSettings.ConnectedHomeSettings.Labels enum available as of 5/13/24 - -- see docs https://docs.aws.amazon.com/rekognition/latest/APIReference/API_ConnectedHomeSettings.html#API_ConnectedHomeSettings_Contents -*/ -func connectedHomeLabels() []string { - return []string{ - person_label, - pet_label, - package_label, - all_label, - } -} From 8c35a39604305f797d0fec63f12eb75431fe1e0a Mon Sep 17 00:00:00 2001 From: Jared Baker Date: Wed, 17 Jul 2024 10:17:44 -0400 Subject: [PATCH 60/71] r/aws_rekognition_stream_processor: consistent arn types --- internal/service/rekognition/stream_processor.go | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index d44587083c29..7b2dd3c9b574 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -117,8 +117,8 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem }, names.AttrRoleARN: schema.StringAttribute{ Description: "The Amazon Resource Number (ARN) of the IAM role that allows access to the stream processor.", - // CustomType: fwtypes.ARNType, - Required: true, + CustomType: fwtypes.ARNType, + Required: true, PlanModifiers: []planmodifier.String{ stringplanmodifier.RequiresReplace(), }, @@ -802,7 +802,7 @@ type resourceStreamProcessorDataModel struct { Name types.String `tfsdk:"name"` Output fwtypes.ObjectValueOf[outputModel] `tfsdk:"output"` RegionsOfInterest fwtypes.ListNestedObjectValueOf[regionOfInterestModel] `tfsdk:"regions_of_interest"` - RoleARN types.String `tfsdk:"role_arn"` //TODO ARN types? + RoleARN fwtypes.ARN `tfsdk:"role_arn"` Settings fwtypes.ObjectValueOf[settingsModel] `tfsdk:"settings"` Tags types.Map `tfsdk:"tags"` TagsAll types.Map `tfsdk:"tags_all"` @@ -818,11 +818,11 @@ type inputModel struct { } type kinesisVideoStreamInputModel struct { - ARN types.String `tfsdk:"arn"` + ARN fwtypes.ARN `tfsdk:"arn"` } type notificationChannelModel struct { - SNSTopicArn fwtypes.ARN `tfsdk:"sns_topic_arn"` + SNSTopicARN fwtypes.ARN `tfsdk:"sns_topic_arn"` } type outputModel struct { @@ -831,7 +831,7 @@ type outputModel struct { } type kinesisDataStreamModel struct { - ARN types.String `tfsdk:"arn"` + ARN fwtypes.ARN `tfsdk:"arn"` } type s3DestinationModel struct { From c357c2e70e1d2483634147abf8a0986e5b95614f Mon Sep 17 00:00:00 2001 From: Jared Baker Date: Wed, 17 Jul 2024 10:23:48 -0400 Subject: [PATCH 61/71] r/aws_rekognition_stream_processor: remove magic numbers --- internal/service/rekognition/stream_processor.go | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index 7b2dd3c9b574..5e6d3c3271cd 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -42,6 +42,14 @@ import ( ) const ( + connectedHomeConfidenceMin float64 = 0 + connectedHomeConfidenceMax float64 = 100 + connectedHomeConfidenceDefault float64 = 50 + + faceMatchThresholdMin float64 = 0 + faceMatchThresholdMax float64 = 100 + faceMatchThresholdDefault float64 = 80 + labelPerson = "PERSON" labelPet = "PET" labelPackage = "PACKAGE" @@ -356,9 +364,9 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem "min_confidence": schema.Float64Attribute{ Description: "The minimum confidence required to label an object in the video.", Validators: []validator.Float64{ - float64validator.Between(0.0, 100.0), //nolint:mnd + float64validator.Between(connectedHomeConfidenceMin, connectedHomeConfidenceMax), }, - Default: float64default.StaticFloat64(50), //nolint:mnd + Default: float64default.StaticFloat64(connectedHomeConfidenceDefault), Computed: true, Optional: true, }, @@ -388,12 +396,12 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem "face_match_threshold": schema.Float64Attribute{ Description: "Minimum face match confidence score that must be met to return a result for a recognized face.", Validators: []validator.Float64{ - float64validator.Between(0.0, 100.0), //nolint:mnd + float64validator.Between(faceMatchThresholdMin, faceMatchThresholdMax), }, PlanModifiers: []planmodifier.Float64{ float64planmodifier.RequiresReplace(), }, - Default: float64default.StaticFloat64(80), //nolint:mnd + Default: float64default.StaticFloat64(faceMatchThresholdDefault), Computed: true, Optional: true, }, From 36d7371b623ad3536d31100a2a734c255998b70b Mon Sep 17 00:00:00 2001 From: Jared Baker Date: Wed, 17 Jul 2024 10:28:44 -0400 Subject: [PATCH 62/71] r/aws_rekognition_stream_processor: remove connect home min_confidence default The AWS documentation does not specify a default value for this argument. --- internal/service/rekognition/stream_processor.go | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index 5e6d3c3271cd..0aa7f10f0586 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -42,9 +42,8 @@ import ( ) const ( - connectedHomeConfidenceMin float64 = 0 - connectedHomeConfidenceMax float64 = 100 - connectedHomeConfidenceDefault float64 = 50 + connectedHomeConfidenceMin float64 = 0 + connectedHomeConfidenceMax float64 = 100 faceMatchThresholdMin float64 = 0 faceMatchThresholdMax float64 = 100 @@ -366,7 +365,6 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem Validators: []validator.Float64{ float64validator.Between(connectedHomeConfidenceMin, connectedHomeConfidenceMax), }, - Default: float64default.StaticFloat64(connectedHomeConfidenceDefault), Computed: true, Optional: true, }, From 0d8832f72a511543e537e8f99ecd792267582729 Mon Sep 17 00:00:00 2001 From: Jared Baker Date: Wed, 17 Jul 2024 10:45:34 -0400 Subject: [PATCH 63/71] r/aws_rekognition_stream_processor(test): compose test configs --- .../rekognition/stream_processor_test.go | 232 +++++++++--------- 1 file changed, 116 insertions(+), 116 deletions(-) diff --git a/internal/service/rekognition/stream_processor_test.go b/internal/service/rekognition/stream_processor_test.go index 3397654dff14..8f325c35f605 100644 --- a/internal/service/rekognition/stream_processor_test.go +++ b/internal/service/rekognition/stream_processor_test.go @@ -22,7 +22,7 @@ import ( "github.com/hashicorp/terraform-provider-aws/names" ) -func TestAccRekognitionStreamProcessor_connectedHome(t *testing.T) { +func TestAccRekognitionStreamProcessor_basic(t *testing.T) { ctx := acctest.Context(t) var streamprocessor rekognition.DescribeStreamProcessorOutput @@ -40,11 +40,11 @@ func TestAccRekognitionStreamProcessor_connectedHome(t *testing.T) { CheckDestroy: testAccCheckStreamProcessorDestroy(ctx), Steps: []resource.TestStep{ { - Config: testAccStreamProcessorConfig_connectedHome(testAccStreamProcessorConfig_connectedHome_setup(rName), rName, ""), + Config: testAccStreamProcessorConfig_connectedHome(rName, ""), Check: resource.ComposeTestCheckFunc( testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), - resource.TestCheckResourceAttr(resourceName, names.AttrID, fmt.Sprintf("%[1]s-acctest-processor", rName)), - resource.TestCheckResourceAttr(resourceName, names.AttrName, fmt.Sprintf("%[1]s-acctest-processor", rName)), + resource.TestCheckResourceAttr(resourceName, names.AttrID, rName), + resource.TestCheckResourceAttr(resourceName, names.AttrName, rName), ), }, { @@ -57,6 +57,35 @@ func TestAccRekognitionStreamProcessor_connectedHome(t *testing.T) { }) } +func TestAccRekognitionStreamProcessor_disappears(t *testing.T) { + ctx := acctest.Context(t) + + var streamprocessor rekognition.DescribeStreamProcessorOutput + rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) + resourceName := "aws_rekognition_stream_processor.test" + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + acctest.PreCheck(ctx, t) + acctest.PreCheckPartitionHasService(t, names.RekognitionEndpointID) + testAccPreCheck(ctx, t) + }, + ErrorCheck: acctest.ErrorCheck(t, names.RekognitionServiceID), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, + CheckDestroy: testAccCheckStreamProcessorDestroy(ctx), + Steps: []resource.TestStep{ + { + Config: testAccStreamProcessorConfig_connectedHome(rName, ""), + Check: resource.ComposeTestCheckFunc( + testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), + acctest.CheckFrameworkResourceDisappears(ctx, acctest.Provider, tfrekognition.ResourceStreamProcessor, resourceName), + ), + ExpectNonEmptyPlan: true, + }, + }, + }) +} + func TestAccRekognitionStreamProcessor_connectedHome_boundingBox_to_polygon(t *testing.T) { ctx := acctest.Context(t) @@ -75,7 +104,7 @@ func TestAccRekognitionStreamProcessor_connectedHome_boundingBox_to_polygon(t *t CheckDestroy: testAccCheckStreamProcessorDestroy(ctx), Steps: []resource.TestStep{ { - Config: testAccStreamProcessorConfig_connectedHome(testAccStreamProcessorConfig_connectedHome_setup(rName), rName, testAccStreamProcessorConfig_boundingBox()), + Config: testAccStreamProcessorConfig_connectedHome(rName, testAccStreamProcessorConfig_boundingBox()), Check: resource.ComposeTestCheckFunc( testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), resource.TestCheckResourceAttr(resourceName, "regions_of_interest.#", acctest.Ct1), @@ -87,7 +116,7 @@ func TestAccRekognitionStreamProcessor_connectedHome_boundingBox_to_polygon(t *t ), }, { - Config: testAccStreamProcessorConfig_connectedHome(testAccStreamProcessorConfig_connectedHome_setup(rName), rName, testAccStreamProcessorConfig_polygons()), + Config: testAccStreamProcessorConfig_connectedHome(rName, testAccStreamProcessorConfig_polygons()), Check: resource.ComposeTestCheckFunc( testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor2), testAccCheckStreamProcessorNotRecreated(&streamprocessor, &streamprocessor2), @@ -124,11 +153,11 @@ func TestAccRekognitionStreamProcessor_faceRecognition(t *testing.T) { CheckDestroy: testAccCheckStreamProcessorDestroy(ctx), Steps: []resource.TestStep{ { - Config: testAccStreamProcessorConfig_faceRecognition(testAccStreamProcessorConfig_faceRecognition_setup(rName), rName, ""), + Config: testAccStreamProcessorConfig_faceRecognition(rName, ""), Check: resource.ComposeTestCheckFunc( testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), - resource.TestCheckResourceAttr(resourceName, names.AttrID, fmt.Sprintf("%[1]s-acctest-processor", rName)), - resource.TestCheckResourceAttr(resourceName, names.AttrName, fmt.Sprintf("%[1]s-acctest-processor", rName)), + resource.TestCheckResourceAttr(resourceName, names.AttrID, rName), + resource.TestCheckResourceAttr(resourceName, names.AttrName, rName), ), }, { @@ -159,7 +188,7 @@ func TestAccRekognitionStreamProcessor_faceRecognition_boundingBox(t *testing.T) CheckDestroy: testAccCheckStreamProcessorDestroy(ctx), Steps: []resource.TestStep{ { - Config: testAccStreamProcessorConfig_faceRecognition(testAccStreamProcessorConfig_faceRecognition_setup(rName), rName, testAccStreamProcessorConfig_boundingBox()), + Config: testAccStreamProcessorConfig_faceRecognition(rName, testAccStreamProcessorConfig_boundingBox()), Check: resource.ComposeTestCheckFunc( testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), resource.TestCheckResourceAttr(resourceName, "regions_of_interest.#", acctest.Ct1), @@ -192,7 +221,7 @@ func TestAccRekognitionStreamProcessor_faceRecognition_polygon(t *testing.T) { CheckDestroy: testAccCheckStreamProcessorDestroy(ctx), Steps: []resource.TestStep{ { - Config: testAccStreamProcessorConfig_faceRecognition(testAccStreamProcessorConfig_faceRecognition_setup(rName), rName, testAccStreamProcessorConfig_polygons()), + Config: testAccStreamProcessorConfig_faceRecognition(rName, testAccStreamProcessorConfig_polygons()), Check: resource.ComposeTestCheckFunc( testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), resource.TestCheckResourceAttr(resourceName, "regions_of_interest.#", acctest.Ct1), @@ -209,35 +238,6 @@ func TestAccRekognitionStreamProcessor_faceRecognition_polygon(t *testing.T) { }) } -func TestAccRekognitionStreamProcessor_disappears(t *testing.T) { - ctx := acctest.Context(t) - - var streamprocessor rekognition.DescribeStreamProcessorOutput - rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) - resourceName := "aws_rekognition_stream_processor.test" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { - acctest.PreCheck(ctx, t) - acctest.PreCheckPartitionHasService(t, names.RekognitionEndpointID) - testAccPreCheck(ctx, t) - }, - ErrorCheck: acctest.ErrorCheck(t, names.RekognitionServiceID), - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, - CheckDestroy: testAccCheckStreamProcessorDestroy(ctx), - Steps: []resource.TestStep{ - { - Config: testAccStreamProcessorConfig_connectedHome(testAccStreamProcessorConfig_connectedHome_setup(rName), rName, ""), - Check: resource.ComposeTestCheckFunc( - testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), - acctest.CheckFrameworkResourceDisappears(ctx, acctest.Provider, tfrekognition.ResourceStreamProcessor, resourceName), - ), - ExpectNonEmptyPlan: true, - }, - }, - }) -} - func testAccCheckStreamProcessorDestroy(ctx context.Context) resource.TestCheckFunc { return func(s *terraform.State) error { conn := acctest.Provider.Meta().(*conns.AWSClient).RekognitionClient(ctx) @@ -313,7 +313,7 @@ func testAccCheckStreamProcessorNotRecreated(before, after *rekognition.Describe } } -func testAccStreamProcessorConfig_connectedHome_setup(rName string) string { +func testAccStreamProcessorConfigBase_connectedHome(rName string) string { return fmt.Sprintf(` resource "aws_iam_role" "test" { name = "%[1]s-acctest-role" @@ -350,6 +350,67 @@ resource "aws_kinesis_video_stream" "test" { `, rName) } +func testAccStreamProcessorConfigBase_faceRecognition(rName string) string { + return fmt.Sprintf(` +resource "aws_kinesis_video_stream" "test" { + name = "%[1]s-acctest-kinesis-input" + data_retention_in_hours = 1 + device_name = "kinesis-video-device-name" + media_type = "video/h264" +} + +resource "aws_kinesis_stream" "test_output" { + name = "%[1]s-acctest-kinesis-stream" + shard_count = 1 +} + +resource "aws_iam_role" "test" { + name = "%[1]s-acctest-role" + + inline_policy { + name = "Rekognition-Access" + policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Action = [ + "kinesis:Get*", + "kinesis:DescribeStreamSummary" + ] + Effect = "Allow" + Resource = ["${aws_kinesis_video_stream.test.arn}"] + }, + { + Action = [ + "kinesis:PutRecord" + ] + Effect = "Allow" + Resource = ["${aws_kinesis_stream.test_output.arn}"] + }, + ] + }) + } + + assume_role_policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Action = "sts:AssumeRole" + Effect = "Allow" + Principal = { + Service = "rekognition.amazonaws.com" + } + }, + ] + }) +} + +resource "aws_rekognition_collection" "test" { + collection_id = "%[1]s-acctest-rekognition-collection" +} +`, rName) +} + func testAccStreamProcessorConfig_polygons() string { return ` regions_of_interest { @@ -380,13 +441,13 @@ regions_of_interest { }` } -func testAccStreamProcessorConfig_connectedHome(setup, rName, regionsOfInterest string) string { - return fmt.Sprintf(` -%[1]s - +func testAccStreamProcessorConfig_connectedHome(rName, regionsOfInterest string) string { + return acctest.ConfigCompose( + testAccStreamProcessorConfigBase_connectedHome(rName), + fmt.Sprintf(` resource "aws_rekognition_stream_processor" "test" { role_arn = aws_iam_role.test.arn - name = "%[2]s-acctest-processor" + name = %[1]q data_sharing_preference { opt_in = true @@ -398,7 +459,7 @@ resource "aws_rekognition_stream_processor" "test" { } } -%[3]s +%[2]s settings { connected_home { @@ -416,83 +477,22 @@ resource "aws_rekognition_stream_processor" "test" { sns_topic_arn = aws_sns_topic.test.arn } } -`, setup, rName, regionsOfInterest) -} - -func testAccStreamProcessorConfig_faceRecognition_setup(rName string) string { - return fmt.Sprintf(` -resource "aws_kinesis_video_stream" "test" { - name = "%[1]s-acctest-kinesis-input" - data_retention_in_hours = 1 - device_name = "kinesis-video-device-name" - media_type = "video/h264" -} - -resource "aws_kinesis_stream" "test_output" { - name = "%[1]s-acctest-kinesis-stream" - shard_count = 1 -} - -resource "aws_iam_role" "test" { - name = "%[1]s-acctest-role" - - inline_policy { - name = "Rekognition-Access" - policy = jsonencode({ - Version = "2012-10-17" - Statement = [ - { - Action = [ - "kinesis:Get*", - "kinesis:DescribeStreamSummary" - ] - Effect = "Allow" - Resource = ["${aws_kinesis_video_stream.test.arn}"] - }, - { - Action = [ - "kinesis:PutRecord" - ] - Effect = "Allow" - Resource = ["${aws_kinesis_stream.test_output.arn}"] - }, - ] - }) - } - - assume_role_policy = jsonencode({ - Version = "2012-10-17" - Statement = [ - { - Action = "sts:AssumeRole" - Effect = "Allow" - Principal = { - Service = "rekognition.amazonaws.com" - } - }, - ] - }) -} - -resource "aws_rekognition_collection" "test" { - collection_id = "%[1]s-acctest-rekognition-collection" -} -`, rName) +`, rName, regionsOfInterest)) } -func testAccStreamProcessorConfig_faceRecognition(setup, rName, regionsOfInterest string) string { - return fmt.Sprintf(` -%[1]s - +func testAccStreamProcessorConfig_faceRecognition(rName, regionsOfInterest string) string { + return acctest.ConfigCompose( + testAccStreamProcessorConfigBase_faceRecognition(rName), + fmt.Sprintf(` resource "aws_rekognition_stream_processor" "test" { role_arn = aws_iam_role.test.arn - name = "%[2]s-acctest-processor" + name = %[1]q data_sharing_preference { opt_in = false } -%[3]s +%[2]s input { kinesis_video_stream { @@ -512,5 +512,5 @@ resource "aws_rekognition_stream_processor" "test" { } } } -`, setup, rName, regionsOfInterest) +`, rName, regionsOfInterest)) } From 38f38ab977e8a1eb1a2b7be05e493ac0b8e9ccfb Mon Sep 17 00:00:00 2001 From: Jared Baker Date: Wed, 17 Jul 2024 11:13:27 -0400 Subject: [PATCH 64/71] r/aws_rekognition_stream_processor(test): tidy config names --- .../rekognition/stream_processor_test.go | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/internal/service/rekognition/stream_processor_test.go b/internal/service/rekognition/stream_processor_test.go index 8f325c35f605..807540431c14 100644 --- a/internal/service/rekognition/stream_processor_test.go +++ b/internal/service/rekognition/stream_processor_test.go @@ -86,7 +86,7 @@ func TestAccRekognitionStreamProcessor_disappears(t *testing.T) { }) } -func TestAccRekognitionStreamProcessor_connectedHome_boundingBox_to_polygon(t *testing.T) { +func TestAccRekognitionStreamProcessor_connectedHome(t *testing.T) { ctx := acctest.Context(t) var streamprocessor, streamprocessor2 rekognition.DescribeStreamProcessorOutput @@ -316,7 +316,7 @@ func testAccCheckStreamProcessorNotRecreated(before, after *rekognition.Describe func testAccStreamProcessorConfigBase_connectedHome(rName string) string { return fmt.Sprintf(` resource "aws_iam_role" "test" { - name = "%[1]s-acctest-role" + name = %[1]q assume_role_policy = jsonencode({ Version = "2012-10-17" @@ -334,15 +334,15 @@ resource "aws_iam_role" "test" { } resource "aws_s3_bucket" "test" { - bucket = "%[1]s-acctest-bucket" + bucket = %[1]q } resource "aws_sns_topic" "test" { - name = "%[1]s-acctest-topic" + name = %[1]q } resource "aws_kinesis_video_stream" "test" { - name = "%[1]s-acctest-kinesis-input" + name = %[1]q data_retention_in_hours = 1 device_name = "kinesis-video-device-name" media_type = "video/h264" @@ -353,19 +353,19 @@ resource "aws_kinesis_video_stream" "test" { func testAccStreamProcessorConfigBase_faceRecognition(rName string) string { return fmt.Sprintf(` resource "aws_kinesis_video_stream" "test" { - name = "%[1]s-acctest-kinesis-input" + name = %[1]q data_retention_in_hours = 1 device_name = "kinesis-video-device-name" media_type = "video/h264" } resource "aws_kinesis_stream" "test_output" { - name = "%[1]s-acctest-kinesis-stream" + name = %[1]q shard_count = 1 } resource "aws_iam_role" "test" { - name = "%[1]s-acctest-role" + name = %[1]q inline_policy { name = "Rekognition-Access" @@ -406,7 +406,7 @@ resource "aws_iam_role" "test" { } resource "aws_rekognition_collection" "test" { - collection_id = "%[1]s-acctest-rekognition-collection" + collection_id = %[1]q } `, rName) } From c8b05b7f46ee4731c92868dfcb5572ad869f8961 Mon Sep 17 00:00:00 2001 From: Jared Baker Date: Wed, 17 Jul 2024 11:21:01 -0400 Subject: [PATCH 65/71] chore: changelog --- .changelog/37536.txt | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 .changelog/37536.txt diff --git a/.changelog/37536.txt b/.changelog/37536.txt new file mode 100644 index 000000000000..a83830de0561 --- /dev/null +++ b/.changelog/37536.txt @@ -0,0 +1,3 @@ +```release-note:new-resource +aws_rekognition_stream_processor +``` From 1c35443c58901540718ba8f04d501843f7d7a409 Mon Sep 17 00:00:00 2001 From: Jared Baker Date: Wed, 17 Jul 2024 14:48:42 -0400 Subject: [PATCH 66/71] r/aws_rekognition_stream_processor: prefer ListNestedBlock SingleNestedBlock is not broadly tested. The Terraform Plugin Framework documentation suggests using SingleNestedAttribute instead, which the AWS provider cannot currently adopt due to continued support of Terraform protocol V5. Given these constraints the current preferred convention is to use ListNestedBlock with a SizeAtMost(1) list validator. --- .../service/rekognition/stream_processor.go | 352 ++++++++++-------- 1 file changed, 189 insertions(+), 163 deletions(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index 0aa7f10f0586..bdfb370b813a 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -134,56 +134,68 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem names.AttrTagsAll: tftags.TagsAttributeComputedOnly(), }, Blocks: map[string]schema.Block{ - "data_sharing_preference": schema.SingleNestedBlock{ - CustomType: fwtypes.NewObjectTypeOf[dataSharingPreferenceModel](ctx), + "data_sharing_preference": schema.ListNestedBlock{ + CustomType: fwtypes.NewListNestedObjectTypeOf[dataSharingPreferenceModel](ctx), Description: "Shows whether you are sharing data with Rekognition to improve model performance.", - Validators: []validator.Object{ - objectvalidator.IsRequired(), - objectvalidator.AlsoRequires(path.MatchRelative().AtName("opt_in")), + Validators: []validator.List{ + listvalidator.SizeAtMost(1), }, - Attributes: map[string]schema.Attribute{ - "opt_in": schema.BoolAttribute{ - Description: "Do you want to share data with Rekognition to improve model performance.", - Optional: true, + NestedObject: schema.NestedBlockObject{ + Attributes: map[string]schema.Attribute{ + "opt_in": schema.BoolAttribute{ + Description: "Do you want to share data with Rekognition to improve model performance.", + Required: true, + }, }, }, }, - "input": schema.SingleNestedBlock{ - CustomType: fwtypes.NewObjectTypeOf[inputModel](ctx), + "input": schema.ListNestedBlock{ + CustomType: fwtypes.NewListNestedObjectTypeOf[inputModel](ctx), Description: "Information about the source streaming video.", - Validators: []validator.Object{ - objectvalidator.IsRequired(), + Validators: []validator.List{ + listvalidator.IsRequired(), + listvalidator.SizeAtMost(1), }, - Blocks: map[string]schema.Block{ - "kinesis_video_stream": schema.SingleNestedBlock{ - Validators: []validator.Object{ - objectvalidator.IsRequired(), - }, - CustomType: fwtypes.NewObjectTypeOf[kinesisVideoStreamInputModel](ctx), - Description: "Kinesis video stream stream that provides the source streaming video for a Amazon Rekognition Video stream processor.", - Attributes: map[string]schema.Attribute{ - names.AttrARN: schema.StringAttribute{ - CustomType: fwtypes.ARNType, - Description: "ARN of the Kinesis video stream stream that streams the source video.", - Required: true, - PlanModifiers: []planmodifier.String{ - stringplanmodifier.RequiresReplace(), + NestedObject: schema.NestedBlockObject{ + Blocks: map[string]schema.Block{ + "kinesis_video_stream": schema.ListNestedBlock{ + Validators: []validator.List{ + listvalidator.IsRequired(), + listvalidator.SizeAtMost(1), + }, + CustomType: fwtypes.NewListNestedObjectTypeOf[kinesisVideoStreamInputModel](ctx), + Description: "Kinesis video stream stream that provides the source streaming video for a Amazon Rekognition Video stream processor.", + NestedObject: schema.NestedBlockObject{ + Attributes: map[string]schema.Attribute{ + names.AttrARN: schema.StringAttribute{ + CustomType: fwtypes.ARNType, + Description: "ARN of the Kinesis video stream stream that streams the source video.", + Required: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + }, }, }, }, }, }, }, - "notification_channel": schema.SingleNestedBlock{ - CustomType: fwtypes.NewObjectTypeOf[notificationChannelModel](ctx), + "notification_channel": schema.ListNestedBlock{ + CustomType: fwtypes.NewListNestedObjectTypeOf[notificationChannelModel](ctx), Description: "The Amazon Simple Notification Service topic to which Amazon Rekognition publishes the object detection results and completion status of a video analysis operation.", - Attributes: map[string]schema.Attribute{ - names.AttrSNSTopicARN: schema.StringAttribute{ - Description: "The Amazon Resource Number (ARN) of the Amazon Amazon Simple Notification Service topic to which Amazon Rekognition posts the completion status.", - CustomType: fwtypes.ARNType, - Optional: true, - PlanModifiers: []planmodifier.String{ - stringplanmodifier.RequiresReplace(), + Validators: []validator.List{ + listvalidator.SizeAtMost(1), + }, + NestedObject: schema.NestedBlockObject{ + Attributes: map[string]schema.Attribute{ + names.AttrSNSTopicARN: schema.StringAttribute{ + Description: "The Amazon Resource Number (ARN) of the Amazon Amazon Simple Notification Service topic to which Amazon Rekognition posts the completion status.", + CustomType: fwtypes.ARNType, + Optional: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, }, }, }, @@ -276,132 +288,149 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem }, }, }, - "output": schema.SingleNestedBlock{ - CustomType: fwtypes.NewObjectTypeOf[outputModel](ctx), + "output": schema.ListNestedBlock{ + CustomType: fwtypes.NewListNestedObjectTypeOf[outputModel](ctx), Description: "Kinesis data stream stream or Amazon S3 bucket location to which Amazon Rekognition Video puts the analysis results.", - Validators: []validator.Object{ - objectvalidator.AtLeastOneOf( + Validators: []validator.List{ + listvalidator.IsRequired(), + listvalidator.SizeAtMost(1), + listvalidator.AtLeastOneOf( path.MatchRelative().AtName("kinesis_data_stream"), path.MatchRelative().AtName("s3_destination"), ), }, - Blocks: map[string]schema.Block{ - "kinesis_data_stream": schema.SingleNestedBlock{ - CustomType: fwtypes.NewObjectTypeOf[kinesisDataStreamModel](ctx), - Description: "The Amazon Kinesis Data Streams stream to which the Amazon Rekognition stream processor streams the analysis results.", - Validators: []validator.Object{ - objectvalidator.ConflictsWith(path.MatchRelative().AtParent().AtName("s3_destination")), - }, - Attributes: map[string]schema.Attribute{ - names.AttrARN: schema.StringAttribute{ - CustomType: fwtypes.ARNType, - Description: "ARN of the output Amazon Kinesis Data Streams stream.", - Optional: true, - PlanModifiers: []planmodifier.String{ - stringplanmodifier.RequiresReplace(), + NestedObject: schema.NestedBlockObject{ + Blocks: map[string]schema.Block{ + "kinesis_data_stream": schema.ListNestedBlock{ + CustomType: fwtypes.NewListNestedObjectTypeOf[kinesisDataStreamModel](ctx), + Description: "The Amazon Kinesis Data Streams stream to which the Amazon Rekognition stream processor streams the analysis results.", + Validators: []validator.List{ + listvalidator.SizeAtMost(1), + listvalidator.ConflictsWith(path.MatchRelative().AtParent().AtName("s3_destination")), + }, + NestedObject: schema.NestedBlockObject{ + Attributes: map[string]schema.Attribute{ + names.AttrARN: schema.StringAttribute{ + CustomType: fwtypes.ARNType, + Description: "ARN of the output Amazon Kinesis Data Streams stream.", + Optional: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + }, }, }, }, - }, - "s3_destination": schema.SingleNestedBlock{ - CustomType: fwtypes.NewObjectTypeOf[s3DestinationModel](ctx), - Description: "The Amazon S3 bucket location to which Amazon Rekognition publishes the detailed inference results of a video analysis operation.", - Validators: []validator.Object{ - objectvalidator.ConflictsWith(path.MatchRelative().AtParent().AtName("kinesis_data_stream")), - }, - Attributes: map[string]schema.Attribute{ - names.AttrBucket: schema.StringAttribute{ - Description: "The name of the Amazon S3 bucket you want to associate with the streaming video project.", - Optional: true, - Validators: []validator.String{ - stringvalidator.LengthBetween(3, 255), - }, - PlanModifiers: []planmodifier.String{ - stringplanmodifier.RequiresReplace(), - }, + "s3_destination": schema.ListNestedBlock{ + CustomType: fwtypes.NewListNestedObjectTypeOf[s3DestinationModel](ctx), + Description: "The Amazon S3 bucket location to which Amazon Rekognition publishes the detailed inference results of a video analysis operation.", + Validators: []validator.List{ + listvalidator.SizeAtMost(1), + listvalidator.ConflictsWith(path.MatchRelative().AtParent().AtName("kinesis_data_stream")), }, - "key_prefix": schema.StringAttribute{ - Description: "The prefix value of the location within the bucket that you want the information to be published to.", - Optional: true, - Validators: []validator.String{ - stringvalidator.LengthAtMost(1024), - }, - PlanModifiers: []planmodifier.String{ - stringplanmodifier.RequiresReplace(), + NestedObject: schema.NestedBlockObject{ + Attributes: map[string]schema.Attribute{ + names.AttrBucket: schema.StringAttribute{ + Description: "The name of the Amazon S3 bucket you want to associate with the streaming video project.", + Optional: true, + Validators: []validator.String{ + stringvalidator.LengthBetween(3, 255), + }, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + }, + "key_prefix": schema.StringAttribute{ + Description: "The prefix value of the location within the bucket that you want the information to be published to.", + Optional: true, + Validators: []validator.String{ + stringvalidator.LengthAtMost(1024), + }, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + }, }, }, }, }, }, }, - "settings": schema.SingleNestedBlock{ - CustomType: fwtypes.NewObjectTypeOf[settingsModel](ctx), + "settings": schema.ListNestedBlock{ + CustomType: fwtypes.NewListNestedObjectTypeOf[settingsModel](ctx), Description: "Input parameters used in a streaming video analyzed by a stream processor.", - Validators: []validator.Object{ - objectvalidator.AtLeastOneOf( + Validators: []validator.List{ + listvalidator.IsRequired(), + listvalidator.SizeAtMost(1), + listvalidator.AtLeastOneOf( path.MatchRelative().AtName("connected_home"), path.MatchRelative().AtName("face_search"), ), }, - Blocks: map[string]schema.Block{ - "connected_home": schema.SingleNestedBlock{ - CustomType: fwtypes.NewObjectTypeOf[connectedHomeModel](ctx), - Description: "Label detection settings to use on a streaming video.", - Validators: []validator.Object{ - objectvalidator.ConflictsWith(path.MatchRelative().AtParent().AtName("face_search")), - }, - Attributes: map[string]schema.Attribute{ - "labels": schema.ListAttribute{ - Description: "Specifies what you want to detect in the video, such as people, packages, or pets.", - CustomType: fwtypes.ListOfStringType, - Optional: true, - Validators: []validator.List{ - listvalidator.SizeAtLeast(1), - listvalidator.ValueStringsAre(stringvalidator.OneOf(labelsEnumValues()...)), - }, + NestedObject: schema.NestedBlockObject{ + Blocks: map[string]schema.Block{ + "connected_home": schema.ListNestedBlock{ + CustomType: fwtypes.NewListNestedObjectTypeOf[connectedHomeModel](ctx), + Description: "Label detection settings to use on a streaming video.", + Validators: []validator.List{ + listvalidator.SizeAtMost(1), + listvalidator.ConflictsWith(path.MatchRelative().AtParent().AtName("face_search")), }, - "min_confidence": schema.Float64Attribute{ - Description: "The minimum confidence required to label an object in the video.", - Validators: []validator.Float64{ - float64validator.Between(connectedHomeConfidenceMin, connectedHomeConfidenceMax), + NestedObject: schema.NestedBlockObject{ + Attributes: map[string]schema.Attribute{ + "labels": schema.ListAttribute{ + Description: "Specifies what you want to detect in the video, such as people, packages, or pets.", + CustomType: fwtypes.ListOfStringType, + Optional: true, + Validators: []validator.List{ + listvalidator.SizeAtLeast(1), + listvalidator.ValueStringsAre(stringvalidator.OneOf(labelsEnumValues()...)), + }, + }, + "min_confidence": schema.Float64Attribute{ + Description: "The minimum confidence required to label an object in the video.", + Validators: []validator.Float64{ + float64validator.Between(connectedHomeConfidenceMin, connectedHomeConfidenceMax), + }, + Computed: true, + Optional: true, + }, }, - Computed: true, - Optional: true, }, }, - }, - "face_search": schema.SingleNestedBlock{ - CustomType: fwtypes.NewObjectTypeOf[faceSearchModel](ctx), - Description: "Face search settings to use on a streaming video.", - Validators: []validator.Object{ - objectvalidator.ConflictsWith(path.MatchRelative().AtParent().AtName("connected_home")), - objectvalidator.AlsoRequires( - path.MatchRelative().AtName("collection_id"), - ), - }, - Attributes: map[string]schema.Attribute{ - "collection_id": schema.StringAttribute{ - Description: "The ID of a collection that contains faces that you want to search for.", - Validators: []validator.String{ - stringvalidator.LengthAtMost(2048), - stringvalidator.RegexMatches(collectionIdRegex, ""), - }, - Optional: true, - PlanModifiers: []planmodifier.String{ - stringplanmodifier.RequiresReplace(), - }, + "face_search": schema.ListNestedBlock{ + CustomType: fwtypes.NewListNestedObjectTypeOf[faceSearchModel](ctx), + Description: "Face search settings to use on a streaming video.", + Validators: []validator.List{ + listvalidator.SizeAtMost(1), + listvalidator.ConflictsWith(path.MatchRelative().AtParent().AtName("connected_home")), }, - "face_match_threshold": schema.Float64Attribute{ - Description: "Minimum face match confidence score that must be met to return a result for a recognized face.", - Validators: []validator.Float64{ - float64validator.Between(faceMatchThresholdMin, faceMatchThresholdMax), - }, - PlanModifiers: []planmodifier.Float64{ - float64planmodifier.RequiresReplace(), + NestedObject: schema.NestedBlockObject{ + Attributes: map[string]schema.Attribute{ + "collection_id": schema.StringAttribute{ + Description: "The ID of a collection that contains faces that you want to search for.", + Validators: []validator.String{ + stringvalidator.LengthAtMost(2048), + stringvalidator.RegexMatches(collectionIdRegex, ""), + }, + Required: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + }, + "face_match_threshold": schema.Float64Attribute{ + Description: "Minimum face match confidence score that must be met to return a result for a recognized face.", + Validators: []validator.Float64{ + float64validator.Between(faceMatchThresholdMin, faceMatchThresholdMax), + }, + PlanModifiers: []planmodifier.Float64{ + float64planmodifier.RequiresReplace(), + }, + Default: float64default.StaticFloat64(faceMatchThresholdDefault), + Optional: true, + Computed: true, + }, }, - Default: float64default.StaticFloat64(faceMatchThresholdDefault), - Computed: true, - Optional: true, }, }, }, @@ -453,7 +482,7 @@ func (r *resourceStreamProcessor) Create(ctx context.Context, req resource.Creat plan.ID = plan.Name if plan.DataSharingPreference.IsNull() { - dataSharing, diag := fwtypes.NewObjectValueOf(ctx, &dataSharingPreferenceModel{OptIn: basetypes.NewBoolValue(false)}) + dataSharing, diag := fwtypes.NewListNestedObjectValueOfPtr(ctx, &dataSharingPreferenceModel{OptIn: basetypes.NewBoolValue(false)}) resp.Diagnostics.Append(diag...) plan.DataSharingPreference = dataSharing resp.Diagnostics.Append(req.Plan.Set(ctx, &plan)...) @@ -527,7 +556,7 @@ func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.Updat } if !plan.DataSharingPreference.Equal(state.DataSharingPreference) { - dspPlan, dspState := unwrapObjectValueOf(ctx, resp.Diagnostics, plan.DataSharingPreference, state.DataSharingPreference) + dspPlan, dspState := unwrapListNestedObjectValueOf(ctx, resp.Diagnostics, plan.DataSharingPreference, state.DataSharingPreference) if resp.Diagnostics.HasError() { return } @@ -544,12 +573,12 @@ func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.Updat ConnectedHomeForUpdate: &awstypes.ConnectedHomeSettingsForUpdate{}, } - settingsPlan, settingsState := unwrapObjectValueOf(ctx, resp.Diagnostics, plan.Settings, state.Settings) + settingsPlan, settingsState := unwrapListNestedObjectValueOf(ctx, resp.Diagnostics, plan.Settings, state.Settings) if resp.Diagnostics.HasError() { return } - connectedHomePlan, connectedHomeState := unwrapObjectValueOf(ctx, resp.Diagnostics, settingsPlan.ConnectedHome, settingsState.ConnectedHome) + connectedHomePlan, connectedHomeState := unwrapListNestedObjectValueOf(ctx, resp.Diagnostics, settingsPlan.ConnectedHome, settingsState.ConnectedHome) if resp.Diagnostics.HasError() { return } @@ -645,14 +674,12 @@ func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.Updat func (r *resourceStreamProcessor) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { conn := r.Meta().RekognitionClient(ctx) - // TIP: -- 2. Fetch the state var state resourceStreamProcessorDataModel resp.Diagnostics.Append(req.State.Get(ctx, &state)...) if resp.Diagnostics.HasError() { return } - // TIP: -- 3. Populate a delete input structure in := &rekognition.DeleteStreamProcessorInput{ Name: aws.String(state.ID.ValueString()), } @@ -670,7 +697,6 @@ func (r *resourceStreamProcessor) Delete(ctx context.Context, req resource.Delet return } - // TIP: -- 5. Use a waiter to wait for delete to complete deleteTimeout := r.DeleteTimeout(ctx, state.Timeouts) _, err = waitStreamProcessorDeleted(ctx, conn, state.ID.ValueString(), deleteTimeout) if err != nil { @@ -788,7 +814,7 @@ func findStreamProcessorByID(ctx context.Context, conn *rekognition.Client, name return out, nil } -func unwrapObjectValueOf[T any](ctx context.Context, diagnostics diag.Diagnostics, plan fwtypes.ObjectValueOf[T], state fwtypes.ObjectValueOf[T]) (*T, *T) { +func unwrapListNestedObjectValueOf[T any](ctx context.Context, diagnostics diag.Diagnostics, plan fwtypes.ListNestedObjectValueOf[T], state fwtypes.ListNestedObjectValueOf[T]) (*T, *T) { ptrPlan, diags := plan.ToPtr(ctx) diagnostics.Append(diags...) @@ -799,20 +825,20 @@ func unwrapObjectValueOf[T any](ctx context.Context, diagnostics diag.Diagnostic } type resourceStreamProcessorDataModel struct { - ARN types.String `tfsdk:"arn"` - DataSharingPreference fwtypes.ObjectValueOf[dataSharingPreferenceModel] `tfsdk:"data_sharing_preference"` - ID types.String `tfsdk:"id"` - Input fwtypes.ObjectValueOf[inputModel] `tfsdk:"input"` - KmsKeyId types.String `tfsdk:"kms_key_id"` - NotificationChannel fwtypes.ObjectValueOf[notificationChannelModel] `tfsdk:"notification_channel"` - Name types.String `tfsdk:"name"` - Output fwtypes.ObjectValueOf[outputModel] `tfsdk:"output"` - RegionsOfInterest fwtypes.ListNestedObjectValueOf[regionOfInterestModel] `tfsdk:"regions_of_interest"` - RoleARN fwtypes.ARN `tfsdk:"role_arn"` - Settings fwtypes.ObjectValueOf[settingsModel] `tfsdk:"settings"` - Tags types.Map `tfsdk:"tags"` - TagsAll types.Map `tfsdk:"tags_all"` - Timeouts timeouts.Value `tfsdk:"timeouts"` + ARN types.String `tfsdk:"arn"` + DataSharingPreference fwtypes.ListNestedObjectValueOf[dataSharingPreferenceModel] `tfsdk:"data_sharing_preference"` + ID types.String `tfsdk:"id"` + Input fwtypes.ListNestedObjectValueOf[inputModel] `tfsdk:"input"` + KmsKeyId types.String `tfsdk:"kms_key_id"` + NotificationChannel fwtypes.ListNestedObjectValueOf[notificationChannelModel] `tfsdk:"notification_channel"` + Name types.String `tfsdk:"name"` + Output fwtypes.ListNestedObjectValueOf[outputModel] `tfsdk:"output"` + RegionsOfInterest fwtypes.ListNestedObjectValueOf[regionOfInterestModel] `tfsdk:"regions_of_interest"` + RoleARN fwtypes.ARN `tfsdk:"role_arn"` + Settings fwtypes.ListNestedObjectValueOf[settingsModel] `tfsdk:"settings"` + Tags types.Map `tfsdk:"tags"` + TagsAll types.Map `tfsdk:"tags_all"` + Timeouts timeouts.Value `tfsdk:"timeouts"` } type dataSharingPreferenceModel struct { @@ -820,7 +846,7 @@ type dataSharingPreferenceModel struct { } type inputModel struct { - KinesisVideoStream fwtypes.ObjectValueOf[kinesisVideoStreamInputModel] `tfsdk:"kinesis_video_stream"` + KinesisVideoStream fwtypes.ListNestedObjectValueOf[kinesisVideoStreamInputModel] `tfsdk:"kinesis_video_stream"` } type kinesisVideoStreamInputModel struct { @@ -832,8 +858,8 @@ type notificationChannelModel struct { } type outputModel struct { - KinesisDataStream fwtypes.ObjectValueOf[kinesisDataStreamModel] `tfsdk:"kinesis_data_stream"` - S3Destination fwtypes.ObjectValueOf[s3DestinationModel] `tfsdk:"s3_destination"` + KinesisDataStream fwtypes.ListNestedObjectValueOf[kinesisDataStreamModel] `tfsdk:"kinesis_data_stream"` + S3Destination fwtypes.ListNestedObjectValueOf[s3DestinationModel] `tfsdk:"s3_destination"` } type kinesisDataStreamModel struct { @@ -863,8 +889,8 @@ type polygonModel struct { } type settingsModel struct { - ConnectedHome fwtypes.ObjectValueOf[connectedHomeModel] `tfsdk:"connected_home"` - FaceSearch fwtypes.ObjectValueOf[faceSearchModel] `tfsdk:"face_search"` + ConnectedHome fwtypes.ListNestedObjectValueOf[connectedHomeModel] `tfsdk:"connected_home"` + FaceSearch fwtypes.ListNestedObjectValueOf[faceSearchModel] `tfsdk:"face_search"` } type connectedHomeModel struct { From b7fcc450e5ce5cceb2f1456471e51231395a270b Mon Sep 17 00:00:00 2001 From: Jared Baker Date: Wed, 17 Jul 2024 15:11:46 -0400 Subject: [PATCH 67/71] r/aws_rekognition_stream_processor(doc): tidy registry docs --- ...rekognition_stream_processor.html.markdown | 94 +++++++++---------- 1 file changed, 46 insertions(+), 48 deletions(-) diff --git a/website/docs/r/rekognition_stream_processor.html.markdown b/website/docs/r/rekognition_stream_processor.html.markdown index 39813a5d8f4d..73994113cdb9 100644 --- a/website/docs/r/rekognition_stream_processor.html.markdown +++ b/website/docs/r/rekognition_stream_processor.html.markdown @@ -10,13 +10,13 @@ description: |- Terraform resource for managing an AWS Rekognition Stream Processor. -~> **Note:** This resource must be configured specifically for your use case, and not all options are compatible with one another. See [Stream Processor API documentation](https://docs.aws.amazon.com/rekognition/latest/APIReference/API_CreateStreamProcessor.html#rekognition-CreateStreamProcessor-request-Input) for configuration information. +~> This resource must be configured specifically for your use case, and not all options are compatible with one another. See [Stream Processor API documentation](https://docs.aws.amazon.com/rekognition/latest/APIReference/API_CreateStreamProcessor.html#rekognition-CreateStreamProcessor-request-Input) for configuration information. -~> **Note:** Stream Processors configured for Face Recognition cannot have _any_ properties updated after the fact, and it will result in an AWS API error. +~> Stream Processors configured for Face Recognition cannot have _any_ properties updated after the fact, and it will result in an AWS API error. ## Example Usage -### Label Detection Usage +### Label Detection ```terraform resource "aws_s3_bucket" "example" { @@ -35,7 +35,7 @@ resource "aws_kinesis_video_stream" "example" { } resource "aws_iam_role" "example" { - name = "eample-role" + name = "example-role" inline_policy { name = "Rekognition-Access" @@ -113,7 +113,6 @@ resource "aws_rekognition_stream_processor" "example" { ### Face Detection Usage ```terraform - resource "aws_kinesis_video_stream" "example" { name = "example-kinesis-input" data_retention_in_hours = 1 @@ -121,7 +120,7 @@ resource "aws_kinesis_video_stream" "example" { media_type = "video/h264" } -resource "aws_kinesis_stream" "example_output" { +resource "aws_kinesis_stream" "example" { name = "terraform-kinesis-example" shard_count = 1 } @@ -147,7 +146,7 @@ resource "aws_iam_role" "example" { "kinesis:PutRecord" ] Effect = "Allow" - Resource = ["${aws_kinesis_stream.example_output.arn}"] + Resource = ["${aws_kinesis_stream.example.arn}"] }, ] }) @@ -202,7 +201,7 @@ resource "aws_rekognition_stream_processor" "example" { output { kinesis_data_stream { - arn = aws_kinesis_stream.example_output.arn + arn = aws_kinesis_stream.example.arn } } @@ -218,54 +217,52 @@ resource "aws_rekognition_stream_processor" "example" { The following arguments are required: -* `data_sharing_preference` - (Optional) See [`data_sharing_preference`](#data_sharing_preference) definition. -* `input` - (Required) Input video stream. See [`input`](#input) definition. -* `name` - (Required) The name of the Stream Processor -* `output` - (Required) Kinesis data stream stream or Amazon S3 bucket location to which Amazon Rekognition Video puts the analysis results +* `input` - (Required) Input video stream. See [`input`](#input). +* `name` - (Required) The name of the Stream Processor. +* `output` - (Required) Kinesis data stream stream or Amazon S3 bucket location to which Amazon Rekognition Video puts the analysis results. See [`output`](#output). * `role_arn` - (Required) The Amazon Resource Number (ARN) of the IAM role that allows access to the stream processor. The IAM role provides Rekognition read permissions for a Kinesis stream. It also provides write permissions to an Amazon S3 bucket and Amazon Simple Notification Service topic for a label detection stream processor. This is required for both face search and label detection stream processors. -* `settings` - (Required) Input parameters used in a streaming video analyzed by a stream processor. See [`settings`](#settings) definition. +* `settings` - (Required) Input parameters used in a streaming video analyzed by a stream processor. See [`settings`](#settings). The following arguments are optional: -* `kms_key_id` - (Optional) Optional parameter for label detection stream processors -* `notification_channel` - (Optional) The Amazon Simple Notification Service topic to which Amazon Rekognition publishes the completion status. See [`notification_channel`](#notification_channel) definition. -* `regions_of_interest` - (Optional) Specifies locations in the frames where Amazon Rekognition checks for objects or people. See [`regions_of_interest`] definition. +* `data_sharing_preference` - (Optional) See [`data_sharing_preference`](#data_sharing_preference). +* `kms_key_id` - (Optional) Optional parameter for label detection stream processors. +* `notification_channel` - (Optional) The Amazon Simple Notification Service topic to which Amazon Rekognition publishes the completion status. See [`notification_channel`](#notification_channel). +* `regions_of_interest` - (Optional) Specifies locations in the frames where Amazon Rekognition checks for objects or people. See [`regions_of_interest`](#regions_of_interest). * `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. -## Nested Blocks - ### `input` -* `kinesis_video_stream` - Kinesis input stream. See [`kinesis_video_stream`](#kinesis_video_stream) definition. +* `kinesis_video_stream` - (Optional) Kinesis input stream. See [`kinesis_video_stream`](#kinesis_video_stream). -#### `kinesis_video_stream` +### `kinesis_video_stream` -* `arn` - ARN of the Kinesis video stream stream that streams the source video +* `arn` - (Optional) ARN of the Kinesis video stream stream that streams the source video. ### `output` -* `kinesis_data_stream` - (Optional) The Amazon Kinesis Data Streams stream to which the Amazon Rekognition stream processor streams the analysis results. See [`kinesis_data_stream`](#kinesis_data_stream) definition. -* `s3_destination` - (Optiona) The Amazon S3 bucket location to which Amazon Rekognition publishes the detailed inference results of a video analysis operation. See [`s3_destination`](#s3_destination) definition. +* `kinesis_data_stream` - (Optional) The Amazon Kinesis Data Streams stream to which the Amazon Rekognition stream processor streams the analysis results. See [`kinesis_data_stream`](#kinesis_data_stream). +* `s3_destination` - (Optional) The Amazon S3 bucket location to which Amazon Rekognition publishes the detailed inference results of a video analysis operation. See [`s3_destination`](#s3_destination). -#### `kinesis_data_stream` +### `kinesis_data_stream` -* `arn` - ARN of the output Amazon Kinesis Data Streams stream. +* `arn` - (Optional) ARN of the output Amazon Kinesis Data Streams stream. -#### `s3_destination` +### `s3_destination` -* `bucket` - The name of the Amazon S3 bucket you want to associate with the streaming video project -* `key_prefixx` - The prefix value of the location within the bucket that you want the information to be published to +* `bucket` - (Optional) Name of the Amazon S3 bucket you want to associate with the streaming video project. +* `key_prefixx` - (Optional) Prefix value of the location within the bucket that you want the information to be published to. ### `data_sharing_preference` -* `opt_in` - (Optional) Shows whether you are sharing data with Rekognition to improve model performance. +* `opt_in` - (Optional) Whether you are sharing data with Rekognition to improve model performance. ### `regions_of_interest` -* `bounding_box` - (Optional) The box representing a region of interest on screen. Only 1 per region is allowed. See [`bounding_box`](#bounding_box) definition. -* `polygon` - (Optional) Shows whether you are sharing data with Rekognition to improve model performance. See [`polygon`](#polygon) definition. +* `bounding_box` - (Optional) Box representing a region of interest on screen. Only 1 per region is allowed. See [`bounding_box`](#bounding_box). +* `polygon` - (Optional) Shape made up of up to 10 Point objects to define a region of interest. See [`polygon`](#polygon). -#### `bounding_box` +### `bounding_box` A region can only have a single `bounding_box` @@ -274,7 +271,7 @@ A region can only have a single `bounding_box` * `left` - (Required) Left coordinate of the bounding box as a ratio of overall image width. * `top` - (Required) Top coordinate of the bounding box as a ratio of overall image height. -#### `polygon` +### `polygon` If using `polygon`, a minimum of 3 per region is required, with a maximum of 10. @@ -283,41 +280,42 @@ If using `polygon`, a minimum of 3 per region is required, with a maximum of 10. ### `notification_channel` -* `sns_topic_arn` - The Amazon Resource Number (ARN) of the Amazon Amazon Simple Notification Service topic to which Amazon Rekognition posts the completion status. +* `sns_topic_arn` - (Required) The Amazon Resource Number (ARN) of the Amazon Amazon Simple Notification Service topic to which Amazon Rekognition posts the completion status. ### `settings` -* `connected_home` - Label detection settings to use on a streaming video. See [`connected_home`](#connected_home) definition. -* `face_search` - Input face recognition parameters for an Amazon Rekognition stream processor. See [`face_search`](#face_search) definition. +* `connected_home` - (Optional) Label detection settings to use on a streaming video. See [`connected_home`](#connected_home). +* `face_search` - (Optional) Input face recognition parameters for an Amazon Rekognition stream processor. See [`face_search`](#face_search). -#### `connected_home` +### `connected_home` -* `labels` - Specifies what you want to detect in the video, such as people, packages, or pets. The current valid labels you can include in this list are: "PERSON", "PET", "PACKAGE", and "ALL". -* `min_confidence` - The minimum confidence required to label an object in the video. +* `labels` - (Required) Specifies what you want to detect in the video, such as people, packages, or pets. The current valid labels you can include in this list are: `PERSON`, `PET`, `PACKAGE`, and `ALL`. +* `min_confidence` - (Optional) Minimum confidence required to label an object in the video. -#### `face_search` +### `face_search` -* `collection_id` - The ID of a collection that contains faces that you want to search for. -* `face_match_threshold` - Minimum face match confidence score that must be met to return a result for a recognized face +* `collection_id` - (Optional) ID of a collection that contains faces that you want to search for. +* `face_match_threshold` - (Optional) Minimum face match confidence score that must be met to return a result for a recognized face. ## Attribute Reference This resource exports the following attributes in addition to the arguments above: -* `arn` - ARN of the Stream Processor. Do not begin the description with "An", "The", "Defines", "Indicates", or "Specifies," as these are verbose. In other words, "Indicates the amount of storage," can be rewritten as "Amount of storage," without losing any information. +* `arn` - ARN of the Stream Processor. +* `id` - Name of the Stream Processor. * `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). ## Timeouts [Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): -* `create` - (Default `60m`) -* `update` - (Default `180m`) -* `delete` - (Default `90m`) +* `create` - (Default `30m`) +* `update` - (Default `30m`) +* `delete` - (Default `30m`) ## Import -In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Rekognition Stream Processor using the `example_id_arg`. For example: +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Rekognition Stream Processor using the `id`. For example: ```terraform import { @@ -326,7 +324,7 @@ import { } ``` -Using `terraform import`, import Rekognition Stream Processor using the `example_id_arg`. For example: +Using `terraform import`, import Rekognition Stream Processor using the `id`. For example: ```console % terraform import aws_rekognition_stream_processor.example stream_processor-id-12345678 From d9939d94fdb09a1a678b6a7fb6aa9c59a36342ea Mon Sep 17 00:00:00 2001 From: Jared Baker Date: Thu, 18 Jul 2024 10:11:17 -0400 Subject: [PATCH 68/71] r/aws_rekognition_stream_processor(test): add tagging test Also renames the `arn` attribute to `stream_processor_arn` to align with the AWS API. --- internal/service/rekognition/generate.go | 2 +- .../rekognition/service_package_gen.go | 3 + .../service/rekognition/stream_processor.go | 43 ++--- .../rekognition/stream_processor_test.go | 148 +++++++++++++++++- ...rekognition_stream_processor.html.markdown | 2 +- 5 files changed, 170 insertions(+), 28 deletions(-) diff --git a/internal/service/rekognition/generate.go b/internal/service/rekognition/generate.go index 8afcbb88d64e..1c2b9728d734 100644 --- a/internal/service/rekognition/generate.go +++ b/internal/service/rekognition/generate.go @@ -1,8 +1,8 @@ // Copyright (c) HashiCorp, Inc. // SPDX-License-Identifier: MPL-2.0 -//go:generate go run ../../generate/tags/main.go -AWSSDKVersion=2 -ServiceTagsMap -KVTValues -SkipTypesImp -ListTags -UpdateTags //go:generate go run ../../generate/servicepackage/main.go +//go:generate go run ../../generate/tags/main.go -AWSSDKVersion=2 -ServiceTagsMap -KVTValues -SkipTypesImp -ListTags -UpdateTags // ONLY generate directives and package declaration! Do not add anything else to this file. package rekognition diff --git a/internal/service/rekognition/service_package_gen.go b/internal/service/rekognition/service_package_gen.go index eb2208e713a0..1ca8339efdd4 100644 --- a/internal/service/rekognition/service_package_gen.go +++ b/internal/service/rekognition/service_package_gen.go @@ -34,6 +34,9 @@ func (p *servicePackage) FrameworkResources(ctx context.Context) []*types.Servic { Factory: newResourceStreamProcessor, Name: "Stream Processor", + Tags: &types.ServicePackageResourceTags{ + IdentifierAttribute: "stream_processor_arn", + }, }, } } diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index bdfb370b813a..c3dd915b0877 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -73,6 +73,7 @@ var ( ) // @FrameworkResource("aws_rekognition_stream_processor", name="Stream Processor") +// @Tags(identifierAttribute="stream_processor_arn") func newResourceStreamProcessor(_ context.Context) (resource.ResourceWithConfigure, error) { r := &resourceStreamProcessor{} r.SetDefaultCreateTimeout(30 * time.Minute) @@ -99,7 +100,6 @@ func (r *resourceStreamProcessor) Metadata(_ context.Context, req resource.Metad func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { resp.Schema = schema.Schema{ Attributes: map[string]schema.Attribute{ - names.AttrARN: framework.ARNAttributeComputedOnly(), names.AttrKMSKeyID: schema.StringAttribute{ Description: "The identifier for your AWS Key Management Service key (AWS KMS key). You can supply the Amazon Resource Name (ARN) of your KMS key, the ID of your KMS key, an alias for your KMS key, or an alias ARN.", Optional: true, @@ -130,6 +130,13 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem stringplanmodifier.RequiresReplace(), }, }, + "stream_processor_arn": schema.StringAttribute{ + CustomType: fwtypes.ARNType, + Computed: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.UseStateForUnknown(), + }, + }, names.AttrTags: tftags.TagsAttribute(), names.AttrTagsAll: tftags.TagsAttributeComputedOnly(), }, @@ -478,8 +485,8 @@ func (r *resourceStreamProcessor) Create(ctx context.Context, req resource.Creat return } - plan.ARN = fwflex.StringToFramework(ctx, out.StreamProcessorArn) plan.ID = plan.Name + plan.StreamProcessorARN = fwflex.StringToFrameworkARN(ctx, out.StreamProcessorArn) if plan.DataSharingPreference.IsNull() { dataSharing, diag := fwtypes.NewListNestedObjectValueOfPtr(ctx, &dataSharingPreferenceModel{OptIn: basetypes.NewBoolValue(false)}) @@ -651,24 +658,24 @@ func (r *resourceStreamProcessor) Update(ctx context.Context, req resource.Updat ) return } - } - updateTimeout := r.UpdateTimeout(ctx, plan.Timeouts) - updated, err := waitStreamProcessorUpdated(ctx, conn, plan.Name.ValueString(), updateTimeout) - if err != nil { - resp.Diagnostics.AddError( - create.ProblemStandardMessage(names.Rekognition, create.ErrActionWaitingForUpdate, ResNameStreamProcessor, plan.Name.String(), err), - err.Error(), - ) - return - } + updateTimeout := r.UpdateTimeout(ctx, plan.Timeouts) + updated, err := waitStreamProcessorUpdated(ctx, conn, plan.Name.ValueString(), updateTimeout) + if err != nil { + resp.Diagnostics.AddError( + create.ProblemStandardMessage(names.Rekognition, create.ErrActionWaitingForUpdate, ResNameStreamProcessor, plan.Name.String(), err), + err.Error(), + ) + return + } - resp.Diagnostics.Append(fwflex.Flatten(ctx, updated, &plan)...) - if resp.Diagnostics.HasError() { - return - } + resp.Diagnostics.Append(fwflex.Flatten(ctx, updated, &plan)...) + if resp.Diagnostics.HasError() { + return + } - resp.Diagnostics.Append(resp.State.Set(ctx, &plan)...) + resp.Diagnostics.Append(resp.State.Set(ctx, &plan)...) + } } func (r *resourceStreamProcessor) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { @@ -825,7 +832,6 @@ func unwrapListNestedObjectValueOf[T any](ctx context.Context, diagnostics diag. } type resourceStreamProcessorDataModel struct { - ARN types.String `tfsdk:"arn"` DataSharingPreference fwtypes.ListNestedObjectValueOf[dataSharingPreferenceModel] `tfsdk:"data_sharing_preference"` ID types.String `tfsdk:"id"` Input fwtypes.ListNestedObjectValueOf[inputModel] `tfsdk:"input"` @@ -836,6 +842,7 @@ type resourceStreamProcessorDataModel struct { RegionsOfInterest fwtypes.ListNestedObjectValueOf[regionOfInterestModel] `tfsdk:"regions_of_interest"` RoleARN fwtypes.ARN `tfsdk:"role_arn"` Settings fwtypes.ListNestedObjectValueOf[settingsModel] `tfsdk:"settings"` + StreamProcessorARN fwtypes.ARN `tfsdk:"stream_processor_arn"` Tags types.Map `tfsdk:"tags"` TagsAll types.Map `tfsdk:"tags_all"` Timeouts timeouts.Value `tfsdk:"timeouts"` diff --git a/internal/service/rekognition/stream_processor_test.go b/internal/service/rekognition/stream_processor_test.go index 807540431c14..abb051a5af80 100644 --- a/internal/service/rekognition/stream_processor_test.go +++ b/internal/service/rekognition/stream_processor_test.go @@ -48,10 +48,9 @@ func TestAccRekognitionStreamProcessor_basic(t *testing.T) { ), }, { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{names.AttrARN}, + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, }, }, }) @@ -161,10 +160,9 @@ func TestAccRekognitionStreamProcessor_faceRecognition(t *testing.T) { ), }, { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{names.AttrARN}, + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, }, }, }) @@ -238,6 +236,57 @@ func TestAccRekognitionStreamProcessor_faceRecognition_polygon(t *testing.T) { }) } +func TestAccRekognitionStreamProcessor_tags(t *testing.T) { + ctx := acctest.Context(t) + + var streamprocessor rekognition.DescribeStreamProcessorOutput + rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) + resourceName := "aws_rekognition_stream_processor.test" + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + acctest.PreCheck(ctx, t) + acctest.PreCheckPartitionHasService(t, names.RekognitionEndpointID) + testAccPreCheck(ctx, t) + }, + ErrorCheck: acctest.ErrorCheck(t, names.RekognitionServiceID), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, + CheckDestroy: testAccCheckStreamProcessorDestroy(ctx), + Steps: []resource.TestStep{ + { + Config: testAccStreamProcessorConfig_tags1(rName, acctest.CtKey1, acctest.CtValue1), + Check: resource.ComposeTestCheckFunc( + testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), + resource.TestCheckResourceAttr(resourceName, acctest.CtTagsPercent, acctest.Ct1), + resource.TestCheckResourceAttr(resourceName, acctest.CtTagsKey1, acctest.CtValue1), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccStreamProcessorConfig_tags2(rName, acctest.CtKey1, acctest.CtValue1Updated, acctest.CtKey2, acctest.CtValue2), + Check: resource.ComposeTestCheckFunc( + testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), + resource.TestCheckResourceAttr(resourceName, acctest.CtTagsPercent, acctest.Ct2), + resource.TestCheckResourceAttr(resourceName, acctest.CtTagsKey1, acctest.CtValue1Updated), + resource.TestCheckResourceAttr(resourceName, acctest.CtTagsKey2, acctest.CtValue2), + ), + }, + { + Config: testAccStreamProcessorConfig_tags1(rName, acctest.CtKey2, acctest.CtValue2), + Check: resource.ComposeTestCheckFunc( + testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), + resource.TestCheckResourceAttr(resourceName, acctest.CtTagsPercent, acctest.Ct1), + resource.TestCheckResourceAttr(resourceName, acctest.CtTagsKey2, acctest.CtValue2), + ), + }, + }, + }) +} + func testAccCheckStreamProcessorDestroy(ctx context.Context) resource.TestCheckFunc { return func(s *terraform.State) error { conn := acctest.Provider.Meta().(*conns.AWSClient).RekognitionClient(ctx) @@ -514,3 +563,86 @@ resource "aws_rekognition_stream_processor" "test" { } `, rName, regionsOfInterest)) } + +func testAccStreamProcessorConfig_tags1(rName, tagKey1, tagValue1 string) string { + return acctest.ConfigCompose( + testAccStreamProcessorConfigBase_connectedHome(rName), + fmt.Sprintf(` +resource "aws_rekognition_stream_processor" "test" { + role_arn = aws_iam_role.test.arn + name = %[1]q + + data_sharing_preference { + opt_in = true + } + + output { + s3_destination { + bucket = aws_s3_bucket.test.bucket + } + } + + settings { + connected_home { + labels = ["PERSON", "ALL"] + } + } + + input { + kinesis_video_stream { + arn = aws_kinesis_video_stream.test.arn + } + } + + notification_channel { + sns_topic_arn = aws_sns_topic.test.arn + } + + tags = { + %[2]q = %[3]q + } +} +`, rName, tagKey1, tagValue1)) +} + +func testAccStreamProcessorConfig_tags2(rName, tagKey1, tagValue1, tagKey2, tagValue2 string) string { + return acctest.ConfigCompose( + testAccStreamProcessorConfigBase_connectedHome(rName), + fmt.Sprintf(` +resource "aws_rekognition_stream_processor" "test" { + role_arn = aws_iam_role.test.arn + name = %[1]q + + data_sharing_preference { + opt_in = true + } + + output { + s3_destination { + bucket = aws_s3_bucket.test.bucket + } + } + + settings { + connected_home { + labels = ["PERSON", "ALL"] + } + } + + input { + kinesis_video_stream { + arn = aws_kinesis_video_stream.test.arn + } + } + + notification_channel { + sns_topic_arn = aws_sns_topic.test.arn + } + + tags = { + %[2]q = %[3]q + %[4]q = %[5]q + } +} +`, rName, tagKey1, tagValue1, tagKey2, tagValue2)) +} diff --git a/website/docs/r/rekognition_stream_processor.html.markdown b/website/docs/r/rekognition_stream_processor.html.markdown index 73994113cdb9..de71df30e25b 100644 --- a/website/docs/r/rekognition_stream_processor.html.markdown +++ b/website/docs/r/rekognition_stream_processor.html.markdown @@ -301,8 +301,8 @@ If using `polygon`, a minimum of 3 per region is required, with a maximum of 10. This resource exports the following attributes in addition to the arguments above: -* `arn` - ARN of the Stream Processor. * `id` - Name of the Stream Processor. +* `stream_processor_arn` - ARN of the Stream Processor. * `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). ## Timeouts From bfe298c39e7be2b6a942cec1f103c1e0be1c3551 Mon Sep 17 00:00:00 2001 From: Jared Baker Date: Thu, 18 Jul 2024 10:20:16 -0400 Subject: [PATCH 69/71] r/aws_rekognition_stream_processor: rm explicit ImportState method This is already provided by the embedded `framework.WithImportByID` struct. --- internal/service/rekognition/stream_processor.go | 4 ---- 1 file changed, 4 deletions(-) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index c3dd915b0877..3f09ecd35a2d 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -715,10 +715,6 @@ func (r *resourceStreamProcessor) Delete(ctx context.Context, req resource.Delet } } -func (r *resourceStreamProcessor) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { - resource.ImportStatePassthroughID(ctx, path.Root(names.AttrID), req, resp) -} - func (r *resourceStreamProcessor) ModifyPlan(ctx context.Context, request resource.ModifyPlanRequest, response *resource.ModifyPlanResponse) { r.SetTagsAll(ctx, request, response) } From 29ecfa00071769be41cb41cde97e553f948ed440 Mon Sep 17 00:00:00 2001 From: Jared Baker Date: Thu, 18 Jul 2024 11:21:02 -0400 Subject: [PATCH 70/71] r/aws_rekognition_stream_processor(test): extend _basic test checks --- internal/service/rekognition/stream_processor_test.go | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/internal/service/rekognition/stream_processor_test.go b/internal/service/rekognition/stream_processor_test.go index abb051a5af80..6f46eea7cdb9 100644 --- a/internal/service/rekognition/stream_processor_test.go +++ b/internal/service/rekognition/stream_processor_test.go @@ -28,6 +28,9 @@ func TestAccRekognitionStreamProcessor_basic(t *testing.T) { var streamprocessor rekognition.DescribeStreamProcessorOutput rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_rekognition_stream_processor.test" + s3BucketResourceName := "aws_s3_bucket.test" + kinesisVideoStreamResourceName := "aws_kinesis_video_stream.test" + snsTopicResourceName := "aws_sns_topic.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { @@ -45,6 +48,13 @@ func TestAccRekognitionStreamProcessor_basic(t *testing.T) { testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), resource.TestCheckResourceAttr(resourceName, names.AttrID, rName), resource.TestCheckResourceAttr(resourceName, names.AttrName, rName), + resource.TestCheckResourceAttr(resourceName, "data_sharing_preference.0.opt_in", acctest.CtTrue), + resource.TestCheckResourceAttrPair(resourceName, "output.0.s3_destination.0.bucket", s3BucketResourceName, names.AttrBucket), + resource.TestCheckResourceAttr(resourceName, "settings.0.connected_home.0.labels.#", acctest.Ct2), + resource.TestCheckTypeSetElemAttr(resourceName, "settings.0.connected_home.0.labels.*", "PERSON"), + resource.TestCheckTypeSetElemAttr(resourceName, "settings.0.connected_home.0.labels.*", "ALL"), + resource.TestCheckResourceAttrPair(resourceName, "input.0.kinesis_video_stream.0.arn", kinesisVideoStreamResourceName, names.AttrARN), + resource.TestCheckResourceAttrPair(resourceName, "notification_channel.0.sns_topic_arn", snsTopicResourceName, names.AttrARN), ), }, { From 46bda14385ae4a06245a659138337d0037eb68f3 Mon Sep 17 00:00:00 2001 From: Jared Baker Date: Thu, 18 Jul 2024 12:10:43 -0400 Subject: [PATCH 71/71] r/aws_rekognition_stream_processor: remove redundant id attribute --- internal/service/rekognition/exports_test.go | 6 +- .../service/rekognition/stream_processor.go | 42 +++++++------- .../rekognition/stream_processor_test.go | 57 ++++++++++++------- ...rekognition_stream_processor.html.markdown | 9 ++- 4 files changed, 64 insertions(+), 50 deletions(-) diff --git a/internal/service/rekognition/exports_test.go b/internal/service/rekognition/exports_test.go index f08d56287a35..f91a5af29fc5 100644 --- a/internal/service/rekognition/exports_test.go +++ b/internal/service/rekognition/exports_test.go @@ -12,7 +12,7 @@ var ( ) var ( - FindCollectionByID = findCollectionByID - FindProjectByName = findProjectByName - FindStreamProcessorByID = findStreamProcessorByID + FindCollectionByID = findCollectionByID + FindProjectByName = findProjectByName + FindStreamProcessorByName = findStreamProcessorByName ) diff --git a/internal/service/rekognition/stream_processor.go b/internal/service/rekognition/stream_processor.go index 3f09ecd35a2d..77321677ccca 100644 --- a/internal/service/rekognition/stream_processor.go +++ b/internal/service/rekognition/stream_processor.go @@ -90,7 +90,6 @@ const ( type resourceStreamProcessor struct { framework.ResourceWithConfigure framework.WithTimeouts - framework.WithImportByID } func (r *resourceStreamProcessor) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { @@ -110,7 +109,6 @@ func (r *resourceStreamProcessor) Schema(ctx context.Context, req resource.Schem stringplanmodifier.RequiresReplace(), }, }, - names.AttrID: framework.IDAttribute(), names.AttrName: schema.StringAttribute{ Description: "An identifier you assign to the stream processor.", Required: true, @@ -485,7 +483,6 @@ func (r *resourceStreamProcessor) Create(ctx context.Context, req resource.Creat return } - plan.ID = plan.Name plan.StreamProcessorARN = fwflex.StringToFrameworkARN(ctx, out.StreamProcessorArn) if plan.DataSharingPreference.IsNull() { @@ -496,10 +493,10 @@ func (r *resourceStreamProcessor) Create(ctx context.Context, req resource.Creat } createTimeout := r.CreateTimeout(ctx, plan.Timeouts) - created, err := waitStreamProcessorCreated(ctx, conn, plan.ID.ValueString(), createTimeout) + created, err := waitStreamProcessorCreated(ctx, conn, plan.Name.ValueString(), createTimeout) if err != nil { resp.Diagnostics.AddError( - create.ProblemStandardMessage(names.Rekognition, create.ErrActionWaitingForCreation, ResNameStreamProcessor, plan.ID.String(), err), + create.ProblemStandardMessage(names.Rekognition, create.ErrActionWaitingForCreation, ResNameStreamProcessor, plan.Name.String(), err), err.Error(), ) return @@ -522,7 +519,7 @@ func (r *resourceStreamProcessor) Read(ctx context.Context, req resource.ReadReq return } - out, err := findStreamProcessorByID(ctx, conn, state.ID.ValueString()) + out, err := findStreamProcessorByName(ctx, conn, state.Name.ValueString()) if tfresource.NotFound(err) { resp.Diagnostics.Append(fwdiag.NewResourceNotFoundWarningDiagnostic(err)) resp.State.RemoveResource(ctx) @@ -530,7 +527,7 @@ func (r *resourceStreamProcessor) Read(ctx context.Context, req resource.ReadReq } if err != nil { resp.Diagnostics.AddError( - create.ProblemStandardMessage(names.Rekognition, create.ErrActionSetting, ResNameStreamProcessor, state.ID.String(), err), + create.ProblemStandardMessage(names.Rekognition, create.ErrActionSetting, ResNameStreamProcessor, state.Name.String(), err), err.Error(), ) return @@ -688,7 +685,7 @@ func (r *resourceStreamProcessor) Delete(ctx context.Context, req resource.Delet } in := &rekognition.DeleteStreamProcessorInput{ - Name: aws.String(state.ID.ValueString()), + Name: aws.String(state.Name.ValueString()), } _, err := conn.DeleteStreamProcessor(ctx, in) @@ -698,32 +695,36 @@ func (r *resourceStreamProcessor) Delete(ctx context.Context, req resource.Delet return } resp.Diagnostics.AddError( - create.ProblemStandardMessage(names.Rekognition, create.ErrActionDeleting, ResNameStreamProcessor, state.ID.String(), err), + create.ProblemStandardMessage(names.Rekognition, create.ErrActionDeleting, ResNameStreamProcessor, state.Name.String(), err), err.Error(), ) return } deleteTimeout := r.DeleteTimeout(ctx, state.Timeouts) - _, err = waitStreamProcessorDeleted(ctx, conn, state.ID.ValueString(), deleteTimeout) + _, err = waitStreamProcessorDeleted(ctx, conn, state.Name.ValueString(), deleteTimeout) if err != nil { resp.Diagnostics.AddError( - create.ProblemStandardMessage(names.Rekognition, create.ErrActionWaitingForDeletion, ResNameStreamProcessor, state.ID.String(), err), + create.ProblemStandardMessage(names.Rekognition, create.ErrActionWaitingForDeletion, ResNameStreamProcessor, state.Name.String(), err), err.Error(), ) return } } +func (r *resourceStreamProcessor) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { + resource.ImportStatePassthroughID(ctx, path.Root(names.AttrName), req, resp) +} + func (r *resourceStreamProcessor) ModifyPlan(ctx context.Context, request resource.ModifyPlanRequest, response *resource.ModifyPlanResponse) { r.SetTagsAll(ctx, request, response) } -func waitStreamProcessorCreated(ctx context.Context, conn *rekognition.Client, id string, timeout time.Duration) (*rekognition.DescribeStreamProcessorOutput, error) { +func waitStreamProcessorCreated(ctx context.Context, conn *rekognition.Client, name string, timeout time.Duration) (*rekognition.DescribeStreamProcessorOutput, error) { stateConf := &retry.StateChangeConf{ Pending: []string{}, Target: enum.Slice(awstypes.StreamProcessorStatusStopped), - Refresh: statusStreamProcessor(ctx, conn, id), + Refresh: statusStreamProcessor(ctx, conn, name), Timeout: timeout, NotFoundChecks: 20, ContinuousTargetOccurence: 2, @@ -737,11 +738,11 @@ func waitStreamProcessorCreated(ctx context.Context, conn *rekognition.Client, i return nil, err } -func waitStreamProcessorUpdated(ctx context.Context, conn *rekognition.Client, id string, timeout time.Duration) (*rekognition.DescribeStreamProcessorOutput, error) { +func waitStreamProcessorUpdated(ctx context.Context, conn *rekognition.Client, name string, timeout time.Duration) (*rekognition.DescribeStreamProcessorOutput, error) { stateConf := &retry.StateChangeConf{ Pending: enum.Slice(awstypes.StreamProcessorStatusUpdating), Target: enum.Slice(awstypes.StreamProcessorStatusStopped), - Refresh: statusStreamProcessor(ctx, conn, id), + Refresh: statusStreamProcessor(ctx, conn, name), Timeout: timeout, NotFoundChecks: 20, ContinuousTargetOccurence: 2, @@ -755,7 +756,7 @@ func waitStreamProcessorUpdated(ctx context.Context, conn *rekognition.Client, i return nil, err } -func waitStreamProcessorDeleted(ctx context.Context, conn *rekognition.Client, id string, timeout time.Duration) (*rekognition.DescribeStreamProcessorOutput, error) { +func waitStreamProcessorDeleted(ctx context.Context, conn *rekognition.Client, name string, timeout time.Duration) (*rekognition.DescribeStreamProcessorOutput, error) { stateConf := &retry.StateChangeConf{ Pending: enum.Slice( awstypes.StreamProcessorStatusStopped, @@ -766,7 +767,7 @@ func waitStreamProcessorDeleted(ctx context.Context, conn *rekognition.Client, i awstypes.StreamProcessorStatusUpdating, ), Target: []string{}, - Refresh: statusStreamProcessor(ctx, conn, id), + Refresh: statusStreamProcessor(ctx, conn, name), Timeout: timeout, } @@ -778,9 +779,9 @@ func waitStreamProcessorDeleted(ctx context.Context, conn *rekognition.Client, i return nil, err } -func statusStreamProcessor(ctx context.Context, conn *rekognition.Client, id string) retry.StateRefreshFunc { +func statusStreamProcessor(ctx context.Context, conn *rekognition.Client, name string) retry.StateRefreshFunc { return func() (interface{}, string, error) { - out, err := findStreamProcessorByID(ctx, conn, id) + out, err := findStreamProcessorByName(ctx, conn, name) if tfresource.NotFound(err) { return nil, "", nil } @@ -793,7 +794,7 @@ func statusStreamProcessor(ctx context.Context, conn *rekognition.Client, id str } } -func findStreamProcessorByID(ctx context.Context, conn *rekognition.Client, name string) (*rekognition.DescribeStreamProcessorOutput, error) { +func findStreamProcessorByName(ctx context.Context, conn *rekognition.Client, name string) (*rekognition.DescribeStreamProcessorOutput, error) { in := &rekognition.DescribeStreamProcessorInput{ Name: aws.String(name), } @@ -829,7 +830,6 @@ func unwrapListNestedObjectValueOf[T any](ctx context.Context, diagnostics diag. type resourceStreamProcessorDataModel struct { DataSharingPreference fwtypes.ListNestedObjectValueOf[dataSharingPreferenceModel] `tfsdk:"data_sharing_preference"` - ID types.String `tfsdk:"id"` Input fwtypes.ListNestedObjectValueOf[inputModel] `tfsdk:"input"` KmsKeyId types.String `tfsdk:"kms_key_id"` NotificationChannel fwtypes.ListNestedObjectValueOf[notificationChannelModel] `tfsdk:"notification_channel"` diff --git a/internal/service/rekognition/stream_processor_test.go b/internal/service/rekognition/stream_processor_test.go index 6f46eea7cdb9..058bbadc5ee1 100644 --- a/internal/service/rekognition/stream_processor_test.go +++ b/internal/service/rekognition/stream_processor_test.go @@ -46,7 +46,6 @@ func TestAccRekognitionStreamProcessor_basic(t *testing.T) { Config: testAccStreamProcessorConfig_connectedHome(rName, ""), Check: resource.ComposeTestCheckFunc( testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), - resource.TestCheckResourceAttr(resourceName, names.AttrID, rName), resource.TestCheckResourceAttr(resourceName, names.AttrName, rName), resource.TestCheckResourceAttr(resourceName, "data_sharing_preference.0.opt_in", acctest.CtTrue), resource.TestCheckResourceAttrPair(resourceName, "output.0.s3_destination.0.bucket", s3BucketResourceName, names.AttrBucket), @@ -55,12 +54,15 @@ func TestAccRekognitionStreamProcessor_basic(t *testing.T) { resource.TestCheckTypeSetElemAttr(resourceName, "settings.0.connected_home.0.labels.*", "ALL"), resource.TestCheckResourceAttrPair(resourceName, "input.0.kinesis_video_stream.0.arn", kinesisVideoStreamResourceName, names.AttrARN), resource.TestCheckResourceAttrPair(resourceName, "notification_channel.0.sns_topic_arn", snsTopicResourceName, names.AttrARN), + resource.TestCheckResourceAttrSet(resourceName, "stream_processor_arn"), ), }, { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, + ResourceName: resourceName, + ImportState: true, + ImportStateIdFunc: testAccStreamProcessorImportStateIdFunc(resourceName), + ImportStateVerify: true, + ImportStateVerifyIdentifierAttribute: names.AttrName, }, }, }) @@ -165,14 +167,15 @@ func TestAccRekognitionStreamProcessor_faceRecognition(t *testing.T) { Config: testAccStreamProcessorConfig_faceRecognition(rName, ""), Check: resource.ComposeTestCheckFunc( testAccCheckStreamProcessorExists(ctx, resourceName, &streamprocessor), - resource.TestCheckResourceAttr(resourceName, names.AttrID, rName), resource.TestCheckResourceAttr(resourceName, names.AttrName, rName), ), }, { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, + ResourceName: resourceName, + ImportState: true, + ImportStateIdFunc: testAccStreamProcessorImportStateIdFunc(resourceName), + ImportStateVerify: true, + ImportStateVerifyIdentifierAttribute: names.AttrName, }, }, }) @@ -272,9 +275,11 @@ func TestAccRekognitionStreamProcessor_tags(t *testing.T) { ), }, { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, + ResourceName: resourceName, + ImportState: true, + ImportStateIdFunc: testAccStreamProcessorImportStateIdFunc(resourceName), + ImportStateVerify: true, + ImportStateVerifyIdentifierAttribute: names.AttrName, }, { Config: testAccStreamProcessorConfig_tags2(rName, acctest.CtKey1, acctest.CtValue1Updated, acctest.CtKey2, acctest.CtValue2), @@ -306,16 +311,17 @@ func testAccCheckStreamProcessorDestroy(ctx context.Context) resource.TestCheckF continue } - _, err := tfrekognition.FindCollectionByID(ctx, conn, rs.Primary.ID) + streamName := rs.Primary.Attributes[names.AttrName] + _, err := tfrekognition.FindStreamProcessorByName(ctx, conn, streamName) if tfresource.NotFound(err) { continue } if err != nil { - return create.Error(names.Rekognition, create.ErrActionCheckingDestroyed, tfrekognition.ResNameStreamProcessor, rs.Primary.ID, err) + return create.Error(names.Rekognition, create.ErrActionCheckingDestroyed, tfrekognition.ResNameStreamProcessor, streamName, err) } - return create.Error(names.Rekognition, create.ErrActionCheckingDestroyed, tfrekognition.ResNameStreamProcessor, rs.Primary.ID, errors.New("not destroyed")) + return create.Error(names.Rekognition, create.ErrActionCheckingDestroyed, tfrekognition.ResNameStreamProcessor, streamName, errors.New("not destroyed")) } return nil @@ -329,17 +335,15 @@ func testAccCheckStreamProcessorExists(ctx context.Context, name string, streamp return create.Error(names.Rekognition, create.ErrActionCheckingExistence, tfrekognition.ResNameStreamProcessor, name, errors.New("not found")) } - if rs.Primary.ID == "" { - return create.Error(names.Rekognition, create.ErrActionCheckingExistence, tfrekognition.ResNameStreamProcessor, name, errors.New("not set")) + streamName := rs.Primary.Attributes[names.AttrName] + if streamName == "" { + return create.Error(names.Rekognition, create.ErrActionCheckingExistence, tfrekognition.ResNameStreamProcessor, name, errors.New("name not set")) } conn := acctest.Provider.Meta().(*conns.AWSClient).RekognitionClient(ctx) - resp, err := conn.DescribeStreamProcessor(ctx, &rekognition.DescribeStreamProcessorInput{ - Name: aws.String(rs.Primary.ID), - }) - + resp, err := tfrekognition.FindStreamProcessorByName(ctx, conn, streamName) if err != nil { - return create.Error(names.Rekognition, create.ErrActionCheckingExistence, tfrekognition.ResNameStreamProcessor, rs.Primary.ID, err) + return create.Error(names.Rekognition, create.ErrActionCheckingExistence, tfrekognition.ResNameStreamProcessor, streamName, err) } *streamprocessor = *resp @@ -348,6 +352,17 @@ func testAccCheckStreamProcessorExists(ctx context.Context, name string, streamp } } +func testAccStreamProcessorImportStateIdFunc(resourceName string) resource.ImportStateIdFunc { + return func(s *terraform.State) (string, error) { + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return "", fmt.Errorf("Not found: %s", resourceName) + } + + return rs.Primary.Attributes[names.AttrName], nil + } +} + func testAccPreCheck(ctx context.Context, t *testing.T) { conn := acctest.Provider.Meta().(*conns.AWSClient).RekognitionClient(ctx) diff --git a/website/docs/r/rekognition_stream_processor.html.markdown b/website/docs/r/rekognition_stream_processor.html.markdown index de71df30e25b..ba80bf6544ef 100644 --- a/website/docs/r/rekognition_stream_processor.html.markdown +++ b/website/docs/r/rekognition_stream_processor.html.markdown @@ -301,7 +301,6 @@ If using `polygon`, a minimum of 3 per region is required, with a maximum of 10. This resource exports the following attributes in addition to the arguments above: -* `id` - Name of the Stream Processor. * `stream_processor_arn` - ARN of the Stream Processor. * `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). @@ -315,17 +314,17 @@ This resource exports the following attributes in addition to the arguments abov ## Import -In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Rekognition Stream Processor using the `id`. For example: +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Rekognition Stream Processor using the `name`. For example: ```terraform import { to = aws_rekognition_stream_processor.example - id = "stream_processor-id-12345678" + id = "my-stream" } ``` -Using `terraform import`, import Rekognition Stream Processor using the `id`. For example: +Using `terraform import`, import Rekognition Stream Processor using the `name`. For example: ```console -% terraform import aws_rekognition_stream_processor.example stream_processor-id-12345678 +% terraform import aws_rekognition_stream_processor.example my-stream ```