diff --git a/.changelog/35280.txt b/.changelog/35280.txt new file mode 100644 index 000000000000..050670503ac3 --- /dev/null +++ b/.changelog/35280.txt @@ -0,0 +1,3 @@ +```release-note:new-data-source +aws_prometheus_default_scraper_configuration +``` diff --git a/internal/provider/provider.go b/internal/provider/provider.go index 3c84cda7e149..8a5fa03dd42f 100644 --- a/internal/provider/provider.go +++ b/internal/provider/provider.go @@ -7,6 +7,7 @@ import ( "context" "errors" "fmt" + "log" "os" "strings" "time" @@ -33,6 +34,8 @@ import ( // New returns a new, initialized Terraform Plugin SDK v2-style provider instance. // The provider instance is fully configured once the `ConfigureContextFunc` has been called. func New(ctx context.Context) (*schema.Provider, error) { + log.Printf("Initializing Terraform AWS Provider...") + provider := &schema.Provider{ // This schema must match exactly the Terraform Protocol v6 (Terraform Plugin Framework) provider's schema. // Notably the attributes can have no Default values. diff --git a/internal/service/amp/default_scraper_configuration_data_source.go b/internal/service/amp/default_scraper_configuration_data_source.go new file mode 100644 index 000000000000..5ceae8b55588 --- /dev/null +++ b/internal/service/amp/default_scraper_configuration_data_source.go @@ -0,0 +1,81 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package amp + +import ( + "context" + + "github.com/aws/aws-sdk-go-v2/service/amp" + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-provider-aws/internal/framework" + fwflex "github.com/hashicorp/terraform-provider-aws/internal/framework/flex" + "github.com/hashicorp/terraform-provider-aws/internal/tfresource" + "github.com/hashicorp/terraform-provider-aws/names" +) + +// @FrameworkDataSource(aws_prometheus_default_scraper_configuration, name="Default Scraper Configuration") +func newDefaultScraperConfigurationDataSource(context.Context) (datasource.DataSourceWithConfigure, error) { + return &defaultScraperConfigurationDataSource{}, nil +} + +type defaultScraperConfigurationDataSource struct { + framework.DataSourceWithConfigure +} + +func (*defaultScraperConfigurationDataSource) Metadata(_ context.Context, request datasource.MetadataRequest, response *datasource.MetadataResponse) { // nosemgrep:ci.meta-in-func-name + response.TypeName = "aws_prometheus_default_scraper_configuration" +} + +func (d *defaultScraperConfigurationDataSource) Schema(ctx context.Context, request datasource.SchemaRequest, response *datasource.SchemaResponse) { + response.Schema = schema.Schema{ + Attributes: map[string]schema.Attribute{ + names.AttrConfiguration: schema.StringAttribute{ + Computed: true, + }, + }, + } +} + +func (d *defaultScraperConfigurationDataSource) Read(ctx context.Context, request datasource.ReadRequest, response *datasource.ReadResponse) { + var data defaultScraperConfigurationDataSourceModel + response.Diagnostics.Append(request.Config.Get(ctx, &data)...) + if response.Diagnostics.HasError() { + return + } + + conn := d.Meta().AMPClient(ctx) + + out, err := findDefaultScraperConfiguration(ctx, conn) + + if err != nil { + response.Diagnostics.AddError("reading Prometheus Default Scraper Configuration", err.Error()) + + return + } + + data.Configuration = fwflex.StringValueToFramework(ctx, string(out)) + + response.Diagnostics.Append(response.State.Set(ctx, &data)...) +} + +func findDefaultScraperConfiguration(ctx context.Context, conn *amp.Client) ([]byte, error) { + input := &.GetDefaultScraperConfigurationInput{} + output, err := conn.GetDefaultScraperConfiguration(ctx, input) + + if err != nil { + return nil, err + } + + if output == nil || output.Configuration == nil { + return nil, tfresource.NewEmptyResultError(input) + } + + return output.Configuration, err +} + +type defaultScraperConfigurationDataSourceModel struct { + Configuration types.String `tfsdk:"configuration"` +} diff --git a/internal/service/amp/default_scraper_configuration_data_source_test.go b/internal/service/amp/default_scraper_configuration_data_source_test.go new file mode 100644 index 000000000000..b09dc235ceb7 --- /dev/null +++ b/internal/service/amp/default_scraper_configuration_data_source_test.go @@ -0,0 +1,40 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package amp_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-aws/internal/acctest" + "github.com/hashicorp/terraform-provider-aws/names" +) + +func TestAccAMPDefaultScraperConfigurationDataSource_basic(t *testing.T) { + ctx := acctest.Context(t) + dataSourceName := "data.aws_prometheus_default_scraper_configuration.test" + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + acctest.PreCheck(ctx, t) + acctest.PreCheckPartitionHasService(t, names.AMPEndpointID) + }, + ErrorCheck: acctest.ErrorCheck(t, names.AMPServiceID), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, + Steps: []resource.TestStep{ + { + Config: testAccDefaultScraperConfigurationDataSourceConfig_basic(), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet(dataSourceName, names.AttrConfiguration), + ), + }, + }, + }) +} + +func testAccDefaultScraperConfigurationDataSourceConfig_basic() string { + return ` +data "aws_prometheus_default_scraper_configuration" "test" {} +` +} diff --git a/internal/service/amp/service_package_gen.go b/internal/service/amp/service_package_gen.go index ee3a5812e4e4..8c8000a70635 100644 --- a/internal/service/amp/service_package_gen.go +++ b/internal/service/amp/service_package_gen.go @@ -15,7 +15,12 @@ import ( type servicePackage struct{} func (p *servicePackage) FrameworkDataSources(ctx context.Context) []*types.ServicePackageFrameworkDataSource { - return []*types.ServicePackageFrameworkDataSource{} + return []*types.ServicePackageFrameworkDataSource{ + { + Factory: newDefaultScraperConfigurationDataSource, + Name: "Default Scraper Configuration", + }, + } } func (p *servicePackage) FrameworkResources(ctx context.Context) []*types.ServicePackageFrameworkResource { diff --git a/main.go b/main.go index b73cae18c643..92ee54dd3219 100644 --- a/main.go +++ b/main.go @@ -7,15 +7,25 @@ import ( "context" "flag" "log" + "runtime/debug" "github.com/hashicorp/terraform-plugin-go/tfprotov5/tf5server" "github.com/hashicorp/terraform-provider-aws/internal/provider" + "github.com/hashicorp/terraform-provider-aws/version" ) func main() { debugFlag := flag.Bool("debug", false, "Start provider in debug mode.") flag.Parse() + logFlags := log.Flags() + logFlags = logFlags &^ (log.Ldate | log.Ltime) + log.SetFlags(logFlags) + + if buildInfo, ok := debug.ReadBuildInfo(); ok { + log.Printf("Starting %s@%s (%s)...", buildInfo.Main.Path, version.ProviderVersion, buildInfo.GoVersion) + } + serverFactory, _, err := provider.ProtoV5ProviderServerFactory(context.Background()) if err != nil { @@ -28,10 +38,6 @@ func main() { serveOpts = append(serveOpts, tf5server.WithManagedDebug()) } - logFlags := log.Flags() - logFlags = logFlags &^ (log.Ldate | log.Ltime) - log.SetFlags(logFlags) - err = tf5server.Serve( "registry.terraform.io/hashicorp/aws", serverFactory, diff --git a/website/docs/d/prometheus_default_scraper_configuration.html.markdown b/website/docs/d/prometheus_default_scraper_configuration.html.markdown new file mode 100644 index 000000000000..284a9636c590 --- /dev/null +++ b/website/docs/d/prometheus_default_scraper_configuration.html.markdown @@ -0,0 +1,24 @@ +--- +subcategory: "AMP (Managed Prometheus)" +layout: "aws" +page_title: "AWS: aws_prometheus_default_scraper_configuration" +description: |- + Returns the default scraper configuration used when Amazon EKS creates a scraper for you. +--- + + +# Data Source: aws_prometheus_default_scraper_configuration + +Returns the default scraper configuration used when Amazon EKS creates a scraper for you. + +## Example Usage + +```terraform +data "aws_prometheus_default_scraper_configuration" "example" {} +``` + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `configuration` - The configuration file. diff --git a/website/docs/r/prometheus_scraper.html.markdown b/website/docs/r/prometheus_scraper.html.markdown index e154495f2c4d..0ff126df48ef 100644 --- a/website/docs/r/prometheus_scraper.html.markdown +++ b/website/docs/r/prometheus_scraper.html.markdown @@ -94,6 +94,36 @@ EOT } ``` +### Use default EKS scraper configuration + +You can use the data source `aws_prometheus_scraper_configuration` to use a +service managed scrape configuration. + +-> **Note:** If the configuration is updated, this will trigger a replacement +of your scraper. + +```terraform +data "aws_prometheus_default_scraper_configuration" "example" {} + +resource "aws_prometheus_scraper" "example" { + + destination { + amp { + workspace_arn = aws_prometheus_workspace.example.arn + } + } + + scrape_configuration = data.aws_prometheus_scraper_configuration.example.configuration + + source { + eks { + cluster_arn = data.aws_eks_cluster.example.arn + subnet_ids = data.aws_eks_cluster.example.vpc_config[0].subnet_ids + } + } +} +``` + ### Ignoring changes to Prometheus Workspace destination A managed scraper will add a `AMPAgentlessScraper` tag to its Prometheus workspace