Skip to content

Commit

Permalink
Merge pull request #35280 from bonclay7/f-prometheus-scraper-config
Browse files Browse the repository at this point in the history
Add data source for default scraper configuration
  • Loading branch information
ewbankkit authored Oct 9, 2024
2 parents 6c7dd89 + 3c9479f commit 3404798
Show file tree
Hide file tree
Showing 8 changed files with 197 additions and 5 deletions.
3 changes: 3 additions & 0 deletions .changelog/35280.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
```release-note:new-data-source
aws_prometheus_default_scraper_configuration
```
3 changes: 3 additions & 0 deletions internal/provider/provider.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import (
"context"
"errors"
"fmt"
"log"
"os"
"strings"
"time"
Expand All @@ -33,6 +34,8 @@ import (
// New returns a new, initialized Terraform Plugin SDK v2-style provider instance.
// The provider instance is fully configured once the `ConfigureContextFunc` has been called.
func New(ctx context.Context) (*schema.Provider, error) {
log.Printf("Initializing Terraform AWS Provider...")

provider := &schema.Provider{
// This schema must match exactly the Terraform Protocol v6 (Terraform Plugin Framework) provider's schema.
// Notably the attributes can have no Default values.
Expand Down
81 changes: 81 additions & 0 deletions internal/service/amp/default_scraper_configuration_data_source.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: MPL-2.0

package amp

import (
"context"

"github.com/aws/aws-sdk-go-v2/service/amp"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-provider-aws/internal/framework"
fwflex "github.com/hashicorp/terraform-provider-aws/internal/framework/flex"
"github.com/hashicorp/terraform-provider-aws/internal/tfresource"
"github.com/hashicorp/terraform-provider-aws/names"
)

// @FrameworkDataSource(aws_prometheus_default_scraper_configuration, name="Default Scraper Configuration")
func newDefaultScraperConfigurationDataSource(context.Context) (datasource.DataSourceWithConfigure, error) {
return &defaultScraperConfigurationDataSource{}, nil
}

type defaultScraperConfigurationDataSource struct {
framework.DataSourceWithConfigure
}

func (*defaultScraperConfigurationDataSource) Metadata(_ context.Context, request datasource.MetadataRequest, response *datasource.MetadataResponse) { // nosemgrep:ci.meta-in-func-name
response.TypeName = "aws_prometheus_default_scraper_configuration"
}

func (d *defaultScraperConfigurationDataSource) Schema(ctx context.Context, request datasource.SchemaRequest, response *datasource.SchemaResponse) {
response.Schema = schema.Schema{
Attributes: map[string]schema.Attribute{
names.AttrConfiguration: schema.StringAttribute{
Computed: true,
},
},
}
}

func (d *defaultScraperConfigurationDataSource) Read(ctx context.Context, request datasource.ReadRequest, response *datasource.ReadResponse) {
var data defaultScraperConfigurationDataSourceModel
response.Diagnostics.Append(request.Config.Get(ctx, &data)...)
if response.Diagnostics.HasError() {
return
}

conn := d.Meta().AMPClient(ctx)

out, err := findDefaultScraperConfiguration(ctx, conn)

if err != nil {
response.Diagnostics.AddError("reading Prometheus Default Scraper Configuration", err.Error())

return
}

data.Configuration = fwflex.StringValueToFramework(ctx, string(out))

response.Diagnostics.Append(response.State.Set(ctx, &data)...)
}

func findDefaultScraperConfiguration(ctx context.Context, conn *amp.Client) ([]byte, error) {
input := &amp.GetDefaultScraperConfigurationInput{}
output, err := conn.GetDefaultScraperConfiguration(ctx, input)

if err != nil {
return nil, err
}

if output == nil || output.Configuration == nil {
return nil, tfresource.NewEmptyResultError(input)
}

return output.Configuration, err
}

type defaultScraperConfigurationDataSourceModel struct {
Configuration types.String `tfsdk:"configuration"`
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: MPL-2.0

package amp_test

import (
"testing"

"github.com/hashicorp/terraform-plugin-testing/helper/resource"
"github.com/hashicorp/terraform-provider-aws/internal/acctest"
"github.com/hashicorp/terraform-provider-aws/names"
)

func TestAccAMPDefaultScraperConfigurationDataSource_basic(t *testing.T) {
ctx := acctest.Context(t)
dataSourceName := "data.aws_prometheus_default_scraper_configuration.test"

resource.ParallelTest(t, resource.TestCase{
PreCheck: func() {
acctest.PreCheck(ctx, t)
acctest.PreCheckPartitionHasService(t, names.AMPEndpointID)
},
ErrorCheck: acctest.ErrorCheck(t, names.AMPServiceID),
ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories,
Steps: []resource.TestStep{
{
Config: testAccDefaultScraperConfigurationDataSourceConfig_basic(),
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttrSet(dataSourceName, names.AttrConfiguration),
),
},
},
})
}

func testAccDefaultScraperConfigurationDataSourceConfig_basic() string {
return `
data "aws_prometheus_default_scraper_configuration" "test" {}
`
}
7 changes: 6 additions & 1 deletion internal/service/amp/service_package_gen.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

14 changes: 10 additions & 4 deletions main.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,15 +7,25 @@ import (
"context"
"flag"
"log"
"runtime/debug"

"github.com/hashicorp/terraform-plugin-go/tfprotov5/tf5server"
"github.com/hashicorp/terraform-provider-aws/internal/provider"
"github.com/hashicorp/terraform-provider-aws/version"
)

func main() {
debugFlag := flag.Bool("debug", false, "Start provider in debug mode.")
flag.Parse()

logFlags := log.Flags()
logFlags = logFlags &^ (log.Ldate | log.Ltime)
log.SetFlags(logFlags)

if buildInfo, ok := debug.ReadBuildInfo(); ok {
log.Printf("Starting %s@%s (%s)...", buildInfo.Main.Path, version.ProviderVersion, buildInfo.GoVersion)
}

serverFactory, _, err := provider.ProtoV5ProviderServerFactory(context.Background())

if err != nil {
Expand All @@ -28,10 +38,6 @@ func main() {
serveOpts = append(serveOpts, tf5server.WithManagedDebug())
}

logFlags := log.Flags()
logFlags = logFlags &^ (log.Ldate | log.Ltime)
log.SetFlags(logFlags)

err = tf5server.Serve(
"registry.terraform.io/hashicorp/aws",
serverFactory,
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
---
subcategory: "AMP (Managed Prometheus)"
layout: "aws"
page_title: "AWS: aws_prometheus_default_scraper_configuration"
description: |-
Returns the default scraper configuration used when Amazon EKS creates a scraper for you.
---


# Data Source: aws_prometheus_default_scraper_configuration

Returns the default scraper configuration used when Amazon EKS creates a scraper for you.

## Example Usage

```terraform
data "aws_prometheus_default_scraper_configuration" "example" {}
```

## Attribute Reference

This data source exports the following attributes in addition to the arguments above:

* `configuration` - The configuration file.
30 changes: 30 additions & 0 deletions website/docs/r/prometheus_scraper.html.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,36 @@ EOT
}
```

### Use default EKS scraper configuration

You can use the data source `aws_prometheus_scraper_configuration` to use a
service managed scrape configuration.

-> **Note:** If the configuration is updated, this will trigger a replacement
of your scraper.

```terraform
data "aws_prometheus_default_scraper_configuration" "example" {}
resource "aws_prometheus_scraper" "example" {
destination {
amp {
workspace_arn = aws_prometheus_workspace.example.arn
}
}
scrape_configuration = data.aws_prometheus_scraper_configuration.example.configuration
source {
eks {
cluster_arn = data.aws_eks_cluster.example.arn
subnet_ids = data.aws_eks_cluster.example.vpc_config[0].subnet_ids
}
}
}
```

### Ignoring changes to Prometheus Workspace destination

A managed scraper will add a `AMPAgentlessScraper` tag to its Prometheus workspace
Expand Down

0 comments on commit 3404798

Please sign in to comment.