diff --git a/.changelog/435.txt b/.changelog/435.txt
new file mode 100644
index 000000000..880b3aef8
--- /dev/null
+++ b/.changelog/435.txt
@@ -0,0 +1,3 @@
+```release-note:feature
+New resource `hcp_packer_channel` to create, or update an existing, channel with or without an assigned iteration.
+```
diff --git a/docs/resources/packer_channel.md b/docs/resources/packer_channel.md
new file mode 100644
index 000000000..64cfd8490
--- /dev/null
+++ b/docs/resources/packer_channel.md
@@ -0,0 +1,118 @@
+---
+page_title: "Resource hcp_packer_channel - terraform-provider-hcp"
+subcategory: ""
+description: |-
+ The Packer Channel resource allows you to manage image bucket channels within an active HCP Packer Registry.
+---
+
+# hcp_packer_channel (Resource)
+
+The Packer Channel resource allows you to manage image bucket channels within an active HCP Packer Registry.
+
+## Example Usage
+
+To create a channel with no assigned iteration.
+```terraform
+resource "hcp_packer_channel" "staging" {
+ name = "staging"
+ bucket_name = "alpine"
+}
+```
+
+To create, or update an existing, channel with an assigned iteration.
+```terraform
+resource "hcp_packer_channel" "staging" {
+ name = "staging"
+ bucket_name = "alpine"
+ iteration {
+ id = "iteration-id"
+ }
+}
+
+# Update assigned iteration using an iteration fingerprint
+resource "hcp_packer_channel" "staging" {
+ name = "staging"
+ bucket_name = "alpine"
+ iteration {
+ fingerprint = "fingerprint-associated-to-iteration"
+ }
+}
+
+# Update assigned iteration using an iteration incremental version
+resource "hcp_packer_channel" "staging" {
+ name = "staging"
+ bucket_name = "alpine"
+ iteration {
+ // incremental_version is the version number assigned to a completed iteration.
+ incremental_version = 1
+ }
+}
+```
+
+Using the latest channel to create a new channel with an assigned iteration.
+```terraform
+data "hcp_packer_image_iteration" "latest" {
+ bucket_name = "alpine"
+ channel = "latest"
+}
+
+resource "hcp_packer_channel" "staging" {
+ name = staging
+ bucket_name = alpine
+ iteration {
+ id = data.hcp_packer_image_iteration.latest.id
+ }
+}
+```
+
+
+
+## Schema
+
+### Required
+
+- `bucket_name` (String) The slug of the HCP Packer Registry image bucket where the channel should be managed in.
+- `name` (String) The name of the channel being managed.
+
+### Optional
+
+- `iteration` (Block List, Max: 1) The iteration assigned to the channel. (see [below for nested schema](#nestedblock--iteration))
+- `timeouts` (Block, Optional) (see [below for nested schema](#nestedblock--timeouts))
+
+### Read-Only
+
+- `author_id` (String) The author of the channel.
+- `created_at` (String) Creation time of this build.
+- `id` (String) The ID of this resource.
+- `organization_id` (String) The ID of the organization this HCP Packer registry is located in.
+- `project_id` (String) The ID of the project this HCP Packer registry is located in.
+- `updated_at` (String) The author of the channel.
+
+
+### Nested Schema for `iteration`
+
+Optional:
+
+- `fingerprint` (String) The fingerprint of the iteration assigned to the channel.
+- `id` (String) The ID of the iteration assigned to the channel.
+- `incremental_version` (Number) The incremental_version of the iteration assigned to the channel.
+
+
+
+### Nested Schema for `timeouts`
+
+Optional:
+
+- `create` (String)
+- `default` (String)
+- `delete` (String)
+- `update` (String)
+
+## Import
+
+Import is supported using the following syntax:
+
+```shell
+# The import ID requires the bucket and channel name in the following format {bucket_name}:{name}
+terraform import hcp_packer_channel.staging alpine:staging
+```
diff --git a/examples/resources/hcp_packer_channel/import.sh b/examples/resources/hcp_packer_channel/import.sh
new file mode 100644
index 000000000..67b5d9509
--- /dev/null
+++ b/examples/resources/hcp_packer_channel/import.sh
@@ -0,0 +1,2 @@
+# The import ID requires the bucket and channel name in the following format {bucket_name}:{name}
+terraform import hcp_packer_channel.staging alpine:staging
diff --git a/examples/resources/hcp_packer_channel/resource.tf b/examples/resources/hcp_packer_channel/resource.tf
new file mode 100644
index 000000000..5518dc1bd
--- /dev/null
+++ b/examples/resources/hcp_packer_channel/resource.tf
@@ -0,0 +1,4 @@
+resource "hcp_packer_channel" "staging" {
+ name = "staging"
+ bucket_name = "alpine"
+}
diff --git a/examples/resources/hcp_packer_channel/resource_assignment.tf b/examples/resources/hcp_packer_channel/resource_assignment.tf
new file mode 100644
index 000000000..56b18f8ff
--- /dev/null
+++ b/examples/resources/hcp_packer_channel/resource_assignment.tf
@@ -0,0 +1,27 @@
+resource "hcp_packer_channel" "staging" {
+ name = "staging"
+ bucket_name = "alpine"
+ iteration {
+ id = "iteration-id"
+ }
+}
+
+# Update assigned iteration using an iteration fingerprint
+resource "hcp_packer_channel" "staging" {
+ name = "staging"
+ bucket_name = "alpine"
+ iteration {
+ fingerprint = "fingerprint-associated-to-iteration"
+ }
+}
+
+# Update assigned iteration using an iteration incremental version
+resource "hcp_packer_channel" "staging" {
+ name = "staging"
+ bucket_name = "alpine"
+ iteration {
+ // incremental_version is the version number assigned to a completed iteration.
+ incremental_version = 1
+ }
+}
+
diff --git a/examples/resources/hcp_packer_channel/resource_using_latest_channel.tf b/examples/resources/hcp_packer_channel/resource_using_latest_channel.tf
new file mode 100644
index 000000000..ca6a7ad62
--- /dev/null
+++ b/examples/resources/hcp_packer_channel/resource_using_latest_channel.tf
@@ -0,0 +1,12 @@
+data "hcp_packer_image_iteration" "latest" {
+ bucket_name = "alpine"
+ channel = "latest"
+}
+
+resource "hcp_packer_channel" "staging" {
+ name = staging
+ bucket_name = alpine
+ iteration {
+ id = data.hcp_packer_image_iteration.latest.id
+ }
+}
diff --git a/internal/clients/packer.go b/internal/clients/packer.go
index 78391639c..8b80e0a58 100644
--- a/internal/clients/packer.go
+++ b/internal/clients/packer.go
@@ -28,7 +28,7 @@ func GetPackerChannelBySlug(ctx context.Context, client *Client, loc *sharedmode
return getResp.Payload.Channel, nil
}
-// GetIteration queries the HCP Packer registry for an existing bucket iteration.
+// GetIterationFromID queries the HCP Packer registry for an existing bucket iteration.
func GetIterationFromID(ctx context.Context, client *Client, loc *sharedmodels.HashicorpCloudLocationLocation,
bucketslug string, iterationID string) (*packermodels.HashicorpCloudPackerIteration, error) {
params := packer_service.NewPackerServiceGetIterationParamsWithContext(ctx)
@@ -48,6 +48,101 @@ func GetIterationFromID(ctx context.Context, client *Client, loc *sharedmodels.H
return it.Payload.Iteration, nil
}
+// CreateBucketChannel creates a channel on the named bucket.
+func CreateBucketChannel(ctx context.Context, client *Client, loc *sharedmodels.HashicorpCloudLocationLocation, bucketSlug string, channelSlug string,
+ iteration *packermodels.HashicorpCloudPackerIteration) (*packermodels.HashicorpCloudPackerChannel, error) {
+ params := packer_service.NewPackerServiceCreateChannelParamsWithContext(ctx)
+ params.LocationOrganizationID = loc.OrganizationID
+ params.LocationProjectID = loc.ProjectID
+ params.BucketSlug = bucketSlug
+ params.Body.Slug = channelSlug
+
+ if iteration != nil {
+ switch {
+ case iteration.ID != "":
+ params.Body.IterationID = iteration.ID
+ case iteration.Fingerprint != "":
+ params.Body.Fingerprint = iteration.Fingerprint
+ case iteration.IncrementalVersion > 0:
+ params.Body.IncrementalVersion = iteration.IncrementalVersion
+ }
+ }
+
+ channel, err := client.Packer.PackerServiceCreateChannel(params, nil)
+ if err != nil {
+ err := err.(*packer_service.PackerServiceCreateChannelDefault)
+ return nil, errors.New(err.Payload.Message)
+ }
+
+ return channel.GetPayload().Channel, nil
+}
+
+// UpdateBucketChannel updates the named channel.
+func UpdateBucketChannel(ctx context.Context, client *Client, loc *sharedmodels.HashicorpCloudLocationLocation, bucketSlug string, channelSlug string,
+ iteration *packermodels.HashicorpCloudPackerIteration) (*packermodels.HashicorpCloudPackerChannel, error) {
+ params := packer_service.NewPackerServiceUpdateChannelParamsWithContext(ctx)
+ params.LocationOrganizationID = loc.OrganizationID
+ params.LocationProjectID = loc.ProjectID
+ params.BucketSlug = bucketSlug
+ params.Slug = channelSlug
+
+ if iteration != nil {
+ switch {
+ case iteration.ID != "":
+ params.Body.IterationID = iteration.ID
+ case iteration.Fingerprint != "":
+ params.Body.Fingerprint = iteration.Fingerprint
+ case iteration.IncrementalVersion > 0:
+ params.Body.IncrementalVersion = iteration.IncrementalVersion
+ }
+ }
+
+ channel, err := client.Packer.PackerServiceUpdateChannel(params, nil)
+ if err != nil {
+ err := err.(*packer_service.PackerServiceUpdateChannelDefault)
+ return nil, errors.New(err.Payload.Message)
+ }
+
+ return channel.GetPayload().Channel, nil
+}
+
+// DeleteBucketChannel deletes a channel from the named bucket.
+func DeleteBucketChannel(ctx context.Context, client *Client, loc *sharedmodels.HashicorpCloudLocationLocation, bucketSlug, channelSlug string) (*packermodels.HashicorpCloudPackerChannel, error) {
+ params := packer_service.NewPackerServiceDeleteChannelParamsWithContext(ctx)
+ params.LocationOrganizationID = loc.OrganizationID
+ params.LocationProjectID = loc.ProjectID
+ params.BucketSlug = bucketSlug
+ params.Slug = channelSlug
+
+ req, err := client.Packer.PackerServiceDeleteChannel(params, nil)
+ if err != nil {
+ err := err.(*packer_service.PackerServiceDeleteChannelDefault)
+ return nil, errors.New(err.Payload.Message)
+ }
+
+ if !req.IsSuccess() {
+ return nil, errors.New("failed to delete channel")
+ }
+
+ return nil, nil
+}
+
+// ListBucketChannels queries the HCP Packer registry for channels associated to the specified bucket.
+func ListBucketChannels(ctx context.Context, client *Client, loc *sharedmodels.HashicorpCloudLocationLocation, bucketSlug string) (*packermodels.HashicorpCloudPackerListChannelsResponse, error) {
+ params := packer_service.NewPackerServiceListChannelsParams()
+ params.LocationOrganizationID = loc.OrganizationID
+ params.LocationProjectID = loc.ProjectID
+ params.BucketSlug = bucketSlug
+
+ req, err := client.Packer.PackerServiceListChannels(params, nil)
+ if err != nil {
+ err := err.(*packer_service.PackerServiceListChannelsDefault)
+ return nil, errors.New(err.Payload.Message)
+ }
+
+ return req.Payload, nil
+}
+
// handleGetChannelError returns a formatted error for the GetChannel error.
// The upstream API does a good job of providing detailed error messages so we just display the error message, with no status code.
func handleGetChannelError(err *packer_service.PackerServiceGetChannelDefault) error {
diff --git a/internal/provider/data_source_packer_image_iteration_test.go b/internal/provider/data_source_packer_image_iteration_test.go
index 82a24dd9a..f159b935d 100644
--- a/internal/provider/data_source_packer_image_iteration_test.go
+++ b/internal/provider/data_source_packer_image_iteration_test.go
@@ -22,7 +22,7 @@ import (
var (
acctestAlpineBucket = fmt.Sprintf("alpine-acc-%s", time.Now().Format("200601021504"))
acctestUbuntuBucket = fmt.Sprintf("ubuntu-acc-%s", time.Now().Format("200601021504"))
- acctestProductionChannel = "production"
+ acctestProductionChannel = fmt.Sprintf("packer-acc-channel-%s", time.Now().Format("200601021504"))
)
var (
diff --git a/internal/provider/provider.go b/internal/provider/provider.go
index e081a18e7..89a17797d 100644
--- a/internal/provider/provider.go
+++ b/internal/provider/provider.go
@@ -49,6 +49,7 @@ func New() func() *schema.Provider {
"hcp_hvn": resourceHvn(),
"hcp_hvn_peering_connection": resourceHvnPeeringConnection(),
"hcp_hvn_route": resourceHvnRoute(),
+ "hcp_packer_channel": resourcePackerChannel(),
"hcp_vault_cluster": resourceVaultCluster(),
"hcp_vault_cluster_admin_token": resourceVaultClusterAdminToken(),
},
diff --git a/internal/provider/resource_packer_channel.go b/internal/provider/resource_packer_channel.go
new file mode 100644
index 000000000..2962a3aa6
--- /dev/null
+++ b/internal/provider/resource_packer_channel.go
@@ -0,0 +1,377 @@
+package provider
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "log"
+ "strings"
+
+ packermodels "github.com/hashicorp/hcp-sdk-go/clients/cloud-packer-service/stable/2021-04-30/models"
+ sharedmodels "github.com/hashicorp/hcp-sdk-go/clients/cloud-shared/v1/models"
+ "github.com/hashicorp/terraform-plugin-sdk/v2/diag"
+ "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
+ "github.com/hashicorp/terraform-provider-hcp/internal/clients"
+)
+
+func resourcePackerChannel() *schema.Resource {
+ return &schema.Resource{
+ Description: "The Packer Channel resource allows you to manage image bucket channels within an active HCP Packer Registry.",
+ CreateContext: resourcePackerChannelCreate,
+ DeleteContext: resourcePackerChannelDelete,
+ ReadContext: resourcePackerChannelRead,
+ UpdateContext: resourcePackerChannelUpdate,
+ Timeouts: &schema.ResourceTimeout{
+ Create: &defaultPackerTimeout,
+ Default: &defaultPackerTimeout,
+ Update: &defaultPackerTimeout,
+ Delete: &defaultPackerTimeout,
+ },
+ Importer: &schema.ResourceImporter{
+ StateContext: resourcePackerChannelImport,
+ },
+
+ Schema: map[string]*schema.Schema{
+ // Required inputs
+ "name": {
+ Description: "The name of the channel being managed.",
+ Type: schema.TypeString,
+ ForceNew: true,
+ Required: true,
+ ValidateDiagFunc: validateSlugID,
+ },
+ "bucket_name": {
+ Description: "The slug of the HCP Packer Registry image bucket where the channel should be managed in.",
+ Type: schema.TypeString,
+ ForceNew: true,
+ Required: true,
+ ValidateDiagFunc: validateStringNotEmpty,
+ },
+ // Optional inputs
+ "iteration": {
+ Description: "The iteration assigned to the channel.",
+ Type: schema.TypeList,
+ MaxItems: 1,
+ Optional: true,
+ Elem: &schema.Resource{
+ Schema: map[string]*schema.Schema{
+ "fingerprint": {
+ Description: "The fingerprint of the iteration assigned to the channel.",
+ Type: schema.TypeString,
+ Optional: true,
+ Computed: true,
+ ExactlyOneOf: []string{"iteration.0.id", "iteration.0.fingerprint", "iteration.0.incremental_version"},
+ },
+ "id": {
+ Description: "The ID of the iteration assigned to the channel.",
+ Type: schema.TypeString,
+ Optional: true,
+ Computed: true,
+ ExactlyOneOf: []string{"iteration.0.id", "iteration.0.fingerprint", "iteration.0.incremental_version"},
+ },
+ "incremental_version": {
+ Description: "The incremental_version of the iteration assigned to the channel.",
+ Type: schema.TypeInt,
+ Optional: true,
+ Computed: true,
+ ExactlyOneOf: []string{"iteration.0.id", "iteration.0.fingerprint", "iteration.0.incremental_version"},
+ },
+ },
+ },
+ },
+ // Computed Values
+ "author_id": {
+ Description: "The author of the channel.",
+ Type: schema.TypeString,
+ Computed: true,
+ },
+ "created_at": {
+ Description: "Creation time of this build.",
+ Type: schema.TypeString,
+ Computed: true,
+ },
+ "organization_id": {
+ Description: "The ID of the organization this HCP Packer registry is located in.",
+ Type: schema.TypeString,
+ Computed: true,
+ },
+ "project_id": {
+ Description: "The ID of the project this HCP Packer registry is located in.",
+ Type: schema.TypeString,
+ Computed: true,
+ },
+ "updated_at": {
+ Description: "The author of the channel.",
+ Type: schema.TypeString,
+ Computed: true,
+ },
+ },
+ }
+}
+
+func resourcePackerChannelRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics {
+ bucketName := d.Get("bucket_name").(string)
+ client := meta.(*clients.Client)
+
+ loc := &sharedmodels.HashicorpCloudLocationLocation{
+ OrganizationID: client.Config.OrganizationID,
+ ProjectID: client.Config.ProjectID,
+ }
+ if err := setLocationData(d, loc); err != nil {
+ return diag.FromErr(err)
+ }
+
+ resp, err := clients.ListBucketChannels(ctx, client, loc, bucketName)
+ if err != nil {
+ return diag.FromErr(err)
+ }
+
+ channelName := d.Get("name").(string)
+ var channel packermodels.HashicorpCloudPackerChannel
+ for _, c := range resp.Channels {
+ if c.Slug == channelName {
+ channel = *c
+ break
+ }
+ }
+ if channel.ID == "" {
+ return diag.Errorf("Unable to find channel in bucket %s named %s.", bucketName, channelName)
+ }
+ return setPackerChannelResourceData(d, &channel)
+}
+
+func resourcePackerChannelCreate(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics {
+ bucketName := d.Get("bucket_name").(string)
+ channelName := d.Get("name").(string)
+
+ client := meta.(*clients.Client)
+ loc := &sharedmodels.HashicorpCloudLocationLocation{
+ OrganizationID: client.Config.OrganizationID,
+ ProjectID: client.Config.ProjectID,
+ }
+ if err := setLocationData(d, loc); err != nil {
+ return diag.FromErr(err)
+ }
+
+ iterationConfig, ok := d.GetOk("iteration")
+ if !ok {
+ channel, err := clients.CreateBucketChannel(ctx, client, loc, bucketName, channelName, nil)
+ if err != nil {
+ return diag.FromErr(err)
+ }
+
+ if channel == nil {
+ return diag.Errorf("Unable to create channel in bucket %s named %s.", bucketName, channelName)
+ }
+
+ return setPackerChannelResourceData(d, channel)
+ }
+
+ var iteration *packermodels.HashicorpCloudPackerIteration
+ if config, ok := iterationConfig.([]interface{})[0].(map[string]interface{}); ok {
+ iteration = expandIterationConfig(config)
+ }
+
+ channel, err := clients.CreateBucketChannel(ctx, client, loc, bucketName, channelName, iteration)
+ if err != nil {
+ return diag.FromErr(err)
+ }
+
+ if channel == nil {
+ return diag.Errorf("Unable to create channel in bucket %s named %s.", bucketName, channelName)
+ }
+
+ return setPackerChannelResourceData(d, channel)
+}
+
+func resourcePackerChannelUpdate(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics {
+ bucketName := d.Get("bucket_name").(string)
+ channelName := d.Get("name").(string)
+
+ client := meta.(*clients.Client)
+ loc := &sharedmodels.HashicorpCloudLocationLocation{
+ OrganizationID: client.Config.OrganizationID,
+ ProjectID: client.Config.ProjectID,
+ }
+
+ if err := setLocationData(d, loc); err != nil {
+ return diag.FromErr(err)
+ }
+
+ var iteration *packermodels.HashicorpCloudPackerIteration
+ iterationConfig, ok := d.GetOk("iteration")
+ if !ok {
+ channel, err := clients.UpdateBucketChannel(ctx, client, loc, bucketName, channelName, iteration)
+ if err != nil {
+ return diag.FromErr(err)
+ }
+ return setPackerChannelResourceData(d, channel)
+ }
+
+ config, ok := iterationConfig.([]interface{})[0].(map[string]interface{})
+ if !ok {
+ return diag.Errorf("Failed to read iteration configuration during update.")
+ }
+
+ updatedIterationConfig := make(map[string]interface{})
+ for key, value := range config {
+ fullKey := fmt.Sprintf("iteration.0.%s", key)
+ // Upstream API doesn't know how to handle the case when all params are set;
+ // So we keep the inputs that are not coming from state.
+ if d.HasChange(fullKey) {
+ updatedIterationConfig[key] = value
+ }
+ }
+
+ if len(updatedIterationConfig) != 0 {
+ iteration = expandIterationConfig(updatedIterationConfig)
+ }
+
+ channel, err := clients.UpdateBucketChannel(ctx, client, loc, bucketName, channelName, iteration)
+ if err != nil {
+ return diag.FromErr(err)
+ }
+
+ return setPackerChannelResourceData(d, channel)
+}
+
+func resourcePackerChannelDelete(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics {
+ bucketName := d.Get("bucket_name").(string)
+ channelName := d.Get("name").(string)
+
+ client := meta.(*clients.Client)
+ loc := &sharedmodels.HashicorpCloudLocationLocation{
+ OrganizationID: client.Config.OrganizationID,
+ ProjectID: client.Config.ProjectID,
+ }
+ if err := setLocationData(d, loc); err != nil {
+ return diag.FromErr(err)
+ }
+
+ _, err := clients.DeleteBucketChannel(ctx, client, loc, bucketName, channelName)
+ if err != nil {
+ return diag.FromErr(err)
+ }
+
+ return nil
+}
+
+func resourcePackerChannelImport(ctx context.Context, d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) {
+ client := meta.(*clients.Client)
+
+ var err error
+ // Updates the source channel to include data about the module used.
+ client, err = client.UpdateSourceChannel(d)
+ if err != nil {
+ log.Printf("[DEBUG] Failed to update analytics with module name (%s)", err)
+ }
+
+ idParts := strings.SplitN(d.Id(), ":", 2)
+ if len(idParts) != 2 || idParts[0] == "" || idParts[1] == "" {
+ return nil, fmt.Errorf("unexpected format of ID (%q), expected {bucket_name}:{channel_name}", d.Id())
+ }
+
+ bucketName := idParts[0]
+ channelName := idParts[1]
+
+ loc := &sharedmodels.HashicorpCloudLocationLocation{
+ OrganizationID: client.Config.OrganizationID,
+ ProjectID: client.Config.ProjectID,
+ }
+ if err := setLocationData(d, loc); err != nil {
+ return nil, err
+
+ }
+ resp, err := clients.ListBucketChannels(ctx, client, loc, bucketName)
+ if err != nil {
+ return nil, err
+ }
+
+ var channel packermodels.HashicorpCloudPackerChannel
+ for _, c := range resp.Channels {
+ if c.Slug == channelName {
+ channel = *c
+ break
+ }
+ }
+
+ if channel.ID == "" {
+ return nil, fmt.Errorf("unable to find channel in bucket %s named %s", bucketName, channelName)
+ }
+
+ if channel.Managed {
+ return nil, fmt.Errorf("the channel %q is managed by HCP Packer and can not be imported", channel.Slug)
+ }
+
+ d.SetId(channel.ID)
+ if err := d.Set("bucket_name", bucketName); err != nil {
+ return nil, err
+ }
+ if err := d.Set("name", channelName); err != nil {
+ return nil, err
+ }
+
+ if channel.Iteration == nil {
+ return []*schema.ResourceData{d}, nil
+ }
+
+ return []*schema.ResourceData{d}, nil
+}
+
+func setPackerChannelResourceData(d *schema.ResourceData, channel *packermodels.HashicorpCloudPackerChannel) diag.Diagnostics {
+ if channel == nil {
+ err := errors.New("unexpected empty channel provided when setting state")
+ return diag.FromErr(err)
+ }
+
+ d.SetId(channel.ID)
+ if err := d.Set("author_id", channel.AuthorID); err != nil {
+ return diag.FromErr(err)
+ }
+
+ if err := d.Set("created_at", channel.CreatedAt.String()); err != nil {
+ return diag.FromErr(err)
+ }
+
+ if err := d.Set("iteration", flattenIterationConfig(channel.Iteration)); err != nil {
+ return diag.FromErr(err)
+ }
+
+ if err := d.Set("updated_at", channel.UpdatedAt.String()); err != nil {
+ return diag.FromErr(err)
+ }
+
+ return nil
+}
+
+func expandIterationConfig(config map[string]interface{}) *packermodels.HashicorpCloudPackerIteration {
+ if config == nil {
+ return nil
+ }
+
+ var iteration packermodels.HashicorpCloudPackerIteration
+ if v, ok := config["id"]; ok && v.(string) != "" {
+ iteration.ID = v.(string)
+ }
+ if v, ok := config["fingerprint"]; ok && v.(string) != "" {
+ iteration.Fingerprint = v.(string)
+ }
+ if v, ok := config["incremental_version"]; ok && v.(int) != 0 {
+ iteration.IncrementalVersion = int32(v.(int))
+ }
+
+ return &iteration
+}
+
+func flattenIterationConfig(iteration *packermodels.HashicorpCloudPackerIteration) []map[string]interface{} {
+ result := make([]map[string]interface{}, 0)
+ if iteration == nil {
+ return result
+ }
+
+ item := make(map[string]interface{})
+ item["id"] = iteration.ID
+ item["fingerprint"] = iteration.Fingerprint
+ item["incremental_version"] = iteration.IncrementalVersion
+ return append(result, item)
+}
diff --git a/internal/provider/resource_packer_channel_test.go b/internal/provider/resource_packer_channel_test.go
new file mode 100644
index 000000000..14d683857
--- /dev/null
+++ b/internal/provider/resource_packer_channel_test.go
@@ -0,0 +1,244 @@
+package provider
+
+import (
+ "fmt"
+ "testing"
+
+ "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/v2/terraform"
+)
+
+func TestAccPackerChannel(t *testing.T) {
+ resourceName := "hcp_packer_channel.production"
+
+ resource.Test(t, resource.TestCase{
+ PreCheck: func() { testAccPreCheck(t, map[string]bool{"aws": false, "azure": false}) },
+ ProviderFactories: providerFactories,
+ CheckDestroy: func(*terraform.State) error {
+ deleteBucket(t, acctestAlpineBucket, false)
+ return nil
+ },
+
+ Steps: []resource.TestStep{
+ {
+ PreConfig: func() { upsertBucket(t, acctestAlpineBucket) },
+ Config: testConfig(testAccPackerChannelBasic(acctestAlpineBucket, acctestProductionChannel)),
+ Check: resource.ComposeTestCheckFunc(
+ resource.TestCheckResourceAttrSet(resourceName, "author_id"),
+ resource.TestCheckResourceAttr(resourceName, "bucket_name", acctestAlpineBucket),
+ resource.TestCheckResourceAttrSet(resourceName, "created_at"),
+ resource.TestCheckResourceAttrSet(resourceName, "id"),
+ resource.TestCheckResourceAttr(resourceName, "name", acctestProductionChannel),
+ resource.TestCheckResourceAttrSet(resourceName, "organization_id"),
+ resource.TestCheckResourceAttrSet(resourceName, "project_id"),
+ resource.TestCheckResourceAttrSet(resourceName, "updated_at"),
+ ),
+ },
+ // Testing that we can import bucket channel created in the previous step and that the
+ // resource terraform state will be exactly the same
+ {
+ ResourceName: resourceName,
+ ImportState: true,
+ ImportStateIdFunc: func(s *terraform.State) (string, error) {
+ rs, ok := s.RootModule().Resources[resourceName]
+ if !ok {
+ return "", fmt.Errorf("not found: %s", resourceName)
+ }
+
+ bucketName := rs.Primary.Attributes["bucket_name"]
+ channelName := rs.Primary.Attributes["name"]
+ return fmt.Sprintf("%s:%s", bucketName, channelName), nil
+ },
+ ImportStateVerify: true,
+ },
+ },
+ })
+}
+
+func TestAccPackerChannel_AssignedIteration(t *testing.T) {
+ resourceName := "hcp_packer_channel.production"
+
+ resource.Test(t, resource.TestCase{
+ PreCheck: func() { testAccPreCheck(t, map[string]bool{"aws": false, "azure": false}) },
+ ProviderFactories: providerFactories,
+ CheckDestroy: func(*terraform.State) error {
+ deleteBucket(t, acctestAlpineBucket, false)
+ return nil
+ },
+ Steps: []resource.TestStep{
+ {
+ PreConfig: func() {
+ fingerprint := "channel-assigned-iteration"
+ upsertBucket(t, acctestAlpineBucket)
+ upsertIteration(t, acctestAlpineBucket, fingerprint)
+ itID, err := getIterationIDFromFingerPrint(t, acctestAlpineBucket, fingerprint)
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+ upsertBuild(t, acctestAlpineBucket, fingerprint, itID)
+ },
+ Config: testConfig(testAccPackerChannelAssignedLatestIteration(acctestAlpineBucket, acctestProductionChannel)),
+ Check: resource.ComposeTestCheckFunc(
+ resource.TestCheckResourceAttrSet(resourceName, "author_id"),
+ resource.TestCheckResourceAttr(resourceName, "bucket_name", acctestAlpineBucket),
+ resource.TestCheckResourceAttrSet(resourceName, "created_at"),
+ resource.TestCheckResourceAttrSet(resourceName, "id"),
+ resource.TestCheckResourceAttrSet(resourceName, "iteration.0.id"),
+ resource.TestCheckResourceAttrSet(resourceName, "iteration.0.incremental_version"),
+ resource.TestCheckResourceAttr(resourceName, "iteration.0.fingerprint", "channel-assigned-iteration"),
+ resource.TestCheckResourceAttrSet(resourceName, "updated_at"),
+ ),
+ },
+ // Testing that we can import bucket channel created in the previous step and that the
+ // resource terraform state will be exactly the same
+ {
+ ResourceName: resourceName,
+ ImportState: true,
+ ImportStateIdFunc: func(s *terraform.State) (string, error) {
+ rs, ok := s.RootModule().Resources[resourceName]
+ if !ok {
+ return "", fmt.Errorf("not found: %s", resourceName)
+ }
+
+ bucketName := rs.Primary.Attributes["bucket_name"]
+ channelName := rs.Primary.Attributes["name"]
+ return fmt.Sprintf("%s:%s", bucketName, channelName), nil
+ },
+ ImportStateVerify: true,
+ },
+ },
+ })
+}
+
+func TestAccPackerChannel_UpdateAssignedIteration(t *testing.T) {
+ resourceName := "hcp_packer_channel.production"
+
+ resource.Test(t, resource.TestCase{
+ PreCheck: func() { testAccPreCheck(t, map[string]bool{"aws": false, "azure": false}) },
+ ProviderFactories: providerFactories,
+ CheckDestroy: func(*terraform.State) error {
+ deleteBucket(t, acctestAlpineBucket, false)
+ return nil
+ },
+ Steps: []resource.TestStep{
+ {
+ PreConfig: func() {
+ fingerprint := "channel-update-it1"
+ upsertBucket(t, acctestAlpineBucket)
+ upsertIteration(t, acctestAlpineBucket, fingerprint)
+ itID, err := getIterationIDFromFingerPrint(t, acctestAlpineBucket, fingerprint)
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+ upsertBuild(t, acctestAlpineBucket, fingerprint, itID)
+ },
+ Config: testConfig(testAccPackerChannelAssignedLatestIteration(acctestAlpineBucket, acctestProductionChannel)),
+ Check: resource.ComposeTestCheckFunc(
+ resource.TestCheckResourceAttrSet(resourceName, "author_id"),
+ resource.TestCheckResourceAttrSet(resourceName, "created_at"),
+ resource.TestCheckResourceAttrSet(resourceName, "id"),
+ resource.TestCheckResourceAttr(resourceName, "bucket_name", acctestAlpineBucket),
+ resource.TestCheckResourceAttr(resourceName, "name", acctestProductionChannel),
+ resource.TestCheckResourceAttrSet(resourceName, "iteration.0.id"),
+ resource.TestCheckResourceAttr(resourceName, "iteration.0.fingerprint", "channel-update-it1"),
+ ),
+ },
+ {
+ PreConfig: func() {
+ fingerprint := "channel-update-it2"
+ upsertIteration(t, acctestAlpineBucket, fingerprint)
+ itID, err := getIterationIDFromFingerPrint(t, acctestAlpineBucket, fingerprint)
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+ upsertBuild(t, acctestAlpineBucket, fingerprint, itID)
+ },
+ Config: testConfig(testAccPackerChannelAssignedLatestIteration(acctestAlpineBucket, acctestProductionChannel)),
+ Check: resource.ComposeTestCheckFunc(
+ resource.TestCheckResourceAttrSet(resourceName, "author_id"),
+ resource.TestCheckResourceAttr(resourceName, "bucket_name", acctestAlpineBucket),
+ resource.TestCheckResourceAttrSet(resourceName, "created_at"),
+ resource.TestCheckResourceAttrSet(resourceName, "id"),
+ resource.TestCheckResourceAttrSet(resourceName, "iteration.0.id"),
+ resource.TestCheckResourceAttrSet(resourceName, "iteration.0.incremental_version"),
+ resource.TestCheckResourceAttr(resourceName, "iteration.0.fingerprint", "channel-update-it2"),
+ resource.TestCheckResourceAttr(resourceName, "name", acctestProductionChannel),
+ resource.TestCheckResourceAttrSet(resourceName, "updated_at"),
+ ),
+ },
+ },
+ })
+}
+
+func TestAccPackerChannel_UpdateAssignedIterationWithFingerprint(t *testing.T) {
+ resourceName := "hcp_packer_channel.production"
+
+ fingerprint := "channel-update-it1"
+ resource.Test(t, resource.TestCase{
+ PreCheck: func() { testAccPreCheck(t, map[string]bool{"aws": false, "azure": false}) },
+ ProviderFactories: providerFactories,
+ CheckDestroy: func(*terraform.State) error {
+ deleteBucket(t, acctestAlpineBucket, false)
+ return nil
+ },
+ Steps: []resource.TestStep{
+ {
+ PreConfig: func() {
+ upsertBucket(t, acctestAlpineBucket)
+ upsertIteration(t, acctestAlpineBucket, fingerprint)
+ itID, err := getIterationIDFromFingerPrint(t, acctestAlpineBucket, fingerprint)
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+ upsertBuild(t, acctestAlpineBucket, fingerprint, itID)
+ },
+ Config: testConfig(testAccPackerChannelIterationFingerprint(acctestAlpineBucket, acctestProductionChannel, fingerprint)),
+ Check: resource.ComposeTestCheckFunc(
+ resource.TestCheckResourceAttrSet(resourceName, "author_id"),
+ resource.TestCheckResourceAttr(resourceName, "bucket_name", acctestAlpineBucket),
+ resource.TestCheckResourceAttrSet(resourceName, "created_at"),
+ resource.TestCheckResourceAttrSet(resourceName, "id"),
+ resource.TestCheckResourceAttrSet(resourceName, "iteration.0.fingerprint"),
+ resource.TestCheckResourceAttrSet(resourceName, "iteration.0.id"),
+ resource.TestCheckResourceAttrSet(resourceName, "iteration.0.incremental_version"),
+ resource.TestCheckResourceAttr(resourceName, "name", acctestProductionChannel),
+ resource.TestCheckResourceAttrSet(resourceName, "updated_at"),
+ ),
+ },
+ },
+ })
+}
+
+var testAccPackerChannelBasic = func(bucketName, channelName string) string {
+ return fmt.Sprintf(`
+ resource "hcp_packer_channel" "production" {
+ bucket_name = %q
+ name = %q
+ }`, bucketName, channelName)
+}
+
+var testAccPackerChannelAssignedLatestIteration = func(bucketName, channelName string) string {
+ return fmt.Sprintf(`
+ data "hcp_packer_image_iteration" "test" {
+ bucket_name = %[2]q
+ channel = "latest"
+ }
+ resource "hcp_packer_channel" "production" {
+ name = %[1]q
+ bucket_name = %[2]q
+ iteration {
+ id = data.hcp_packer_image_iteration.test.id
+ }
+ }`, channelName, bucketName)
+}
+
+var testAccPackerChannelIterationFingerprint = func(bucketName, channelName, fingerprint string) string {
+ return fmt.Sprintf(`
+ resource "hcp_packer_channel" "production" {
+ bucket_name = %q
+ name = %q
+ iteration {
+ fingerprint = %q
+ }
+ }`, bucketName, channelName, fingerprint)
+}
diff --git a/templates/resources/packer_channel.md.tmpl b/templates/resources/packer_channel.md.tmpl
new file mode 100644
index 000000000..bf1f5ed71
--- /dev/null
+++ b/templates/resources/packer_channel.md.tmpl
@@ -0,0 +1,30 @@
+---
+page_title: "{{.Type}} {{.Name}} - {{.ProviderName}}"
+subcategory: ""
+description: |-
+{{ .Description | plainmarkdown | trimspace | prefixlines " " }}
+---
+
+# {{.Name}} ({{.Type}})
+
+{{ .Description | trimspace }}
+
+## Example Usage
+
+To create a channel with no assigned iteration.
+{{ tffile "examples/resources/hcp_packer_channel/resource.tf" }}
+
+To create, or update an existing, channel with an assigned iteration.
+{{ tffile "examples/resources/hcp_packer_channel/resource_assignment.tf" }}
+
+Using the latest channel to create a new channel with an assigned iteration.
+{{ tffile "examples/resources/hcp_packer_channel/resource_using_latest_channel.tf" }}
+
+
+{{ .SchemaMarkdown | trimspace }}
+
+## Import
+
+Import is supported using the following syntax:
+
+{{ codefile "shell" "examples/resources/hcp_packer_channel/import.sh" }}