diff --git a/modules/backup/README.md b/modules/backup/README.md index c68e6724..69d4f74b 100644 --- a/modules/backup/README.md +++ b/modules/backup/README.md @@ -56,6 +56,8 @@ fetch workflows.googleapis.com/Workflow | backup\_retention\_time | The number of days backups should be kept | `number` | `30` | no | | backup\_schedule | The cron schedule to execute the internal backup | `string` | `"45 2 * * *"` | no | | compress\_export | Whether or not to compress the export when storing in the bucket; Only valid for MySQL and PostgreSQL | `bool` | `true` | no | +| connector\_params\_timeout | The end-to-end duration the connector call is allowed to run for before throwing a timeout exception. The default value is 1800 and this should be the maximum for connector methods that are not long-running operations. Otherwise, for long-running operations, the maximum timeout for a connector call is 31536000 seconds (one year). | `number` | `1800` | no | +| enable\_connector\_params | Whether to enable connector-specific parameters for Google Workflow SQL Export. | `bool` | `false` | no | | enable\_export\_backup | Weather to create exports to GCS Buckets with this module | `bool` | `true` | no | | enable\_internal\_backup | Wether to create internal backups with this module | `bool` | `true` | no | | export\_databases | The list of databases that should be exported - if is an empty set all databases will be exported | `set(string)` | `[]` | no | @@ -67,7 +69,9 @@ fetch workflows.googleapis.com/Workflow | scheduler\_timezone | The Timezone in which the Scheduler Jobs are triggered | `string` | `"Etc/GMT"` | no | | service\_account | The service account to use for running the workflow and triggering the workflow by Cloud Scheduler - If empty or null a service account will be created. If you have provided a service account you need to grant the Cloud SQL Admin and the Workflows Invoker role to that | `string` | `null` | no | | sql\_instance | The name of the SQL instance to backup | `string` | n/a | yes | +| sql\_instance\_replica | The name of the SQL instance replica to export | `string` | `null` | no | | unique\_suffix | Unique suffix to add to scheduler jobs and workflows names. | `string` | `""` | no | +| use\_sql\_instance\_replica\_in\_exporter | Whether or not to use replica instance on exporter workflow. | `bool` | `false` | no | ## Outputs diff --git a/modules/backup/main.tf b/modules/backup/main.tf index 61947646..c8ecf120 100644 --- a/modules/backup/main.tf +++ b/modules/backup/main.tf @@ -97,26 +97,28 @@ resource "google_cloud_scheduler_job" "sql_backup" { ################################ resource "google_workflows_workflow" "sql_export" { count = var.enable_export_backup ? 1 : 0 - name = "sql-export-${var.sql_instance}${var.unique_suffix}" + name = var.use_sql_instance_replica_in_exporter ? "sql-export-${var.sql_instance_replica}${var.unique_suffix}" : "sql-export-${var.sql_instance}${var.unique_suffix}" region = var.region description = "Workflow for backing up the CloudSQL Instance" project = var.project_id service_account = local.service_account source_contents = templatefile("${path.module}/templates/export.yaml.tftpl", { - project = var.project_id - instanceName = var.sql_instance - backupRetentionTime = var.backup_retention_time - databases = jsonencode(var.export_databases) - gcsBucket = var.export_uri - dbType = split("_", data.google_sql_database_instance.backup_instance.database_version)[0] - compressExport = var.compress_export - logDbName = var.log_db_name_to_export + project = var.project_id + instanceName = var.use_sql_instance_replica_in_exporter ? var.sql_instance_replica : var.sql_instance + backupRetentionTime = var.backup_retention_time + databases = jsonencode(var.export_databases) + gcsBucket = var.export_uri + dbType = split("_", data.google_sql_database_instance.backup_instance.database_version)[0] + compressExport = var.compress_export + enableConnectorParams = var.enable_connector_params + connectorParamsTimeout = var.connector_params_timeout + logDbName = var.log_db_name_to_export }) } resource "google_cloud_scheduler_job" "sql_export" { count = var.enable_export_backup ? 1 : 0 - name = "sql-export-${var.sql_instance}${var.unique_suffix}" + name = var.use_sql_instance_replica_in_exporter ? "sql-export-${var.sql_instance_replica}${var.unique_suffix}" : "sql-export-${var.sql_instance}${var.unique_suffix}" project = var.project_id region = var.region description = "Managed by Terraform - Triggers a SQL Export via Workflows" diff --git a/modules/backup/templates/export.yaml.tftpl b/modules/backup/templates/export.yaml.tftpl index d2339607..9a1ccb88 100644 --- a/modules/backup/templates/export.yaml.tftpl +++ b/modules/backup/templates/export.yaml.tftpl @@ -54,6 +54,10 @@ main: args: project: ${project} instance: ${instanceName} + %{ if enableConnectorParams } + connector_params: + timeout: ${connectorParamsTimeout} + %{ endif } body: exportContext: databases: [$${database}] @@ -81,6 +85,10 @@ main: args: project: ${project} instance: ${instanceName} + %{ if enableConnectorParams } + connector_params: + timeout: ${connectorParamsTimeout} + %{ endif } body: exportContext: databases: [$${database}] @@ -94,9 +102,12 @@ main: args: project: ${project} instance: ${instanceName} + %{ if enableConnectorParams } + connector_params: + timeout: ${connectorParamsTimeout} + %{ endif } body: exportContext: databases: $${databases} uri: $${"${gcsBucket}/${instanceName}-" + backupTime + %{ if compressExport == true }".sql.gz"%{ else }".sql"%{ endif }} %{ endif } - diff --git a/modules/backup/variables.tf b/modules/backup/variables.tf index d8428e59..faa16a04 100644 --- a/modules/backup/variables.tf +++ b/modules/backup/variables.tf @@ -97,6 +97,18 @@ variable "compress_export" { default = true } +variable "enable_connector_params" { + description = "Whether to enable connector-specific parameters for Google Workflow SQL Export." + type = bool + default = false +} + +variable "connector_params_timeout" { + description = "The end-to-end duration the connector call is allowed to run for before throwing a timeout exception. The default value is 1800 and this should be the maximum for connector methods that are not long-running operations. Otherwise, for long-running operations, the maximum timeout for a connector call is 31536000 seconds (one year)." + type = number + default = 1800 +} + variable "unique_suffix" { description = "Unique suffix to add to scheduler jobs and workflows names." type = string @@ -108,3 +120,15 @@ variable "log_db_name_to_export" { type = bool default = false } + +variable "use_sql_instance_replica_in_exporter" { + description = "Whether or not to use replica instance on exporter workflow." + type = bool + default = false +} + +variable "sql_instance_replica" { + description = "The name of the SQL instance replica to export" + type = string + default = null +}