Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

DB Safety feature and GCP opta destroy + config upload #163

Merged
merged 3 commits into from
Mar 28, 2021
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions config/registry.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -202,6 +202,7 @@ modules:
module_name: str
engine_version: optional
instance_class: optional
safety: optional
outputs:
db_user: str
db_password: str
Expand Down Expand Up @@ -362,6 +363,7 @@ modules:
module_name: str
instance_tier: optional
engine_version: optional
safety: optional
outputs:
db_user: str
db_password: str
Expand Down
3 changes: 2 additions & 1 deletion config/schema/modules/aws-postgres.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,5 @@ env_name: str(required=False)
layer_name: str(required=False)
module_name: str(required=False)
engine_version: any(required=False)
instance_class: any(required=False)
instance_class: any(required=False)
safety: bool(required=False)
3 changes: 2 additions & 1 deletion config/schema/modules/gcp-postgres.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,5 @@ env_name: str(required=False)
layer_name: str(required=False)
module_name: str(required=False)
engine_version: any(required=False)
instance_tier: any(required=False)
instance_tier: any(required=False)
safety: bool(required=False)
1 change: 1 addition & 0 deletions config/tf_modules/aws-postgres/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ resource "aws_rds_cluster" "db_cluster" {
skip_final_snapshot = true
storage_encrypted = true
kms_key_id = data.aws_kms_key.main.arn
deletion_protection = var.safety
lifecycle {
ignore_changes = [storage_encrypted, kms_key_id]
}
Expand Down
5 changes: 5 additions & 0 deletions config/tf_modules/aws-postgres/variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -22,3 +22,8 @@ variable "instance_class" {
type = string
default = "db.t3.medium"
}

variable "safety" {
type = bool
default = true
}
1 change: 1 addition & 0 deletions config/tf_modules/aws-s3/bucket.tf
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ resource "aws_s3_bucket" "bucket" {
}
}
}
force_destroy = true
}

resource "aws_s3_bucket_public_access_block" "block" {
Expand Down
1 change: 1 addition & 0 deletions config/tf_modules/gcp-gcs/bucket.tf
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ resource "google_storage_bucket" "bucket" {
encryption {
default_kms_key_name = data.google_kms_crypto_key.kms.id
}
force_destroy = true
}

resource "google_storage_bucket_acl" "acl" {
Expand Down
1 change: 1 addition & 0 deletions config/tf_modules/gcp-postgres/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ resource "random_password" "root_auth" {
resource "google_sql_database_instance" "instance" {
name = "opta-${var.layer_name}-${var.module_name}"
database_version = "POSTGRES_${var.engine_version}"
deletion_protection = var.safety

settings {
disk_autoresize = true
Expand Down
5 changes: 5 additions & 0 deletions config/tf_modules/gcp-postgres/variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -26,3 +26,8 @@ variable "instance_tier" {
type = string
default = "db-f1-micro"
}

variable "safety" {
type = bool
default = true
}
5 changes: 5 additions & 0 deletions opta/commands/apply.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from opta.amplitude import amplitude_client
from opta.constants import TF_PLAN_PATH
from opta.core.aws import AWS
from opta.core.gcp import GCP
from opta.core.generator import gen, gen_opta_resource_tags
from opta.core.kubernetes import configure_kubectl, tail_module_log, tail_namespace_events
from opta.core.terraform import Terraform
Expand Down Expand Up @@ -85,6 +86,10 @@ def _apply(
gen_opta_resource_tags(layer)
if layer.cloud == "aws":
AWS(layer).upload_opta_config(config)
elif layer.cloud == "google":
GCP(layer).upload_opta_config(config)
else:
raise Exception(f"Cannot handle upload config for cloud {layer.cloud}")

existing_modules: Set[str] = set()
first_loop = True
Expand Down
28 changes: 24 additions & 4 deletions opta/commands/destroy.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,10 @@
import boto3
import click
import yaml
from google.cloud import storage

from opta.amplitude import amplitude_client
from opta.core.gcp import GCP
from opta.core.generator import gen_all
from opta.core.terraform import Terraform
from opta.layer import Layer
Expand Down Expand Up @@ -39,9 +41,13 @@ def _fetch_children_layers(layer: "Layer") -> List["Layer"]:
return []

# Download all the opta config files in the bucket
s3_bucket_name = layer.state_storage()
opta_configs = _download_all_opta_configs(s3_bucket_name)

bucket_name = layer.state_storage()
if layer.cloud == "aws":
opta_configs = _aws_download_all_opta_configs(bucket_name)
elif layer.cloud == "google":
opta_configs = _gcp_download_all_opta_configs(bucket_name)
else:
raise Exception(f"Not handling deletion for cloud {layer.cloud}")
# Keep track of children layers as we find them.
children_layers = []
for config_path in opta_configs:
Expand All @@ -65,7 +71,7 @@ def _fetch_children_layers(layer: "Layer") -> List["Layer"]:

# Download all the opta config files from the specified bucket and return
# a list of temporary file paths to access them.
def _download_all_opta_configs(bucket_name: str) -> List[str]:
def _aws_download_all_opta_configs(bucket_name: str) -> List[str]:
# Opta configs for every layer are saved in the opta_config/ directory
# in the state bucket.
s3_config_dir = "opta_config/"
Expand All @@ -85,3 +91,17 @@ def _download_all_opta_configs(bucket_name: str) -> List[str]:
configs.append(local_config_path)

return configs


def _gcp_download_all_opta_configs(bucket_name: str) -> List[str]:
gcs_config_dir = "opta_config/"
credentials, project_id = GCP.get_credentials()
gcs_client = storage.Client(project=project_id, credentials=credentials)
bucket_object = gcs_client.get_bucket(bucket_name)
blobs: List[storage.Blob] = list(
gcs_client.list_blobs(bucket_object, prefix=gcs_config_dir)
)
configs: List[str] = []
for blob in blobs:
configs.append(blob.download_as_bytes().decode("utf-8"))
return configs
23 changes: 22 additions & 1 deletion opta/core/gcp.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,27 @@
from typing import Optional, Tuple
from typing import TYPE_CHECKING, Optional, Tuple

import google.auth.transport.requests
from google.auth import default
from google.auth.credentials import Credentials
from google.auth.exceptions import DefaultCredentialsError, GoogleAuthError
from google.cloud import storage

from opta.exceptions import UserErrors
from opta.utils import logger

if TYPE_CHECKING:
from opta.layer import Layer


class GCP:
project_id: Optional[str] = None
credentials: Optional[Credentials] = None

def __init__(self, layer: "Layer"):
self.layer = layer
providers = layer.root().gen_providers(0)["provider"]
self.region = providers["google"]["region"]

@classmethod
def get_credentials(cls) -> Tuple[Credentials, str]:
if cls.project_id is None or cls.credentials is None:
Expand All @@ -27,3 +37,14 @@ def get_credentials(cls) -> Tuple[Credentials, str]:
# Refresh credentials to get new access token
cls.credentials.refresh(google.auth.transport.requests.Request())
return cls.credentials, cls.project_id # type: ignore

# Upload the current opta config to the state bucket, under opta_config/.
def upload_opta_config(self, config: str) -> None:
bucket = self.layer.state_storage()
config_path = f"opta_config/{self.layer.name}"
credentials, project_id = self.get_credentials()
gcs_client = storage.Client(project=project_id, credentials=credentials)
bucket_object = gcs_client.get_bucket(bucket)
blob = storage.Blob(config_path, bucket_object)
blob.upload_from_string(config)
juandiegopalomino marked this conversation as resolved.
Show resolved Hide resolved
logger.debug("Uploaded opta config to s3")
juandiegopalomino marked this conversation as resolved.
Show resolved Hide resolved