Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

airbyte-ci: introduce a SecretStore abstraction #38322

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion airbyte-ci/connectors/connector_ops/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ requests = "^2.31"
PyYAML = "^6.0"
GitPython = "^3.1.29"
pydantic = "^1.9"
PyGithub = "^1.58.0"
PyGithub = "^2"
rich = "^13.0.0"
pydash = "^6.0.2"
google-cloud-storage = "^2.8.0"
Expand Down
1 change: 1 addition & 0 deletions airbyte-ci/connectors/pipelines/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -748,6 +748,7 @@ E.G.: running Poe tasks on the modified internal packages of the current branch:

| Version | PR | Description |
| ------- | ---------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------- |
| 4.15.0 | [#38322](https://github.com/airbytehq/airbyte/pull/38322) | Introduce a SecretStore abstraction to fetch connector secrets from metadata files. |
| 4.14.1 | [#38582](https://github.com/airbytehq/airbyte/pull/38582) | Fixed bugs in `up_to_date` flags, `pull_request` version change logic. |
| 4.14.0 | [#38281](https://github.com/airbytehq/airbyte/pull/38281) | Conditionally run test suites according to `connectorTestSuitesOptions` in metadata files. |
| 4.13.3 | [#38221](https://github.com/airbytehq/airbyte/pull/38221) | Add dagster cloud dev deployment pipeline opitions |
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,12 +54,11 @@ async def build(ctx: click.Context, use_host_gradle_dist_tar: bool, build_archit
git_repo_url=ctx.obj["git_repo_url"],
ci_report_bucket=ctx.obj["ci_report_bucket_name"],
report_output_prefix=ctx.obj["report_output_prefix"],
use_remote_secrets=ctx.obj["use_remote_secrets"],
gha_workflow_run_url=ctx.obj.get("gha_workflow_run_url"),
dagger_logs_url=ctx.obj.get("dagger_logs_url"),
pipeline_start_timestamp=ctx.obj.get("pipeline_start_timestamp"),
ci_context=ctx.obj.get("ci_context"),
ci_gcs_credentials=ctx.obj["ci_gcs_credentials"],
ci_gcp_credentials=ctx.obj["ci_gcp_credentials"],
use_local_cdk=ctx.obj.get("use_local_cdk"),
enable_report_auto_open=ctx.obj.get("enable_report_auto_open"),
use_host_gradle_dist_tar=use_host_gradle_dist_tar,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,12 +33,11 @@ async def bump_version(
git_repo_url=ctx.obj["git_repo_url"],
ci_report_bucket=ctx.obj["ci_report_bucket_name"],
report_output_prefix=ctx.obj["report_output_prefix"],
use_remote_secrets=ctx.obj["use_remote_secrets"],
gha_workflow_run_url=ctx.obj.get("gha_workflow_run_url"),
dagger_logs_url=ctx.obj.get("dagger_logs_url"),
pipeline_start_timestamp=ctx.obj.get("pipeline_start_timestamp"),
ci_context=ctx.obj.get("ci_context"),
ci_gcs_credentials=ctx.obj["ci_gcs_credentials"],
ci_gcp_credentials=ctx.obj["ci_gcp_credentials"],
ci_git_user=ctx.obj["ci_git_user"],
ci_github_access_token=ctx.obj["ci_github_access_token"],
enable_report_auto_open=False,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -153,8 +153,9 @@ def __init__(

async def get_repo_dir(self) -> Directory:
if not self.repo_dir:
self.repo_dir = await self.context.get_repo_dir()
return self.repo_dir
repo_dir = await self.context.get_repo_dir()
self.repo_dir = repo_dir
return repo_dir

async def _run(self) -> StepResult:
result = await self.update_metadata()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,13 @@

import os
from pathlib import Path
from typing import List, Optional, Set, Tuple
from typing import List, Set, Tuple

import asyncclick as click
from connector_ops.utils import ConnectorLanguage, SupportLevelEnum, get_all_connectors_in_repo # type: ignore
from pipelines import main_logger
from pipelines.cli.click_decorators import click_append_to_context_object, click_ignore_unused_kwargs, click_merge_args_into_context_obj
from pipelines.cli.airbyte_ci import wrap_in_secret
from pipelines.cli.click_decorators import click_ignore_unused_kwargs, click_merge_args_into_context_obj
from pipelines.cli.lazy_group import LazyGroup
from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles, get_connector_modified_files, get_modified_connectors
from pipelines.helpers.git import get_modified_files
Expand Down Expand Up @@ -108,37 +109,6 @@ def validate_environment(is_local: bool) -> None:
raise click.UsageError(f"When running in a CI context a {required_env_var} environment variable must be set.")


def should_use_remote_secrets(use_remote_secrets: Optional[bool]) -> bool:
"""Check if the connector secrets should be loaded from Airbyte GSM or from the local secrets directory.

Args:
use_remote_secrets (Optional[bool]): Whether to use remote connector secrets or local connector secrets according to user inputs.

Raises:
click.UsageError: If the --use-remote-secrets flag was provided but no GCP_GSM_CREDENTIALS environment variable was found.

Returns:
bool: Whether to use remote connector secrets (True) or local connector secrets (False).
"""
gcp_gsm_credentials_is_set = bool(os.getenv("GCP_GSM_CREDENTIALS"))
if use_remote_secrets is None:
if gcp_gsm_credentials_is_set:
main_logger.info("GCP_GSM_CREDENTIALS environment variable found, using remote connector secrets.")
return True
else:
main_logger.info("No GCP_GSM_CREDENTIALS environment variable found, using local connector secrets.")
return False
if use_remote_secrets:
if gcp_gsm_credentials_is_set:
main_logger.info("GCP_GSM_CREDENTIALS environment variable found, using remote connector secrets.")
return True
else:
raise click.UsageError("The --use-remote-secrets flag was provided but no GCP_GSM_CREDENTIALS environment variable was found.")
else:
main_logger.info("Using local connector secrets as the --use-local-secrets flag was provided")
return False


@click.group(
cls=LazyGroup,
help="Commands related to connectors and connector acceptance tests.",
Expand All @@ -157,12 +127,6 @@ def should_use_remote_secrets(use_remote_secrets: Optional[bool]) -> bool:
"pull_request": "pipelines.airbyte_ci.connectors.pull_request.commands.pull_request",
},
)
@click.option(
"--use-remote-secrets/--use-local-secrets",
help="Use Airbyte GSM connector secrets or local connector secrets.",
type=bool,
default=None,
)
@click.option(
"--name",
"names",
Expand Down Expand Up @@ -223,16 +187,17 @@ def should_use_remote_secrets(use_remote_secrets: Optional[bool]) -> bool:
type=click.STRING,
required=False,
envvar="DOCKER_HUB_USERNAME",
callback=wrap_in_secret,
)
@click.option(
"--docker-hub-password",
help="Your password to connect to DockerHub.",
type=click.STRING,
required=False,
envvar="DOCKER_HUB_PASSWORD",
callback=wrap_in_secret,
)
@click_merge_args_into_context_obj
@click_append_to_context_object("use_remote_secrets", lambda ctx: should_use_remote_secrets(ctx.obj["use_remote_secrets"]))
@click.pass_context
@click_ignore_unused_kwargs
async def connectors(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

import yaml # type: ignore
from asyncer import asyncify
from dagger import Directory, Platform, Secret
from dagger import Directory, Platform
from github import PullRequest
from pipelines.airbyte_ci.connectors.consts import CONNECTOR_TEST_STEP_ID
from pipelines.airbyte_ci.connectors.reports import ConnectorReport
Expand All @@ -26,6 +26,7 @@
from pipelines.helpers.slack import send_message_to_webhook
from pipelines.helpers.utils import METADATA_FILE_NAME
from pipelines.models.contexts.pipeline_context import PipelineContext
from pipelines.models.secrets import LocalDirectorySecretStore, Secret, SecretStore

if TYPE_CHECKING:
from pathlib import Path as NativePath
Expand Down Expand Up @@ -54,11 +55,10 @@ def __init__(
diffed_branch: str,
git_repo_url: str,
report_output_prefix: str,
use_remote_secrets: bool = True,
ci_report_bucket: Optional[str] = None,
ci_gcs_credentials: Optional[str] = None,
ci_gcp_credentials: Optional[Secret] = None,
ci_git_user: Optional[str] = None,
ci_github_access_token: Optional[str] = None,
ci_github_access_token: Optional[Secret] = None,
connector_acceptance_test_image: str = DEFAULT_CONNECTOR_ACCEPTANCE_TEST_IMAGE,
gha_workflow_run_url: Optional[str] = None,
dagger_logs_url: Optional[str] = None,
Expand All @@ -72,13 +72,14 @@ def __init__(
use_local_cdk: bool = False,
use_host_gradle_dist_tar: bool = False,
enable_report_auto_open: bool = True,
docker_hub_username: Optional[str] = None,
docker_hub_password: Optional[str] = None,
s3_build_cache_access_key_id: Optional[str] = None,
s3_build_cache_secret_key: Optional[str] = None,
docker_hub_username: Optional[Secret] = None,
docker_hub_password: Optional[Secret] = None,
s3_build_cache_access_key_id: Optional[Secret] = None,
s3_build_cache_secret_key: Optional[Secret] = None,
concurrent_cat: Optional[bool] = False,
run_step_options: RunStepOptions = RunStepOptions(),
targeted_platforms: Sequence[Platform] = BUILD_PLATFORMS,
secret_stores: Dict[str, SecretStore] | None = None,
) -> None:
"""Initialize a connector context.

Expand All @@ -90,7 +91,6 @@ def __init__(
diffed_branch: str: The branch to compare the current branch against.
git_repo_url: str: The URL of the git repository.
report_output_prefix (str): The S3 key to upload the test report to.
use_remote_secrets (bool, optional): Whether to download secrets for GSM or use the local secrets. Defaults to True.
connector_acceptance_test_image (Optional[str], optional): The image to use to run connector acceptance tests. Defaults to DEFAULT_CONNECTOR_ACCEPTANCE_TEST_IMAGE.
gha_workflow_run_url (Optional[str], optional): URL to the github action workflow run. Only valid for CI run. Defaults to None.
dagger_logs_url (Optional[str], optional): URL to the dagger logs. Only valid for CI run. Defaults to None.
Expand All @@ -102,17 +102,16 @@ def __init__(
code_tests_only (bool, optional): Whether to ignore non-code tests like QA and metadata checks. Defaults to False.
use_host_gradle_dist_tar (bool, optional): Used when developing java connectors with gradle. Defaults to False.
enable_report_auto_open (bool, optional): Open HTML report in browser window. Defaults to True.
docker_hub_username (Optional[str], optional): Docker Hub username to use to read registries. Defaults to None.
docker_hub_password (Optional[str], optional): Docker Hub password to use to read registries. Defaults to None.
s3_build_cache_access_key_id (Optional[str], optional): Gradle S3 Build Cache credentials. Defaults to None.
s3_build_cache_secret_key (Optional[str], optional): Gradle S3 Build Cache credentials. Defaults to None.
docker_hub_username (Optional[Secret], optional): Docker Hub username to use to read registries. Defaults to None.
docker_hub_password (Optional[Secret], optional): Docker Hub password to use to read registries. Defaults to None.
s3_build_cache_access_key_id (Optional[Secret], optional): Gradle S3 Build Cache credentials. Defaults to None.
s3_build_cache_secret_key (Optional[Secret], optional): Gradle S3 Build Cache credentials. Defaults to None.
concurrent_cat (bool, optional): Whether to run the CAT tests in parallel. Defaults to False.
targeted_platforms (Optional[Iterable[Platform]], optional): The platforms to build the connector image for. Defaults to BUILD_PLATFORMS.
"""

self.pipeline_name = pipeline_name
self.connector = connector
self.use_remote_secrets = use_remote_secrets
self.connector_acceptance_test_image = connector_acceptance_test_image
self._secrets_dir: Optional[Directory] = None
self._updated_secrets_dir: Optional[Directory] = None
Expand All @@ -127,7 +126,6 @@ def __init__(
self.s3_build_cache_access_key_id = s3_build_cache_access_key_id
self.s3_build_cache_secret_key = s3_build_cache_secret_key
self.concurrent_cat = concurrent_cat
self._connector_secrets: Optional[Dict[str, Secret]] = None
self.targeted_platforms = targeted_platforms

super().__init__(
Expand All @@ -146,25 +144,14 @@ def __init__(
reporting_slack_channel=reporting_slack_channel,
pull_request=pull_request,
ci_report_bucket=ci_report_bucket,
ci_gcs_credentials=ci_gcs_credentials,
ci_gcp_credentials=ci_gcp_credentials,
ci_git_user=ci_git_user,
ci_github_access_token=ci_github_access_token,
run_step_options=self._skip_metadata_disabled_test_suites(run_step_options),
enable_report_auto_open=enable_report_auto_open,
secret_stores=secret_stores,
)

@property
def s3_build_cache_access_key_id_secret(self) -> Optional[Secret]:
if self.s3_build_cache_access_key_id:
return self.dagger_client.set_secret("s3_build_cache_access_key_id", self.s3_build_cache_access_key_id)
return None

@property
def s3_build_cache_secret_key_secret(self) -> Optional[Secret]:
if self.s3_build_cache_access_key_id and self.s3_build_cache_secret_key:
return self.dagger_client.set_secret("s3_build_cache_secret_key", self.s3_build_cache_secret_key)
return None

@property
def modified_files(self) -> FrozenSet[NativePath]:
return self.connector.modified_files
Expand Down Expand Up @@ -195,7 +182,7 @@ def live_tests_dir(self) -> Directory:

@property
def should_save_updated_secrets(self) -> bool:
return self.use_remote_secrets and self.updated_secrets_dir is not None
return self.ci_gcp_credentials is not None and self.updated_secrets_dir is not None

@property
def host_image_export_dir_path(self) -> str:
Expand All @@ -222,21 +209,15 @@ def docker_image(self) -> str:
return f"{self.docker_repository}:{self.docker_image_tag}"

@property
def docker_hub_username_secret(self) -> Optional[Secret]:
if self.docker_hub_username is None:
return None
return self.dagger_client.set_secret("docker_hub_username", self.docker_hub_username)
def local_secret_store_name(self) -> str:
return f"{self.connector.technical_name}-local"

@property
def docker_hub_password_secret(self) -> Optional[Secret]:
if self.docker_hub_password is None:
return None
return self.dagger_client.set_secret("docker_hub_password", self.docker_hub_password)

async def get_connector_secrets(self) -> Dict[str, Secret]:
if self._connector_secrets is None:
self._connector_secrets = await secrets.get_connector_secrets(self)
return self._connector_secrets
def local_secret_store(self) -> Optional[LocalDirectorySecretStore]:
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It feels a little random to stash the local secrets info here, when we're getting it from helper functions in python_connectors.py and java_connectors.py. Would it be possible to follow the same pattern you used for the other secret stores?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I tied the local_secret_store to the connector context because:

  • We have one context instance per connector under test
  • We want a local secret store per connector (the content of the secrets folder)

connector_secrets_path = self.connector.code_directory / "secrets"
if connector_secrets_path.is_dir():
return LocalDirectorySecretStore(connector_secrets_path)
return None

async def get_connector_dir(self, exclude: Optional[List[str]] = None, include: Optional[List[str]] = None) -> Directory:
"""Get the connector under test source code directory.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,12 +56,11 @@ async def migrate_to_base_image(
git_repo_url=ctx.obj["git_repo_url"],
ci_report_bucket=ctx.obj["ci_report_bucket_name"],
report_output_prefix=ctx.obj["report_output_prefix"],
use_remote_secrets=ctx.obj["use_remote_secrets"],
gha_workflow_run_url=ctx.obj.get("gha_workflow_run_url"),
dagger_logs_url=ctx.obj.get("dagger_logs_url"),
pipeline_start_timestamp=ctx.obj.get("pipeline_start_timestamp"),
ci_context=ctx.obj.get("ci_context"),
ci_gcs_credentials=ctx.obj["ci_gcs_credentials"],
ci_gcp_credentials=ctx.obj["ci_gcp_credentials"],
ci_git_user=ctx.obj["ci_git_user"],
ci_github_access_token=ctx.obj["ci_github_access_token"],
enable_report_auto_open=False,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,12 +43,11 @@ async def migrate_to_poetry(ctx: click.Context, changelog: bool, bump: str | Non
git_repo_url=ctx.obj["git_repo_url"],
ci_report_bucket=ctx.obj["ci_report_bucket_name"],
report_output_prefix=ctx.obj["report_output_prefix"],
use_remote_secrets=ctx.obj["use_remote_secrets"],
gha_workflow_run_url=ctx.obj.get("gha_workflow_run_url"),
dagger_logs_url=ctx.obj.get("dagger_logs_url"),
pipeline_start_timestamp=ctx.obj.get("pipeline_start_timestamp"),
ci_context=ctx.obj.get("ci_context"),
ci_gcs_credentials=ctx.obj["ci_gcs_credentials"],
ci_gcp_credentials=ctx.obj["ci_gcp_credentials"],
ci_git_user=ctx.obj["ci_git_user"],
ci_github_access_token=ctx.obj["ci_github_access_token"],
enable_report_auto_open=False,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -98,9 +98,7 @@ async def run_connectors_pipelines(
docker_hub_password = contexts[0].docker_hub_password

if docker_hub_username and docker_hub_password:
docker_hub_username_secret = dagger_client.set_secret("DOCKER_HUB_USERNAME", docker_hub_username)
docker_hub_password_secret = dagger_client.set_secret("DOCKER_HUB_PASSWORD", docker_hub_password)
dockerd_service = docker.with_global_dockerd_service(dagger_client, docker_hub_username_secret, docker_hub_password_secret)
dockerd_service = docker.with_global_dockerd_service(dagger_client, docker_hub_username, docker_hub_password)
else:
dockerd_service = docker.with_global_dockerd_service(dagger_client)

Expand Down
Loading
Loading