From 832f556e8a4b6cd6db3dccdcc2806847525295ba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafa=C5=82=20Safin?= Date: Thu, 14 Mar 2024 00:37:42 +0100 Subject: [PATCH 1/5] rewrite upload providers to use pydantic models at inititialization MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Rafał Safin --- backuper/core.py | 57 ++++++++------- backuper/main.py | 71 ++++++++++++------- backuper/models/upload_provider_models.py | 7 +- backuper/upload_providers/__init__.py | 12 ---- backuper/upload_providers/aws_s3.py | 29 +++----- backuper/upload_providers/azure.py | 18 ++--- backuper/upload_providers/base_provider.py | 8 +-- backuper/upload_providers/debug.py | 8 +-- .../upload_providers/google_cloud_storage.py | 26 +++---- 9 files changed, 108 insertions(+), 128 deletions(-) diff --git a/backuper/core.py b/backuper/core.py index f75c72eb..8ea8b27a 100644 --- a/backuper/core.py +++ b/backuper/core.py @@ -13,15 +13,7 @@ from pydantic import BaseModel from backuper import config -from backuper.models.backup_target_models import ( - DirectoryTargetModel, - MariaDBTargetModel, - MySQLTargetModel, - PostgreSQLTargetModel, - SingleFileTargetModel, - TargetModel, -) -from backuper.models.upload_provider_models import ProviderModel +from backuper.models import backup_target_models, upload_provider_models log = logging.getLogger(__name__) @@ -58,6 +50,25 @@ def run_subprocess(shell_args: str) -> str: return p.stdout +def get_target_map() -> dict[str, type[backup_target_models.TargetModel]]: + return { + config.BackupTargetEnum.FILE: backup_target_models.SingleFileTargetModel, + config.BackupTargetEnum.FOLDER: backup_target_models.DirectoryTargetModel, + config.BackupTargetEnum.MARIADB: backup_target_models.MariaDBTargetModel, + config.BackupTargetEnum.MYSQL: backup_target_models.MySQLTargetModel, + config.BackupTargetEnum.POSTGRESQL: backup_target_models.PostgreSQLTargetModel, + } + + +def get_provider_map() -> dict[str, type[upload_provider_models.ProviderModel]]: + return { + config.UploadProviderEnum.AZURE: upload_provider_models.AzureProviderModel, + config.UploadProviderEnum.LOCAL_FILES_DEBUG: upload_provider_models.DebugProviderModel, + config.UploadProviderEnum.GOOGLE_CLOUD_STORAGE: upload_provider_models.GCSProviderModel, + config.UploadProviderEnum.AWS_S3: upload_provider_models.AWSProviderModel, + } + + def remove_path(path: Path) -> None: if path.exists(): if path.is_file() or path.is_symlink(): @@ -152,18 +163,10 @@ def _validate_model( return validated_target -def create_target_models() -> list[TargetModel]: - target_map: dict[str, type[TargetModel]] = { - config.BackupTargetEnum.FILE: SingleFileTargetModel, - config.BackupTargetEnum.FOLDER: DirectoryTargetModel, - config.BackupTargetEnum.MARIADB: MariaDBTargetModel, - config.BackupTargetEnum.MYSQL: MySQLTargetModel, - config.BackupTargetEnum.POSTGRESQL: PostgreSQLTargetModel, - } - - log.critical(target_map) +def create_target_models() -> list[backup_target_models.TargetModel]: + target_map = get_target_map() - targets: list[TargetModel] = [] + targets: list[backup_target_models.TargetModel] = [] for env_name, env_value in os.environ.items(): env_name_lowercase = env_name.lower() log.debug("processing env variable %s", env_name_lowercase) @@ -178,22 +181,18 @@ def create_target_models() -> list[TargetModel]: return targets -def create_provider_model() -> ProviderModel: - target_map: dict[config.UploadProviderEnum, type[ProviderModel]] = {} - for target_model in ProviderModel.__subclasses__(): - name = config.UploadProviderEnum( - target_model.__name__.lower().removesuffix("providermodel") - ) - target_map[name] = target_model +def create_provider_model() -> upload_provider_models.ProviderModel: + provider_map = get_provider_map() + log.info("start validating BACKUP_PROVIDER environment variable") base_provider = _validate_model( "backup_provider", config.options.BACKUP_PROVIDER, - ProviderModel, + upload_provider_models.ProviderModel, value_whitespace_split=True, ) - target_model_cls = target_map[base_provider.name] + target_model_cls = provider_map[base_provider.name] return _validate_model( "backup_provider", config.options.BACKUP_PROVIDER, target_model_cls ) diff --git a/backuper/main.py b/backuper/main.py index d11020d4..014cf997 100644 --- a/backuper/main.py +++ b/backuper/main.py @@ -10,17 +10,26 @@ from typing import NoReturn from backuper import config, core -from backuper.backup_targets.base_target import BaseBackupTarget -from backuper.backup_targets.file import File -from backuper.backup_targets.folder import Folder -from backuper.backup_targets.mariadb import MariaDB -from backuper.backup_targets.mysql import MySQL -from backuper.backup_targets.postgresql import PostgreSQL +from backuper.backup_targets import ( + base_target, + file, + folder, + mariadb, + mysql, + postgresql, +) +from backuper.upload_providers import ( + base_provider, + debug, + google_cloud_storage, + aws_s3, + azure, +) from backuper.notifications.notifications_context import ( PROGRAM_STEP, NotificationsContext, ) -from backuper.upload_providers import BaseUploadProvider + exit_event = threading.Event() log = logging.getLogger(__name__) @@ -31,18 +40,36 @@ def quit(sig: int, frame: FrameType | None) -> None: exit_event.set() +def _get_provider_cls_map() -> dict[str, type[base_provider.BaseUploadProvider]]: + return { + config.UploadProviderEnum.AWS_S3: aws_s3.UploadProviderAWS, + config.UploadProviderEnum.AZURE: azure.UploadProviderAzure, + config.UploadProviderEnum.GOOGLE_CLOUD_STORAGE: google_cloud_storage.UploadProviderGCS, + config.UploadProviderEnum.LOCAL_FILES_DEBUG: debug.UploadProviderLocalDebug, + } + + +def _get_target_cls_map() -> dict[str, type[base_target.BaseBackupTarget]]: + return { + config.BackupTargetEnum.FILE: file.File, + config.BackupTargetEnum.FOLDER: folder.Folder, + config.BackupTargetEnum.MARIADB: mariadb.MariaDB, + config.BackupTargetEnum.POSTGRESQL: postgresql.PostgreSQL, + config.BackupTargetEnum.MYSQL: mysql.MySQL, + } + + @NotificationsContext(step_name=PROGRAM_STEP.SETUP_PROVIDER) -def backup_provider() -> BaseUploadProvider: - backup_provider_map: dict[config.UploadProviderEnum, type[BaseUploadProvider]] = {} - for backup_provider in BaseUploadProvider.__subclasses__(): - backup_provider_map[backup_provider.target_name] = backup_provider # type: ignore +def backup_provider() -> base_provider.BaseUploadProvider: + provider_cls_map = _get_provider_cls_map() provider_model = core.create_provider_model() log.info( "initializing provider: `%s`", provider_model.name, ) - provider_target_cls = backup_provider_map[provider_model.name] + + provider_target_cls = provider_cls_map[provider_model.name] log.debug("initializing %s with %s", provider_target_cls, provider_model) res_backup_provider = provider_target_cls(**provider_model.model_dump()) log.info( @@ -53,16 +80,10 @@ def backup_provider() -> BaseUploadProvider: @NotificationsContext(step_name=PROGRAM_STEP.SETUP_TARGETS) -def backup_targets() -> list[BaseBackupTarget]: - backup_targets_map: dict[str, type[BaseBackupTarget]] = { - config.BackupTargetEnum.FILE: File, - config.BackupTargetEnum.FOLDER: Folder, - config.BackupTargetEnum.MARIADB: MariaDB, - config.BackupTargetEnum.POSTGRESQL: PostgreSQL, - config.BackupTargetEnum.MYSQL: MySQL, - } +def backup_targets() -> list[base_target.BaseBackupTarget]: + backup_target_cls_map = _get_target_cls_map() - backup_targets: list[BaseBackupTarget] = [] + backup_targets: list[base_target.BaseBackupTarget] = [] target_models = core.create_target_models() if not target_models: raise RuntimeError("Found 0 backup targets, at least 1 is required.") @@ -74,7 +95,7 @@ def backup_targets() -> list[BaseBackupTarget]: "initializing target: `%s`", target_model.env_name, ) - backup_target_cls = backup_targets_map[target_model.name] + backup_target_cls = backup_target_cls_map[target_model.name] log.debug("initializing %s with %s", backup_target_cls, target_model) backup_targets.append(backup_target_cls(target_model=target_model)) log.info( @@ -128,7 +149,9 @@ def shutdown() -> NoReturn: # pragma: no cover sys.exit(1) -def run_backup(target: BaseBackupTarget, provider: BaseUploadProvider) -> None: +def run_backup( + target: base_target.BaseBackupTarget, provider: base_provider.BaseUploadProvider +) -> None: log.info("start making backup of target: `%s`", target.env_name) with NotificationsContext( step_name=PROGRAM_STEP.BACKUP_CREATE, env_name=target.env_name @@ -137,7 +160,7 @@ def run_backup(target: BaseBackupTarget, provider: BaseUploadProvider) -> None: log.info( "backup file created: %s, starting post save upload to provider %s", backup_file, - provider.target_name, + provider.__class__.__name__, ) with NotificationsContext( step_name=PROGRAM_STEP.UPLOAD, diff --git a/backuper/models/upload_provider_models.py b/backuper/models/upload_provider_models.py index d3221b63..528f6f55 100644 --- a/backuper/models/upload_provider_models.py +++ b/backuper/models/upload_provider_models.py @@ -6,14 +6,15 @@ class ProviderModel(BaseModel): - name: config.UploadProviderEnum + name: str class DebugProviderModel(ProviderModel): - pass + name: str = config.UploadProviderEnum.LOCAL_FILES_DEBUG class GCSProviderModel(ProviderModel): + name: str = config.UploadProviderEnum.GOOGLE_CLOUD_STORAGE bucket_name: str bucket_upload_path: str service_account_base64: SecretStr @@ -29,6 +30,7 @@ def process_service_account_base64( class AWSProviderModel(ProviderModel): + name: str = config.UploadProviderEnum.AWS_S3 bucket_name: str bucket_upload_path: str key_id: str @@ -38,5 +40,6 @@ class AWSProviderModel(ProviderModel): class AzureProviderModel(ProviderModel): + name: str = config.UploadProviderEnum.AZURE container_name: str connect_string: SecretStr diff --git a/backuper/upload_providers/__init__.py b/backuper/upload_providers/__init__.py index fdd102b9..8b137891 100644 --- a/backuper/upload_providers/__init__.py +++ b/backuper/upload_providers/__init__.py @@ -1,13 +1 @@ -from .aws_s3 import UploadProviderAWS -from .azure import UploadProviderAzure -from .base_provider import BaseUploadProvider -from .debug import UploadProviderLocalDebug -from .google_cloud_storage import UploadProviderGCS -__all__ = [ - "BaseUploadProvider", - "UploadProviderAWS", - "UploadProviderGCS", - "UploadProviderLocalDebug", - "UploadProviderAzure", -] diff --git a/backuper/upload_providers/aws_s3.py b/backuper/upload_providers/aws_s3.py index cf1b1746..b671b9b3 100644 --- a/backuper/upload_providers/aws_s3.py +++ b/backuper/upload_providers/aws_s3.py @@ -7,6 +7,7 @@ from pydantic import SecretStr from backuper import config, core +from backuper.models.upload_provider_models import AWSProviderModel from backuper.upload_providers.base_provider import BaseUploadProvider log = logging.getLogger(__name__) @@ -16,33 +17,21 @@ class DeleteItemDict(TypedDict): Key: str -class UploadProviderAWS( - BaseUploadProvider, - name=config.UploadProviderEnum.AWS_S3, -): +class UploadProviderAWS(BaseUploadProvider): """AWS S3 bucket for storing backups""" - def __init__( - self, - bucket_name: str, - bucket_upload_path: str, - key_id: str, - key_secret: SecretStr, - region: str, - max_bandwidth: int | None, - **kwargs: str, - ) -> None: - self.bucket_upload_path = bucket_upload_path - self.max_bandwidth = max_bandwidth + def __init__(self, target_provider: AWSProviderModel) -> None: + self.bucket_upload_path = target_provider.bucket_upload_path + self.max_bandwidth = target_provider.max_bandwidth s3: Any = boto3.resource( "s3", - region_name=region, - aws_access_key_id=key_id, - aws_secret_access_key=key_secret.get_secret_value(), + region_name=target_provider.region, + aws_access_key_id=target_provider.key_id, + aws_secret_access_key=target_provider.key_secret.get_secret_value(), ) - self.bucket = s3.Bucket(bucket_name) + self.bucket = s3.Bucket(target_provider.bucket_name) self.transfer_config = TransferConfig(max_bandwidth=self.max_bandwidth) def _post_save(self, backup_file: Path) -> str: diff --git a/backuper/upload_providers/azure.py b/backuper/upload_providers/azure.py index 975ac5c9..930b7451 100644 --- a/backuper/upload_providers/azure.py +++ b/backuper/upload_providers/azure.py @@ -5,28 +5,20 @@ from pydantic import SecretStr from backuper import config, core +from backuper.models.upload_provider_models import AzureProviderModel from backuper.upload_providers.base_provider import BaseUploadProvider log = logging.getLogger(__name__) -class UploadProviderAzure( - BaseUploadProvider, - name=config.UploadProviderEnum.AZURE, -): +class UploadProviderAzure(BaseUploadProvider): """Azure blob storage for storing backups""" - def __init__( - self, - container_name: str, - connect_string: SecretStr, - **kwargs: str, - ) -> None: - self.container_name = container_name - self.connect_str = connect_string + def __init__(self, target_provider: AzureProviderModel) -> None: + self.container_name = target_provider.container_name blob_service_client = BlobServiceClient.from_connection_string( - connect_string.get_secret_value() + target_provider.connect_string.get_secret_value() ) self.container_client = blob_service_client.get_container_client( container=self.container_name diff --git a/backuper/upload_providers/base_provider.py b/backuper/upload_providers/base_provider.py index 16eb5197..37f0c2a2 100644 --- a/backuper/upload_providers/base_provider.py +++ b/backuper/upload_providers/base_provider.py @@ -2,18 +2,16 @@ from abc import ABC, abstractmethod from pathlib import Path from typing import final - +from backuper.models.upload_provider_models import ProviderModel from backuper import config log = logging.getLogger(__name__) class BaseUploadProvider(ABC): - target_name: config.UploadProviderEnum - def __init_subclass__(cls, name: config.UploadProviderEnum) -> None: - cls.target_name = name - super().__init_subclass__() + def __init__(self, target_provider: ProviderModel) -> None: + self.target_provider = target_provider @final def post_save(self, backup_file: Path) -> str: diff --git a/backuper/upload_providers/debug.py b/backuper/upload_providers/debug.py index ee84b8e0..de596217 100644 --- a/backuper/upload_providers/debug.py +++ b/backuper/upload_providers/debug.py @@ -2,21 +2,19 @@ from pathlib import Path from backuper import config, core +from backuper.models.upload_provider_models import DebugProviderModel from backuper.upload_providers.base_provider import BaseUploadProvider log = logging.getLogger(__name__) -class UploadProviderLocalDebug( - BaseUploadProvider, - name=config.UploadProviderEnum.LOCAL_FILES_DEBUG, -): +class UploadProviderLocalDebug(BaseUploadProvider): """Represent local folder `data` for storing backups. If docker volume/persistant volume is lost, so are backups. """ - def __init__(self, **kwargs: str) -> None: + def __init__(self, target_provider: DebugProviderModel) -> None: pass def _post_save(self, backup_file: Path) -> str: diff --git a/backuper/upload_providers/google_cloud_storage.py b/backuper/upload_providers/google_cloud_storage.py index c9f4a356..a9ed5eeb 100644 --- a/backuper/upload_providers/google_cloud_storage.py +++ b/backuper/upload_providers/google_cloud_storage.py @@ -7,28 +7,18 @@ from pydantic import SecretStr from backuper import config, core +from backuper.models.upload_provider_models import GCSProviderModel from backuper.upload_providers.base_provider import BaseUploadProvider log = logging.getLogger(__name__) -class UploadProviderGCS( - BaseUploadProvider, - name=config.UploadProviderEnum.GOOGLE_CLOUD_STORAGE, -): +class UploadProviderGCS(BaseUploadProvider): """GCS bucket for storing backups""" - def __init__( - self, - bucket_name: str, - bucket_upload_path: str, - service_account_base64: SecretStr, - chunk_size_mb: int, - chunk_timeout_secs: int, - **kwargs: str, - ) -> None: + def __init__(self, target_provider: GCSProviderModel) -> None: service_account_bytes = base64.b64decode( - service_account_base64.get_secret_value() + target_provider.service_account_base64.get_secret_value() ) sa_path = config.CONST_CONFIG_FOLDER_PATH / "google_auth.json" with open(sa_path, "wb") as f: @@ -36,10 +26,10 @@ def __init__( os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = str(sa_path) self.storage_client = cloud_storage.Client() - self.bucket = self.storage_client.bucket(bucket_name) - self.bucket_upload_path = bucket_upload_path - self.chunk_size_bytes = chunk_size_mb * 1024 * 1024 - self.chunk_timeout_secs = chunk_timeout_secs + self.bucket = self.storage_client.bucket(target_provider.bucket_name) + self.bucket_upload_path = target_provider.bucket_upload_path + self.chunk_size_bytes = target_provider.chunk_size_mb * 1024 * 1024 + self.chunk_timeout_secs = target_provider.chunk_timeout_secs def _post_save(self, backup_file: Path) -> str: zip_backup_file = core.run_create_zip_archive(backup_file=backup_file) From 66cc7f9ee195741f396e0615ecb908f7491f822b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafa=C5=82=20Safin?= Date: Thu, 14 Mar 2024 00:47:56 +0100 Subject: [PATCH 2/5] finish rewrite of upload providers, fix mypy issues MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Rafał Safin --- backuper/main.py | 13 +++++------ backuper/upload_providers/aws_s3.py | 3 +-- backuper/upload_providers/azure.py | 3 +-- backuper/upload_providers/base_provider.py | 3 +-- backuper/upload_providers/debug.py | 2 +- .../upload_providers/google_cloud_storage.py | 1 - tests/test_main.py | 6 +++-- tests/test_storage_provider_aws.py | 17 ++++++++------ tests/test_storage_provider_azure.py | 7 ++++-- tests/test_storage_provider_gcs.py | 15 +++++++----- tests/test_storage_provider_local.py | 23 ++++++++++--------- 11 files changed, 50 insertions(+), 43 deletions(-) diff --git a/backuper/main.py b/backuper/main.py index 014cf997..d13a4f68 100644 --- a/backuper/main.py +++ b/backuper/main.py @@ -18,19 +18,18 @@ mysql, postgresql, ) +from backuper.notifications.notifications_context import ( + PROGRAM_STEP, + NotificationsContext, +) from backuper.upload_providers import ( + aws_s3, + azure, base_provider, debug, google_cloud_storage, - aws_s3, - azure, -) -from backuper.notifications.notifications_context import ( - PROGRAM_STEP, - NotificationsContext, ) - exit_event = threading.Event() log = logging.getLogger(__name__) diff --git a/backuper/upload_providers/aws_s3.py b/backuper/upload_providers/aws_s3.py index b671b9b3..2dcd8ddb 100644 --- a/backuper/upload_providers/aws_s3.py +++ b/backuper/upload_providers/aws_s3.py @@ -4,9 +4,8 @@ import boto3 from boto3.s3.transfer import TransferConfig -from pydantic import SecretStr -from backuper import config, core +from backuper import core from backuper.models.upload_provider_models import AWSProviderModel from backuper.upload_providers.base_provider import BaseUploadProvider diff --git a/backuper/upload_providers/azure.py b/backuper/upload_providers/azure.py index 930b7451..9961d27f 100644 --- a/backuper/upload_providers/azure.py +++ b/backuper/upload_providers/azure.py @@ -2,9 +2,8 @@ from pathlib import Path from azure.storage.blob import BlobServiceClient -from pydantic import SecretStr -from backuper import config, core +from backuper import core from backuper.models.upload_provider_models import AzureProviderModel from backuper.upload_providers.base_provider import BaseUploadProvider diff --git a/backuper/upload_providers/base_provider.py b/backuper/upload_providers/base_provider.py index 37f0c2a2..496b16fe 100644 --- a/backuper/upload_providers/base_provider.py +++ b/backuper/upload_providers/base_provider.py @@ -2,14 +2,13 @@ from abc import ABC, abstractmethod from pathlib import Path from typing import final + from backuper.models.upload_provider_models import ProviderModel -from backuper import config log = logging.getLogger(__name__) class BaseUploadProvider(ABC): - def __init__(self, target_provider: ProviderModel) -> None: self.target_provider = target_provider diff --git a/backuper/upload_providers/debug.py b/backuper/upload_providers/debug.py index de596217..886bca20 100644 --- a/backuper/upload_providers/debug.py +++ b/backuper/upload_providers/debug.py @@ -1,7 +1,7 @@ import logging from pathlib import Path -from backuper import config, core +from backuper import core from backuper.models.upload_provider_models import DebugProviderModel from backuper.upload_providers.base_provider import BaseUploadProvider diff --git a/backuper/upload_providers/google_cloud_storage.py b/backuper/upload_providers/google_cloud_storage.py index a9ed5eeb..98d1761d 100644 --- a/backuper/upload_providers/google_cloud_storage.py +++ b/backuper/upload_providers/google_cloud_storage.py @@ -4,7 +4,6 @@ from pathlib import Path import google.cloud.storage as cloud_storage -from pydantic import SecretStr from backuper import config, core from backuper.models.upload_provider_models import GCSProviderModel diff --git a/tests/test_main.py b/tests/test_main.py index c1e6487b..91e68b98 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -7,8 +7,10 @@ import pytest from backuper import config, core, main +from backuper.models import upload_provider_models from backuper.notifications.notifications_context import NotificationsContext from backuper.upload_providers.debug import UploadProviderLocalDebug +from backuper.upload_providers.google_cloud_storage import UploadProviderGCS from .conftest import ( ALL_MARIADB_DBS_TARGETS, @@ -59,7 +61,7 @@ def test_backup_provider(monkeypatch: pytest.MonkeyPatch) -> None: "name=gcs bucket_name=name bucket_upload_path=test service_account_base64=Z29vZ2xlX3NlcnZpY2VfYWNjb3VudAo=", ) provider = main.backup_provider() - assert provider.target_name == "gcs" + assert provider.__class__.__name__ == UploadProviderGCS.__class__.__name__ def test_main_single(monkeypatch: pytest.MonkeyPatch) -> None: @@ -127,7 +129,7 @@ def test_run_backup_notifications_fail_message_is_fired_when_it_fails( backup_file = Path("/tmp/fake") backup_mock = Mock(return_value=backup_file, side_effect=make_backup_side_effect) monkeypatch.setattr(target, "_backup", backup_mock) - provider = UploadProviderLocalDebug() + provider = UploadProviderLocalDebug(upload_provider_models.DebugProviderModel()) monkeypatch.setattr(provider, "_post_save", Mock(side_effect=post_save_side_effect)) monkeypatch.setattr(provider, "_clean", Mock(side_effect=clean_side_effect)) diff --git a/tests/test_storage_provider_aws.py b/tests/test_storage_provider_aws.py index 1d8acfe2..408ea739 100644 --- a/tests/test_storage_provider_aws.py +++ b/tests/test_storage_provider_aws.py @@ -6,7 +6,8 @@ from freezegun import freeze_time from pydantic import SecretStr -from backuper.upload_providers import UploadProviderAWS +from backuper.models.upload_provider_models import AWSProviderModel +from backuper.upload_providers.aws_s3 import UploadProviderAWS @pytest.fixture(autouse=True) @@ -16,12 +17,14 @@ def mock_google_storage_client(monkeypatch: pytest.MonkeyPatch) -> None: def get_test_aws() -> UploadProviderAWS: return UploadProviderAWS( - bucket_name="name", - bucket_upload_path="test123", - key_id="id", - key_secret=SecretStr("secret"), - region="fake region", - max_bandwidth=None, + AWSProviderModel( + bucket_name="name", + bucket_upload_path="test123", + key_id="id", + key_secret=SecretStr("secret"), + region="fake region", + max_bandwidth=None, + ) ) diff --git a/tests/test_storage_provider_azure.py b/tests/test_storage_provider_azure.py index 6f048fa1..174da908 100644 --- a/tests/test_storage_provider_azure.py +++ b/tests/test_storage_provider_azure.py @@ -6,7 +6,8 @@ from freezegun import freeze_time from pydantic import SecretStr -from backuper.upload_providers import UploadProviderAzure +from backuper.models.upload_provider_models import AzureProviderModel +from backuper.upload_providers.azure import UploadProviderAzure @pytest.fixture(autouse=True) @@ -15,7 +16,9 @@ def mock_azure_service_client(monkeypatch: pytest.MonkeyPatch) -> None: def get_test_azure() -> UploadProviderAzure: - return UploadProviderAzure(container_name="test", connect_string=SecretStr("any")) + return UploadProviderAzure( + AzureProviderModel(container_name="test", connect_string=SecretStr("any")) + ) def test_azure_post_save_fails_on_fail_upload( diff --git a/tests/test_storage_provider_gcs.py b/tests/test_storage_provider_gcs.py index 62b1b5db..d349c79e 100644 --- a/tests/test_storage_provider_gcs.py +++ b/tests/test_storage_provider_gcs.py @@ -6,7 +6,8 @@ from freezegun import freeze_time from pydantic import SecretStr -from backuper.upload_providers import UploadProviderGCS +from backuper.models.upload_provider_models import GCSProviderModel +from backuper.upload_providers.google_cloud_storage import UploadProviderGCS @pytest.fixture(autouse=True) @@ -16,11 +17,13 @@ def mock_google_storage_client(monkeypatch: pytest.MonkeyPatch) -> None: def get_test_gcs() -> UploadProviderGCS: return UploadProviderGCS( - bucket_name="name", - bucket_upload_path="test", - service_account_base64=SecretStr("Z29vZ2xlX3NlcnZpY2VfYWNjb3VudAo="), - chunk_size_mb=100, - chunk_timeout_secs=100, + GCSProviderModel( + bucket_name="name", + bucket_upload_path="test", + service_account_base64=SecretStr("Z29vZ2xlX3NlcnZpY2VfYWNjb3VudAo="), + chunk_size_mb=100, + chunk_timeout_secs=100, + ) ) diff --git a/tests/test_storage_provider_local.py b/tests/test_storage_provider_local.py index cd2f9888..cca99a0f 100644 --- a/tests/test_storage_provider_local.py +++ b/tests/test_storage_provider_local.py @@ -3,14 +3,17 @@ import pytest from freezegun import freeze_time -from backuper.upload_providers import UploadProviderLocalDebug +from backuper.models.upload_provider_models import DebugProviderModel +from backuper.upload_providers.debug import UploadProviderLocalDebug + + +def get_test_debug() -> UploadProviderLocalDebug: + return UploadProviderLocalDebug(DebugProviderModel()) @pytest.mark.parametrize("method_name", ["_clean", "clean"]) -def test_local_debug_clean_file( - tmp_path: Path, monkeypatch: pytest.MonkeyPatch, method_name: str -) -> None: - local = UploadProviderLocalDebug() +def test_local_debug_clean_file(tmp_path: Path, method_name: str) -> None: + local = get_test_debug() fake_backup_dir_path = tmp_path / "fake_env_name" fake_backup_dir_path.mkdir() @@ -38,10 +41,8 @@ def test_local_debug_clean_file( @pytest.mark.parametrize("method_name", ["_clean", "clean"]) -def test_local_debug_clean_folder( - tmp_path: Path, monkeypatch: pytest.MonkeyPatch, method_name: str -) -> None: - local = UploadProviderLocalDebug() +def test_local_debug_clean_folder(tmp_path: Path, method_name: str) -> None: + local = get_test_debug() fake_backup_dir_path = tmp_path / "fake_env_name" fake_backup_dir_path.mkdir() @@ -71,9 +72,9 @@ def test_local_debug_clean_folder( @freeze_time("2023-08-27") @pytest.mark.parametrize("method_name", ["_clean", "clean"]) def test_local_debug_respects_min_retention_days_param_and_not_delete_any_file( - tmp_path: Path, monkeypatch: pytest.MonkeyPatch, method_name: str + tmp_path: Path, method_name: str ) -> None: - local = UploadProviderLocalDebug() + local = get_test_debug() fake_backup_dir_path = tmp_path / "fake_env_name" fake_backup_dir_path.mkdir() From a96fe4831371a017ff32ca3db7808628b9757f37 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafa=C5=82=20Safin?= Date: Thu, 14 Mar 2024 00:53:17 +0100 Subject: [PATCH 3/5] last test fixes MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Rafał Safin --- backuper/main.py | 2 +- backuper/upload_providers/base_provider.py | 4 ++-- tests/test_main.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/backuper/main.py b/backuper/main.py index d13a4f68..8d355fd1 100644 --- a/backuper/main.py +++ b/backuper/main.py @@ -70,7 +70,7 @@ def backup_provider() -> base_provider.BaseUploadProvider: provider_target_cls = provider_cls_map[provider_model.name] log.debug("initializing %s with %s", provider_target_cls, provider_model) - res_backup_provider = provider_target_cls(**provider_model.model_dump()) + res_backup_provider = provider_target_cls(target_provider=provider_model) log.info( "success initializing provider: `%s`", provider_model.name, diff --git a/backuper/upload_providers/base_provider.py b/backuper/upload_providers/base_provider.py index 496b16fe..0db9084e 100644 --- a/backuper/upload_providers/base_provider.py +++ b/backuper/upload_providers/base_provider.py @@ -9,8 +9,8 @@ class BaseUploadProvider(ABC): - def __init__(self, target_provider: ProviderModel) -> None: - self.target_provider = target_provider + def __init__(self, target_provider: ProviderModel) -> None: # pragma: no cover + pass @final def post_save(self, backup_file: Path) -> str: diff --git a/tests/test_main.py b/tests/test_main.py index 91e68b98..1e8937a3 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -61,7 +61,7 @@ def test_backup_provider(monkeypatch: pytest.MonkeyPatch) -> None: "name=gcs bucket_name=name bucket_upload_path=test service_account_base64=Z29vZ2xlX3NlcnZpY2VfYWNjb3VudAo=", ) provider = main.backup_provider() - assert provider.__class__.__name__ == UploadProviderGCS.__class__.__name__ + assert provider.__class__.__name__ == UploadProviderGCS.__name__ def test_main_single(monkeypatch: pytest.MonkeyPatch) -> None: From fb6cf1477649bb8c030f5f7569633ed5aae6b600 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafa=C5=82=20Safin?= Date: Thu, 14 Mar 2024 01:01:54 +0100 Subject: [PATCH 4/5] move maps to separate files MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Rafał Safin --- backuper/backup_targets/targets_mapping.py | 20 +++++++++++ backuper/core.py | 25 ++------------ backuper/main.py | 34 +++---------------- backuper/models/models_mapping.py | 21 ++++++++++++ .../upload_providers/providers_mapping.py | 18 ++++++++++ 5 files changed, 66 insertions(+), 52 deletions(-) create mode 100644 backuper/backup_targets/targets_mapping.py create mode 100644 backuper/models/models_mapping.py create mode 100644 backuper/upload_providers/providers_mapping.py diff --git a/backuper/backup_targets/targets_mapping.py b/backuper/backup_targets/targets_mapping.py new file mode 100644 index 00000000..7824cce7 --- /dev/null +++ b/backuper/backup_targets/targets_mapping.py @@ -0,0 +1,20 @@ +from backuper.backup_targets import ( + base_target, + file, + folder, + mariadb, + mysql, + postgresql, +) + +from backuper.config import BackupTargetEnum + + +def get_target_cls_map() -> dict[str, type[base_target.BaseBackupTarget]]: + return { + BackupTargetEnum.FILE: file.File, + BackupTargetEnum.FOLDER: folder.Folder, + BackupTargetEnum.MARIADB: mariadb.MariaDB, + BackupTargetEnum.POSTGRESQL: postgresql.PostgreSQL, + BackupTargetEnum.MYSQL: mysql.MySQL, + } diff --git a/backuper/core.py b/backuper/core.py index 8ea8b27a..751792b4 100644 --- a/backuper/core.py +++ b/backuper/core.py @@ -13,7 +13,7 @@ from pydantic import BaseModel from backuper import config -from backuper.models import backup_target_models, upload_provider_models +from backuper.models import backup_target_models, models_mapping, upload_provider_models log = logging.getLogger(__name__) @@ -50,25 +50,6 @@ def run_subprocess(shell_args: str) -> str: return p.stdout -def get_target_map() -> dict[str, type[backup_target_models.TargetModel]]: - return { - config.BackupTargetEnum.FILE: backup_target_models.SingleFileTargetModel, - config.BackupTargetEnum.FOLDER: backup_target_models.DirectoryTargetModel, - config.BackupTargetEnum.MARIADB: backup_target_models.MariaDBTargetModel, - config.BackupTargetEnum.MYSQL: backup_target_models.MySQLTargetModel, - config.BackupTargetEnum.POSTGRESQL: backup_target_models.PostgreSQLTargetModel, - } - - -def get_provider_map() -> dict[str, type[upload_provider_models.ProviderModel]]: - return { - config.UploadProviderEnum.AZURE: upload_provider_models.AzureProviderModel, - config.UploadProviderEnum.LOCAL_FILES_DEBUG: upload_provider_models.DebugProviderModel, - config.UploadProviderEnum.GOOGLE_CLOUD_STORAGE: upload_provider_models.GCSProviderModel, - config.UploadProviderEnum.AWS_S3: upload_provider_models.AWSProviderModel, - } - - def remove_path(path: Path) -> None: if path.exists(): if path.is_file() or path.is_symlink(): @@ -164,7 +145,7 @@ def _validate_model( def create_target_models() -> list[backup_target_models.TargetModel]: - target_map = get_target_map() + target_map = models_mapping.get_target_map() targets: list[backup_target_models.TargetModel] = [] for env_name, env_value in os.environ.items(): @@ -182,7 +163,7 @@ def create_target_models() -> list[backup_target_models.TargetModel]: def create_provider_model() -> upload_provider_models.ProviderModel: - provider_map = get_provider_map() + provider_map = models_mapping.get_provider_map() log.info("start validating BACKUP_PROVIDER environment variable") diff --git a/backuper/main.py b/backuper/main.py index 8d355fd1..6dbe1029 100644 --- a/backuper/main.py +++ b/backuper/main.py @@ -12,22 +12,15 @@ from backuper import config, core from backuper.backup_targets import ( base_target, - file, - folder, - mariadb, - mysql, - postgresql, + targets_mapping, ) from backuper.notifications.notifications_context import ( PROGRAM_STEP, NotificationsContext, ) from backuper.upload_providers import ( - aws_s3, - azure, base_provider, - debug, - google_cloud_storage, + providers_mapping, ) exit_event = threading.Event() @@ -39,28 +32,9 @@ def quit(sig: int, frame: FrameType | None) -> None: exit_event.set() -def _get_provider_cls_map() -> dict[str, type[base_provider.BaseUploadProvider]]: - return { - config.UploadProviderEnum.AWS_S3: aws_s3.UploadProviderAWS, - config.UploadProviderEnum.AZURE: azure.UploadProviderAzure, - config.UploadProviderEnum.GOOGLE_CLOUD_STORAGE: google_cloud_storage.UploadProviderGCS, - config.UploadProviderEnum.LOCAL_FILES_DEBUG: debug.UploadProviderLocalDebug, - } - - -def _get_target_cls_map() -> dict[str, type[base_target.BaseBackupTarget]]: - return { - config.BackupTargetEnum.FILE: file.File, - config.BackupTargetEnum.FOLDER: folder.Folder, - config.BackupTargetEnum.MARIADB: mariadb.MariaDB, - config.BackupTargetEnum.POSTGRESQL: postgresql.PostgreSQL, - config.BackupTargetEnum.MYSQL: mysql.MySQL, - } - - @NotificationsContext(step_name=PROGRAM_STEP.SETUP_PROVIDER) def backup_provider() -> base_provider.BaseUploadProvider: - provider_cls_map = _get_provider_cls_map() + provider_cls_map = providers_mapping.get_provider_cls_map() provider_model = core.create_provider_model() log.info( @@ -80,7 +54,7 @@ def backup_provider() -> base_provider.BaseUploadProvider: @NotificationsContext(step_name=PROGRAM_STEP.SETUP_TARGETS) def backup_targets() -> list[base_target.BaseBackupTarget]: - backup_target_cls_map = _get_target_cls_map() + backup_target_cls_map = targets_mapping.get_target_cls_map() backup_targets: list[base_target.BaseBackupTarget] = [] target_models = core.create_target_models() diff --git a/backuper/models/models_mapping.py b/backuper/models/models_mapping.py new file mode 100644 index 00000000..c6c14009 --- /dev/null +++ b/backuper/models/models_mapping.py @@ -0,0 +1,21 @@ +from backuper import config +from backuper.models import backup_target_models, upload_provider_models + + +def get_target_map() -> dict[str, type[backup_target_models.TargetModel]]: + return { + config.BackupTargetEnum.FILE: backup_target_models.SingleFileTargetModel, + config.BackupTargetEnum.FOLDER: backup_target_models.DirectoryTargetModel, + config.BackupTargetEnum.MARIADB: backup_target_models.MariaDBTargetModel, + config.BackupTargetEnum.MYSQL: backup_target_models.MySQLTargetModel, + config.BackupTargetEnum.POSTGRESQL: backup_target_models.PostgreSQLTargetModel, + } + + +def get_provider_map() -> dict[str, type[upload_provider_models.ProviderModel]]: + return { + config.UploadProviderEnum.AZURE: upload_provider_models.AzureProviderModel, + config.UploadProviderEnum.LOCAL_FILES_DEBUG: upload_provider_models.DebugProviderModel, + config.UploadProviderEnum.GOOGLE_CLOUD_STORAGE: upload_provider_models.GCSProviderModel, + config.UploadProviderEnum.AWS_S3: upload_provider_models.AWSProviderModel, + } diff --git a/backuper/upload_providers/providers_mapping.py b/backuper/upload_providers/providers_mapping.py new file mode 100644 index 00000000..60f8d44f --- /dev/null +++ b/backuper/upload_providers/providers_mapping.py @@ -0,0 +1,18 @@ +from backuper.upload_providers import ( + aws_s3, + azure, + base_provider, + debug, + google_cloud_storage, +) + +from backuper.config import UploadProviderEnum + + +def get_provider_cls_map() -> dict[str, type[base_provider.BaseUploadProvider]]: + return { + UploadProviderEnum.AWS_S3: aws_s3.UploadProviderAWS, + UploadProviderEnum.AZURE: azure.UploadProviderAzure, + UploadProviderEnum.GOOGLE_CLOUD_STORAGE: google_cloud_storage.UploadProviderGCS, + UploadProviderEnum.LOCAL_FILES_DEBUG: debug.UploadProviderLocalDebug, + } From 4715d2e87f84df3ffda9a940139d69f8b623d70a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafa=C5=82=20Safin?= Date: Thu, 14 Mar 2024 01:26:27 +0100 Subject: [PATCH 5/5] update lint ruff rules, fix errors, remove depr warnings and add new, split lines around to enforce 88 length MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Rafał Safin --- backuper/backup_targets/mariadb.py | 5 +- backuper/backup_targets/mysql.py | 5 +- backuper/backup_targets/postgresql.py | 10 +- backuper/backup_targets/targets_mapping.py | 1 - backuper/config.py | 5 +- backuper/main.py | 5 +- backuper/models/models_mapping.py | 20 +- backuper/models/upload_provider_models.py | 2 +- .../upload_providers/providers_mapping.py | 5 +- pyproject.toml | 10 +- tests/test_backup_target_mariadb.py | 5 +- tests/test_backup_target_mysql.py | 5 +- tests/test_backup_target_postgresql.py | 5 +- tests/test_core.py | 237 +++++++++--------- tests/test_main.py | 3 +- 15 files changed, 171 insertions(+), 152 deletions(-) diff --git a/backuper/backup_targets/mariadb.py b/backuper/backup_targets/mariadb.py index c512fc89..d162b4dc 100644 --- a/backuper/backup_targets/mariadb.py +++ b/backuper/backup_targets/mariadb.py @@ -79,7 +79,10 @@ def _mariadb_connection(self) -> str: version = match.group(0) break if version is None: # pragma: no cover - msg = f"mariadb_connection error processing sql result, version unknown: {result}" + msg = ( + f"mariadb_connection error processing sql result, " + f"version unknown: {result}" + ) log.error(msg) raise ValueError(msg) log.info("mariadb_connection calculated version: %s", version) diff --git a/backuper/backup_targets/mysql.py b/backuper/backup_targets/mysql.py index 7ae9d545..ecc8167d 100644 --- a/backuper/backup_targets/mysql.py +++ b/backuper/backup_targets/mysql.py @@ -79,7 +79,10 @@ def _mysql_connection(self) -> str: version = match.group(0) break if version is None: # pragma: no cover - msg = f"mysql_connection error processing sql result, version unknown: {result}" + msg = ( + f"mysql_connection error processing sql result, " + f"version unknown: {result}" + ) log.error(msg) raise ValueError(msg) log.info("mysql_connection calculated version: %s", version) diff --git a/backuper/backup_targets/postgresql.py b/backuper/backup_targets/postgresql.py index 1046f4f2..84006898 100644 --- a/backuper/backup_targets/postgresql.py +++ b/backuper/backup_targets/postgresql.py @@ -95,7 +95,10 @@ def _postgres_connection(self) -> str: version = match.group(0).strip().split(" ")[1] break if version is None: # pragma: no cover - msg = f"postgres_connection error processing sql result, version unknown: {result}" + msg = ( + "postgres_connection error processing sql result, " + "version unknown: {result}" + ) log.error(msg) raise ValueError(msg) log.info("postgres_connection calculated version: %s", version) @@ -105,7 +108,10 @@ def _backup(self) -> Path: escaped_dbname = core.safe_text_version(self.target_model.db) name = f"{escaped_dbname}_{self.db_version}" out_file = core.get_new_backup_path(self.env_name, name, sql=True) - shell_pg_dump_db = f"pg_dump --clean --if-exists -v -O -d {self.escaped_conn_uri} -f {out_file}" + shell_pg_dump_db = ( + f"pg_dump --clean --if-exists -v -O -d " + f"{self.escaped_conn_uri} -f {out_file}" + ) log.debug("start pg_dump in subprocess: %s", shell_pg_dump_db) core.run_subprocess(shell_pg_dump_db) log.debug("finished pg_dump, output: %s", out_file) diff --git a/backuper/backup_targets/targets_mapping.py b/backuper/backup_targets/targets_mapping.py index 7824cce7..f1748b72 100644 --- a/backuper/backup_targets/targets_mapping.py +++ b/backuper/backup_targets/targets_mapping.py @@ -6,7 +6,6 @@ mysql, postgresql, ) - from backuper.config import BackupTargetEnum diff --git a/backuper/config.py b/backuper/config.py index a5bcfd3f..d62c2f59 100644 --- a/backuper/config.py +++ b/backuper/config.py @@ -27,7 +27,7 @@ class UploadProviderEnum(StrEnum): LOCAL_FILES_DEBUG = "debug" - GOOGLE_CLOUD_STORAGE = "gcs" + GCS = "gcs" AWS_S3 = "aws" AZURE = "azure" @@ -81,7 +81,8 @@ def check_smtp_setup(self) -> Self: smtp_settings = [self.SMTP_HOST, self.SMTP_FROM_ADDR, self.SMTP_TO_ADDRS] if any(smtp_settings) != all(smtp_settings): # pragma: no cover raise ValueError( - "parameters SMTP_HOST, SMTP_FROM_ADDR, SMTP_TO_ADDRS must be all either set or not." + "parameters SMTP_HOST, SMTP_FROM_ADDR, SMTP_TO_ADDRS " + "must be all either set or not." ) return self diff --git a/backuper/main.py b/backuper/main.py index 6dbe1029..12024532 100644 --- a/backuper/main.py +++ b/backuper/main.py @@ -115,8 +115,9 @@ def shutdown() -> NoReturn: # pragma: no cover sys.exit(0) else: log.warning( - "noooo, exiting! i am now killing myself with %d daemon threads force killed. " - "you can extend this time using environment SIGTERM_TIMEOUT_SECS.", + "noooo, exiting! i am now killing myself with %d daemon threads " + "force killed. you can extend this time using environment " + "SIGTERM_TIMEOUT_SECS.", threading.active_count() - 1, ) sys.exit(1) diff --git a/backuper/models/models_mapping.py b/backuper/models/models_mapping.py index c6c14009..82299ada 100644 --- a/backuper/models/models_mapping.py +++ b/backuper/models/models_mapping.py @@ -1,21 +1,21 @@ -from backuper import config +from backuper.config import BackupTargetEnum, UploadProviderEnum from backuper.models import backup_target_models, upload_provider_models def get_target_map() -> dict[str, type[backup_target_models.TargetModel]]: return { - config.BackupTargetEnum.FILE: backup_target_models.SingleFileTargetModel, - config.BackupTargetEnum.FOLDER: backup_target_models.DirectoryTargetModel, - config.BackupTargetEnum.MARIADB: backup_target_models.MariaDBTargetModel, - config.BackupTargetEnum.MYSQL: backup_target_models.MySQLTargetModel, - config.BackupTargetEnum.POSTGRESQL: backup_target_models.PostgreSQLTargetModel, + BackupTargetEnum.FILE: backup_target_models.SingleFileTargetModel, + BackupTargetEnum.FOLDER: backup_target_models.DirectoryTargetModel, + BackupTargetEnum.MARIADB: backup_target_models.MariaDBTargetModel, + BackupTargetEnum.MYSQL: backup_target_models.MySQLTargetModel, + BackupTargetEnum.POSTGRESQL: backup_target_models.PostgreSQLTargetModel, } def get_provider_map() -> dict[str, type[upload_provider_models.ProviderModel]]: return { - config.UploadProviderEnum.AZURE: upload_provider_models.AzureProviderModel, - config.UploadProviderEnum.LOCAL_FILES_DEBUG: upload_provider_models.DebugProviderModel, - config.UploadProviderEnum.GOOGLE_CLOUD_STORAGE: upload_provider_models.GCSProviderModel, - config.UploadProviderEnum.AWS_S3: upload_provider_models.AWSProviderModel, + UploadProviderEnum.AZURE: upload_provider_models.AzureProviderModel, + UploadProviderEnum.LOCAL_FILES_DEBUG: upload_provider_models.DebugProviderModel, + UploadProviderEnum.GCS: upload_provider_models.GCSProviderModel, + UploadProviderEnum.AWS_S3: upload_provider_models.AWSProviderModel, } diff --git a/backuper/models/upload_provider_models.py b/backuper/models/upload_provider_models.py index 528f6f55..c1d2122b 100644 --- a/backuper/models/upload_provider_models.py +++ b/backuper/models/upload_provider_models.py @@ -14,7 +14,7 @@ class DebugProviderModel(ProviderModel): class GCSProviderModel(ProviderModel): - name: str = config.UploadProviderEnum.GOOGLE_CLOUD_STORAGE + name: str = config.UploadProviderEnum.GCS bucket_name: str bucket_upload_path: str service_account_base64: SecretStr diff --git a/backuper/upload_providers/providers_mapping.py b/backuper/upload_providers/providers_mapping.py index 60f8d44f..771ac41c 100644 --- a/backuper/upload_providers/providers_mapping.py +++ b/backuper/upload_providers/providers_mapping.py @@ -1,3 +1,4 @@ +from backuper.config import UploadProviderEnum from backuper.upload_providers import ( aws_s3, azure, @@ -6,13 +7,11 @@ google_cloud_storage, ) -from backuper.config import UploadProviderEnum - def get_provider_cls_map() -> dict[str, type[base_provider.BaseUploadProvider]]: return { UploadProviderEnum.AWS_S3: aws_s3.UploadProviderAWS, UploadProviderEnum.AZURE: azure.UploadProviderAzure, - UploadProviderEnum.GOOGLE_CLOUD_STORAGE: google_cloud_storage.UploadProviderGCS, + UploadProviderEnum.GCS: google_cloud_storage.UploadProviderGCS, UploadProviderEnum.LOCAL_FILES_DEBUG: debug.UploadProviderLocalDebug, } diff --git a/pyproject.toml b/pyproject.toml index 4f6a87c2..4f299c75 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -54,8 +54,8 @@ env = [ "ZIP_ARCHIVE_PASSWORD=very_unpleasant:password-_-12!@#%^&*()/;><.,][`~'", ] filterwarnings = [ - "ignore:Deprecated call to `pkg_resources.declare_namespace\\('google:DeprecationWarning", - "ignore:pkg_resources is deprecated as an API:DeprecationWarning", + "ignore:Type google._upb._message.ScalarMapContainer uses PyType_Spec", + "ignore:Type google._upb._message.MessageMapContainer uses PyType_Spec", ] [tool.ruff] @@ -63,12 +63,8 @@ target-version = "py312" [tool.ruff.lint] # pycodestyle, pyflakes, isort, pylint, pyupgrade -ignore = ["E501"] select = ["E", "F", "I", "PL", "UP", "W"] -[tool.ruff.lint.pylint] -max-args = 12 - [tool.coverage.run] omit = ["backuper/tools/*"] source = ["backuper"] @@ -77,8 +73,6 @@ source = ["backuper"] ignore_missing_imports = true python_version = "3.12" strict = true -warn_return_any = true -warn_unused_configs = true [tool.poetry.scripts] backuper = "backuper.main:main" diff --git a/tests/test_backup_target_mariadb.py b/tests/test_backup_target_mariadb.py index 66494ba4..32621e0d 100644 --- a/tests/test_backup_target_mariadb.py +++ b/tests/test_backup_target_mariadb.py @@ -43,6 +43,9 @@ def test_run_mariadb_dump( db = MariaDB(target_model=mariadb_target) out_backup = db._backup() - out_file = f"{db.env_name}/{db.env_name}_20221211_0000_fixed_dbname_{db.db_version}_{CONST_TOKEN_URLSAFE}.sql" + out_file = ( + f"{db.env_name}/" + f"{db.env_name}_20221211_0000_fixed_dbname_{db.db_version}_{CONST_TOKEN_URLSAFE}.sql" + ) out_path = config.CONST_BACKUP_FOLDER_PATH / out_file assert out_backup == out_path diff --git a/tests/test_backup_target_mysql.py b/tests/test_backup_target_mysql.py index fab2dca1..5e89893c 100644 --- a/tests/test_backup_target_mysql.py +++ b/tests/test_backup_target_mysql.py @@ -39,6 +39,9 @@ def test_run_mysqldump( db = MySQL(target_model=mysql_target) out_backup = db._backup() - out_file = f"{db.env_name}/{db.env_name}_20221211_0000_fixed_dbname_{db.db_version}_{CONST_TOKEN_URLSAFE}.sql" + out_file = ( + f"{db.env_name}/" + f"{db.env_name}_20221211_0000_fixed_dbname_{db.db_version}_{CONST_TOKEN_URLSAFE}.sql" + ) out_path = config.CONST_BACKUP_FOLDER_PATH / out_file assert out_backup == out_path diff --git a/tests/test_backup_target_postgresql.py b/tests/test_backup_target_postgresql.py index d1640482..ec1a475e 100644 --- a/tests/test_backup_target_postgresql.py +++ b/tests/test_backup_target_postgresql.py @@ -45,6 +45,9 @@ def test_run_pg_dump( db = PostgreSQL(target_model=postgres_target) out_backup = db._backup() - out_file = f"{db.env_name}/{db.env_name}_20221211_0000_fixed_dbname_{db.db_version}_{CONST_TOKEN_URLSAFE}.sql" + out_file = ( + f"{db.env_name}/" + f"{db.env_name}_20221211_0000_fixed_dbname_{db.db_version}_{CONST_TOKEN_URLSAFE}.sql" + ) out_path = config.CONST_BACKUP_FOLDER_PATH / out_file assert out_backup == out_path diff --git a/tests/test_core.py b/tests/test_core.py index c0611f0f..373ad23d 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -73,123 +73,126 @@ def test_run_create_zip_archive( assert fake_backup_file_out.exists() -@pytest.mark.parametrize( - "env_lst,valid", - [ - ( - [ - ( - "POSTGRESQL_FIRST_DB", - "host=localhost port=5432 password=secret cron_rule=* * * * *", - ), - ], - True, - ), - ( - [ - ( - "POSTGRESQL_FIRST_DB", - "host=localhost port=5432 password=secret cron_rule=* * * * *", - ), - ( - "MYSQL_FIRST_DB", - "host=localhost port=3306 password=secret cron_rule=* * * * *", - ), - ], - True, - ), - ( - [ - ( - "MYSQL_SECOND_DB", - "host=10.0.0.1 port=3306 user=foo password=change_me! db=bar cron_rule=0 5 * * *", - ) - ], - True, - ), - ( - [ - ( - "MARIADB_THIRD_DB", - "host=192.168.1.5 port=3306 user=root password=change_me_please! db=project cron_rule=15 */3 * * * max_backups=20", - ) - ], - True, - ), - ( - [ - ( - "SINGLEFILE_THIRD", - f"abs_path={Path(__file__)} cron_rule=15 */3 * * * max_backups=20", - ) - ], - True, - ), - ( - [ - ( - "DIRECTORY_FIRST", - f"abs_path={Path(__file__).parent} cron_rule=15 */3 * * * max_backups=20", - ) - ], - True, - ), - ( - [ - ( - "POSTGRESQL_FIRST_DB", - "host=localhostport=5432 password=secret cron_rule=* * * * *", - ), - ], - True, - ), - ( - [ - ( - "POSTGRESQL_FIRST_DB", - "host=localhost port=axxx password=secret cron_rule=* * * * *", - ), - ], - False, - ), - ( - [ - ( - "POSTGRESQL_FIRST_DB", - "host=localhost port=111 passwor=secret cron_rule=* * * * *", - ), - ], - False, - ), - ( - [ - ( - "POSTGRESQL_FIRST_DB", - "host=localhost port=111 password=secret cron_rule=* ** * *", - ), - ], - False, - ), - ( - [ - ( - "POSTGRESQL_FIRST_DB", - "host=localhost port=5432 password=secretcron_rule=* * * * *", - ), - ], - False, - ), - ( - [ - ( - "POSTGRESQL_FIRST_DB", - "host=localhost port5432 password=secret cron_rule=* * * * *", - ), - ], - True, - ), - ], -) +test_data = [ + ( + [ + ( + "POSTGRESQL_FIRST_DB", + "host=localhost port=5432 password=secret cron_rule=* * * * *", + ), + ], + True, + ), + ( + [ + ( + "POSTGRESQL_FIRST_DB", + "host=localhost port=5432 password=secret cron_rule=* * * * *", + ), + ( + "MYSQL_FIRST_DB", + "host=localhost port=3306 password=secret cron_rule=* * * * *", + ), + ], + True, + ), + ( + [ + ( + "MYSQL_SECOND_DB", + "host=10.0.0.1 port=3306 user=foo password=change_me!" + " db=bar cron_rule=0 5 * * *", + ) + ], + True, + ), + ( + [ + ( + "MARIADB_THIRD_DB", + "host=192.168.1.5 port=3306 user=root password=change_me_please! " + "db=project cron_rule=15 */3 * * * max_backups=20", + ) + ], + True, + ), + ( + [ + ( + "SINGLEFILE_THIRD", + f"abs_path={Path(__file__)} cron_rule=15 */3 * * * max_backups=20", + ) + ], + True, + ), + ( + [ + ( + "DIRECTORY_FIRST", + f"abs_path={Path(__file__).parent} cron_rule=15 */3 * * * " + "max_backups=20", + ) + ], + True, + ), + ( + [ + ( + "POSTGRESQL_FIRST_DB", + "host=localhostport=5432 password=secret cron_rule=* * * * *", + ), + ], + True, + ), + ( + [ + ( + "POSTGRESQL_FIRST_DB", + "host=localhost port=axxx password=secret cron_rule=* * * * *", + ), + ], + False, + ), + ( + [ + ( + "POSTGRESQL_FIRST_DB", + "host=localhost port=111 passwor=secret cron_rule=* * * * *", + ), + ], + False, + ), + ( + [ + ( + "POSTGRESQL_FIRST_DB", + "host=localhost port=111 password=secret cron_rule=* ** * *", + ), + ], + False, + ), + ( + [ + ( + "POSTGRESQL_FIRST_DB", + "host=localhost port=5432 password=secretcron_rule=* * * * *", + ), + ], + False, + ), + ( + [ + ( + "POSTGRESQL_FIRST_DB", + "host=localhost port5432 password=secret cron_rule=* * * * *", + ), + ], + True, + ), +] + + +@pytest.mark.parametrize("env_lst,valid", test_data) def test_create_backup_targets( env_lst: list[tuple[str, str]], valid: bool, monkeypatch: pytest.MonkeyPatch ) -> None: diff --git a/tests/test_main.py b/tests/test_main.py index 1e8937a3..9dc9d68e 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -58,7 +58,8 @@ def test_backup_provider(monkeypatch: pytest.MonkeyPatch) -> None: monkeypatch.setattr( config.options, "BACKUP_PROVIDER", - "name=gcs bucket_name=name bucket_upload_path=test service_account_base64=Z29vZ2xlX3NlcnZpY2VfYWNjb3VudAo=", + "name=gcs bucket_name=name bucket_upload_path=test " + "service_account_base64=Z29vZ2xlX3NlcnZpY2VfYWNjb3VudAo=", ) provider = main.backup_provider() assert provider.__class__.__name__ == UploadProviderGCS.__name__