diff --git a/backuper/backup_targets/mariadb.py b/backuper/backup_targets/mariadb.py index c512fc8..d162b4d 100644 --- a/backuper/backup_targets/mariadb.py +++ b/backuper/backup_targets/mariadb.py @@ -79,7 +79,10 @@ def _mariadb_connection(self) -> str: version = match.group(0) break if version is None: # pragma: no cover - msg = f"mariadb_connection error processing sql result, version unknown: {result}" + msg = ( + f"mariadb_connection error processing sql result, " + f"version unknown: {result}" + ) log.error(msg) raise ValueError(msg) log.info("mariadb_connection calculated version: %s", version) diff --git a/backuper/backup_targets/mysql.py b/backuper/backup_targets/mysql.py index 7ae9d54..ecc8167 100644 --- a/backuper/backup_targets/mysql.py +++ b/backuper/backup_targets/mysql.py @@ -79,7 +79,10 @@ def _mysql_connection(self) -> str: version = match.group(0) break if version is None: # pragma: no cover - msg = f"mysql_connection error processing sql result, version unknown: {result}" + msg = ( + f"mysql_connection error processing sql result, " + f"version unknown: {result}" + ) log.error(msg) raise ValueError(msg) log.info("mysql_connection calculated version: %s", version) diff --git a/backuper/backup_targets/postgresql.py b/backuper/backup_targets/postgresql.py index 1046f4f..8400689 100644 --- a/backuper/backup_targets/postgresql.py +++ b/backuper/backup_targets/postgresql.py @@ -95,7 +95,10 @@ def _postgres_connection(self) -> str: version = match.group(0).strip().split(" ")[1] break if version is None: # pragma: no cover - msg = f"postgres_connection error processing sql result, version unknown: {result}" + msg = ( + "postgres_connection error processing sql result, " + "version unknown: {result}" + ) log.error(msg) raise ValueError(msg) log.info("postgres_connection calculated version: %s", version) @@ -105,7 +108,10 @@ def _backup(self) -> Path: escaped_dbname = core.safe_text_version(self.target_model.db) name = f"{escaped_dbname}_{self.db_version}" out_file = core.get_new_backup_path(self.env_name, name, sql=True) - shell_pg_dump_db = f"pg_dump --clean --if-exists -v -O -d {self.escaped_conn_uri} -f {out_file}" + shell_pg_dump_db = ( + f"pg_dump --clean --if-exists -v -O -d " + f"{self.escaped_conn_uri} -f {out_file}" + ) log.debug("start pg_dump in subprocess: %s", shell_pg_dump_db) core.run_subprocess(shell_pg_dump_db) log.debug("finished pg_dump, output: %s", out_file) diff --git a/backuper/backup_targets/targets_mapping.py b/backuper/backup_targets/targets_mapping.py new file mode 100644 index 0000000..f1748b7 --- /dev/null +++ b/backuper/backup_targets/targets_mapping.py @@ -0,0 +1,19 @@ +from backuper.backup_targets import ( + base_target, + file, + folder, + mariadb, + mysql, + postgresql, +) +from backuper.config import BackupTargetEnum + + +def get_target_cls_map() -> dict[str, type[base_target.BaseBackupTarget]]: + return { + BackupTargetEnum.FILE: file.File, + BackupTargetEnum.FOLDER: folder.Folder, + BackupTargetEnum.MARIADB: mariadb.MariaDB, + BackupTargetEnum.POSTGRESQL: postgresql.PostgreSQL, + BackupTargetEnum.MYSQL: mysql.MySQL, + } diff --git a/backuper/config.py b/backuper/config.py index a5bcfd3..d62c2f5 100644 --- a/backuper/config.py +++ b/backuper/config.py @@ -27,7 +27,7 @@ class UploadProviderEnum(StrEnum): LOCAL_FILES_DEBUG = "debug" - GOOGLE_CLOUD_STORAGE = "gcs" + GCS = "gcs" AWS_S3 = "aws" AZURE = "azure" @@ -81,7 +81,8 @@ def check_smtp_setup(self) -> Self: smtp_settings = [self.SMTP_HOST, self.SMTP_FROM_ADDR, self.SMTP_TO_ADDRS] if any(smtp_settings) != all(smtp_settings): # pragma: no cover raise ValueError( - "parameters SMTP_HOST, SMTP_FROM_ADDR, SMTP_TO_ADDRS must be all either set or not." + "parameters SMTP_HOST, SMTP_FROM_ADDR, SMTP_TO_ADDRS " + "must be all either set or not." ) return self diff --git a/backuper/core.py b/backuper/core.py index f75c72e..751792b 100644 --- a/backuper/core.py +++ b/backuper/core.py @@ -13,15 +13,7 @@ from pydantic import BaseModel from backuper import config -from backuper.models.backup_target_models import ( - DirectoryTargetModel, - MariaDBTargetModel, - MySQLTargetModel, - PostgreSQLTargetModel, - SingleFileTargetModel, - TargetModel, -) -from backuper.models.upload_provider_models import ProviderModel +from backuper.models import backup_target_models, models_mapping, upload_provider_models log = logging.getLogger(__name__) @@ -152,18 +144,10 @@ def _validate_model( return validated_target -def create_target_models() -> list[TargetModel]: - target_map: dict[str, type[TargetModel]] = { - config.BackupTargetEnum.FILE: SingleFileTargetModel, - config.BackupTargetEnum.FOLDER: DirectoryTargetModel, - config.BackupTargetEnum.MARIADB: MariaDBTargetModel, - config.BackupTargetEnum.MYSQL: MySQLTargetModel, - config.BackupTargetEnum.POSTGRESQL: PostgreSQLTargetModel, - } +def create_target_models() -> list[backup_target_models.TargetModel]: + target_map = models_mapping.get_target_map() - log.critical(target_map) - - targets: list[TargetModel] = [] + targets: list[backup_target_models.TargetModel] = [] for env_name, env_value in os.environ.items(): env_name_lowercase = env_name.lower() log.debug("processing env variable %s", env_name_lowercase) @@ -178,22 +162,18 @@ def create_target_models() -> list[TargetModel]: return targets -def create_provider_model() -> ProviderModel: - target_map: dict[config.UploadProviderEnum, type[ProviderModel]] = {} - for target_model in ProviderModel.__subclasses__(): - name = config.UploadProviderEnum( - target_model.__name__.lower().removesuffix("providermodel") - ) - target_map[name] = target_model +def create_provider_model() -> upload_provider_models.ProviderModel: + provider_map = models_mapping.get_provider_map() + log.info("start validating BACKUP_PROVIDER environment variable") base_provider = _validate_model( "backup_provider", config.options.BACKUP_PROVIDER, - ProviderModel, + upload_provider_models.ProviderModel, value_whitespace_split=True, ) - target_model_cls = target_map[base_provider.name] + target_model_cls = provider_map[base_provider.name] return _validate_model( "backup_provider", config.options.BACKUP_PROVIDER, target_model_cls ) diff --git a/backuper/main.py b/backuper/main.py index d11020d..1202453 100644 --- a/backuper/main.py +++ b/backuper/main.py @@ -10,17 +10,18 @@ from typing import NoReturn from backuper import config, core -from backuper.backup_targets.base_target import BaseBackupTarget -from backuper.backup_targets.file import File -from backuper.backup_targets.folder import Folder -from backuper.backup_targets.mariadb import MariaDB -from backuper.backup_targets.mysql import MySQL -from backuper.backup_targets.postgresql import PostgreSQL +from backuper.backup_targets import ( + base_target, + targets_mapping, +) from backuper.notifications.notifications_context import ( PROGRAM_STEP, NotificationsContext, ) -from backuper.upload_providers import BaseUploadProvider +from backuper.upload_providers import ( + base_provider, + providers_mapping, +) exit_event = threading.Event() log = logging.getLogger(__name__) @@ -32,19 +33,18 @@ def quit(sig: int, frame: FrameType | None) -> None: @NotificationsContext(step_name=PROGRAM_STEP.SETUP_PROVIDER) -def backup_provider() -> BaseUploadProvider: - backup_provider_map: dict[config.UploadProviderEnum, type[BaseUploadProvider]] = {} - for backup_provider in BaseUploadProvider.__subclasses__(): - backup_provider_map[backup_provider.target_name] = backup_provider # type: ignore +def backup_provider() -> base_provider.BaseUploadProvider: + provider_cls_map = providers_mapping.get_provider_cls_map() provider_model = core.create_provider_model() log.info( "initializing provider: `%s`", provider_model.name, ) - provider_target_cls = backup_provider_map[provider_model.name] + + provider_target_cls = provider_cls_map[provider_model.name] log.debug("initializing %s with %s", provider_target_cls, provider_model) - res_backup_provider = provider_target_cls(**provider_model.model_dump()) + res_backup_provider = provider_target_cls(target_provider=provider_model) log.info( "success initializing provider: `%s`", provider_model.name, @@ -53,16 +53,10 @@ def backup_provider() -> BaseUploadProvider: @NotificationsContext(step_name=PROGRAM_STEP.SETUP_TARGETS) -def backup_targets() -> list[BaseBackupTarget]: - backup_targets_map: dict[str, type[BaseBackupTarget]] = { - config.BackupTargetEnum.FILE: File, - config.BackupTargetEnum.FOLDER: Folder, - config.BackupTargetEnum.MARIADB: MariaDB, - config.BackupTargetEnum.POSTGRESQL: PostgreSQL, - config.BackupTargetEnum.MYSQL: MySQL, - } - - backup_targets: list[BaseBackupTarget] = [] +def backup_targets() -> list[base_target.BaseBackupTarget]: + backup_target_cls_map = targets_mapping.get_target_cls_map() + + backup_targets: list[base_target.BaseBackupTarget] = [] target_models = core.create_target_models() if not target_models: raise RuntimeError("Found 0 backup targets, at least 1 is required.") @@ -74,7 +68,7 @@ def backup_targets() -> list[BaseBackupTarget]: "initializing target: `%s`", target_model.env_name, ) - backup_target_cls = backup_targets_map[target_model.name] + backup_target_cls = backup_target_cls_map[target_model.name] log.debug("initializing %s with %s", backup_target_cls, target_model) backup_targets.append(backup_target_cls(target_model=target_model)) log.info( @@ -121,14 +115,17 @@ def shutdown() -> NoReturn: # pragma: no cover sys.exit(0) else: log.warning( - "noooo, exiting! i am now killing myself with %d daemon threads force killed. " - "you can extend this time using environment SIGTERM_TIMEOUT_SECS.", + "noooo, exiting! i am now killing myself with %d daemon threads " + "force killed. you can extend this time using environment " + "SIGTERM_TIMEOUT_SECS.", threading.active_count() - 1, ) sys.exit(1) -def run_backup(target: BaseBackupTarget, provider: BaseUploadProvider) -> None: +def run_backup( + target: base_target.BaseBackupTarget, provider: base_provider.BaseUploadProvider +) -> None: log.info("start making backup of target: `%s`", target.env_name) with NotificationsContext( step_name=PROGRAM_STEP.BACKUP_CREATE, env_name=target.env_name @@ -137,7 +134,7 @@ def run_backup(target: BaseBackupTarget, provider: BaseUploadProvider) -> None: log.info( "backup file created: %s, starting post save upload to provider %s", backup_file, - provider.target_name, + provider.__class__.__name__, ) with NotificationsContext( step_name=PROGRAM_STEP.UPLOAD, diff --git a/backuper/models/models_mapping.py b/backuper/models/models_mapping.py new file mode 100644 index 0000000..82299ad --- /dev/null +++ b/backuper/models/models_mapping.py @@ -0,0 +1,21 @@ +from backuper.config import BackupTargetEnum, UploadProviderEnum +from backuper.models import backup_target_models, upload_provider_models + + +def get_target_map() -> dict[str, type[backup_target_models.TargetModel]]: + return { + BackupTargetEnum.FILE: backup_target_models.SingleFileTargetModel, + BackupTargetEnum.FOLDER: backup_target_models.DirectoryTargetModel, + BackupTargetEnum.MARIADB: backup_target_models.MariaDBTargetModel, + BackupTargetEnum.MYSQL: backup_target_models.MySQLTargetModel, + BackupTargetEnum.POSTGRESQL: backup_target_models.PostgreSQLTargetModel, + } + + +def get_provider_map() -> dict[str, type[upload_provider_models.ProviderModel]]: + return { + UploadProviderEnum.AZURE: upload_provider_models.AzureProviderModel, + UploadProviderEnum.LOCAL_FILES_DEBUG: upload_provider_models.DebugProviderModel, + UploadProviderEnum.GCS: upload_provider_models.GCSProviderModel, + UploadProviderEnum.AWS_S3: upload_provider_models.AWSProviderModel, + } diff --git a/backuper/models/upload_provider_models.py b/backuper/models/upload_provider_models.py index d3221b6..c1d2122 100644 --- a/backuper/models/upload_provider_models.py +++ b/backuper/models/upload_provider_models.py @@ -6,14 +6,15 @@ class ProviderModel(BaseModel): - name: config.UploadProviderEnum + name: str class DebugProviderModel(ProviderModel): - pass + name: str = config.UploadProviderEnum.LOCAL_FILES_DEBUG class GCSProviderModel(ProviderModel): + name: str = config.UploadProviderEnum.GCS bucket_name: str bucket_upload_path: str service_account_base64: SecretStr @@ -29,6 +30,7 @@ def process_service_account_base64( class AWSProviderModel(ProviderModel): + name: str = config.UploadProviderEnum.AWS_S3 bucket_name: str bucket_upload_path: str key_id: str @@ -38,5 +40,6 @@ class AWSProviderModel(ProviderModel): class AzureProviderModel(ProviderModel): + name: str = config.UploadProviderEnum.AZURE container_name: str connect_string: SecretStr diff --git a/backuper/upload_providers/__init__.py b/backuper/upload_providers/__init__.py index fdd102b..8b13789 100644 --- a/backuper/upload_providers/__init__.py +++ b/backuper/upload_providers/__init__.py @@ -1,13 +1 @@ -from .aws_s3 import UploadProviderAWS -from .azure import UploadProviderAzure -from .base_provider import BaseUploadProvider -from .debug import UploadProviderLocalDebug -from .google_cloud_storage import UploadProviderGCS -__all__ = [ - "BaseUploadProvider", - "UploadProviderAWS", - "UploadProviderGCS", - "UploadProviderLocalDebug", - "UploadProviderAzure", -] diff --git a/backuper/upload_providers/aws_s3.py b/backuper/upload_providers/aws_s3.py index cf1b174..2dcd8dd 100644 --- a/backuper/upload_providers/aws_s3.py +++ b/backuper/upload_providers/aws_s3.py @@ -4,9 +4,9 @@ import boto3 from boto3.s3.transfer import TransferConfig -from pydantic import SecretStr -from backuper import config, core +from backuper import core +from backuper.models.upload_provider_models import AWSProviderModel from backuper.upload_providers.base_provider import BaseUploadProvider log = logging.getLogger(__name__) @@ -16,33 +16,21 @@ class DeleteItemDict(TypedDict): Key: str -class UploadProviderAWS( - BaseUploadProvider, - name=config.UploadProviderEnum.AWS_S3, -): +class UploadProviderAWS(BaseUploadProvider): """AWS S3 bucket for storing backups""" - def __init__( - self, - bucket_name: str, - bucket_upload_path: str, - key_id: str, - key_secret: SecretStr, - region: str, - max_bandwidth: int | None, - **kwargs: str, - ) -> None: - self.bucket_upload_path = bucket_upload_path - self.max_bandwidth = max_bandwidth + def __init__(self, target_provider: AWSProviderModel) -> None: + self.bucket_upload_path = target_provider.bucket_upload_path + self.max_bandwidth = target_provider.max_bandwidth s3: Any = boto3.resource( "s3", - region_name=region, - aws_access_key_id=key_id, - aws_secret_access_key=key_secret.get_secret_value(), + region_name=target_provider.region, + aws_access_key_id=target_provider.key_id, + aws_secret_access_key=target_provider.key_secret.get_secret_value(), ) - self.bucket = s3.Bucket(bucket_name) + self.bucket = s3.Bucket(target_provider.bucket_name) self.transfer_config = TransferConfig(max_bandwidth=self.max_bandwidth) def _post_save(self, backup_file: Path) -> str: diff --git a/backuper/upload_providers/azure.py b/backuper/upload_providers/azure.py index 975ac5c..9961d27 100644 --- a/backuper/upload_providers/azure.py +++ b/backuper/upload_providers/azure.py @@ -2,31 +2,22 @@ from pathlib import Path from azure.storage.blob import BlobServiceClient -from pydantic import SecretStr -from backuper import config, core +from backuper import core +from backuper.models.upload_provider_models import AzureProviderModel from backuper.upload_providers.base_provider import BaseUploadProvider log = logging.getLogger(__name__) -class UploadProviderAzure( - BaseUploadProvider, - name=config.UploadProviderEnum.AZURE, -): +class UploadProviderAzure(BaseUploadProvider): """Azure blob storage for storing backups""" - def __init__( - self, - container_name: str, - connect_string: SecretStr, - **kwargs: str, - ) -> None: - self.container_name = container_name - self.connect_str = connect_string + def __init__(self, target_provider: AzureProviderModel) -> None: + self.container_name = target_provider.container_name blob_service_client = BlobServiceClient.from_connection_string( - connect_string.get_secret_value() + target_provider.connect_string.get_secret_value() ) self.container_client = blob_service_client.get_container_client( container=self.container_name diff --git a/backuper/upload_providers/base_provider.py b/backuper/upload_providers/base_provider.py index 16eb519..0db9084 100644 --- a/backuper/upload_providers/base_provider.py +++ b/backuper/upload_providers/base_provider.py @@ -3,17 +3,14 @@ from pathlib import Path from typing import final -from backuper import config +from backuper.models.upload_provider_models import ProviderModel log = logging.getLogger(__name__) class BaseUploadProvider(ABC): - target_name: config.UploadProviderEnum - - def __init_subclass__(cls, name: config.UploadProviderEnum) -> None: - cls.target_name = name - super().__init_subclass__() + def __init__(self, target_provider: ProviderModel) -> None: # pragma: no cover + pass @final def post_save(self, backup_file: Path) -> str: diff --git a/backuper/upload_providers/debug.py b/backuper/upload_providers/debug.py index ee84b8e..886bca2 100644 --- a/backuper/upload_providers/debug.py +++ b/backuper/upload_providers/debug.py @@ -1,22 +1,20 @@ import logging from pathlib import Path -from backuper import config, core +from backuper import core +from backuper.models.upload_provider_models import DebugProviderModel from backuper.upload_providers.base_provider import BaseUploadProvider log = logging.getLogger(__name__) -class UploadProviderLocalDebug( - BaseUploadProvider, - name=config.UploadProviderEnum.LOCAL_FILES_DEBUG, -): +class UploadProviderLocalDebug(BaseUploadProvider): """Represent local folder `data` for storing backups. If docker volume/persistant volume is lost, so are backups. """ - def __init__(self, **kwargs: str) -> None: + def __init__(self, target_provider: DebugProviderModel) -> None: pass def _post_save(self, backup_file: Path) -> str: diff --git a/backuper/upload_providers/google_cloud_storage.py b/backuper/upload_providers/google_cloud_storage.py index c9f4a35..98d1761 100644 --- a/backuper/upload_providers/google_cloud_storage.py +++ b/backuper/upload_providers/google_cloud_storage.py @@ -4,31 +4,20 @@ from pathlib import Path import google.cloud.storage as cloud_storage -from pydantic import SecretStr from backuper import config, core +from backuper.models.upload_provider_models import GCSProviderModel from backuper.upload_providers.base_provider import BaseUploadProvider log = logging.getLogger(__name__) -class UploadProviderGCS( - BaseUploadProvider, - name=config.UploadProviderEnum.GOOGLE_CLOUD_STORAGE, -): +class UploadProviderGCS(BaseUploadProvider): """GCS bucket for storing backups""" - def __init__( - self, - bucket_name: str, - bucket_upload_path: str, - service_account_base64: SecretStr, - chunk_size_mb: int, - chunk_timeout_secs: int, - **kwargs: str, - ) -> None: + def __init__(self, target_provider: GCSProviderModel) -> None: service_account_bytes = base64.b64decode( - service_account_base64.get_secret_value() + target_provider.service_account_base64.get_secret_value() ) sa_path = config.CONST_CONFIG_FOLDER_PATH / "google_auth.json" with open(sa_path, "wb") as f: @@ -36,10 +25,10 @@ def __init__( os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = str(sa_path) self.storage_client = cloud_storage.Client() - self.bucket = self.storage_client.bucket(bucket_name) - self.bucket_upload_path = bucket_upload_path - self.chunk_size_bytes = chunk_size_mb * 1024 * 1024 - self.chunk_timeout_secs = chunk_timeout_secs + self.bucket = self.storage_client.bucket(target_provider.bucket_name) + self.bucket_upload_path = target_provider.bucket_upload_path + self.chunk_size_bytes = target_provider.chunk_size_mb * 1024 * 1024 + self.chunk_timeout_secs = target_provider.chunk_timeout_secs def _post_save(self, backup_file: Path) -> str: zip_backup_file = core.run_create_zip_archive(backup_file=backup_file) diff --git a/backuper/upload_providers/providers_mapping.py b/backuper/upload_providers/providers_mapping.py new file mode 100644 index 0000000..771ac41 --- /dev/null +++ b/backuper/upload_providers/providers_mapping.py @@ -0,0 +1,17 @@ +from backuper.config import UploadProviderEnum +from backuper.upload_providers import ( + aws_s3, + azure, + base_provider, + debug, + google_cloud_storage, +) + + +def get_provider_cls_map() -> dict[str, type[base_provider.BaseUploadProvider]]: + return { + UploadProviderEnum.AWS_S3: aws_s3.UploadProviderAWS, + UploadProviderEnum.AZURE: azure.UploadProviderAzure, + UploadProviderEnum.GCS: google_cloud_storage.UploadProviderGCS, + UploadProviderEnum.LOCAL_FILES_DEBUG: debug.UploadProviderLocalDebug, + } diff --git a/pyproject.toml b/pyproject.toml index 4f6a87c..4f299c7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -54,8 +54,8 @@ env = [ "ZIP_ARCHIVE_PASSWORD=very_unpleasant:password-_-12!@#%^&*()/;><.,][`~'", ] filterwarnings = [ - "ignore:Deprecated call to `pkg_resources.declare_namespace\\('google:DeprecationWarning", - "ignore:pkg_resources is deprecated as an API:DeprecationWarning", + "ignore:Type google._upb._message.ScalarMapContainer uses PyType_Spec", + "ignore:Type google._upb._message.MessageMapContainer uses PyType_Spec", ] [tool.ruff] @@ -63,12 +63,8 @@ target-version = "py312" [tool.ruff.lint] # pycodestyle, pyflakes, isort, pylint, pyupgrade -ignore = ["E501"] select = ["E", "F", "I", "PL", "UP", "W"] -[tool.ruff.lint.pylint] -max-args = 12 - [tool.coverage.run] omit = ["backuper/tools/*"] source = ["backuper"] @@ -77,8 +73,6 @@ source = ["backuper"] ignore_missing_imports = true python_version = "3.12" strict = true -warn_return_any = true -warn_unused_configs = true [tool.poetry.scripts] backuper = "backuper.main:main" diff --git a/tests/test_backup_target_mariadb.py b/tests/test_backup_target_mariadb.py index 66494ba..32621e0 100644 --- a/tests/test_backup_target_mariadb.py +++ b/tests/test_backup_target_mariadb.py @@ -43,6 +43,9 @@ def test_run_mariadb_dump( db = MariaDB(target_model=mariadb_target) out_backup = db._backup() - out_file = f"{db.env_name}/{db.env_name}_20221211_0000_fixed_dbname_{db.db_version}_{CONST_TOKEN_URLSAFE}.sql" + out_file = ( + f"{db.env_name}/" + f"{db.env_name}_20221211_0000_fixed_dbname_{db.db_version}_{CONST_TOKEN_URLSAFE}.sql" + ) out_path = config.CONST_BACKUP_FOLDER_PATH / out_file assert out_backup == out_path diff --git a/tests/test_backup_target_mysql.py b/tests/test_backup_target_mysql.py index fab2dca..5e89893 100644 --- a/tests/test_backup_target_mysql.py +++ b/tests/test_backup_target_mysql.py @@ -39,6 +39,9 @@ def test_run_mysqldump( db = MySQL(target_model=mysql_target) out_backup = db._backup() - out_file = f"{db.env_name}/{db.env_name}_20221211_0000_fixed_dbname_{db.db_version}_{CONST_TOKEN_URLSAFE}.sql" + out_file = ( + f"{db.env_name}/" + f"{db.env_name}_20221211_0000_fixed_dbname_{db.db_version}_{CONST_TOKEN_URLSAFE}.sql" + ) out_path = config.CONST_BACKUP_FOLDER_PATH / out_file assert out_backup == out_path diff --git a/tests/test_backup_target_postgresql.py b/tests/test_backup_target_postgresql.py index d164048..ec1a475 100644 --- a/tests/test_backup_target_postgresql.py +++ b/tests/test_backup_target_postgresql.py @@ -45,6 +45,9 @@ def test_run_pg_dump( db = PostgreSQL(target_model=postgres_target) out_backup = db._backup() - out_file = f"{db.env_name}/{db.env_name}_20221211_0000_fixed_dbname_{db.db_version}_{CONST_TOKEN_URLSAFE}.sql" + out_file = ( + f"{db.env_name}/" + f"{db.env_name}_20221211_0000_fixed_dbname_{db.db_version}_{CONST_TOKEN_URLSAFE}.sql" + ) out_path = config.CONST_BACKUP_FOLDER_PATH / out_file assert out_backup == out_path diff --git a/tests/test_core.py b/tests/test_core.py index c0611f0..373ad23 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -73,123 +73,126 @@ def test_run_create_zip_archive( assert fake_backup_file_out.exists() -@pytest.mark.parametrize( - "env_lst,valid", - [ - ( - [ - ( - "POSTGRESQL_FIRST_DB", - "host=localhost port=5432 password=secret cron_rule=* * * * *", - ), - ], - True, - ), - ( - [ - ( - "POSTGRESQL_FIRST_DB", - "host=localhost port=5432 password=secret cron_rule=* * * * *", - ), - ( - "MYSQL_FIRST_DB", - "host=localhost port=3306 password=secret cron_rule=* * * * *", - ), - ], - True, - ), - ( - [ - ( - "MYSQL_SECOND_DB", - "host=10.0.0.1 port=3306 user=foo password=change_me! db=bar cron_rule=0 5 * * *", - ) - ], - True, - ), - ( - [ - ( - "MARIADB_THIRD_DB", - "host=192.168.1.5 port=3306 user=root password=change_me_please! db=project cron_rule=15 */3 * * * max_backups=20", - ) - ], - True, - ), - ( - [ - ( - "SINGLEFILE_THIRD", - f"abs_path={Path(__file__)} cron_rule=15 */3 * * * max_backups=20", - ) - ], - True, - ), - ( - [ - ( - "DIRECTORY_FIRST", - f"abs_path={Path(__file__).parent} cron_rule=15 */3 * * * max_backups=20", - ) - ], - True, - ), - ( - [ - ( - "POSTGRESQL_FIRST_DB", - "host=localhostport=5432 password=secret cron_rule=* * * * *", - ), - ], - True, - ), - ( - [ - ( - "POSTGRESQL_FIRST_DB", - "host=localhost port=axxx password=secret cron_rule=* * * * *", - ), - ], - False, - ), - ( - [ - ( - "POSTGRESQL_FIRST_DB", - "host=localhost port=111 passwor=secret cron_rule=* * * * *", - ), - ], - False, - ), - ( - [ - ( - "POSTGRESQL_FIRST_DB", - "host=localhost port=111 password=secret cron_rule=* ** * *", - ), - ], - False, - ), - ( - [ - ( - "POSTGRESQL_FIRST_DB", - "host=localhost port=5432 password=secretcron_rule=* * * * *", - ), - ], - False, - ), - ( - [ - ( - "POSTGRESQL_FIRST_DB", - "host=localhost port5432 password=secret cron_rule=* * * * *", - ), - ], - True, - ), - ], -) +test_data = [ + ( + [ + ( + "POSTGRESQL_FIRST_DB", + "host=localhost port=5432 password=secret cron_rule=* * * * *", + ), + ], + True, + ), + ( + [ + ( + "POSTGRESQL_FIRST_DB", + "host=localhost port=5432 password=secret cron_rule=* * * * *", + ), + ( + "MYSQL_FIRST_DB", + "host=localhost port=3306 password=secret cron_rule=* * * * *", + ), + ], + True, + ), + ( + [ + ( + "MYSQL_SECOND_DB", + "host=10.0.0.1 port=3306 user=foo password=change_me!" + " db=bar cron_rule=0 5 * * *", + ) + ], + True, + ), + ( + [ + ( + "MARIADB_THIRD_DB", + "host=192.168.1.5 port=3306 user=root password=change_me_please! " + "db=project cron_rule=15 */3 * * * max_backups=20", + ) + ], + True, + ), + ( + [ + ( + "SINGLEFILE_THIRD", + f"abs_path={Path(__file__)} cron_rule=15 */3 * * * max_backups=20", + ) + ], + True, + ), + ( + [ + ( + "DIRECTORY_FIRST", + f"abs_path={Path(__file__).parent} cron_rule=15 */3 * * * " + "max_backups=20", + ) + ], + True, + ), + ( + [ + ( + "POSTGRESQL_FIRST_DB", + "host=localhostport=5432 password=secret cron_rule=* * * * *", + ), + ], + True, + ), + ( + [ + ( + "POSTGRESQL_FIRST_DB", + "host=localhost port=axxx password=secret cron_rule=* * * * *", + ), + ], + False, + ), + ( + [ + ( + "POSTGRESQL_FIRST_DB", + "host=localhost port=111 passwor=secret cron_rule=* * * * *", + ), + ], + False, + ), + ( + [ + ( + "POSTGRESQL_FIRST_DB", + "host=localhost port=111 password=secret cron_rule=* ** * *", + ), + ], + False, + ), + ( + [ + ( + "POSTGRESQL_FIRST_DB", + "host=localhost port=5432 password=secretcron_rule=* * * * *", + ), + ], + False, + ), + ( + [ + ( + "POSTGRESQL_FIRST_DB", + "host=localhost port5432 password=secret cron_rule=* * * * *", + ), + ], + True, + ), +] + + +@pytest.mark.parametrize("env_lst,valid", test_data) def test_create_backup_targets( env_lst: list[tuple[str, str]], valid: bool, monkeypatch: pytest.MonkeyPatch ) -> None: diff --git a/tests/test_main.py b/tests/test_main.py index c1e6487..9dc9d68 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -7,8 +7,10 @@ import pytest from backuper import config, core, main +from backuper.models import upload_provider_models from backuper.notifications.notifications_context import NotificationsContext from backuper.upload_providers.debug import UploadProviderLocalDebug +from backuper.upload_providers.google_cloud_storage import UploadProviderGCS from .conftest import ( ALL_MARIADB_DBS_TARGETS, @@ -56,10 +58,11 @@ def test_backup_provider(monkeypatch: pytest.MonkeyPatch) -> None: monkeypatch.setattr( config.options, "BACKUP_PROVIDER", - "name=gcs bucket_name=name bucket_upload_path=test service_account_base64=Z29vZ2xlX3NlcnZpY2VfYWNjb3VudAo=", + "name=gcs bucket_name=name bucket_upload_path=test " + "service_account_base64=Z29vZ2xlX3NlcnZpY2VfYWNjb3VudAo=", ) provider = main.backup_provider() - assert provider.target_name == "gcs" + assert provider.__class__.__name__ == UploadProviderGCS.__name__ def test_main_single(monkeypatch: pytest.MonkeyPatch) -> None: @@ -127,7 +130,7 @@ def test_run_backup_notifications_fail_message_is_fired_when_it_fails( backup_file = Path("/tmp/fake") backup_mock = Mock(return_value=backup_file, side_effect=make_backup_side_effect) monkeypatch.setattr(target, "_backup", backup_mock) - provider = UploadProviderLocalDebug() + provider = UploadProviderLocalDebug(upload_provider_models.DebugProviderModel()) monkeypatch.setattr(provider, "_post_save", Mock(side_effect=post_save_side_effect)) monkeypatch.setattr(provider, "_clean", Mock(side_effect=clean_side_effect)) diff --git a/tests/test_storage_provider_aws.py b/tests/test_storage_provider_aws.py index 1d8acfe..408ea73 100644 --- a/tests/test_storage_provider_aws.py +++ b/tests/test_storage_provider_aws.py @@ -6,7 +6,8 @@ from freezegun import freeze_time from pydantic import SecretStr -from backuper.upload_providers import UploadProviderAWS +from backuper.models.upload_provider_models import AWSProviderModel +from backuper.upload_providers.aws_s3 import UploadProviderAWS @pytest.fixture(autouse=True) @@ -16,12 +17,14 @@ def mock_google_storage_client(monkeypatch: pytest.MonkeyPatch) -> None: def get_test_aws() -> UploadProviderAWS: return UploadProviderAWS( - bucket_name="name", - bucket_upload_path="test123", - key_id="id", - key_secret=SecretStr("secret"), - region="fake region", - max_bandwidth=None, + AWSProviderModel( + bucket_name="name", + bucket_upload_path="test123", + key_id="id", + key_secret=SecretStr("secret"), + region="fake region", + max_bandwidth=None, + ) ) diff --git a/tests/test_storage_provider_azure.py b/tests/test_storage_provider_azure.py index 6f048fa..174da90 100644 --- a/tests/test_storage_provider_azure.py +++ b/tests/test_storage_provider_azure.py @@ -6,7 +6,8 @@ from freezegun import freeze_time from pydantic import SecretStr -from backuper.upload_providers import UploadProviderAzure +from backuper.models.upload_provider_models import AzureProviderModel +from backuper.upload_providers.azure import UploadProviderAzure @pytest.fixture(autouse=True) @@ -15,7 +16,9 @@ def mock_azure_service_client(monkeypatch: pytest.MonkeyPatch) -> None: def get_test_azure() -> UploadProviderAzure: - return UploadProviderAzure(container_name="test", connect_string=SecretStr("any")) + return UploadProviderAzure( + AzureProviderModel(container_name="test", connect_string=SecretStr("any")) + ) def test_azure_post_save_fails_on_fail_upload( diff --git a/tests/test_storage_provider_gcs.py b/tests/test_storage_provider_gcs.py index 62b1b5d..d349c79 100644 --- a/tests/test_storage_provider_gcs.py +++ b/tests/test_storage_provider_gcs.py @@ -6,7 +6,8 @@ from freezegun import freeze_time from pydantic import SecretStr -from backuper.upload_providers import UploadProviderGCS +from backuper.models.upload_provider_models import GCSProviderModel +from backuper.upload_providers.google_cloud_storage import UploadProviderGCS @pytest.fixture(autouse=True) @@ -16,11 +17,13 @@ def mock_google_storage_client(monkeypatch: pytest.MonkeyPatch) -> None: def get_test_gcs() -> UploadProviderGCS: return UploadProviderGCS( - bucket_name="name", - bucket_upload_path="test", - service_account_base64=SecretStr("Z29vZ2xlX3NlcnZpY2VfYWNjb3VudAo="), - chunk_size_mb=100, - chunk_timeout_secs=100, + GCSProviderModel( + bucket_name="name", + bucket_upload_path="test", + service_account_base64=SecretStr("Z29vZ2xlX3NlcnZpY2VfYWNjb3VudAo="), + chunk_size_mb=100, + chunk_timeout_secs=100, + ) ) diff --git a/tests/test_storage_provider_local.py b/tests/test_storage_provider_local.py index cd2f988..cca99a0 100644 --- a/tests/test_storage_provider_local.py +++ b/tests/test_storage_provider_local.py @@ -3,14 +3,17 @@ import pytest from freezegun import freeze_time -from backuper.upload_providers import UploadProviderLocalDebug +from backuper.models.upload_provider_models import DebugProviderModel +from backuper.upload_providers.debug import UploadProviderLocalDebug + + +def get_test_debug() -> UploadProviderLocalDebug: + return UploadProviderLocalDebug(DebugProviderModel()) @pytest.mark.parametrize("method_name", ["_clean", "clean"]) -def test_local_debug_clean_file( - tmp_path: Path, monkeypatch: pytest.MonkeyPatch, method_name: str -) -> None: - local = UploadProviderLocalDebug() +def test_local_debug_clean_file(tmp_path: Path, method_name: str) -> None: + local = get_test_debug() fake_backup_dir_path = tmp_path / "fake_env_name" fake_backup_dir_path.mkdir() @@ -38,10 +41,8 @@ def test_local_debug_clean_file( @pytest.mark.parametrize("method_name", ["_clean", "clean"]) -def test_local_debug_clean_folder( - tmp_path: Path, monkeypatch: pytest.MonkeyPatch, method_name: str -) -> None: - local = UploadProviderLocalDebug() +def test_local_debug_clean_folder(tmp_path: Path, method_name: str) -> None: + local = get_test_debug() fake_backup_dir_path = tmp_path / "fake_env_name" fake_backup_dir_path.mkdir() @@ -71,9 +72,9 @@ def test_local_debug_clean_folder( @freeze_time("2023-08-27") @pytest.mark.parametrize("method_name", ["_clean", "clean"]) def test_local_debug_respects_min_retention_days_param_and_not_delete_any_file( - tmp_path: Path, monkeypatch: pytest.MonkeyPatch, method_name: str + tmp_path: Path, method_name: str ) -> None: - local = UploadProviderLocalDebug() + local = get_test_debug() fake_backup_dir_path = tmp_path / "fake_env_name" fake_backup_dir_path.mkdir()