diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index ffaceaa..76c1660 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,34 +1,98 @@ -name: Build and Publish Python 🐍 distributions 📦 to PyPI and TestPyPI +name: Python package build and publish + on: release: types: [created] + jobs: + build: + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-latest, windows-latest] + python-version: ["3.9", "3.10", "3.11", "3.12"] + steps: + - uses: actions/checkout@v2 + + - name: Set up Python + uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install twine + + - name: Build wheels on Ubuntu + if: matrix.os == 'ubuntu-latest' + uses: RalfG/python-wheels-manylinux-build@v0.5.0-manylinux2014_x86_64 + with: + python-versions: 'cp39-cp39 cp310-cp310 cp311-cp311 cp312-cp312' + build-requirements: 'cython numpy' + + - name: Build wheels on Windows + if: matrix.os == 'windows-latest' + run: | + pip install cibuildwheel + cibuildwheel --output-dir dist + # You may need to adjust the above command based on your package's specific requirements + + - name: Upload wheel artifacts + uses: actions/upload-artifact@v2 + with: + name: wheels-${{ matrix.os }}-py${{ matrix.python-version }} + path: dist/*.whl + deploy: + needs: build runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - name: Set up Python - uses: actions/setup-python@v1 - with: - python-version: 3.9 - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install twine - - name: Build manylinux Python wheels - uses: RalfG/python-wheels-manylinux-build@v0.5.0-manylinux2014_x86_64 - with: - python-versions: 'cp39-cp39 cp310-cp310 cp311-cp311' - build-requirements: 'cython numpy' - - name: Publish wheels to Test PyPI - env: - TWINE_USERNAME: __token__ - TWINE_PASSWORD: ${{ secrets.NAVCONFIG_TEST_PYPI_API_TOKEN }} - run: | - twine upload -r testpypi dist/*-manylinux*.whl - - name: Publish wheels to Production PyPI - env: - TWINE_USERNAME: __token__ - TWINE_PASSWORD: ${{ secrets.NAVCONFIG_PYPI_API_TOKEN }} - run: | - twine upload dist/*-manylinux*.whl + - uses: actions/checkout@v2 + + - name: Download all artifacts + uses: actions/download-artifact@v2 + with: + path: dist + + - name: Move wheel files to 'dist' directory + run: | + find dist -name '*.whl' -exec mv {} dist \; + + - name: Check for wheel types + id: check_wheels + run: | + echo "Checking for wheel types..." + if ls dist/*-manylinux*.whl 1> /dev/null 2>&1; then + echo "Found manylinux wheels." + echo "HAS_MANYLINUX_WHEELS=true" >> $GITHUB_ENV + fi + if ls dist/*-win_*.whl 1> /dev/null 2>&1; then + echo "Found Windows wheels." + echo "HAS_WINDOWS_WHEELS=true" >> $GITHUB_ENV + fi + + - name: List files in dist + run: ls -l dist + + - name: Set up Python + uses: actions/setup-python@v1 + with: + python-version: '3.x' + + - name: Install twine + run: pip install twine + + - name: Publish manylinux wheels to Production PyPI + if: env.HAS_MANYLINUX_WHEELS == 'true' + env: + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{ secrets.NAVCONFIG_PYPI_API_TOKEN }} + run: twine upload dist/*-manylinux*.whl + + - name: Publish Windows wheels to Production PyPI + if: env.HAS_WINDOWS_WHEELS == 'true' + env: + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{ secrets.NAVCONFIG_PYPI_API_TOKEN }} + run: twine upload dist/*-win_*.whl diff --git a/docs/conf.py b/docs/conf.py index c7c1cd0..4f5fc33 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -7,7 +7,7 @@ # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information project = 'navconfig' -copyright = '2022, Jesus Lara G.' +copyright = '2021, Jesus Lara G.' author = 'Jesus Lara G.' release = 'Dual License BSD and Apache 2.0' diff --git a/examples/migrate_vault.py b/examples/migrate_vault.py new file mode 100644 index 0000000..4ce477a --- /dev/null +++ b/examples/migrate_vault.py @@ -0,0 +1,43 @@ +import sys +from pathlib import Path +from navconfig import config + + +def process_config_file(file_path): + # Check if the file exists + if not Path(file_path).is_file(): + print(f"File not found: {file_path}") + return + + with open(file_path, 'r') as file: + for line in file: + # Strip whitespace from the beginning and end of the line + line = line.strip() + + # Skip blank lines and lines that start with '[' + if not line or line.startswith('['): + continue + + # Split the line into key and value at the first '=' + parts = line.split('=', 1) + if len(parts) == 2: + key, value = parts + key = key.strip() + value = value.strip() + vault = config.source('vault') + if vault: + try: + vault.set(key, value) + print(f"Saved Key: {key}, Value: {value}") + except Exception as e: + print( + f"Error saving Key: {key}={value} on Vault: {e}" + ) + +if __name__ == "__main__": + if len(sys.argv) != 2: + print("Usage: python script.py ") + sys.exit(1) + + config_file_path = sys.argv[1] + process_config_file(config_file_path) diff --git a/navconfig/__init__.py b/navconfig/__init__.py index 3b6bda2..3876f2c 100644 --- a/navconfig/__init__.py +++ b/navconfig/__init__.py @@ -12,36 +12,35 @@ # PROJECT PATH IS DEFINED? SITE_ROOT = project_root(__file__) -BASE_DIR = os.getenv('BASE_DIR', None) +BASE_DIR = os.getenv("BASE_DIR", None) if not BASE_DIR: BASE_DIR = SITE_ROOT else: BASE_DIR = Path(BASE_DIR).resolve() -SETTINGS_DIR = BASE_DIR.joinpath('settings') # configuration of the environment type: # environment type can be a file (.env) an encrypted file (crypt) -ENV_TYPE = os.getenv('ENV_TYPE', 'file') +ENV_TYPE = os.getenv("ENV_TYPE", "file") # check if create is True (default: false), create the required directories: -CREATE = os.getenv('CONFIG_CREATE', None) +CREATE = os.getenv("CONFIG_CREATE", None) """ Loading main Configuration Object. """ -config = Kardex( - SITE_ROOT, - env_type=ENV_TYPE, - create=CREATE -) +config = Kardex(SITE_ROOT, env_type=ENV_TYPE, create=CREATE) + # ENV version (dev, prod, staging) ENV = config.ENV # DEBUG VERSION DEBUG = config.debug +# Settings Directory +SETTINGS_DIR = BASE_DIR.joinpath("settings") + # SECURITY WARNING: keep the secret keys used in production secret! -PRODUCTION = config.getboolean('PRODUCTION', fallback=bool(not DEBUG)) +PRODUCTION = config.getboolean("PRODUCTION", fallback=bool(not DEBUG)) # Add Path Navigator to Sys path sys.path.append(str(BASE_DIR)) diff --git a/navconfig/conf.py b/navconfig/conf.py index 0918b40..d4a5c2b 100644 --- a/navconfig/conf.py +++ b/navconfig/conf.py @@ -1,31 +1,31 @@ import os import logging -from navconfig import ( - BASE_DIR -) +from navconfig import BASE_DIR + os.chdir(str(BASE_DIR)) ### Global-Settings. try: - from settings.settings import * # pylint: disable=W0401,W0614 + from settings.settings import * # pylint: disable=W0401,W0614 except ImportError as err: try: from settings import settings - logging.error(f'Cannot loaded a settings Module {err}, module: {settings}') + + logging.error(f"Cannot loaded a settings Module {err}, module: {settings}") print( - 'Settings.py File is missing.' - 'Hint: Its recommended to use a settings/settings.py module to customize ' - ' NAV Configuration.' + "Settings.py File is missing." + "Hint: Its recommended to use a settings/settings.py module to customize " + " NAV Configuration." ) except ImportError as ex: logging.error("There is no *settings/* module in project.") print( - 'Settings.py module is missing.' - 'Hint: Its recommended to use a settings/settings.py module to customize ' - ' NAV Configuration.' + "Settings.py module is missing." + "Hint: Its recommended to use a settings/settings.py module to customize " + " NAV Configuration." ) ### User Local Settings try: - from settings.local_settings import * # pylint: disable=W0401,W0614 -except (ImportError) as err: + from settings.local_settings import * # pylint: disable=W0401,W0614 +except ImportError as err: pass diff --git a/navconfig/config.py b/navconfig/config.py index 91d5ac7..7a3c934 100644 --- a/navconfig/config.py +++ b/navconfig/config.py @@ -16,21 +16,19 @@ from .utils.functions import strtobool from .utils.types import Singleton from .loaders import import_loader, pyProjectLoader -from .exceptions import ( - ConfigError, - NavConfigError, - ReaderNotSet -) +from .exceptions import ConfigError, NavConfigError, ReaderNotSet ## memcache: try: from .readers.memcache import mcache + MEMCACHE_LOADER = mcache except ModuleNotFoundError: MEMCACHE_LOADER = None ## redis: try: from .readers.redis import mredis + REDIS_LOADER = mredis except ModuleNotFoundError: REDIS_LOADER = None @@ -38,6 +36,7 @@ ## Hashicorp Vault: try: from .readers.vault import VaultReader + HVAULT_LOADER = VaultReader except ModuleNotFoundError: HVAULT_LOADER = None @@ -48,7 +47,8 @@ class Kardex(metaclass=Singleton): Kardex. Universal container for Configuration Management. """ - _conffile: str = 'etc/config.ini' + + _conffile: str = "etc/config.ini" __initialized__ = False _readers: dict = {} _mapping_: dict = {} @@ -76,21 +76,24 @@ def __init__( # TODO: better discovery of Project Root self._site_path = Path(__file__).resolve().parent.parent else: - self._site_path = Path(site_root).resolve() + if isinstance(site_root, str): + self._site_path = Path(site_root).resolve() + else: + self._site_path = site_root # then: configure the instance: self.configure(env, **kwargs) def configure( - self, - env: str = None, - env_type: str = 'file', - override: bool = False + self, + env: str = None, + env_type: str = "file", + override: bool = False ): # Environment Configuration: if env is not None: self.ENV = env else: - environment = os.getenv('ENV', '') + environment = os.getenv("ENV", "") self.ENV = environment # getting type of enviroment consumer: try: @@ -100,49 +103,45 @@ def configure( ) except FileNotFoundError: logging.error( - 'NavConfig Error: Environment (.env) File is Missing.' + "NavConfig Error: Environment (.env) File is Missing." ) # Get External Readers: - self._use_redis: bool = os.environ.get('USE_REDIS', False) + self._use_redis: bool = strtobool(os.environ.get("USE_REDIS", False)) if self._use_redis: if REDIS_LOADER: try: - self._readers['redis'] = REDIS_LOADER() + self._readers["redis"] = REDIS_LOADER() except ReaderNotSet as err: - logging.error(f'{err}') + logging.error(f"{err}") except Exception as err: - logging.warning(f'Redis error: {err}') - raise ConfigError( - str(err) - ) from err - self._use_memcache: bool = os.environ.get('USE_MEMCACHED', False) + logging.warning(f"Redis error: {err}") + raise ConfigError(str(err)) from err + self._use_memcache: bool = strtobool(os.environ.get("USE_MEMCACHED", False)) if self._use_memcache: if MEMCACHE_LOADER: try: - self._readers['memcache'] = MEMCACHE_LOADER() + self._readers["memcache"] = MEMCACHE_LOADER() except ReaderNotSet as err: - logging.error(f'{err}') + logging.error(f"{err}") except Exception as err: - raise ConfigError( - str(err) - ) from err + raise ConfigError(str(err)) from err ## Hashicorp Vault: - self._use_vault: bool = os.environ.get('USE_VAULT', False) + self._use_vault: bool = strtobool(os.environ.get("USE_VAULT", False)) if self._use_vault: if HVAULT_LOADER: try: - self._readers['vault'] = HVAULT_LOADER() + self._readers["vault"] = HVAULT_LOADER( + env=self.ENV + ) except ReaderNotSet as err: - logging.error(f'{err}') + logging.error(f"{err}") except Exception as err: - logging.warning(f'Vault error: {err}') - raise ConfigError( - str(err) - ) from err + logging.warning(f"Vault error: {err}") + raise ConfigError(str(err)) from err # define debug - self._debug = bool(self.getboolean('DEBUG', fallback=False)) + self._debug = bool(self.getboolean("DEBUG", fallback=False)) # and get the config file declared in the environment file - config_file = self.get('CONFIG_FILE', fallback=self._conffile) + config_file = self.get("CONFIG_FILE", fallback=self._conffile) self._ini = ConfigParser() cf = Path(config_file).resolve() if not cf.exists(): @@ -183,9 +182,7 @@ def close(self): try: reader.close() except Exception as err: # pylint: disable=W0703 - logging.error( - f"NavConfig: Error on Reader close: {err}" - ) + logging.error(f"NavConfig: Error on Reader close: {err}") @property def debug(self): @@ -196,9 +193,9 @@ def load_pyproject(self): Load a pyproject.toml file and set the configuration """ try: - project_name = os.getenv('PROJECT_NAME', 'navconfig') - project_path = os.getenv('PROJECT_PATH', self.site_root) - project_file = os.getenv('PROJECT_FILE', 'pyproject.toml') + project_name = os.getenv("PROJECT_NAME", "navconfig") + project_path = os.getenv("PROJECT_PATH", self.site_root) + project_file = os.getenv("PROJECT_FILE", "pyproject.toml") if isinstance(project_path, str): project_path = Path(project_path).resolve() try: @@ -206,7 +203,7 @@ def load_pyproject(self): env_path=project_path, project_name=project_name, project_file=project_file, - create=self._create + create=self._create, ) data = self._pyproject.load_environment() self._mapping_ = {**self._mapping_, **data} @@ -214,59 +211,57 @@ def load_pyproject(self): logging.warning(err) except Exception as err: logging.exception(err) - raise ConfigError( - str(err) - ) from err + raise ConfigError(str(err)) from err - def save_environment(self, env_type: str = 'drive'): + def save_environment(self, env_type: str = "drive"): """ Saving a remote Environment into a local File. """ - env_path = self.site_root.joinpath('env', self.ENV, '.env') + env_path = self.site_root.joinpath("env", self.ENV, ".env") # pluggable types - print('ENV ', env_type) + print("ENV ", env_type) if self._env_loader.downloadable is True: self._env_loader.save_enviroment(env_path) - def load_enviroment(self, env_type: str = 'file', override: bool = False): + def load_enviroment(self, env_type: str = "file", override: bool = False): """load_environment. - Load an environment from a File or any pluggable Origin. + Load an environment from a File or any pluggable Origin. """ try: - env_path = self.site_root.joinpath('env', self.ENV) - logging.debug(f'Environment Path: {env_path!s}') + env_path = self.site_root.joinpath("env", self.ENV) + logging.debug( + f"Environment Path: {env_path!s}" + ) obj = import_loader(loader=env_type) self._env_loader = obj( env_path=env_path, - env_file='', + env_file="", override=override, create=self._create, - env=self.ENV + env=self.ENV, ) self._mapping_ = self._env_loader.load_environment() if self._mapping_ is None: self._mapping_ = {} # empty dict except (FileExistsError, FileNotFoundError) as ex: - logging.warning(ex) + logging.warning(str(ex)) raise except RuntimeError as ex: - raise RuntimeError( - ex - ) from ex + raise RuntimeError(str(ex)) from ex except Exception as ex: logging.exception(ex, stack_info=True) raise RuntimeError( f"Navconfig: Exception on Env loader: {ex}" ) from ex - def source(self, option: str = 'ini') -> object: + def source(self, option: str = "ini") -> object: """ source. Return a configuration source. """ - if option == 'ini': + if option == "ini": return self._ini - elif option == 'env': + elif option == "env": return self._env_loader elif option in self._readers: return self._readers[option] @@ -307,20 +302,14 @@ def addEnv(self, file, override: bool = False): """ if file.exists() and file.is_file(): try: - load_dotenv( - dotenv_path=file, - override=override - ) + load_dotenv(dotenv_path=file, override=override) except Exception as err: raise NavConfigError(str(err)) from err else: - raise NavConfigError( - f'Failed to load a new ENV file from {file}' - ) + raise NavConfigError(f"Failed to load a new ENV file from {file}") def _get_external(self, key: str) -> Any: - """Get value fron an External Reader. - """ + """Get value fron an External Reader.""" for _, reader in self._readers.items(): try: if reader.enabled is True and reader.exists(key) is True: @@ -412,7 +401,7 @@ def getlist(self, key: str, section: str = None, fallback: Any = None): if isinstance(val, (list, tuple)): return val if val: - return val.split(',') + return val.split(",") else: return [] @@ -450,8 +439,7 @@ def get(self, key: str, section: str = None, fallback: Any = None) -> Any: return val return fallback -# Config Magic Methods (dict like) - + # Config Magic Methods (dict like) def __setitem__(self, key: str, value: Any) -> None: if key in os.environ: # override an environment variable @@ -512,35 +500,34 @@ def __getattr__(self, key: str) -> Any: finally: return val # pylint: disable=W0150 else: - raise TypeError( - f"NavigatorConfig Error: has not attribute {key}" + raise AttributeError( + f"Config Error: has not attribute {key}" ) - def set(self, key: str, value: Any, vault: bool = False) -> None: + def set(self, key: str, value: Any) -> None: """ set. Set an enviroment variable on REDIS, based on Strategy TODO: add cloudpickle to serialize and unserialize data first. """ - if vault is True: + if self._use_vault is True: try: - return self._readers['vault'].set(key, value) + return self._readers["vault"].set(key, value) except KeyError: logging.warning( - f'Unable to Set key {key} in Vault' + f"Unable to Set key {key} in Vault" ) + except Exception: + raise if self._use_redis: try: - return self._readers['redis'].set(key, value) + return self._readers["redis"].set(key, value) except KeyError: - logging.warning(f'Unable to Set key {key} in Redis') + logging.warning(f"Unable to Set key {key} in Redis") return False def setext( - self, key: str, - value: Any, - timeout: int = None, - vault: bool = False + self, key: str, value: Any, timeout: int = None, vault: bool = False ) -> bool: """ set @@ -552,19 +539,19 @@ def setext( else: time = timeout try: - return self._readers['redis'].set(key, value, time) + return self._readers["redis"].set(key, value, time) except KeyError: - logging.warning(f'Unable to Set key {key} in Redis') + logging.warning(f"Unable to Set key {key} in Redis") elif vault is True: if not isinstance(timeout, int): time = 3600 else: time = timeout try: - return self._readers['vault'].set(key, value, timeout=timeout) + return self._readers["vault"].set(key, value, timeout=timeout) except (ValueError, AttributeError): logging.warning( - f'Unable to Set key {key} in Vault' + f"Unable to Set key {key} in Vault" ) else: return False diff --git a/navconfig/cyphers/__init__.py b/navconfig/cyphers/__init__.py index 8c9679f..eaeda42 100644 --- a/navconfig/cyphers/__init__.py +++ b/navconfig/cyphers/__init__.py @@ -1,3 +1,3 @@ from .fernet import FileCypher -__all__ = ('FileCypher', ) \ No newline at end of file +__all__ = ("FileCypher",) diff --git a/navconfig/cyphers/abstract.py b/navconfig/cyphers/abstract.py index ebb284c..7a88bca 100644 --- a/navconfig/cyphers/abstract.py +++ b/navconfig/cyphers/abstract.py @@ -11,38 +11,29 @@ def __init__(self, directory: PurePath): async def open_file(self, path: PurePath): content = None if not path.exists(): - raise FileNotFoundError( - f'Cypher: File {path} does not exist' - ) + raise FileNotFoundError(f"Cypher: File {path} does not exist") try: async with aiofiles.open(path) as f: content = await f.read() except IOError as ex: - raise Exception( - f'NavConfig: Error loading Environment File {path}' - ) from ex + raise Exception(f"NavConfig: Error loading Environment File {path}") from ex return content - async def save_file(self, path: PurePath, content, mode: str = 'wb'): + async def save_file(self, path: PurePath, content, mode: str = "wb"): async with aiofiles.open(path, mode) as file: await file.write(content) - async def read_file(self, filename) -> str: fpath = self.path.joinpath(filename) if not fpath.exists(): - raise FileNotFoundError( - f"Not Found: {fpath}" - ) + raise FileNotFoundError(f"Not Found: {fpath}") content = None try: async with aiofiles.open(fpath) as f: content = await f.read() return content except IOError as ex: - raise RuntimeError( - f'NavConfig: Error reading {fpath}: {ex}' - ) from ex + raise RuntimeError(f"NavConfig: Error reading {fpath}: {ex}") from ex async def strbuffer(self, content): s = StringIO() @@ -51,9 +42,9 @@ async def strbuffer(self, content): return s @abstractmethod - async def encrypt(self, name: str = '.env'): + async def encrypt(self, name: str = ".env"): pass @abstractmethod - async def decrypt(self, name: str = 'env.crypt'): + async def decrypt(self, name: str = "env.crypt"): pass diff --git a/navconfig/cyphers/fernet.py b/navconfig/cyphers/fernet.py index 5aca410..6bae6bc 100644 --- a/navconfig/cyphers/fernet.py +++ b/navconfig/cyphers/fernet.py @@ -1,36 +1,30 @@ from cryptography.fernet import Fernet from .abstract import AbstractCypher -class FileCypher(AbstractCypher): +class FileCypher(AbstractCypher): async def create_key(self): # generate the key key = Fernet.generate_key() - file = self.path.joinpath('unlock.key') + file = self.path.joinpath("unlock.key") # string the key into a file await self.save_file(file, key) return file async def get_key(self): try: - key = await self.read_file('unlock.key') + key = await self.read_file("unlock.key") if not key: - raise Exception( - 'Missing the Unlock Key' - ) + raise Exception("Missing the Unlock Key") # use the generated key f = Fernet(key) return f except FileNotFoundError as ex: - raise FileNotFoundError( - ex - ) from ex + raise FileNotFoundError(ex) from ex except RuntimeError as ex: - raise RuntimeError( - f'NavConfig: Error reading the unlock Key: {ex}' - ) from ex + raise RuntimeError(f"NavConfig: Error reading the unlock Key: {ex}") from ex - async def encrypt(self, name: str = '.env'): + async def encrypt(self, name: str = ".env"): # use the generated key f = await self.get_key() file = self.path.joinpath(name) @@ -39,11 +33,11 @@ async def encrypt(self, name: str = '.env'): # encrypt the file encrypted = f.encrypt(original.encode()) # at now, save it into the same directory - file = self.path.joinpath('env.crypt') + file = self.path.joinpath("env.crypt") await self.save_file(file, encrypted) return file - async def decrypt(self, name: str = 'env.crypt'): + async def decrypt(self, name: str = "env.crypt"): # use the generated key f = await self.get_key() # open the original file to encrypt diff --git a/navconfig/loaders/__init__.py b/navconfig/loaders/__init__.py index 2e1f5cd..411a4f1 100644 --- a/navconfig/loaders/__init__.py +++ b/navconfig/loaders/__init__.py @@ -2,7 +2,7 @@ from .pyproject import pyProjectLoader -def import_loader(loader: str = 'file'): +def import_loader(loader: str = "file"): classpath = f"navconfig.loaders.{loader}" cls = f"{loader}Loader" try: @@ -10,6 +10,4 @@ def import_loader(loader: str = 'file'): obj = getattr(module, cls) return obj except ImportError as err: - raise RuntimeError( - f"Navconfig Error: Cannot load {cls}: {err}" - ) from err + raise RuntimeError(f"Navconfig Error: Cannot load {cls}: {err}") from err diff --git a/navconfig/loaders/abstract.py b/navconfig/loaders/abstract.py index ccb196a..d2cfcd5 100644 --- a/navconfig/loaders/abstract.py +++ b/navconfig/loaders/abstract.py @@ -7,17 +7,16 @@ class BaseLoader(ABC): - def __init__( - self, - env_path: PurePath = None, - override: bool = False, - create: bool = True, - **kwargs + self, + env_path: PurePath = None, + override: bool = False, + create: bool = True, + **kwargs, ) -> None: self.override: bool = override self.env_path = env_path - self.env_file = '.env' + self.env_file = ".env" self._kwargs = kwargs self.downloadable: bool = False self._content: Any = None @@ -28,10 +27,10 @@ def __init__( env_path.mkdir(parents=True, exist_ok=True) except IOError as ex: raise RuntimeError( - f'{type(self).__name__}: Error creating directory {env_path}: {ex}' + f"{type(self).__name__}: Error creating directory {env_path}: {ex}" ) from ex raise FileExistsError( - f'{type(self).__name__}: No Directory Path: {env_path}' + f"{type(self).__name__}: No Directory Path: {env_path}" ) @abstractmethod @@ -43,16 +42,10 @@ def save_environment(self): pass def load_from_file(self, path): - load_dotenv( - dotenv_path=path, - override=self.override - ) + load_dotenv(dotenv_path=path, override=self.override) def load_from_stream(self, content: str): - load_dotenv( - stream=content, - override=self.override - ) + load_dotenv(stream=content, override=self.override) def load_from_string(self, content: Union[str, dict]): if isinstance(content, str): diff --git a/navconfig/loaders/crypt.py b/navconfig/loaders/crypt.py index 78f93e6..59883a7 100644 --- a/navconfig/loaders/crypt.py +++ b/navconfig/loaders/crypt.py @@ -1,6 +1,7 @@ import asyncio from pathlib import PurePath from ..cyphers import FileCypher + # TODO: load by configuration the Cypher. from .abstract import BaseLoader @@ -10,25 +11,18 @@ class cryptLoader(BaseLoader): Use to read configuration settings from Encrypted Files. """ + def __init__( - self, - env_path: PurePath, - override: bool = False, - create: bool = True, - **kwargs + self, env_path: PurePath, override: bool = False, create: bool = True, **kwargs ) -> None: super().__init__(env_path, override, create=create, **kwargs) - self.env_file = 'env.crypt' + self.env_file = "env.crypt" self._cypher = FileCypher(directory=env_path) def load_environment(self): try: - decrypted = asyncio.run( - self._cypher.decrypt(name=self.env_file) - ) - self.load_from_stream( - content=decrypted - ) + decrypted = asyncio.run(self._cypher.decrypt(name=self.env_file)) + self.load_from_stream(content=decrypted) except FileNotFoundError: raise except Exception as err: diff --git a/navconfig/loaders/drive.py b/navconfig/loaders/drive.py index ff567f1..69aa05f 100644 --- a/navconfig/loaders/drive.py +++ b/navconfig/loaders/drive.py @@ -4,25 +4,27 @@ from pydrive.drive import GoogleDrive from .abstract import BaseLoader -logging.getLogger('googleapiclient.discovery_cache').setLevel(logging.CRITICAL) +logging.getLogger("googleapiclient.discovery_cache").setLevel(logging.CRITICAL) + class driveLoader(BaseLoader): """fileLoader. Use to read configuration settings from .env Files. """ + drive = None file_id = None def __init__(self, **kwargs): - self.file_id = os.getenv('NAVCONFIG_DRIVE_ID') - client = os.getenv('NAVCONFIG_DRIVE_CLIENT') + self.file_id = os.getenv("NAVCONFIG_DRIVE_ID") + client = os.getenv("NAVCONFIG_DRIVE_CLIENT") if self.file_id and client: gauth = GoogleAuth() gauth.LoadCredentialsFile(client) if gauth.credentials is None: # Authenticate if they're not there - gauth.LocalWebserverAuth() # Creates local webserver and auto handles authentication. + gauth.LocalWebserverAuth() # Creates local webserver and auto handles authentication. # Save the current credentials to a file elif gauth.access_token_expired: gauth.Refresh() @@ -32,22 +34,24 @@ def __init__(self, **kwargs): print("Google Auth Success") self.drive = GoogleDrive(gauth) else: - raise Exception('Config Google Drive Error: you need to provide **NAVCONFIG_DRIVE_CLIENT** for client configuration') + raise Exception( + "Config Google Drive Error: you need to provide **NAVCONFIG_DRIVE_CLIENT** for client configuration" + ) def load_enviroment(self): try: - env = self.drive.CreateFile({'id': self.file_id}) + env = self.drive.CreateFile({"id": self.file_id}) content = env.GetContentString() self.load_from_string(content) except Exception as err: - raise Exception('Error loading Environment: {}'.format(err)) + raise Exception("Error loading Environment: {}".format(err)) - def save_enviroment(self, path:str=None): + def save_enviroment(self, path: str = None): try: - env = self.drive.CreateFile({'id': self.file_id}) + env = self.drive.CreateFile({"id": self.file_id}) content = env.GetContentString() if content: - with open(path, 'w+') as f: + with open(path, "w+") as f: f.write(content) except Exception as err: - raise Exception('Error Saving Environment: {}'.format(err)) + raise Exception("Error Saving Environment: {}".format(err)) diff --git a/navconfig/loaders/file.py b/navconfig/loaders/file.py index 901ca6e..736a71f 100644 --- a/navconfig/loaders/file.py +++ b/navconfig/loaders/file.py @@ -6,19 +6,16 @@ class fileLoader(BaseLoader): Use to read configuration settings from .env Files. """ + def load_environment(self): file_path = self.env_path.joinpath(self.env_file) if file_path.exists(): if file_path.stat().st_size == 0: - raise FileExistsError( - f'Empty Environment File: {file_path}' - ) + raise FileExistsError(f"Empty Environment File: {file_path}") # load dotenv from file: self.load_from_file(file_path) else: - raise FileNotFoundError( - f'Environment file not found: {file_path}' - ) + raise FileNotFoundError(f"Environment file not found: {file_path}") def save_environment(self): raise NotImplementedError diff --git a/navconfig/loaders/pyproject.py b/navconfig/loaders/pyproject.py index 71789ea..b60de03 100644 --- a/navconfig/loaders/pyproject.py +++ b/navconfig/loaders/pyproject.py @@ -1,35 +1,48 @@ import asyncio from pathlib import PurePath +from concurrent.futures import ThreadPoolExecutor from .parsers.toml import TOMLParser from .abstract import BaseLoader + class pyProjectLoader(BaseLoader): """pyProjectLoader. Read Configuration from a pyproject.toml (TOML syntax) file. """ + def __init__( self, env_path: PurePath, override: bool = False, project_name: str = None, - project_file: str = 'pyproject.toml', - **kwargs + project_file: str = "pyproject.toml", + **kwargs, ) -> None: self.project_name = project_name super().__init__(env_path, override, **kwargs) self.env_file = self.env_path.joinpath(project_file) if not self.env_file.exists(): - raise FileNotFoundError( - f"Config File Not Found: {self.env_file}" - ) + raise FileNotFoundError(f"Config File Not Found: {self.env_file}") self._parser = TOMLParser() - def load_environment(self): - self._content = asyncio.run( - self._parser.parse(self.env_file) - ) + async def load_environment_async(self): + self._content = await self._parser.parse(self.env_file) return self.load().get(self.project_name, {}) + def load_environment(self): + async def run_coro(coro): + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + result = await coro + loop.close() + return result + + coro = self.load_environment_async() + with ThreadPoolExecutor() as executor: + # Run the coroutine in a separate thread + future = executor.submit(asyncio.run, run_coro(coro)) + return future.result() + def save_environment(self): pass diff --git a/navconfig/loaders/toml.py b/navconfig/loaders/toml.py index 533a998..286b3eb 100644 --- a/navconfig/loaders/toml.py +++ b/navconfig/loaders/toml.py @@ -3,25 +3,20 @@ from .parsers.toml import TOMLParser from .abstract import BaseLoader + class tomlLoader(BaseLoader): """TomlLoader. Used to read configuration settings from TOML files. """ - def __init__( - self, - env_path: PurePath, - override: bool = False, - **kwargs - ) -> None: + + def __init__(self, env_path: PurePath, override: bool = False, **kwargs) -> None: super().__init__(env_path, override, **kwargs) - self.env_file = self.env_path.joinpath('env.toml') + self.env_file = self.env_path.joinpath("env.toml") self._parser = TOMLParser() def load_environment(self): - content = asyncio.run( - self._parser.parse(self.env_file) - ) + content = asyncio.run(self._parser.parse(self.env_file)) return self.load_from_string(content) def save_environment(self): diff --git a/navconfig/loaders/vault.py b/navconfig/loaders/vault.py deleted file mode 100644 index 34294f0..0000000 --- a/navconfig/loaders/vault.py +++ /dev/null @@ -1,41 +0,0 @@ -import os -import logging -from pathlib import PurePath -from .abstract import BaseLoader -from ..readers.vault import VaultReader - - -class vaultLoader(BaseLoader): - """vaultLoader. - - Use to read configuration settings from Hashicorp Vault. - """ - def __init__( - self, env_path: PurePath, override: bool = False, create: bool = True, **kwargs) -> None: - super().__init__(env_path, override, create=create, **kwargs) - try: - env = kwargs['env'] - except KeyError: - env = None - self._vault = VaultReader() - self.secret_path = os.getenv('VAULT_HVAC_SECRETS_PATH', 'env_vars') - if env is not None: - self.secret_path = f'{env}/{self.secret_path}' - - - def load_environment(self): - # Retrieve the entire secret at the specified path - secret_data = self._vault.get(f'{self.secret_path}/*') - - # Load the secret data as environment variables - for key, value in secret_data.items(): - if self.override or key not in os.environ: - try: - os.environ[key] = str(value) - except (AttributeError, KeyError): - logging.warning( - f'Vault: Could not set ENV variable {key} with value {value}' - ) - - def save_environment(self): - pass diff --git a/navconfig/loaders/yaml.py b/navconfig/loaders/yaml.py index b6999ea..05fd7f5 100644 --- a/navconfig/loaders/yaml.py +++ b/navconfig/loaders/yaml.py @@ -3,25 +3,20 @@ from .parsers.yaml import YAMLParser from .abstract import BaseLoader + class yamlLoader(BaseLoader): """YamlLoader. Used to read configuration settings from YAML files. """ - def __init__( - self, - env_path: PurePath, - override: bool = False, - **kwargs - ) -> None: + + def __init__(self, env_path: PurePath, override: bool = False, **kwargs) -> None: super().__init__(env_path, override, **kwargs) - self.env_file = self.env_path.joinpath('env.yaml') + self.env_file = self.env_path.joinpath("env.yaml") self._parser = YAMLParser() def load_environment(self): - content = asyncio.run( - self._parser.parse(self.env_file) - ) + content = asyncio.run(self._parser.parse(self.env_file)) return self.load_from_string(content) def save_environment(self): diff --git a/navconfig/logging/__init__.py b/navconfig/logging/__init__.py index ed86e6d..f4b286f 100644 --- a/navconfig/logging/__init__.py +++ b/navconfig/logging/__init__.py @@ -19,37 +19,35 @@ ### Logging loglevel = LOGLEVEL -APP_NAME = config.get('APP_NAME', fallback='navigator') +APP_NAME = config.get("APP_NAME", fallback="navigator") LOG_DIR = config.get( - 'logdir', section='logging', fallback=str(BASE_DIR.joinpath('logs')) + "logdir", section="logging", fallback=str(BASE_DIR.joinpath("logs")) ) -TMP_DIR = config.get('temp_path', section='temp', fallback='/tmp') +TMP_DIR = config.get("temp_path", section="temp", fallback="/tmp") """ Logging Information. """ logging_disable_other = config.getboolean( - 'logging_disable_other', section='logging', fallback=False + "logging_disable_other", section="logging", fallback=False ) ### Logging Echo (standard output) -logging_echo = config.getboolean( - 'logging_echo', section='logging', fallback=False -) +logging_echo = config.getboolean("logging_echo", section="logging", fallback=False) ## Mail Alerts: logging_enable_mailer = config.getboolean( - 'mailer_enabled', section='logging', fallback=False + "mailer_enabled", section="logging", fallback=False ) ## can disable the rotating file handler logging_enable_filehandler = config.getboolean( - 'filehandler_enabled', section='logging', fallback=False + "filehandler_enabled", section="logging", fallback=False ) ### External Loggers: logging_enable_logstash = config.getboolean( - 'logstash_enabled', section='logging', fallback=False + "logstash_enabled", section="logging", fallback=False ) # Path version of the log directory @@ -58,147 +56,107 @@ try: logdir.mkdir(parents=True, exist_ok=True) except OSError: - logging.exception( - 'Error Creating Logging Directory', exc_info=True - ) - -HANDLERS = config.get( - 'handlers', - section='logging', - fallback=['StreamHandler'] -) + logging.exception("Error Creating Logging Directory", exc_info=True) + +HANDLERS = config.get("handlers", section="logging", fallback=["StreamHandler"]) if isinstance(HANDLERS, str): - HANDLERS = HANDLERS.split(',') + HANDLERS = HANDLERS.split(",") logging_config = dict( version=1, disable_existing_loggers=logging_disable_other, formatters={ - "console": { - '()': ColoredFormatter, - 'datefmt': '%Y-%m-%d %H:%M:%S' - }, - 'default': { - 'format': '[%(levelname)s] %(asctime)s %(name)s|%(lineno)d :: \ - %(message)s' + "console": {"()": ColoredFormatter, "datefmt": "%Y-%m-%d %H:%M:%S"}, + "default": { + "format": "[%(levelname)s] %(asctime)s %(name)s|%(lineno)d :: \ + %(message)s" }, - 'error': { - 'format': '%(asctime)s-%(levelname)s-%(name)s-%(process)d::\ - %(module)s|%(lineno)s:: %(message)s' + "error": { + "format": "%(asctime)s-%(levelname)s-%(name)s-%(process)d::\ + %(module)s|%(lineno)s:: %(message)s" }, - 'file': { - 'format': "%(asctime)s: [%(levelname)s]: %(pathname)s: %(lineno)d:\ + "file": { + "format": "%(asctime)s: [%(levelname)s]: %(pathname)s: %(lineno)d:\ \n%(message)s\n" }, }, handlers={ - 'console': { + "console": { "class": "logging.StreamHandler", "formatter": "console", "stream": "ext://sys.stdout", - 'level': loglevel + "level": loglevel, }, - 'StreamHandler': { - 'class': 'logging.StreamHandler', - 'formatter': 'default', + "StreamHandler": { + "class": "logging.StreamHandler", + "formatter": "default", "stream": "ext://sys.stdout", - 'level': loglevel - } + "level": loglevel, + }, }, loggers={ - APP_NAME: { - 'handlers': HANDLERS, - 'level': loglevel, - 'propagate': False - }, + APP_NAME: {"handlers": HANDLERS, "level": loglevel, "propagate": False}, "__main__": { # if __name__ == "__main__" "handlers": ["StreamHandler"], - "level": 'INFO', - "propagate": False + "level": "INFO", + "propagate": False, }, - '': { - 'handlers': ['console'], - 'level': 'INFO', - 'propagate': False - } + "": {"handlers": ["console"], "level": "INFO", "propagate": False}, }, root={ - 'handlers': ['StreamHandler'], - 'level': loglevel, - 'propagate': True, - } + "handlers": ["StreamHandler"], + "level": loglevel, + "propagate": True, + }, ) logging_config[APP_NAME] = { - 'handlers': ['StreamHandler'], - 'level': loglevel, - 'propagate': True, + "handlers": ["StreamHandler"], + "level": loglevel, + "propagate": True, } if logging_echo is True: - logging_config['root']['handlers'].append('console') + logging_config["root"]["handlers"].append("console") if logging_enable_filehandler is True: from .handlers.file import FileHandler - lf = FileHandler( - config=config, - loglevel=loglevel, - application=APP_NAME - ) - logging_config['handlers']['RotatingFileHandler'] = lf.handler( - path=LOG_DIR - ) + + lf = FileHandler(config=config, loglevel=loglevel, application=APP_NAME) + logging_config["handlers"]["RotatingFileHandler"] = lf.handler(path=LOG_DIR) ## Also Error Handler: - logging_config['handlers']['ErrorFileHandler'] = lf.handler( + logging_config["handlers"]["ErrorFileHandler"] = lf.handler( path=LOG_DIR, loglevel=logging.ERROR ) - logging_config['root']['handlers'].append('RotatingFileHandler') - logging_config['root']['handlers'].append('ErrorFileHandler') + logging_config["root"]["handlers"].append("RotatingFileHandler") + logging_config["root"]["handlers"].append("ErrorFileHandler") if logging_enable_mailer is True: from .handlers.mail import MailerHandler - lm = MailerHandler( - config=config, - loglevel=logging.CRITICAL, - application=APP_NAME - ) - logging_config['handlers']['CriticalMailHandler'] = lm.handler() - logging_config['root']['handlers'].append( - 'CriticalMailHandler' - ) + + lm = MailerHandler(config=config, loglevel=logging.CRITICAL, application=APP_NAME) + logging_config["handlers"]["CriticalMailHandler"] = lm.handler() + logging_config["root"]["handlers"].append("CriticalMailHandler") if logging_enable_logstash is True: - logging.debug( - "Logstash configuration Enabled." - ) + logging.debug("Logstash configuration Enabled.") logstash_logging = config.get( - 'logstash_logging', - section="logging", - fallback='INFO' + "logstash_logging", section="logging", fallback="INFO" ) ### Importing Logstash Handler and returning Logging Config: from .handlers.logstash import LogstashHandler - lh = LogstashHandler( - config=config, - loglevel=logstash_logging, - application=APP_NAME - ) - logging_config['formatters']['logstash'] = lh.formatter( - path=BASE_DIR - ) - logging_config['handlers']['LogstashHandler'] = lh.handler( - enable_localdb=config.getboolean('LOGSTASH_ENABLE_DB', fallback=True), - logdir=LOG_DIR - ) - logging_config['root']['handlers'].append( - 'LogstashHandler' - ) - logging_config[APP_NAME]['handlers'].append( - 'LogstashHandler' + + lh = LogstashHandler(config=config, loglevel=logstash_logging, application=APP_NAME) + logging_config["formatters"]["logstash"] = lh.formatter(path=BASE_DIR) + logging_config["handlers"]["LogstashHandler"] = lh.handler( + enable_localdb=config.getboolean("LOGSTASH_ENABLE_DB", fallback=True), + logdir=LOG_DIR, ) + logging_config["root"]["handlers"].append("LogstashHandler") + logging_config[APP_NAME]["handlers"].append("LogstashHandler") ### Load Logging Configuration: @@ -207,14 +165,14 @@ ### configure basic logger for navconfig logger = Logger(name=__name__, config=logging_config) + # alias for debug printing class dprint: - instance: object = None def __new__(cls, *args, **kwargs): if not cls.instance: - cls.instance = Logger(name='DEBUG', config=logging_config) + cls.instance = Logger(name="DEBUG", config=logging_config) cls.instance.addConsole() cls.instance.debug(*args, **kwargs) diff --git a/navconfig/logging/formatter.py b/navconfig/logging/formatter.py index 126c83b..030496c 100644 --- a/navconfig/logging/formatter.py +++ b/navconfig/logging/formatter.py @@ -13,6 +13,7 @@ # Set the NOICE level name globaly: addLevelName(NOTICE_LEVEL_NUM, "NOTICE") + class VerboseLogger(getLoggerClass()): def __init__(self, name, level=NOTSET): super().__init__(name, level) @@ -25,12 +26,12 @@ def notice(self, msg, *args, **kwargs): if self.isEnabledFor(NOTICE_LEVEL_NUM): self._log(NOTICE_LEVEL_NUM, msg, args, **kwargs) -class ColoredFormatter(logging.Formatter): +class ColoredFormatter(logging.Formatter): lightgrey = "\033[37m" darkgrey = "\033[90m" - green = '\x1b[32m' - lightgreen = '\x1b[92m' + green = "\x1b[32m" + lightgreen = "\x1b[92m" blue = "\033[34m" lightblue = "\033[94m" purple = "\033[35m" @@ -42,7 +43,9 @@ class ColoredFormatter(logging.Formatter): lightred = "\033[91m" bold_red = "\x1b[31;1m" reset = "\x1b[0m" - _format = "[%(levelname)s] %(asctime)s %(name)s(%(filename)s:%(lineno)d) :: %(message)s" + _format = ( + "[%(levelname)s] %(asctime)s %(name)s(%(filename)s:%(lineno)d) :: %(message)s" + ) _notice = "[%(levelname)s] %(asctime)s :: %(message)s" FORMATS = { @@ -52,7 +55,7 @@ class ColoredFormatter(logging.Formatter): logging.INFO: lightgrey + _format + reset, logging.WARNING: yellow + _format + reset, logging.ERROR: lightyellow + _format + reset, - logging.CRITICAL: bold_red + _format + reset + logging.CRITICAL: bold_red + _format + reset, } def format(self, record): diff --git a/navconfig/logging/handlers/abstract.py b/navconfig/logging/handlers/abstract.py index 5215b71..3e98fd7 100644 --- a/navconfig/logging/handlers/abstract.py +++ b/navconfig/logging/handlers/abstract.py @@ -1,23 +1,17 @@ from abc import ABCMeta + class AbstractLog(metaclass=ABCMeta): """AbstractLog. Abstract class for Logger Handlers. """ + def __init__(self, config, loglevel, application: str) -> None: - self.env = config.ENV if config.ENV is not None else 'production' + self.env = config.ENV if config.ENV is not None else "production" self.loglevel = loglevel self.application = application - self.host = config.get( - 'logging_host', section='logging', fallback="localhost" - ) - self.port = config.getint( - 'logging_port', section='logging', fallback=9600 - ) + self.host = config.get("logging_host", section="logging", fallback="localhost") + self.port = config.getint("logging_port", section="logging", fallback=9600) # log name: - self._logname = config.get( - 'logname', - section='logging', - fallback=application - ) + self._logname = config.get("logname", section="logging", fallback=application) diff --git a/navconfig/logging/handlers/file.py b/navconfig/logging/handlers/file.py index e53b50c..0694a0e 100644 --- a/navconfig/logging/handlers/file.py +++ b/navconfig/logging/handlers/file.py @@ -1,25 +1,22 @@ import logging from .abstract import AbstractLog + class FileHandler(AbstractLog): def handler(self, path: str, loglevel=None, **kwargs): if loglevel is None: loglevel = self.loglevel if loglevel == logging.ERROR: - filename = { - 'filename': f'{path}/{self._logname}.error.log' - } + filename = {"filename": f"{path}/{self._logname}.error.log"} else: - filename = { - 'filename': f'{path}/{self._logname}.log' - } + filename = {"filename": f"{path}/{self._logname}.log"} hdlr = { - 'class': 'logging.handlers.RotatingFileHandler', - 'maxBytes': (1048576 * 5), - 'backupCount': 2, - 'encoding': 'utf-8', - 'formatter': 'file', - 'level': loglevel, - **filename + "class": "logging.handlers.RotatingFileHandler", + "maxBytes": (1048576 * 5), + "backupCount": 2, + "encoding": "utf-8", + "formatter": "file", + "level": loglevel, + **filename, } return hdlr diff --git a/navconfig/logging/handlers/logstash.py b/navconfig/logging/handlers/logstash.py index 8b62f0a..97e41fe 100644 --- a/navconfig/logging/handlers/logstash.py +++ b/navconfig/logging/handlers/logstash.py @@ -9,58 +9,51 @@ ) from ex from .abstract import AbstractLog + class LogstashHandler(AbstractLog): """LogstashHandler. Send Logs to Logstash using Logstash-async. """ + def __init__(self, config, loglevel, application: str) -> None: super(LogstashHandler, self).__init__(config, loglevel, application) self._flush_timeout = config.getint( - 'logstash_flush_timeout', section='logging', fallback=10 - ) - self.host = config.get( - 'LOGSTASH_HOST', fallback=self.host - ) - self.port = config.get( - 'LOGSTASH_PORT', fallback=self.port + "logstash_flush_timeout", section="logging", fallback=10 ) + self.host = config.get("LOGSTASH_HOST", fallback=self.host) + self.port = config.get("LOGSTASH_PORT", fallback=self.port) - def formatter( - self, - path: str, - fqdn: bool = False, - **kwargs - ): + def formatter(self, path: str, fqdn: bool = False, **kwargs): return { - '()': 'logstash_async.formatter.LogstashFormatter', - 'message_type': 'python-logstash', - 'fqdn': fqdn, - "extra_prefix": 'dev', - 'extra': { - 'application': f'{self.application}', - 'project_path': f'{path}', - 'environment': self.env, - **kwargs - } + "()": "logstash_async.formatter.LogstashFormatter", + "message_type": "python-logstash", + "fqdn": fqdn, + "extra_prefix": "dev", + "extra": { + "application": f"{self.application}", + "project_path": f"{path}", + "environment": self.env, + **kwargs, + }, } def handler(self, enable_localdb: bool = False, **kwargs): hdlr = { - 'class': 'logstash_async.handler.AsynchronousLogstashHandler', - 'formatter': 'logstash', - 'transport': 'logstash_async.transport.TcpTransport', - 'transport_type': 'tcp', - 'host': self.host, - 'port': int(self.port), - 'flush_timeout': self._flush_timeout, # set the flush_timeout - 'level': self.loglevel + "class": "logstash_async.handler.AsynchronousLogstashHandler", + "formatter": "logstash", + "transport": "logstash_async.transport.TcpTransport", + "transport_type": "tcp", + "host": self.host, + "port": int(self.port), + "flush_timeout": self._flush_timeout, # set the flush_timeout + "level": self.loglevel, } if enable_localdb is True: - log_dir = kwargs['logdir'] - hdlr['database_path'] = f'{log_dir}/logstash.db' + log_dir = kwargs["logdir"] + hdlr["database_path"] = f"{log_dir}/logstash.db" else: - hdlr['database_path'] = '' - if 'propagate' in kwargs: - hdlr['propagate'] = kwargs['propagate'] + hdlr["database_path"] = "" + if "propagate" in kwargs: + hdlr["propagate"] = kwargs["propagate"] return hdlr diff --git a/navconfig/logging/handlers/mail.py b/navconfig/logging/handlers/mail.py index 4824c28..e493bc0 100644 --- a/navconfig/logging/handlers/mail.py +++ b/navconfig/logging/handlers/mail.py @@ -1,32 +1,27 @@ from .abstract import AbstractLog + class MailerHandler(AbstractLog): def __init__(self, config, loglevel) -> None: super().__init__(config, loglevel) self._admin = config.get( - 'logging_admin', - section='logging', - fallback="dev@domain.com" + "logging_admin", section="logging", fallback="dev@domain.com" ) self._email = config.get( - 'logging_email', - section='logging', - fallback='no-reply@domain.com' + "logging_email", section="logging", fallback="no-reply@domain.com" ) self._mailhost = config.get( - 'logging_mail_host', - section='logging', - fallback="localhost" + "logging_mail_host", section="logging", fallback="localhost" ) def handler(self, **kwargs): hdlr = { - 'level': self.loglevel, - 'formatter': 'error', - 'class': 'logging.handlers.SMTPHandler', - 'mailhost': self._mailhost, - 'fromaddr': self._email, - 'toaddrs': [self._admin], - 'subject': f'Critical Error on {self.application}' + "level": self.loglevel, + "formatter": "error", + "class": "logging.handlers.SMTPHandler", + "mailhost": self._mailhost, + "fromaddr": self._email, + "toaddrs": [self._admin], + "subject": f"Critical Error on {self.application}", } return hdlr diff --git a/navconfig/logging/logger.pyi b/navconfig/logging/logger.pyi index 97041d4..956bf62 100644 --- a/navconfig/logging/logger.pyi +++ b/navconfig/logging/logger.pyi @@ -1,15 +1,16 @@ import logging from typing import Any - class Logger: - def disable(self, name: str, loglevel = logging.CRITICAL) -> None: ... + def disable(self, name: str, loglevel=logging.CRITICAL) -> None: ... def setName(self, name: str) -> None: ... - def info(self, message, *args, serialize = True, **kwargs) -> None: ... - def debug(self, message, *args, serialize = True, **kwargs) -> None: ... + def info(self, message, *args, serialize=True, **kwargs) -> None: ... + def debug(self, message, *args, serialize=True, **kwargs) -> None: ... def warning(self, message, *args, **kwargs) -> None: ... def setLevel(self, level) -> None: ... def addHandler(self, handler) -> None: ... - def error(self, message, *args, serialize = False, **kwargs) -> None: ... - def critical(self, message, *args, serialize = False, stacktrace = False, **kwargs) -> None: ... + def error(self, message, *args, serialize=False, **kwargs) -> None: ... + def critical( + self, message, *args, serialize=False, stacktrace=False, **kwargs + ) -> None: ... def logger(self) -> Any: ... diff --git a/navconfig/readers/abstract.py b/navconfig/readers/abstract.py index 66d03b5..bfa6f0a 100644 --- a/navconfig/readers/abstract.py +++ b/navconfig/readers/abstract.py @@ -7,6 +7,7 @@ class AbstractReader(ABC): Description: Abstract class for External Readers. """ + enabled: bool = True @abstractmethod diff --git a/navconfig/readers/memcache.py b/navconfig/readers/memcache.py index 07f29a3..1dbeeeb 100644 --- a/navconfig/readers/memcache.py +++ b/navconfig/readers/memcache.py @@ -4,30 +4,28 @@ from ..exceptions import ReaderNotSet from .abstract import AbstractReader + class mcache(AbstractReader): """ Basic Connector for Memcached. Future-proof. """ + _args = {"tcp_nodelay": True, "ketama": True} def __init__(self) -> None: - host = os.getenv('MEMCACHE_HOST', 'localhost') - port = int(os.getenv('MEMCACHE_PORT', '11211')) + host = os.getenv("MEMCACHE_HOST", "localhost") + port = int(os.getenv("MEMCACHE_PORT", "11211")) try: self._server = [f"{host}:{port}"] self._memcached = pylibmc.Client( - self._server, - binary=True, - behaviors=self._args + self._server, binary=True, behaviors=self._args ) # Set a temporary value - self._memcached.set('ping', 'pong', time=1) + self._memcached.set("ping", "pong", time=1) except pylibmc.ConnectionError as err: self.enabled = False - raise ReaderNotSet( - f"Unable to Connect: {err} :: Memcached Disabled ::" - ) + raise ReaderNotSet(f"Unable to Connect: {err} :: Memcached Disabled ::") except Exception as err: # pylint: disable=W0703 self.enabled = False logging.exception(err, stack_info=True) @@ -42,9 +40,7 @@ def get(self, key, default=None): else: return None except Exception as err: - raise Exception( - f"Memcache Get Error: {err!s}" - ) from err + raise Exception(f"Memcache Get Error: {err!s}") from err def exists(self, key: str) -> bool: if self.enabled is False: @@ -67,31 +63,21 @@ def set(self, key, value, timeout: int = None): bytes(key, "utf-8"), bytes(value, "utf-8"), time=timeout ) else: - return self._memcached.set( - bytes(key, "utf-8"), bytes(value, "utf-8") - ) + return self._memcached.set(bytes(key, "utf-8"), bytes(value, "utf-8")) except Exception as err: - raise Exception( - f"Memcache Set Error: {err!s}" - ) from err + raise Exception(f"Memcache Set Error: {err!s}") from err def multi_get(self, *keys): try: - return self._memcached.multi_get( - *[bytes(v, 'utf-8') for v in keys] - ) + return self._memcached.multi_get(*[bytes(v, "utf-8") for v in keys]) except Exception as err: - raise Exception( - f"Memcache Multi Error: {err!s}" - ) from err + raise Exception(f"Memcache Multi Error: {err!s}") from err def delete(self, key): try: self._memcached.delete(bytes(key, "utf-8")) except Exception as err: - raise Exception( - f"Memcache Delete Error: {err!s}" - ) from err + raise Exception(f"Memcache Delete Error: {err!s}") from err def close(self): try: diff --git a/navconfig/readers/redis.py b/navconfig/readers/redis.py index 78f0016..88a527c 100644 --- a/navconfig/readers/redis.py +++ b/navconfig/readers/redis.py @@ -2,34 +2,30 @@ import logging from collections.abc import Callable import redis -from redis.exceptions import ( - RedisError, - ResponseError, - ReadOnlyError -) +from redis.exceptions import RedisError, ResponseError, ReadOnlyError from ..exceptions import ReaderNotSet from .abstract import AbstractReader + class mredis(AbstractReader): """ Very Basic Connector for Redis. """ + params: dict = { - "encoding": 'utf-8', + "encoding": "utf-8", "decode_responses": True, - "max_connections": 10 + "max_connections": 10, } def __init__(self): - host = os.getenv('REDIS_HOST', 'localhost') - port = int(os.getenv('REDIS_PORT', '6379')) - db = int(os.getenv('REDIS_DB', '1')) + host = os.getenv("REDIS_HOST", "localhost") + port = int(os.getenv("REDIS_PORT", "6379")) + db = int(os.getenv("REDIS_DB", "1")) self.redis_url = f"redis://{host}:{port}/{db}" self._redis: Callable = None try: - self._redis = redis.from_url( - url=self.redis_url, **self.params - ) + self._redis = redis.from_url(url=self.redis_url, **self.params) response = self._redis.ping() if not response: self.enabled = False @@ -38,11 +34,9 @@ def __init__(self): raise ReaderNotSet( f"Unable to Connecto to Redis: {err} :: Redis Disabled ::" ) - except (TimeoutError) as err: + except TimeoutError as err: self.enabled = False - raise Exception( - f"Redis Config: Redis Timeout: {err}" - ) from err + raise Exception(f"Redis Config: Redis Timeout: {err}") from err except Exception as err: self.enabled = False logging.exception(err) @@ -53,14 +47,10 @@ def set(self, key, value): raise ReaderNotSet() try: return self._redis.set(key, value) - except (ReadOnlyError) as err: - raise Exception( - f"Redis is Read Only: {err}" - ) from err + except ReadOnlyError as err: + raise Exception(f"Redis is Read Only: {err}") from err except Exception as err: - raise Exception( - f"Redis Error: {err}" - ) from err + raise Exception(f"Redis Error: {err}") from err def delete(self, key: str) -> None: pass @@ -69,21 +59,13 @@ def exists(self, key, *keys): if self.enabled is False: raise ReaderNotSet() try: - return bool( - self._redis.exists(key, *keys) - ) + return bool(self._redis.exists(key, *keys)) except ResponseError as err: - raise Exception( - f"Bad Response: {err}" - ) from err - except (RedisError) as err: - raise Exception( - f"Redis Error: {err}" - ) from err + raise Exception(f"Bad Response: {err}") from err + except RedisError as err: + raise Exception(f"Redis Error: {err}") from err except Exception as err: - raise Exception( - f"Unknown Redis Error: {err}" - ) from err + raise Exception(f"Unknown Redis Error: {err}") from err def get(self, key): if self.enabled is False: @@ -91,17 +73,11 @@ def get(self, key): try: return self._redis.get(key) except ResponseError as err: - raise Exception( - f"Bad Response: {err}" - ) from err - except (RedisError) as err: - raise Exception( - f"Redis Error: {err}" - ) from err + raise Exception(f"Bad Response: {err}") from err + except RedisError as err: + raise Exception(f"Redis Error: {err}") from err except Exception as err: - raise Exception( - f"Unknown Redis Error: {err}" - ) from err + raise Exception(f"Unknown Redis Error: {err}") from err def setex(self, key, value, timeout): """ @@ -120,22 +96,14 @@ def setex(self, key, value, timeout): time = timeout try: self._redis.setex(key, time, value) - except (ReadOnlyError) as err: - raise Exception( - f"Redis is Read Only: {err}" - ) from err + except ReadOnlyError as err: + raise Exception(f"Redis is Read Only: {err}") from err except ResponseError as err: - raise Exception( - f"Bad Response: {err}" - ) from err - except (RedisError) as err: - raise Exception( - f"Redis Error: {err}" - ) from err + raise Exception(f"Bad Response: {err}") from err + except RedisError as err: + raise Exception(f"Redis Error: {err}") from err except Exception as err: - raise Exception( - f"Unknown Redis Error: {err}" - ) from err + raise Exception(f"Unknown Redis Error: {err}") from err def close(self): try: diff --git a/navconfig/readers/vault.py b/navconfig/readers/vault.py index a084a63..4495b9e 100644 --- a/navconfig/readers/vault.py +++ b/navconfig/readers/vault.py @@ -12,26 +12,29 @@ class VaultReader(AbstractReader): Description: Class for HashiCorp Vault Reader. """ - def __init__(self): - url = os.getenv('VAULT_HVAC_URL', 'http://localhost:8200') - token = os.getenv('VAULT_HVAC_TOKEN') - self._mount = os.getenv('VAULT_HVAC_MOUNT_POINT', 'secret') + def __init__(self, env: str = None) -> None: + url = os.getenv( + "VAULT_HVAC_URL", + "http://localhost:8200" + ) + token = os.getenv("VAULT_HVAC_TOKEN") + self.version = int(os.getenv("VAULT_HVAC_VERSION", 1)) + self._mount = os.getenv("VAULT_HVAC_MOUNT_POINT", "navigator") + self._env = env + if not self._env: + self._env = os.getenv("ENV", "") if not token: - raise ValueError( - 'VAULT_HVAC_TOKEN is not set' - ) + raise ValueError("VAULT_HVAC_TOKEN is not set") try: self.client = hvac.Client(url=url, token=token) self.open() except Exception as err: # pylint: disable=W0703 self.enabled = False - raise ReaderNotSet( - f"Vault Error: {err}" - ) + raise ReaderNotSet(f"Vault Error: {err}") def open(self) -> bool: if self.client.is_authenticated(): - logging.debug('Hashicorp Vault Connected') + logging.debug("Hashicorp Vault Connected") return True return False @@ -42,29 +45,39 @@ def get( self, key: str, default: Any = None, - version: int = None, - path: str = 'secrets', - sub_key: str = None + path: str = "secrets", + sub_key: str = None, ) -> Any: if self.enabled is False: raise ReaderNotSet() + data = {} try: secret_parts = key.split("/") secret_key = secret_parts.pop() secret_path = "/".join(secret_parts) + if not secret_path: + secret_path = self._env except ValueError: secret_path = path secret_key = key try: - response = self.client.secrets.kv.read_secret_version( - path=secret_path, version=version, mount_point=self._mount - ) + if self.version == 1: + response = self.client.secrets.kv.v1.read_secret( + path=secret_path, mount_point=self._mount + ) + data = response["data"] + elif self.version == 2: + response = self.client.secrets.kv.v2.read_secret_version( + path=secret_path, mount_point=self._mount + ) + data = response["data"]["data"] except hvac.exceptions.InvalidPath: return default + if secret_key == "*": - return response['data']['data'] + return data - secret_data = response['data']['data'].get(secret_key, default) + secret_data = data.get(secret_key, default) if sub_key is not None: return secret_data.get(sub_key, default) return secret_data @@ -72,74 +85,140 @@ def get( def exists( self, key: str, - path: str = 'secrets', - version: int = None ) -> bool: if self.enabled is False: raise ReaderNotSet() + data = {} try: secret_parts = key.split("/") secret_key = secret_parts.pop() secret_path = "/".join(secret_parts) + if not secret_path: + secret_path = self._env except ValueError: - secret_path = path + secret_path = self._env secret_key = key try: - response = self.client.secrets.kv.read_secret_version( - path=secret_path, version=version, mount_point=self._mount - ) - except hvac.exceptions.InvalidPath as exc: - # logging.error( - # f"Vault Error over key {key}: {exc}" - # ) + if self.version == 1: + response = self.client.secrets.kv.v1.read_secret( + path=secret_path, mount_point=self._mount + ) + data = response["data"] + elif self.version == 2: + response = self.client.secrets.kv.v2.read_secret_version( + path=secret_path, mount_point=self._mount + ) + data = response["data"]["data"] + except hvac.exceptions.InvalidPath: return False if secret_key == "*": return True - return secret_key in response['data']['data'] + return secret_key in data def set( self, key: str, value: Any, - path: str = 'secrets', - timeout: int = None, - version: int = None + **kwargs ) -> None: if self.enabled is False: raise ReaderNotSet() try: secret_path, secret_key = key.split("/", 1) + if not secret_path: + secret_path = self._env except ValueError: - secret_path = path + secret_path = self._env secret_key = key - secret_data = {secret_key: value} - self.client.secrets.kv.v2.create_or_update_secret( - path=secret_path, - version=version, - secret=secret_data, - mount_point=self._mount - ) + try: + if self.version == 1: + # Read the existing secret data + existing_data = {} + try: + read_response = self.client.secrets.kv.v1.read_secret( + path=secret_path, mount_point=self._mount + ) + existing_data = read_response['data'] + except hvac.exceptions.InvalidPath: + # If the path doesn't exist yet, it's fine + pass + + # Update the existing data with the new key-value pair + existing_data[secret_key] = value + + # Write the updated data back to the path + self.client.secrets.kv.v1.create_or_update_secret( + path=secret_path, + secret=existing_data, + mount_point=self._mount, + ) + elif self.version == 2: + # For KV v2, you need to provide the full data for the path + # Fetch existing data if you want to preserve other keys + existing_data = {} + try: + read_response = self.client.secrets.kv.v2.read_secret_version( + path=secret_path, mount_point=self._mount + ) + existing_data = read_response['data']['data'] + except hvac.exceptions.InvalidPath: + # If the path doesn't exist yet, it's fine + pass + + # Update the existing data with the new key-value pair + existing_data[secret_key] = value + + # Write the updated data back to the path + self.client.secrets.kv.v2.create_or_update_secret( + path=secret_path, + secret=existing_data, + mount_point=self._mount + ) + except Exception as ex: + raise ValueError( + f"Error writing to Vault: {ex}" + ) + + def delete(self, key: str, secret_path: str = None) -> bool: + if self.enabled is False: + raise ReaderNotSet() # Or some appropriate exception - def delete( - self, - key: str, - path: str = 'secrets', - version: int = None - ) -> bool: try: secret_path, secret_key = key.split("/", 1) + if not secret_path: + secret_path = self._env except ValueError: - secret_path = path + secret_path = self._env secret_key = key - response = self.client.secrets.kv.read_secret_version( - path=secret_path, version=version, mount_point=self._mount - ) - if secret_key in response['data']['data']: - del response['data']['data'][secret_key] - self.client.secrets.kv.v2.create_or_update_secret( - path=secret_path, - secret=response['data']['data'], - mount_point=self._mount - ) + + try: + if self.version == 1: + current_secret = self.client.secrets.kv.v1.read_secret( + path=secret_path, + mount_point=self._mount + )['data'] + if secret_key in current_secret: + del current_secret[secret_key] + self.client.secrets.kv.v1.create_or_update_secret( + path=secret_path, + secret=current_secret, + mount_point=self._mount + ) + elif self.version == 2: + current_secret = self.client.secrets.kv.v2.read_secret_version( + path=secret_path, + mount_point=self._mount + )['data']['data'] + if secret_key in current_secret: + del current_secret[secret_key] + self.client.secrets.kv.v2.create_or_update_secret( + path=secret_path, + secret=current_secret, + mount_point=self._mount + ) return True - return False + except Exception as e: + logging.warning( + f"Error deleting key '{key}' from '{secret_path}': {e}" + ) + return False diff --git a/navconfig/utils/__init__.py b/navconfig/utils/__init__.py index e2482e3..f7c721e 100644 --- a/navconfig/utils/__init__.py +++ b/navconfig/utils/__init__.py @@ -1,26 +1,27 @@ import sys import os from pathlib import Path, PurePath -# from inspect import getfile, currentframe -def is_virtualenv(): - if os.getenv('PYENV_VIRTUAL_ENV') or os.getenv('PYENV_VIRTUAL_ENV'): +def is_virtualenv() -> bool: + if os.getenv("PYENV_VIRTUAL_ENV") or os.getenv("PYENV_VIRTUAL_ENV"): return True - return ( - hasattr(sys, 'real_prefix') or ( - hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix) + return hasattr(sys, "real_prefix") or ( + hasattr(sys, "base_prefix") and sys.base_prefix != sys.prefix ) -def project_root(base_file) -> PurePath: - path = os.getenv('SITE_ROOT', None) - # print(getfile(currentframe().f_back)) - if not path: - if is_virtualenv(): - path = Path(sys.prefix).resolve().parent - else: - path = Path(os.path.abspath(os.path.dirname(base_file))).resolve().parent.parent +def project_root(base_file: str) -> PurePath: + path = os.getenv("SITE_ROOT", None) + if path: + return Path(path) + + if is_virtualenv(): + return Path(sys.prefix).resolve().parent + + # Resolve the directory of the base_file + path = Path(base_file).resolve().parent.parent + if not path: path = Path(sys.prefix).resolve().parent if isinstance(path, str): diff --git a/navconfig/version.py b/navconfig/version.py index 54f3964..98e984d 100644 --- a/navconfig/version.py +++ b/navconfig/version.py @@ -1,9 +1,11 @@ """NavConfig Meta information.""" -__title__ = 'navconfig' -__description__ = ('Configuration tool for all Navigator Services ' - 'Tool for accessing Config info from different sources.') -__version__ = '1.5.4' -__author__ = 'Jesus Lara' -__author_email__ = 'jesuslarag@gmail.com' -__license__ = 'MIT' +__title__ = "navconfig" +__description__ = ( + "Configuration tool for all Navigator Services " + "Tool for accessing Config info from different sources." +) +__version__ = "1.6.0" +__author__ = "Jesus Lara" +__author_email__ = "jesuslarag@gmail.com" +__license__ = "MIT" diff --git a/pyproject.toml b/pyproject.toml index 73b7796..bf65dbe 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,7 +35,7 @@ classifiers = [ long_description = "file:README.md" long_description_content_type = "text/markdown" keywords = ["aiohttp", "settings", "configuration", "conf", "configuration-management"] -python_requires = ">=3.9.16" +python_requires = ">=3.9.13" [tool.pytest.ini_options] diff --git a/setup.py b/setup.py index 2b5d621..940f58a 100644 --- a/setup.py +++ b/setup.py @@ -99,7 +99,7 @@ def readme(): setup( name=__title__, version=__version__, - python_requires=">=3.9.16", + python_requires=">=3.9.13", url='https://github.com/phenobarbital/NavConfig', description=__description__, long_description=readme(), diff --git a/tests/test_config.py b/tests/test_config.py index a2f3153..6df9d09 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -31,7 +31,7 @@ async def test_conf(event_loop): async def test_environment(event_loop): from navconfig import config - config.configure(env='dev', override=True) # re-configure the environment + config.configure(env='dev', override=True) # re-configure the environment cnf = config.get('CONFIG_FILE') assert cnf == 'etc/navigator.ini'