From e2d6289337bc71ab8877b3c316e327a6cea3a044 Mon Sep 17 00:00:00 2001 From: obervinov Date: Mon, 26 Aug 2024 01:11:03 +0400 Subject: [PATCH 001/148] bumped dependencies version and migrated to users-package v3.0.0 --- CHANGELOG.md | 13 + poetry.lock | 252 +++++++------------ pyproject.toml | 6 +- src/bot.py | 6 +- src/configs/databases.json | 17 +- src/migrations/0001_vault_historical_data.py | 7 +- src/migrations/0002_messages_table.py | 78 +++--- src/migrations/0003_users_table.py | 54 ++++ src/modules/database.py | 32 +-- src/modules/downloader.py | 2 +- src/modules/uploader.py | 2 +- 11 files changed, 244 insertions(+), 225 deletions(-) create mode 100644 src/migrations/0003_users_table.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 2cf795786..f79414743 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,19 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). +## v2.3.0 - 2024-08-25 +### What's Changed +**Full Changelog**: https://github.com/obervinov/pyinstabot-downloader/compare/v2.2.1...v2.3.0 by @obervinov in https://github.com/obervinov/pyinstabot-downloader/pull/95 +#### 💥 Breaking Changes +* now all user data is stored in the database +* psql credentials are now written out via Vault Database Engine +#### 🚀 Features +* bump vault-package to major version `3.0.0` +* bump users-package to major version `3.0.0` +* bump telegram-package to major version `2.0.1` +* [Switch reading of the database connection configuration to db engine](https://github.com/obervinov/pyinstabot-downloader/issues/33) + + ## v2.2.1 - 2024-08-24 ### What's Changed **Full Changelog**: https://github.com/obervinov/pyinstabot-downloader/compare/v2.2.0...v2.2.1 by @obervinov in https://github.com/obervinov/pyinstabot-downloader/pull/94 diff --git a/poetry.lock b/poetry.lock index 0727bf244..77d61af2a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -189,51 +189,45 @@ files = [ {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - [[package]] name = "cryptography" -version = "43.0.0" +version = "42.0.8" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-43.0.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:64c3f16e2a4fc51c0d06af28441881f98c5d91009b8caaff40cf3548089e9c74"}, - {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3dcdedae5c7710b9f97ac6bba7e1052b95c7083c9d0e9df96e02a1932e777895"}, - {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d9a1eca329405219b605fac09ecfc09ac09e595d6def650a437523fcd08dd22"}, - {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ea9e57f8ea880eeea38ab5abf9fbe39f923544d7884228ec67d666abd60f5a47"}, - {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9a8d6802e0825767476f62aafed40532bd435e8a5f7d23bd8b4f5fd04cc80ecf"}, - {file = "cryptography-43.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:cc70b4b581f28d0a254d006f26949245e3657d40d8857066c2ae22a61222ef55"}, - {file = "cryptography-43.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:4a997df8c1c2aae1e1e5ac49c2e4f610ad037fc5a3aadc7b64e39dea42249431"}, - {file = "cryptography-43.0.0-cp37-abi3-win32.whl", hash = "sha256:6e2b11c55d260d03a8cf29ac9b5e0608d35f08077d8c087be96287f43af3ccdc"}, - {file = "cryptography-43.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:31e44a986ceccec3d0498e16f3d27b2ee5fdf69ce2ab89b52eaad1d2f33d8778"}, - {file = "cryptography-43.0.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:7b3f5fe74a5ca32d4d0f302ffe6680fcc5c28f8ef0dc0ae8f40c0f3a1b4fca66"}, - {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac1955ce000cb29ab40def14fd1bbfa7af2017cca696ee696925615cafd0dce5"}, - {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:299d3da8e00b7e2b54bb02ef58d73cd5f55fb31f33ebbf33bd00d9aa6807df7e"}, - {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ee0c405832ade84d4de74b9029bedb7b31200600fa524d218fc29bfa371e97f5"}, - {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cb013933d4c127349b3948aa8aaf2f12c0353ad0eccd715ca789c8a0f671646f"}, - {file = "cryptography-43.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:fdcb265de28585de5b859ae13e3846a8e805268a823a12a4da2597f1f5afc9f0"}, - {file = "cryptography-43.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2905ccf93a8a2a416f3ec01b1a7911c3fe4073ef35640e7ee5296754e30b762b"}, - {file = "cryptography-43.0.0-cp39-abi3-win32.whl", hash = "sha256:47ca71115e545954e6c1d207dd13461ab81f4eccfcb1345eac874828b5e3eaaf"}, - {file = "cryptography-43.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:0663585d02f76929792470451a5ba64424acc3cd5227b03921dab0e2f27b1709"}, - {file = "cryptography-43.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2c6d112bf61c5ef44042c253e4859b3cbbb50df2f78fa8fae6747a7814484a70"}, - {file = "cryptography-43.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:844b6d608374e7d08f4f6e6f9f7b951f9256db41421917dfb2d003dde4cd6b66"}, - {file = "cryptography-43.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:51956cf8730665e2bdf8ddb8da0056f699c1a5715648c1b0144670c1ba00b48f"}, - {file = "cryptography-43.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:aae4d918f6b180a8ab8bf6511a419473d107df4dbb4225c7b48c5c9602c38c7f"}, - {file = "cryptography-43.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:232ce02943a579095a339ac4b390fbbe97f5b5d5d107f8a08260ea2768be8cc2"}, - {file = "cryptography-43.0.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5bcb8a5620008a8034d39bce21dc3e23735dfdb6a33a06974739bfa04f853947"}, - {file = "cryptography-43.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:08a24a7070b2b6804c1940ff0f910ff728932a9d0e80e7814234269f9d46d069"}, - {file = "cryptography-43.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e9c5266c432a1e23738d178e51c2c7a5e2ddf790f248be939448c0ba2021f9d1"}, - {file = "cryptography-43.0.0.tar.gz", hash = "sha256:b88075ada2d51aa9f18283532c9f60e72170041bba88d7f37e49cbb10275299e"}, + {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e"}, + {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7"}, + {file = "cryptography-42.0.8-cp37-abi3-win32.whl", hash = "sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2"}, + {file = "cryptography-42.0.8-cp37-abi3-win_amd64.whl", hash = "sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba"}, + {file = "cryptography-42.0.8-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14"}, + {file = "cryptography-42.0.8-cp39-abi3-win32.whl", hash = "sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c"}, + {file = "cryptography-42.0.8-cp39-abi3-win_amd64.whl", hash = "sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad"}, + {file = "cryptography-42.0.8.tar.gz", hash = "sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2"}, ] [package.dependencies] @@ -246,7 +240,7 @@ nox = ["nox"] pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi", "cryptography-vectors (==43.0.0)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] @@ -283,20 +277,6 @@ typing-extensions = ">=4.7.0" [package.extras] dev = ["coverage", "pytest (>=7.4.4)"] -[[package]] -name = "exceptiongroup" -version = "1.2.2" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, - {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, -] - -[package.extras] -test = ["pytest (>=6)"] - [[package]] name = "hvac" version = "2.3.0" @@ -316,24 +296,24 @@ parser = ["pyhcl (>=0.4.4,<0.5.0)"] [[package]] name = "idna" -version = "3.7" +version = "3.8" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, + {file = "idna-3.8-py3-none-any.whl", hash = "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac"}, + {file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"}, ] [[package]] name = "importlib-metadata" -version = "8.2.0" +version = "8.4.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-8.2.0-py3-none-any.whl", hash = "sha256:11901fa0c2f97919b288679932bb64febaeacf289d18ac84dd68cb2e74213369"}, - {file = "importlib_metadata-8.2.0.tar.gz", hash = "sha256:72e8d4399996132204f9a16dcc751af254a48f8d1b20b9ff0f98d4a8f901e73d"}, + {file = "importlib_metadata-8.4.0-py3-none-any.whl", hash = "sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1"}, + {file = "importlib_metadata-8.4.0.tar.gz", hash = "sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5"}, ] [package.dependencies] @@ -344,17 +324,6 @@ doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linke perf = ["ipython"] test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.7" -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - [[package]] name = "instaloader" version = "4.13" @@ -658,17 +627,6 @@ files = [ {file = "more_itertools-10.4.0-py3-none-any.whl", hash = "sha256:0f7d9f83a0a8dcfa8a2694a770590d98a67ea943e3d9f5298309a484758c4e27"}, ] -[[package]] -name = "packaging" -version = "24.1" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, - {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, -] - [[package]] name = "pathlib" version = "1.0.1" @@ -680,21 +638,6 @@ files = [ {file = "pathlib-1.0.1.tar.gz", hash = "sha256:6940718dfc3eff4258203ad5021090933e5c04707d5ca8cc9e73c94a7894ea9f"}, ] -[[package]] -name = "pluggy" -version = "1.5.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, - {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - [[package]] name = "ply" version = "3.11" @@ -720,6 +663,28 @@ files = [ [package.extras] twisted = ["twisted"] +[[package]] +name = "psycopg2" +version = "2.9.9" +description = "psycopg2 - Python-PostgreSQL Database Adapter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "psycopg2-2.9.9-cp310-cp310-win32.whl", hash = "sha256:38a8dcc6856f569068b47de286b472b7c473ac7977243593a288ebce0dc89516"}, + {file = "psycopg2-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:426f9f29bde126913a20a96ff8ce7d73fd8a216cfb323b1f04da402d452853c3"}, + {file = "psycopg2-2.9.9-cp311-cp311-win32.whl", hash = "sha256:ade01303ccf7ae12c356a5e10911c9e1c51136003a9a1d92f7aa9d010fb98372"}, + {file = "psycopg2-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:121081ea2e76729acfb0673ff33755e8703d45e926e416cb59bae3a86c6a4981"}, + {file = "psycopg2-2.9.9-cp312-cp312-win32.whl", hash = "sha256:d735786acc7dd25815e89cc4ad529a43af779db2e25aa7c626de864127e5a024"}, + {file = "psycopg2-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:a7653d00b732afb6fc597e29c50ad28087dcb4fbfb28e86092277a559ae4e693"}, + {file = "psycopg2-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:5e0d98cade4f0e0304d7d6f25bbfbc5bd186e07b38eac65379309c4ca3193efa"}, + {file = "psycopg2-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:7e2dacf8b009a1c1e843b5213a87f7c544b2b042476ed7755be813eaf4e8347a"}, + {file = "psycopg2-2.9.9-cp38-cp38-win32.whl", hash = "sha256:ff432630e510709564c01dafdbe996cb552e0b9f3f065eb89bdce5bd31fabf4c"}, + {file = "psycopg2-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:bac58c024c9922c23550af2a581998624d6e02350f4ae9c5f0bc642c633a2d5e"}, + {file = "psycopg2-2.9.9-cp39-cp39-win32.whl", hash = "sha256:c92811b2d4c9b6ea0285942b2e7cac98a59e166d59c588fe5cfe1eda58e72d59"}, + {file = "psycopg2-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:de80739447af31525feddeb8effd640782cf5998e1a4e9192ebdf829717e3913"}, + {file = "psycopg2-2.9.9.tar.gz", hash = "sha256:d1454bde93fb1e224166811694d600e746430c006fbb031ea06ecc2ea41bf156"}, +] + [[package]] name = "psycopg2-binary" version = "2.9.9" @@ -877,28 +842,6 @@ redis = ["redis (>=3.4.1)"] uvicorn = ["uvicorn"] watchdog = ["watchdog"] -[[package]] -name = "pytest" -version = "8.3.2" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"}, - {file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=1.5,<2" -tomli = {version = ">=1", markers = "python_version < \"3.11\""} - -[package.extras] -dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] - [[package]] name = "python-dateutil" version = "2.9.0.post0" @@ -926,20 +869,20 @@ files = [ [[package]] name = "requests" -version = "2.32.3" +version = "2.29.0" description = "Python HTTP for Humans." optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, - {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, + {file = "requests-2.29.0-py3-none-any.whl", hash = "sha256:e8f3c9be120d3333921d213eef078af392fba3933ab7ed2d1cba3b56f2568c3b"}, + {file = "requests-2.29.0.tar.gz", hash = "sha256:f2e34a75f4749019bb0e3effb66683630e4ffeaf75819fb51bebef1bf5aef059"}, ] [package.dependencies] certifi = ">=2017.4.17" charset-normalizer = ">=2,<4" idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" +urllib3 = ">=1.21.1,<1.27" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] @@ -989,7 +932,7 @@ six = ">=1.12.0" [[package]] name = "telegram" -version = "1.2.0" +version = "2.0.1" description = "This is an additional implementation compared to the telebot module. This module is designed for quick initialization, authorization and rendering of various buttons/widgets for telegram bots." optional = false python-versions = "^3.9" @@ -1000,14 +943,13 @@ develop = false logger = {git = "https://github.com/obervinov/logger-package.git", tag = "v1.0.6"} messages = {git = "https://github.com/obervinov/messages-package.git", tag = "v1.0.4"} pyTelegramBotAPI = "^4" -pytest = "^8" -vault = {git = "https://github.com/obervinov/vault-package.git", tag = "v2.0.4"} +vault = {git = "https://github.com/obervinov/vault-package.git", tag = "v3.0.0"} [package.source] type = "git" url = "https://github.com/obervinov/telegram-package.git" -reference = "v1.2.0" -resolved_reference = "0a7d08cc3b215e6e7bfe13ccfe72f8190f5ae457" +reference = "v2.0.1" +resolved_reference = "5b3dc2f377095f59a1cd9645a865e466c9b40a4b" [[package]] name = "tenacity" @@ -1026,17 +968,6 @@ six = ">=1.9.0" [package.extras] doc = ["reno", "sphinx", "tornado (>=4.5)"] -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] - [[package]] name = "typing-extensions" version = "4.12.2" @@ -1050,25 +981,24 @@ files = [ [[package]] name = "urllib3" -version = "2.2.2" +version = "1.26.19" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=3.8" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, - {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, + {file = "urllib3-1.26.19-py2.py3-none-any.whl", hash = "sha256:37a0344459b199fce0e80b0d3569837ec6b6937435c5244e7fd73fa6006830f3"}, + {file = "urllib3-1.26.19.tar.gz", hash = "sha256:3e3d753a8618b86d7de333b4223005f68720bcd6a7d2bcb9fbd2229ec7c1e429"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -h2 = ["h2 (>=4,<5)"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] +brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "users" -version = "2.0.5" -description = "This python module is a simple implementation of user management functionality for telegram bots, such as: authentication, authorization and request limiting." +version = "3.0.0" +description = "This python module is a simple implementation of user management functionality for telegram bots, such as: authentication, authorization and requests limiting." optional = false python-versions = "^3.9 || ^3.10 || ^3.11" files = [] @@ -1076,35 +1006,39 @@ develop = false [package.dependencies] logger = {git = "https://github.com/obervinov/logger-package.git", tag = "v1.0.6"} -vault = {git = "https://github.com/obervinov/vault-package.git", tag = "v2.0.4"} +psycopg2 = "^2" +vault = {git = "https://github.com/obervinov/vault-package.git", tag = "v3.0.0"} [package.source] type = "git" url = "https://github.com/obervinov/users-package.git" -reference = "v2.0.5" -resolved_reference = "f4f4b758d394fd1820c9ee3ce264b5d18a439823" +reference = "v3.0.0" +resolved_reference = "1026da4cb8cfacd1186808a75284257b1cdac610" [[package]] name = "vault" -version = "2.0.4" -description = "This is an additional implementation compared to the hvac module. The main purpose of which is to simplify the use and interaction with vault for my standard projects. This module contains a set of methods for working with secrets and quickly configuring Vault." +version = "3.0.0" +description = "This is an additional implementation compared to the hvac module. The main purpose of which is to simplify the use and interaction with vault for my standard projects. This module contains a set of methods for working with secrets and database engines in vault." optional = false python-versions = "^3.9 || ^3.10 || ^3.11" files = [] develop = false [package.dependencies] +cryptography = "^42" hvac = "^2" +idna = "^3" keyring = "^24" logger = {git = "https://github.com/obervinov/logger-package.git", tag = "v1.0.6"} python-dateutil = "^2" +requests = ">=2.29.0,<2.30.0" SecretStorage = "^3" [package.source] type = "git" url = "https://github.com/obervinov/vault-package.git" -reference = "v2.0.4" -resolved_reference = "54a312b747ad84c391a837c5bddaed7a021c9d76" +reference = "v3.0.0" +resolved_reference = "73b8d0431415eae65e2271483e80353a62b30a28" [[package]] name = "webdavclient3" @@ -1139,4 +1073,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "d668b2255f52e2bffef853511746eaa0b4c5fca4b16a01736d04a99932ffeffa" +content-hash = "fa955dfdcb032f7463ef978064e12800f34d95047753864159e2af6917c00da0" diff --git a/pyproject.toml b/pyproject.toml index cc40eadbe..6e761260b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,9 +26,9 @@ mock = "^5" webdavclient3 = "^3" prometheus-client = "^0" logger = { git = "https://github.com/obervinov/logger-package.git", tag = "v1.0.6" } -vault = { git = "https://github.com/obervinov/vault-package.git", tag = "v2.0.4" } -users = { git = "https://github.com/obervinov/users-package.git", tag = "v2.0.5" } -telegram = { git = "https://github.com/obervinov/telegram-package.git", tag = "v1.2.0" } +vault = { git = "https://github.com/obervinov/vault-package.git", tag = "v3.0.0" } +users = { git = "https://github.com/obervinov/users-package.git", tag = "v3.0.0" } +telegram = { git = "https://github.com/obervinov/telegram-package.git", tag = "v2.0.1" } [build-system] requires = ["poetry-core"] diff --git a/src/bot.py b/src/bot.py index 3af4e9504..715036f3d 100644 --- a/src/bot.py +++ b/src/bot.py @@ -36,7 +36,7 @@ # Client for download content from supplier # If API disabled, the mock object will be used -downloader_api_enabled = vault.read_secret(path='configuration/downloader-api').get('enabled', False) +downloader_api_enabled = vault.kv2engine.read_secret(path='configuration/downloader-api').get('enabled', False) if downloader_api_enabled == 'True': log.info('[Bot]: downloader API is enabled: %s', downloader_api_enabled) downloader = Downloader(vault=vault) @@ -52,7 +52,7 @@ # Client for upload content to the cloud storage # If API disabled, the mock object will be used -uploader_api_enabled = vault.read_secret(path='configuration/uploader-api').get('enabled', False) +uploader_api_enabled = vault.kv2engine.read_secret(path='configuration/uploader-api').get('enabled', False) if uploader_api_enabled == 'True': log.info('[Bot]: uploader API is enabled: %s', uploader_api_enabled) uploader = Uploader(vault=vault) @@ -388,7 +388,7 @@ def process_one_post( user = users_rl.user_access_check(message.chat.id, ROLES_MAP['Post']) if user.get('permissions', None) == users_rl.user_status_allow: data = message_parser(message) - rate_limit = user.get('rate_limits', {}).get('end_time', None) + rate_limit = user.get('rate_limits', None) # Define time to process the message in queue if rate_limit: diff --git a/src/configs/databases.json b/src/configs/databases.json index 4ec57a45c..046d8e8fd 100644 --- a/src/configs/databases.json +++ b/src/configs/databases.json @@ -68,7 +68,22 @@ "columns": [ "id SERIAL PRIMARY KEY, ", "user_id VARCHAR(255) NOT NULL, ", - "chat_id VARCHAR(255) NOT NULL" + "chat_id VARCHAR(255) NOT NULL, ", + "status VARCHAR(255) NOT NULL DEFAULT 'denied'" + ] + }, + { + "name": "users_requests", + "description": "The table stores the metadata of the user requests", + "columns": [ + "id SERIAL PRIMARY KEY, ", + "user_id VARCHAR(255) NOT NULL, ", + "message_id VARCHAR(255) NOT NULL, ", + "chat_id VARCHAR(255) NOT NULL, ", + "authentication VARCHAR(255) NOT NULL, ", + "\"authorization\" VARCHAR(255) NOT NULL, ", + "timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, ", + "rate_limits TIMESTAMP" ] } ] diff --git a/src/migrations/0001_vault_historical_data.py b/src/migrations/0001_vault_historical_data.py index afb65a8de..225d87827 100644 --- a/src/migrations/0001_vault_historical_data.py +++ b/src/migrations/0001_vault_historical_data.py @@ -60,8 +60,11 @@ def execute(obj): ) print(f"{NAME}: Migrating {post_id} from history/{owner}") - obj.cursor.execute(f"INSERT INTO {table_name} ({columns}) VALUES ({values})") - obj.database_connection.commit() + conn = obj.get_connection() + with conn.cursor() as cursor: + cursor.execute(f"INSERT INTO {table_name} ({columns}) VALUES ({values})") + conn.commit() + obj.close_connection(conn) print(f"{NAME}: Post {post_id} from history/{owner} has been added to processed table") print(f"{NAME}: Migration has been completed") # Will be fixed after the issue https://github.com/obervinov/vault-package/issues/46 is resolved diff --git a/src/migrations/0002_messages_table.py b/src/migrations/0002_messages_table.py index cd49bcf86..73112171b 100644 --- a/src/migrations/0002_messages_table.py +++ b/src/migrations/0002_messages_table.py @@ -1,4 +1,3 @@ -# pylint: disable=C0103,R0914 """ Add additional column 'created_at' and replace column 'timestamp' with 'updated_at' in the messages table. https://github.com/obervinov/pyinstabot-downloader/issues/62 @@ -23,44 +22,45 @@ def execute(obj): add_columns = [('created_at', 'TIMESTAMP', 'CURRENT_TIMESTAMP'), ('state', 'VARCHAR(255)', "'added'")] print(f"{NAME}: Start migration for the {table_name} table: Rename columns {rename_columns}, Add columns {add_columns}...") - # check if the table exists and has the necessary schema for execute the migration - # check table - obj.cursor.execute("SELECT * FROM information_schema.tables WHERE table_schema = 'public' AND table_name = %s;", (table_name,)) - table = obj.cursor.fetchone() + conn = obj.get_connection() + with conn.cursor() as cursor: + # check if the table exists and has the necessary schema for execute the migration + # check table + cursor.execute("SELECT * FROM information_schema.tables WHERE table_schema = 'public' AND table_name = %s;", (table_name,)) + table = cursor.fetchone() - # check columns in the table - obj.cursor.execute("SELECT column_name FROM information_schema.columns WHERE table_schema = 'public' AND table_name = %s;", (table_name,)) - columns = [row[0] for row in obj.cursor.fetchall()] + # check columns in the table + cursor.execute("SELECT column_name FROM information_schema.columns WHERE table_schema = 'public' AND table_name = %s;", (table_name,)) + columns = [row[0] for row in cursor.fetchall()] - if not table: - print(f"{NAME}: The {table_name} table does not exist. Skip the migration.") + if not table: + print(f"{NAME}: The {table_name} table does not exist. Skip the migration.") + elif len(columns) < 1: + print(f"{NAME}: The {table_name} table does not have the necessary columns to execute the migration. Skip the migration.") + else: + for column in rename_columns: + try: + print(f"{NAME}: Rename column {column[0]} to {column[1]} in the {table_name} table...") + cursor.execute(f"ALTER TABLE {table_name} RENAME COLUMN {column[0]} TO {column[1]}") + conn.commit() + print(f"{NAME}: Column {column[0]} has been renamed to {column[1]} in the {table_name} table.") + except obj.errors.DuplicateColumn as error: + print(f"{NAME}: Columns in the {table_name} table have already been renamed. Skip renaming: {error}") + conn.rollback() + except obj.errors.UndefinedColumn as error: + print(f"{NAME}: Columns in the {table_name} table have not been renamed. Skip renaming: {error}") + conn.rollback() - elif len(columns) < 1: - print(f"{NAME}: The {table_name} table does not have the necessary columns to execute the migration. Skip the migration.") - - else: - for column in rename_columns: - try: - print(f"{NAME}: Rename column {column[0]} to {column[1]} in the {table_name} table...") - obj.cursor.execute(f"ALTER TABLE {table_name} RENAME COLUMN {column[0]} TO {column[1]}") - obj.database_connection.commit() - print(f"{NAME}: Column {column[0]} has been renamed to {column[1]} in the {table_name} table.") - except obj.errors.DuplicateColumn as error: - print(f"{NAME}: Columns in the {table_name} table have already been renamed. Skip renaming: {error}") - obj.database_connection.rollback() - except obj.errors.UndefinedColumn as error: - print(f"{NAME}: Columns in the {table_name} table have not been renamed. Skip renaming: {error}") - obj.database_connection.rollback() - - for column in add_columns: - try: - print(f"{NAME}: Add column {column[0]} to the {table_name} table...") - obj.cursor.execute(f"ALTER TABLE {table_name} ADD COLUMN {column[0]} {column[1]} DEFAULT {column[2]}") - obj.database_connection.commit() - print(f"{NAME}: Column {column[0]} has been added to the {table_name} table.") - except obj.errors.DuplicateColumn as error: - print(f"{NAME}: Columns in the {table_name} table have already been added. Skip adding: {error}") - obj.database_connection.rollback() - except obj.errors.FeatureNotSupported as error: - print(f"{NAME}: Columns in the {table_name} table have not been added. Skip adding: {error}") - obj.database_connection.rollback() + for column in add_columns: + try: + print(f"{NAME}: Add column {column[0]} to the {table_name} table...") + cursor.execute(f"ALTER TABLE {table_name} ADD COLUMN {column[0]} {column[1]} DEFAULT {column[2]}") + conn.commit() + print(f"{NAME}: Column {column[0]} has been added to the {table_name} table.") + except obj.errors.DuplicateColumn as error: + print(f"{NAME}: Columns in the {table_name} table have already been added. Skip adding: {error}") + conn.rollback() + except obj.errors.FeatureNotSupported as error: + print(f"{NAME}: Columns in the {table_name} table have not been added. Skip adding: {error}") + conn.rollback() + obj.close_connection(conn) diff --git a/src/migrations/0003_users_table.py b/src/migrations/0003_users_table.py new file mode 100644 index 000000000..652552cf8 --- /dev/null +++ b/src/migrations/0003_users_table.py @@ -0,0 +1,54 @@ +# pylint: disable=C0103,R0914 +""" +Add additional column 'status' in the users table. +https://github.com/obervinov/users-package/blob/v3.0.0/tests/postgres/tables.sql +""" +VERSION = '1.0' +NAME = '0003_users_table' + + +def execute(obj): + """ + Add additional column 'status' in the users table. + + Args: + obj: An obj containing the database connection and cursor, as well as the Vault instance. + + Returns: + None + """ + # database settings + table_name = 'users' + add_columns = [('status', 'VARCHAR(255)', "'denied'")] + print(f"{NAME}: Start migration for the {table_name} table: Add columns {add_columns}...") + + # check if the table exists and has the necessary schema for execute the migration + conn = obj.get_connection() + with conn.cursor() as cursor: + # check table + cursor.execute("SELECT * FROM information_schema.tables WHERE table_schema = 'public' AND table_name = %s;", (table_name,)) + table = cursor.fetchone() + + # check columns in the table + cursor.execute("SELECT column_name FROM information_schema.columns WHERE table_schema = 'public' AND table_name = %s;", (table_name,)) + columns = [row[0] for row in cursor.fetchall()] + + if not table: + print(f"{NAME}: The {table_name} table does not exist. Skip the migration.") + + elif len(columns) < 1: + print(f"{NAME}: The {table_name} table does not have the necessary columns to execute the migration. Skip the migration.") + + else: + for column in add_columns: + try: + print(f"{NAME}: Add column {column[0]} to the {table_name} table...") + cursor.execute(f"ALTER TABLE {table_name} ADD COLUMN {column[0]} {column[1]} DEFAULT {column[2]}") + conn.commit() + print(f"{NAME}: Column {column[0]} has been added to the {table_name} table.") + except obj.errors.DuplicateColumn as error: + print(f"{NAME}: Columns in the {table_name} table have already been added. Skip adding: {error}") + conn.rollback() + except obj.errors.FeatureNotSupported as error: + print(f"{NAME}: Columns in the {table_name} table have not been added. Skip adding: {error}") + conn.rollback() diff --git a/src/modules/database.py b/src/modules/database.py index c22dfaf30..397e0117a 100644 --- a/src/modules/database.py +++ b/src/modules/database.py @@ -41,9 +41,9 @@ class DatabaseClient: errors (psycopg2.errors): A collection of error classes for exceptions raised by the psycopg2 module. Methods: - _create_connection_pool(): Create a connection pool for the PostgreSQL database. - _get_connection(): Get a connection from the connection pool. - _close_connection(connection): Close the connection and return it to the connection pool. + create_connection_pool(): Create a connection pool for the PostgreSQL database. + get_connection(): Get a connection from the connection pool. + close_connection(connection): Close the connection and return it to the connection pool. _prepare_db(): Prepare the database by creating and initializing the necessary tables. _migrations(): Execute database migrations to update the database schema or data. _is_migration_executed(migration_name): Check if a migration has already been executed. @@ -102,7 +102,7 @@ def create_connection_pool(self) -> pool.SimpleConnectionPool: Returns: pool.SimpleConnectionPool: A connection pool for the PostgreSQL database. """ - db_configuration = self.vault.read_secret(path='configuration/database') + db_configuration = self.vault.kv2engine.read_secret(path='configuration/database') log.info( '[Database]: Creating a connection pool for the %s:%s/%s', db_configuration['host'], db_configuration['port'], db_configuration['database'] @@ -117,7 +117,7 @@ def create_connection_pool(self) -> pool.SimpleConnectionPool: database=db_configuration['database'] ) - def _get_connection(self) -> psycopg2.extensions.connection: + def get_connection(self) -> psycopg2.extensions.connection: """ Get a connection from the connection pool. @@ -126,7 +126,7 @@ def _get_connection(self) -> psycopg2.extensions.connection: """ return self.database_connections.getconn() - def _close_connection(self, connection: psycopg2.extensions.connection) -> None: + def close_connection(self, connection: psycopg2.extensions.connection) -> None: """ Close the cursor and return it to the connection pool. @@ -230,11 +230,11 @@ def _create_table( To create a new table called 'users' with columns 'id' and 'name', you can call the method like this: >>> _create_table('users', 'id INTEGER PRIMARY KEY, name TEXT') """ - conn = self._get_connection() + conn = self.get_connection() with conn.cursor() as cursor: cursor.execute(f"CREATE TABLE IF NOT EXISTS {table_name} ({columns})") conn.commit() - self._close_connection(conn) + self.close_connection(conn) @reconnect_on_exception def _insert( @@ -260,11 +260,11 @@ def _insert( """ try: sql_query = f"INSERT INTO {table_name} ({', '.join(columns)}) VALUES ({', '.join(['%s'] * len(columns))})" - conn = self._get_connection() + conn = self.get_connection() with conn.cursor() as cursor: cursor.execute(sql_query, values) conn.commit() - self._close_connection(conn) + self.close_connection(conn) except (psycopg2.Error, IndexError) as error: log.error( '[Database]: An error occurred while inserting a row into the table %s: %s\nColumns: %s\nValues: %s\nQuery: %s', @@ -309,11 +309,11 @@ def _select( if kwargs.get('limit', None): sql_query += f" LIMIT {kwargs.get('limit')}" - conn = self._get_connection() + conn = self.get_connection() with conn.cursor() as cursor: cursor.execute(sql_query) response = cursor.fetchall() - self._close_connection(conn) + self.close_connection(conn) return response if response else None @reconnect_on_exception @@ -334,11 +334,11 @@ def _update( Examples: >>> _update('users', "username='new_username', password='new_password'", "id=1") """ - conn = self._get_connection() + conn = self.get_connection() with conn.cursor() as cursor: cursor.execute(f"UPDATE {table_name} SET {values} WHERE {condition}") conn.commit() - self._close_connection(conn) + self.close_connection(conn) @reconnect_on_exception def _delete( @@ -357,11 +357,11 @@ def _delete( To delete all rows from the 'users' table where the 'username' column is 'john': >>> db._delete('users', "username='john'") """ - conn = self._get_connection() + conn = self.get_connection() with conn.cursor() as cursor: cursor.execute(f"DELETE FROM {table_name} WHERE {condition}") conn.commit() - self._close_connection(conn) + self.close_connection(conn) def _reset_stale_records(self) -> None: """ diff --git a/src/modules/downloader.py b/src/modules/downloader.py index 717408b17..0333634cd 100644 --- a/src/modules/downloader.py +++ b/src/modules/downloader.py @@ -63,7 +63,7 @@ def __init__( if configuration: self.configuration = configuration elif not configuration: - self.configuration = vault.read_secret(path='configuration/downloader-api') + self.configuration = vault.kv2engine.read_secret(path='configuration/downloader-api') else: raise FailedCreateDownloaderInstance( "Failed to initialize the Downloader instance." diff --git a/src/modules/uploader.py b/src/modules/uploader.py index 011b76da5..329b4df8d 100644 --- a/src/modules/uploader.py +++ b/src/modules/uploader.py @@ -57,7 +57,7 @@ def __init__( if configuration: self.configuration = configuration elif not configuration: - self.configuration = vault.read_secret(path='configuration/uploader-api') + self.configuration = vault.kv2engine.read_secret(path='configuration/uploader-api') else: raise FailedInitUploaderInstance( "Failed to initialize the Uploader instance." From 05eda06f90fd9954403b56a2a2db79f6ce1afc25 Mon Sep 17 00:00:00 2001 From: obervinov Date: Mon, 26 Aug 2024 01:44:00 +0400 Subject: [PATCH 002/148] fixed error --- CHANGELOG.md | 2 + pyproject.toml | 2 +- src/migrations/0002_messages_table.py | 1 + src/migrations/0003_users_table.py | 2 +- tests/conftest.py | 150 ++++++++++++++++++-------- tests/postgres/tables.sql | 73 +++++++++++++ 6 files changed, 186 insertions(+), 44 deletions(-) create mode 100644 tests/postgres/tables.sql diff --git a/CHANGELOG.md b/CHANGELOG.md index f79414743..61e1582bb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,8 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/) and this p * bump users-package to major version `3.0.0` * bump telegram-package to major version `2.0.1` * [Switch reading of the database connection configuration to db engine](https://github.com/obervinov/pyinstabot-downloader/issues/33) +#### 🐛 Bug Fixes +* general bug fixes and improvements ## v2.2.1 - 2024-08-24 diff --git a/pyproject.toml b/pyproject.toml index 6e761260b..65cfce439 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "pyinstabot-downloader" -version = "2.2.1" +version = "2.3.0" description = "This project is a Telegram bot that allows you to upload posts from your Instagram profile to clouds like Dropbox, Mega or any WebDav compatible cloud storage." authors = ["Bervinov Oleg "] maintainers = ["Bervinov Oleg "] diff --git a/src/migrations/0002_messages_table.py b/src/migrations/0002_messages_table.py index 73112171b..cb27d76a9 100644 --- a/src/migrations/0002_messages_table.py +++ b/src/migrations/0002_messages_table.py @@ -1,3 +1,4 @@ +# pylint: disable=C0103,R0914,R0801 """ Add additional column 'created_at' and replace column 'timestamp' with 'updated_at' in the messages table. https://github.com/obervinov/pyinstabot-downloader/issues/62 diff --git a/src/migrations/0003_users_table.py b/src/migrations/0003_users_table.py index 652552cf8..7c96b729f 100644 --- a/src/migrations/0003_users_table.py +++ b/src/migrations/0003_users_table.py @@ -1,4 +1,4 @@ -# pylint: disable=C0103,R0914 +# pylint: disable=C0103,R0914,R0801 """ Add additional column 'status' in the users table. https://github.com/obervinov/users-package/blob/v3.0.0/tests/postgres/tables.sql diff --git a/tests/conftest.py b/tests/conftest.py index d2930de7a..901fd873e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -7,6 +7,7 @@ import time import requests import pytest +import hvac # pylint: disable=E0401 from vault import VaultClient @@ -77,9 +78,9 @@ def fixture_vault_url(prepare_dev_environment): return url -@pytest.fixture(name="name", scope='session') -def fixture_name(): - """Returns the project name""" +@pytest.fixture(name="namespace", scope='session') +def fixture_namespace(): + """Returns the project namespace""" return "pyinstabot-downloader" @@ -89,35 +90,115 @@ def fixture_policy_path(): return "tests/vault/policy.hcl" -@pytest.fixture(name="vault_approle", scope='session') -def fixture_vault_approle(vault_url, name, policy_path): - """Prepare a temporary Vault instance and return the Vault client""" - configurator = VaultClient( - url=vault_url, - name=name, - new=True - ) - namespace = configurator.create_namespace( - name=name - ) - policy = configurator.create_policy( - name=name, - path=policy_path +@pytest.fixture(name="psql_tables_path", scope='session') +def fixture_psql_tables_path(): + """Returns the path to the postgres sql file with tables""" + return "tests/postgres/tables.sql" + + +@pytest.fixture(name="postgres_url", scope='session') +def fixture_postgres_url(): + """Returns the postgres url""" + return "postgresql://{{username}}:{{password}}@postgres:5432/postgres?sslmode=disable" + + +@pytest.fixture(name="prepare_vault", scope='session') +def fixture_prepare_vault(vault_url, namespace, policy_path, postgres_url): + """Returns the vault client""" + client = hvac.Client(url=vault_url) + init_data = client.sys.initialize() + + # Unseal the vault + if client.sys.is_sealed(): + client.sys.submit_unseal_keys(keys=[init_data['keys'][0], init_data['keys'][1], init_data['keys'][2]]) + # Authenticate in the vault server using the root token + client = hvac.Client(url=vault_url, token=init_data['root_token']) + + # Create policy + with open(policy_path, 'rb') as policyfile: + _ = client.sys.create_or_update_policy( + name=namespace, + policy=policyfile.read().decode("utf-8"), ) - return configurator.create_approle( - name=name, + + # Create Namespace + _ = client.sys.enable_secrets_engine( + backend_type='kv', path=namespace, - policy=policy + options={'version': 2} ) + # Prepare AppRole for the namespace + client.sys.enable_auth_method( + method_type='approle', + path=namespace + ) + _ = client.auth.approle.create_or_update_approle( + role_name=namespace, + token_policies=[namespace], + token_type='service', + secret_id_num_uses=0, + token_num_uses=0, + token_ttl='15s', + bind_secret_id=True, + token_no_default_policy=True, + mount_point=namespace + ) + approle_adapter = hvac.api.auth_methods.AppRole(client.adapter) + + # Prepare database engine configuration + client.sys.enable_secrets_engine( + backend_type='database', + path='database' + ) + + # Configure database engine + configuration = client.secrets.database.configure( + name="postgresql", + plugin_name="postgresql-database-plugin", + verify_connection=False, + allowed_roles=["test-role"], + username="postgres", + password="postgres", + connection_url=postgres_url + ) + print(f"Configured database engine: {configuration}") + + # Create role for the database + statement = ( + "CREATE ROLE \"{{name}}\" WITH LOGIN PASSWORD '{{password}}' VALID UNTIL '{{expiration}}'; " + "GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO \"{{name}}\"; " + "GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO \"{{name}}\";" + ) + role = client.secrets.database.create_role( + name="test-role", + db_name="postgresql", + creation_statements=statement, + default_ttl="1h", + max_ttl="24h" + ) + print(f"Created role: {role}") + + # Return the role_id and secret_id + return { + 'id': approle_adapter.read_role_id(role_name=namespace, mount_point=namespace)["data"]["role_id"], + 'secret-id': approle_adapter.generate_secret_id(role_name=namespace, mount_point=namespace)["data"]["secret_id"] + } + @pytest.fixture(name="vault_instance", scope='session') -def fixture_vault_instance(vault_url, vault_approle, name): - """Returns an initialized vault instance""" +def fixture_vault_instance(vault_url, namespace, prepare_vault): + """Returns client of the configurator""" return VaultClient( url=vault_url, - name=name, - approle=vault_approle + namespace=namespace, + auth={ + 'type': 'approle', + 'approle': { + 'id': prepare_vault['id'], + 'secret-id': prepare_vault['secret-id'] + } + } ) @@ -140,13 +221,13 @@ def fixture_vault_configuration_data(vault_instance): 'database': 'pyinstabot-downloader' } for key, value in database.items(): - _ = vault_instance.write_secret( + _ = vault_instance.kv2engine.write_secret( path='configuration/database', key=key, value=value ) - _ = vault_instance.write_secret( + _ = vault_instance.kv2engine.write_secret( path='configuration/telegram', key='token', value=os.getenv("TG_TOKEN") @@ -168,23 +249,8 @@ def fixture_vault_configuration_data(vault_instance): } user_id = os.getenv("TG_USERID") for key, value in user_attributes.items(): - _ = vault_instance.write_secret( + _ = vault_instance.kv2engine.write_secret( path=f'configuration/users/{user_id}', key=key, value=value ) - - test_owner = { - "eiD5aech8Oh": "downloaded", - "eiD5aech8Oa": "downloaded", - "eiD5aech8Oq": "downloaded", - "eiD5aech8Ol": "downloaded", - "eiD5aech8Op": "downloaded", - "eiD5aech8Oy": "downloaded" - } - for key, value in test_owner.items(): - _ = vault_instance.write_secret( - path='history/testOwner', - key=key, - value=value - ) diff --git a/tests/postgres/tables.sql b/tests/postgres/tables.sql new file mode 100644 index 000000000..d76463748 --- /dev/null +++ b/tests/postgres/tables.sql @@ -0,0 +1,73 @@ +-- Schema for the users table +CREATE TABLE users ( + id serial PRIMARY KEY, + user_id VARCHAR (50) UNIQUE NOT NULL, + chat_id VARCHAR (50) NOT NULL, + status VARCHAR (50) NOT NULL DEFAULT 'denied' +); + +-- Schema for the users_requests table +CREATE TABLE users_requests ( + id serial PRIMARY KEY, + user_id VARCHAR (50) NOT NULL, + message_id VARCHAR (50), + chat_id VARCHAR (50), + authentication VARCHAR (50) NOT NULL, + "authorization" VARCHAR (255) NOT NULL, + timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + rate_limits TIMESTAMP +); + +-- Schema for queue table +CREATE TABLE queue ( + id serial PRIMARY KEY, + user_id VARCHAR (50) NOT NULL, + post_id VARCHAR (50) NOT NULL, + post_url VARCHAR (255) NOT NULL, + post_owner VARCHAR (50) NOT NULL, + link_type VARCHAR (50) NOT NULL DEFAULT 'post', + message_id VARCHAR (50) NOT NULL, + chat_id VARCHAR (50) NOT NULL, + scheduled_time TIMESTAMP NOT NULL, + download_status VARCHAR (50) NOT NULL DEFAULT 'not started', + upload_status VARCHAR (50) NOT NULL DEFAULT 'not started', + timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + state VARCHAR (50) NOT NULL DEFAULT 'waiting' +); + +-- Schema for the processed table +CREATE TABLE processed ( + id serial PRIMARY KEY, + user_id VARCHAR (50) NOT NULL, + post_id VARCHAR (50) NOT NULL, + post_url VARCHAR (255) NOT NULL, + post_owner VARCHAR (50) NOT NULL, + link_type VARCHAR (50) NOT NULL DEFAULT 'post', + message_id VARCHAR (50) NOT NULL, + chat_id VARCHAR (50) NOT NULL, + download_status VARCHAR (50) NOT NULL, + upload_status VARCHAR (50) NOT NULL, + timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + state VARCHAR (50) NOT NULL DEFAULT 'processed' +); + +-- Schema for the migrations table +CREATE TABLE migrations ( + id serial PRIMARY KEY, + name VARCHAR (255) NOT NULL, + version VARCHAR (255) NOT NULL, + timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP +); + +-- Schema for the messages table +CREATE TABLE messages ( + id serial PRIMARY KEY, + message_id VARCHAR (50) NOT NULL, + chat_id VARCHAR (50) NOT NULL, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + message_type VARCHAR (50) NOT NULL, + producer VARCHAR (50) NOT NULL, + message_content_hash VARCHAR (64) NOT NULL, + state VARCHAR (50) NOT NULL DEFAULT 'added' +); From 49228759476986f9b48ceda38f3ad945970c91b3 Mon Sep 17 00:00:00 2001 From: obervinov Date: Mon, 26 Aug 2024 01:57:47 +0400 Subject: [PATCH 003/148] modified: src/migrations/0001_vault_historical_data.py modified: tests/test_init.py --- src/migrations/0001_vault_historical_data.py | 2 +- tests/test_init.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/migrations/0001_vault_historical_data.py b/src/migrations/0001_vault_historical_data.py index 225d87827..389377119 100644 --- a/src/migrations/0001_vault_historical_data.py +++ b/src/migrations/0001_vault_historical_data.py @@ -1,4 +1,4 @@ -# pylint: disable=C0103,R0914 +# pylint: disable=C0103,R0914,R0801 """ Migrates historical data from the Vault to the processed table in the database. https://github.com/obervinov/pyinstabot-downloader/issues/30 diff --git a/tests/test_init.py b/tests/test_init.py index ab49ceeb7..49f3ddb6d 100644 --- a/tests/test_init.py +++ b/tests/test_init.py @@ -6,15 +6,15 @@ @pytest.mark.order(1) -def test_init_dev_environment(vault_configuration_data, vault_approle): +def test_init_dev_environment(vault_configuration_data, prepare_vault): """ Check the function for the user who is allow access to the bot """ _ = vault_configuration_data command = ( "export VAULT_ADDR=http://vault-server:8200 && " - f"export VAULT_APPROLE_ID={vault_approle['id']} && " - f"export VAULT_APPROLE_SECRETID={vault_approle['secret-id']} && " + f"export VAULT_APPROLE_ID={prepare_vault['id']} && " + f"export VAULT_APPROLE_SECRETID={prepare_vault['secret-id']} && " "docker compose -f docker-compose.yml up -d --force-recreate --build pyinstabot-downloader" ) with subprocess.Popen(command, shell=True): From 88ba8c67dfa0f12795ad93deaf1c12e7ae3612fa Mon Sep 17 00:00:00 2001 From: obervinov Date: Mon, 26 Aug 2024 15:36:04 +0400 Subject: [PATCH 004/148] fixed ordering in database migrations and writing tests: part1 --- CHANGELOG.md | 2 +- src/bot.py | 2 +- src/modules/database.py | 17 +++++++--- tests/conftest.py | 75 ++++++++++++++++++++++++----------------- tests/test_database.py | 74 ++++++++++++++++++++++++++++++++++++++++ 5 files changed, 133 insertions(+), 37 deletions(-) create mode 100644 tests/test_database.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 61e1582bb..218f546e4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,7 +3,7 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). -## v2.3.0 - 2024-08-25 +## v2.3.0 - 2024-08-26 ### What's Changed **Full Changelog**: https://github.com/obervinov/pyinstabot-downloader/compare/v2.2.1...v2.3.0 by @obervinov in https://github.com/obervinov/pyinstabot-downloader/pull/95 #### 💥 Breaking Changes diff --git a/src/bot.py b/src/bot.py index 715036f3d..949562659 100644 --- a/src/bot.py +++ b/src/bot.py @@ -62,7 +62,7 @@ uploader.run_transfers.return_value = 'completed' # Client for communication with the database -database = DatabaseClient(vault=vault) +database = DatabaseClient(vault=vault, db_role=TELEGRAM_BOT_NAME) # Metrics exporter metrics = Metrics(port=METRICS_PORT, interval=METRICS_INTERVAL, metrics_prefix=TELEGRAM_BOT_NAME, vault=vault, database=database) diff --git a/src/modules/database.py b/src/modules/database.py index 397e0117a..95218c268 100644 --- a/src/modules/database.py +++ b/src/modules/database.py @@ -38,6 +38,7 @@ class DatabaseClient: Attributes: database_connections (psycopg2.extensions.connection): A connection to the PostgreSQL database. vault (object): An object representing a HashiCorp Vault client for retrieving secrets. + db_role (str): The role to use for generating database credentials. errors (psycopg2.errors): A collection of error classes for exceptions raised by the psycopg2 module. Methods: @@ -72,13 +73,15 @@ class DatabaseClient: """ def __init__( self, - vault: object = None + vault: object = None, + db_role: str = None ) -> None: """ Initializes a new instance of the Database client. Args: vault (object): An object representing a HashiCorp Vault client for retrieving secrets with the database configuration. + db_role (str): The role to use for generating database credentials. Examples: To create a new instance of the Database class: @@ -88,6 +91,7 @@ def __init__( >>> db = Database(vault=vault) """ self.vault = vault + self.db_role = db_role self.errors = psycopg2.errors self.database_connections = self.create_connection_pool() @@ -103,6 +107,7 @@ def create_connection_pool(self) -> pool.SimpleConnectionPool: pool.SimpleConnectionPool: A connection pool for the PostgreSQL database. """ db_configuration = self.vault.kv2engine.read_secret(path='configuration/database') + db_credentials = self.vault.dbengine.generate_credentials(role=self.db_role) log.info( '[Database]: Creating a connection pool for the %s:%s/%s', db_configuration['host'], db_configuration['port'], db_configuration['database'] @@ -112,8 +117,8 @@ def create_connection_pool(self) -> pool.SimpleConnectionPool: maxconn=db_configuration['connections'], host=db_configuration['host'], port=db_configuration['port'], - user=db_configuration['user'], - password=db_configuration['password'], + user=db_credentials['username'], + password=db_credentials['password'], database=db_configuration['database'] ) @@ -172,8 +177,10 @@ def _migrations(self) -> None: # Migrations directory migrations_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '../migrations')) sys.path.append(migrations_dir) + migration_files = [f for f in os.listdir(migrations_dir) if f.endswith('.py')] + migration_files.sort() - for migration_file in os.listdir(migrations_dir): + for migration_file in migration_files: if migration_file.endswith('.py'): migration_module_name = migration_file[:-3] @@ -185,6 +192,8 @@ def _migrations(self) -> None: self._mark_migration_as_executed(migration_name=migration_module_name, version=version) else: log.info('[Database] Migrations: the %s has already been executed and was skipped', migration_module_name) + else: + log.error('[Database]: Migrations: the %s is not a valid migration file', migration_file) def _is_migration_executed( self, diff --git a/tests/conftest.py b/tests/conftest.py index 901fd873e..5c4f0fdc9 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -2,12 +2,11 @@ This module stores fixtures for performing tests. """ import os -import sys -import subprocess import time import requests import pytest import hvac +import psycopg2 # pylint: disable=E0401 from vault import VaultClient @@ -31,31 +30,6 @@ def test_example(): config.addinivalue_line("markers", "order: Set the execution order of tests") -@pytest.fixture(name="prepare_dev_environment", scope='session') -def fixture_prepare_dev_environment(): - """ - Prepare a local environment or ci environment and return the URL of the Vault server - """ - if not os.getenv("CI"): - if not os.getenv("TG_USERID"): - print("You need to set the TG_USER_ID environment variable to run the tests (telegram user-id)") - sys.exit(1) - if not os.getenv("TG_TOKEN"): - print("You need to set the TG_TOKEN environment variable to run the tests (telegram token)") - sys.exit(1) - command = ( - "vault=$(docker ps -a | grep vault | awk '{print $1}') && " - "bot=$(docker ps -a | grep pyinstabot-downloader | awk '{print $1}') && " - "[ -n '$vault' ] && docker container rm -f $vault && " - "[ -n '$bot' ] && docker container rm -f $bot && " - "docker compose -f docker-compose.dev.yml up -d" - ) - with subprocess.Popen(command, shell=True): - print("Running dev environment...") - return 'ready' - return None - - @pytest.fixture(name="vault_url", scope='session') def fixture_vault_url(prepare_dev_environment): """Prepare a local environment or ci environment and return the URL of the Vault server""" @@ -102,6 +76,25 @@ def fixture_postgres_url(): return "postgresql://{{username}}:{{password}}@postgres:5432/postgres?sslmode=disable" +@pytest.fixture(name="postgres_instance", scope='session') +def fixture_postgres_instance(psql_tables_path): + """Prepare the postgres database, return the connection and cursor""" + # Prepare database for tests + psql_connection = psycopg2.connect( + host='0.0.0.0', + port=5432, + user='postgres', + password='postgres', + dbname='postgres' + ) + psql_cursor = psql_connection.cursor() + with open(psql_tables_path, 'r', encoding='utf-8') as sql_file: + sql_script = sql_file.read() + psql_cursor.execute(sql_script) + psql_connection.commit() + return psql_connection, psql_cursor + + @pytest.fixture(name="prepare_vault", scope='session') def fixture_prepare_vault(vault_url, namespace, policy_path, postgres_url): """Returns the vault client""" @@ -179,10 +172,11 @@ def fixture_prepare_vault(vault_url, namespace, policy_path, postgres_url): ) print(f"Created role: {role}") - # Return the role_id and secret_id + # Return the role_id, secret_id and db_role return { 'id': approle_adapter.read_role_id(role_name=namespace, mount_point=namespace)["data"]["role_id"], - 'secret-id': approle_adapter.generate_secret_id(role_name=namespace, mount_point=namespace)["data"]["secret_id"] + 'secret-id': approle_adapter.generate_secret_id(role_name=namespace, mount_point=namespace)["data"]["secret_id"], + 'db_role': 'test-role' } @@ -216,8 +210,6 @@ def fixture_vault_configuration_data(vault_instance): database = { 'host': 'postgres', 'port': '5432', - 'user': 'python', - 'password': 'python', 'database': 'pyinstabot-downloader' } for key, value in database.items(): @@ -254,3 +246,24 @@ def fixture_vault_configuration_data(vault_instance): key=key, value=value ) + bot_configurations = [ + { + 'path': 'configuration/downloader-api', + 'data': { + 'enabled': 'false', + } + }, + { + 'path': 'configuration/uploader-api', + 'data': { + 'enabled': 'false', + } + } + ] + for configuration in bot_configurations: + for key, value in configuration['data'].items(): + _ = vault_instance.kv2engine.write_secret( + path=configuration['path'], + key=key, + value=value + ) diff --git a/tests/test_database.py b/tests/test_database.py new file mode 100644 index 000000000..42636de65 --- /dev/null +++ b/tests/test_database.py @@ -0,0 +1,74 @@ +""" +This module contains tests for the database module. +""" + +import os +import sys +import json +import importlib +import pytest +import psycopg2 +from psycopg2 import pool +from src.modules.database import DatabaseClient + + +# pylint: disable=too-many-locals +@pytest.mark.order(2) +def test_init_database_client(prepare_vault, vault_instance, vault_configuration_data, postgres_instance): + """ + Checking an initialized database client + """ + _ = vault_configuration_data + _, cursor = postgres_instance + db_role = prepare_vault['db_role'] + database = DatabaseClient(vault=vault_instance, db_role=db_role) + + # Check general attributes + assert isinstance(database.vault, object) + assert isinstance(database.db_role, str) + assert isinstance(database.database_connections, pool.SimpleConnectionPool) + + # Check tables creation in the database + cursor.execute("SELECT * FROM information_schema.tables WHERE table_schema = 'public'") + tables_list = cursor.fetchall() + tables_configuration_path = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'src/configs/databases.json')) + with open(tables_configuration_path, encoding='UTF-8') as config_file: + database_init_configuration = json.load(config_file) + for table in database_init_configuration.get('Tables', None): + if table['name'] not in [table[2] for table in tables_list]: + assert False + + # Check migrations execution in the database + cursor.execute("SELECT * FROM migrations") + migrations_list = cursor.fetchall() + assert len(migrations_list) > 0 + + migrations_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), 'src/migrations')) + sys.path.append(migrations_dir) + migration_files = [f for f in os.listdir(migrations_dir) if f.endswith('.py')] + migration_files.sort() + for migration_file in migration_files: + if not migration_file.endswith('.py'): + assert False + else: + migration_module_name = migration_file[:-3] + migration_module = importlib.import_module(name=migration_module_name) + version = getattr(migration_module, 'VERSION', migration_module_name) + name = getattr(migration_module, 'NAME', migration_module_name) + if (version, name) not in migrations_list: + assert False + + +# @pytest.mark.order(4) +# def test_database_connection(prepare_vault, vault_instance, vault_configuration_data, postgres_instance): +# """ + +# """ +# _ = vault_configuration_data +# _ = postgres_instance +# db_role = prepare_vault['db_role'] +# database = DatabaseClient(vault=vault_instance, db_role=db_role) + +# connection = database.get_connection() +# assert isinstance(connection, psycopg2.extensions.connection) + From 0996a5cf50f84b09bb78085954e4ceb467b2b381 Mon Sep 17 00:00:00 2001 From: obervinov Date: Mon, 26 Aug 2024 18:23:59 +0400 Subject: [PATCH 005/148] fixed typos in `test_init_database_client()` --- tests/conftest.py | 3 +-- tests/test_database.py | 3 +-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 5c4f0fdc9..2482337b5 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -31,9 +31,8 @@ def test_example(): @pytest.fixture(name="vault_url", scope='session') -def fixture_vault_url(prepare_dev_environment): +def fixture_vault_url(): """Prepare a local environment or ci environment and return the URL of the Vault server""" - _ = prepare_dev_environment # prepare vault for local environment if not os.getenv("CI"): url = "http://0.0.0.0:8200" diff --git a/tests/test_database.py b/tests/test_database.py index 42636de65..942b58f12 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -62,7 +62,7 @@ def test_init_database_client(prepare_vault, vault_instance, vault_configuration # @pytest.mark.order(4) # def test_database_connection(prepare_vault, vault_instance, vault_configuration_data, postgres_instance): # """ - + # """ # _ = vault_configuration_data # _ = postgres_instance @@ -71,4 +71,3 @@ def test_init_database_client(prepare_vault, vault_instance, vault_configuration # connection = database.get_connection() # assert isinstance(connection, psycopg2.extensions.connection) - From bc7cd879a9c6fec37726424e9307caf4bf880a36 Mon Sep 17 00:00:00 2001 From: obervinov Date: Mon, 26 Aug 2024 18:31:29 +0400 Subject: [PATCH 006/148] fixed vault policy for support database engine --- tests/vault/policy.hcl | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/tests/vault/policy.hcl b/tests/vault/policy.hcl index 2a97205a6..4066f0443 100644 --- a/tests/vault/policy.hcl +++ b/tests/vault/policy.hcl @@ -23,6 +23,14 @@ path "sys/mounts/pyinstabot-downloader" { capabilities = ["read", "create", "update"] } +# Operations for pytest +# Allow reading database credentials for a role +path "database/creds/test-role" { + capabilities = ["read"] +} + +############################################################### + # Operations for the module # Read and update namespace configuration path "pyinstabot-downloader/config" { @@ -55,3 +63,8 @@ path "pyinstabot-downloader/data/history/*" { path "pyinstabot-downloader/metadata/configuration/users" { capabilities = ["read", "list"] } + +# Allow reading database credentials for a role +path "database/creds/pyinstabot-downloader"{ + capabilities = ["read"] +} From d1f10980f2629317d15aeb4d9df18a97bc7cd037 Mon Sep 17 00:00:00 2001 From: obervinov Date: Mon, 26 Aug 2024 18:35:33 +0400 Subject: [PATCH 007/148] fixed test data for pytests --- tests/conftest.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index 2482337b5..b0fd1ded6 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -209,7 +209,8 @@ def fixture_vault_configuration_data(vault_instance): database = { 'host': 'postgres', 'port': '5432', - 'database': 'pyinstabot-downloader' + 'database': 'pyinstabot-downloader', + 'connections': '10' } for key, value in database.items(): _ = vault_instance.kv2engine.write_secret( From 97286ba32f54ae345ad57f9b66aa0631ac72a79a Mon Sep 17 00:00:00 2001 From: obervinov Date: Mon, 26 Aug 2024 18:40:10 +0400 Subject: [PATCH 008/148] fixed psql server address for tests --- tests/conftest.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index b0fd1ded6..fd4923902 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -72,7 +72,7 @@ def fixture_psql_tables_path(): @pytest.fixture(name="postgres_url", scope='session') def fixture_postgres_url(): """Returns the postgres url""" - return "postgresql://{{username}}:{{password}}@postgres:5432/postgres?sslmode=disable" + return "postgresql://{{username}}:{{password}}@localhost:5432/postgres?sslmode=disable" @pytest.fixture(name="postgres_instance", scope='session') @@ -80,7 +80,7 @@ def fixture_postgres_instance(psql_tables_path): """Prepare the postgres database, return the connection and cursor""" # Prepare database for tests psql_connection = psycopg2.connect( - host='0.0.0.0', + host='localhost', port=5432, user='postgres', password='postgres', @@ -207,7 +207,7 @@ def fixture_vault_configuration_data(vault_instance): None """ database = { - 'host': 'postgres', + 'host': 'localhost', 'port': '5432', 'database': 'pyinstabot-downloader', 'connections': '10' From c158f8e32f2b32fa9adc1ed162320dfcc28561f2 Mon Sep 17 00:00:00 2001 From: obervinov Date: Mon, 26 Aug 2024 18:46:10 +0400 Subject: [PATCH 009/148] modified: tests/conftest.py --- tests/conftest.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index fd4923902..30c8abab6 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -72,7 +72,7 @@ def fixture_psql_tables_path(): @pytest.fixture(name="postgres_url", scope='session') def fixture_postgres_url(): """Returns the postgres url""" - return "postgresql://{{username}}:{{password}}@localhost:5432/postgres?sslmode=disable" + return "postgresql://{{username}}:{{password}}@0.0.0.0:5432/postgres?sslmode=disable" @pytest.fixture(name="postgres_instance", scope='session') @@ -80,7 +80,7 @@ def fixture_postgres_instance(psql_tables_path): """Prepare the postgres database, return the connection and cursor""" # Prepare database for tests psql_connection = psycopg2.connect( - host='localhost', + host='0.0.0.0', port=5432, user='postgres', password='postgres', @@ -207,7 +207,7 @@ def fixture_vault_configuration_data(vault_instance): None """ database = { - 'host': 'localhost', + 'host': '0.0.0.0', 'port': '5432', 'database': 'pyinstabot-downloader', 'connections': '10' From 7d56b25eb8d8ac9d8871e79d63c40cd1d06a2633 Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 27 Aug 2024 13:43:53 +0400 Subject: [PATCH 010/148] modified: CHANGELOG.md modified: tests/conftest.py --- CHANGELOG.md | 2 +- tests/conftest.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 218f546e4..942fd80b9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,7 +3,7 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). -## v2.3.0 - 2024-08-26 +## v2.3.0 - 2024-08-27 ### What's Changed **Full Changelog**: https://github.com/obervinov/pyinstabot-downloader/compare/v2.2.1...v2.3.0 by @obervinov in https://github.com/obervinov/pyinstabot-downloader/pull/95 #### 💥 Breaking Changes diff --git a/tests/conftest.py b/tests/conftest.py index 30c8abab6..edf482324 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -72,7 +72,7 @@ def fixture_psql_tables_path(): @pytest.fixture(name="postgres_url", scope='session') def fixture_postgres_url(): """Returns the postgres url""" - return "postgresql://{{username}}:{{password}}@0.0.0.0:5432/postgres?sslmode=disable" + return "postgresql://{{username}}:{{password}}@postgres:5432/postgres?sslmode=disable" @pytest.fixture(name="postgres_instance", scope='session') From ba268fe9b84f8f9900fb0adbe762296dbbedfccc Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 27 Aug 2024 14:40:12 +0400 Subject: [PATCH 011/148] fixed tests/conftest.py --- tests/conftest.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/tests/conftest.py b/tests/conftest.py index edf482324..71f8af723 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -87,6 +87,17 @@ def fixture_postgres_instance(psql_tables_path): dbname='postgres' ) psql_cursor = psql_connection.cursor() + psql_cursor.execute('CREATE DATABASE pyinstabot_downloader;') + psql_connection.close() + + psql_connection = psycopg2.connect( + host='0.0.0.0', + port=5432, + user='postgres', + password='postgres', + dbname='pyinstabot_downloader' + ) + psql_cursor = psql_connection.cursor() with open(psql_tables_path, 'r', encoding='utf-8') as sql_file: sql_script = sql_file.read() psql_cursor.execute(sql_script) From 06b2375d032e3a82d5d3973a359094b164a3ac8a Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 27 Aug 2024 14:52:16 +0400 Subject: [PATCH 012/148] modified: tests/conftest.py --- tests/conftest.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index 71f8af723..2ed0f9ab2 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -86,8 +86,12 @@ def fixture_postgres_instance(psql_tables_path): password='postgres', dbname='postgres' ) + psql_connection.set_session(autocommit=True) psql_cursor = psql_connection.cursor() - psql_cursor.execute('CREATE DATABASE pyinstabot_downloader;') + try: + psql_cursor.execute('CREATE DATABASE pyinstabot_downloader;') + except psycopg2.errors.DuplicateDatabase: + pass psql_connection.close() psql_connection = psycopg2.connect( From 7bceed3bbaffaed887212c47d7c0fe88c1da5cd1 Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 27 Aug 2024 14:56:19 +0400 Subject: [PATCH 013/148] modified: tests/conftest.py --- tests/conftest.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index 2ed0f9ab2..846b3b724 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -110,8 +110,12 @@ def fixture_postgres_instance(psql_tables_path): @pytest.fixture(name="prepare_vault", scope='session') -def fixture_prepare_vault(vault_url, namespace, policy_path, postgres_url): +def fixture_prepare_vault(vault_url, namespace, policy_path, postgres_url, postgres_instance): """Returns the vault client""" + # Wait for the postgres instance to be ready + _ = postgres_instance + + # Initialize the vault client = hvac.Client(url=vault_url) init_data = client.sys.initialize() From f4dd50e8a93d73fb249416a6c5bfee544d21e5ed Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 27 Aug 2024 15:12:13 +0400 Subject: [PATCH 014/148] modified: tests/conftest.py --- tests/conftest.py | 22 ++-------------------- 1 file changed, 2 insertions(+), 20 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 846b3b724..00320f52a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -86,21 +86,6 @@ def fixture_postgres_instance(psql_tables_path): password='postgres', dbname='postgres' ) - psql_connection.set_session(autocommit=True) - psql_cursor = psql_connection.cursor() - try: - psql_cursor.execute('CREATE DATABASE pyinstabot_downloader;') - except psycopg2.errors.DuplicateDatabase: - pass - psql_connection.close() - - psql_connection = psycopg2.connect( - host='0.0.0.0', - port=5432, - user='postgres', - password='postgres', - dbname='pyinstabot_downloader' - ) psql_cursor = psql_connection.cursor() with open(psql_tables_path, 'r', encoding='utf-8') as sql_file: sql_script = sql_file.read() @@ -110,11 +95,8 @@ def fixture_postgres_instance(psql_tables_path): @pytest.fixture(name="prepare_vault", scope='session') -def fixture_prepare_vault(vault_url, namespace, policy_path, postgres_url, postgres_instance): +def fixture_prepare_vault(vault_url, namespace, policy_path, postgres_url): """Returns the vault client""" - # Wait for the postgres instance to be ready - _ = postgres_instance - # Initialize the vault client = hvac.Client(url=vault_url) init_data = client.sys.initialize() @@ -228,7 +210,7 @@ def fixture_vault_configuration_data(vault_instance): database = { 'host': '0.0.0.0', 'port': '5432', - 'database': 'pyinstabot-downloader', + 'database': 'postgres', 'connections': '10' } for key, value in database.items(): From 2a052173be632e205ca9665a01734cc292d2a9ec Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 27 Aug 2024 15:36:32 +0400 Subject: [PATCH 015/148] modified: tests/conftest.py --- tests/conftest.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index 00320f52a..7c16cc38d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -160,7 +160,12 @@ def fixture_prepare_vault(vault_url, namespace, policy_path, postgres_url): # Create role for the database statement = ( "CREATE ROLE \"{{name}}\" WITH LOGIN PASSWORD '{{password}}' VALID UNTIL '{{expiration}}'; " - "GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO \"{{name}}\"; " + "ALTER TABLE public.users OWNER TO \"{{name}}\"; " + "ALTER TABLE public.users_requests OWNER TO \"{{name}}\"; " + "ALTER TABLE public.messages OWNER TO \"{{name}}\"; " + "ALTER TABLE public.queue OWNER TO \"{{name}}\"; " + "ALTER TABLE public.processed OWNER TO \"{{name}}\"; " + "ALTER TABLE public.migrations OWNER TO \"{{name}}\"; " "GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO \"{{name}}\";" ) role = client.secrets.database.create_role( From d303fc1f72c760af3f1ce13c27a5ecacca4980fc Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 27 Aug 2024 15:37:42 +0400 Subject: [PATCH 016/148] modified: tests/conftest.py --- tests/conftest.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index 7c16cc38d..8dddcecb2 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -95,8 +95,11 @@ def fixture_postgres_instance(psql_tables_path): @pytest.fixture(name="prepare_vault", scope='session') -def fixture_prepare_vault(vault_url, namespace, policy_path, postgres_url): +def fixture_prepare_vault(vault_url, namespace, policy_path, postgres_url, postgres_instance): """Returns the vault client""" + # Wait for the postgres database to be ready + _ = postgres_instance + # Initialize the vault client = hvac.Client(url=vault_url) init_data = client.sys.initialize() From a855647471a7f43e3c3d32e5e64d6407dfa90d9e Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 27 Aug 2024 15:40:30 +0400 Subject: [PATCH 017/148] modified: src/modules/database.py --- src/modules/database.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/src/modules/database.py b/src/modules/database.py index 95218c268..f93cbce4e 100644 --- a/src/modules/database.py +++ b/src/modules/database.py @@ -384,14 +384,15 @@ def _reset_stale_records(self) -> None: columns=("id", "state"), condition="message_type = 'status_message'", ) - for message in status_messages: - if message[1] != 'updated': - self._update( - table_name='messages', - values="state = 'updated'", - condition=f"id = '{message[0]}'" - ) - log.info('[Database]: Stale status messages have been reset') + if status_messages: + for message in status_messages: + if message[1] != 'updated': + self._update( + table_name='messages', + values="state = 'updated'", + condition=f"id = '{message[0]}'" + ) + log.info('[Database]: Stale status messages have been reset') def add_message_to_queue( self, From eefdac5519d1ce1b488c4e85d45c9d7ae99c99c7 Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 27 Aug 2024 15:47:07 +0400 Subject: [PATCH 018/148] modified: tests/test_database.py --- tests/test_database.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_database.py b/tests/test_database.py index 942b58f12..9bdab2ac3 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -31,7 +31,7 @@ def test_init_database_client(prepare_vault, vault_instance, vault_configuration # Check tables creation in the database cursor.execute("SELECT * FROM information_schema.tables WHERE table_schema = 'public'") tables_list = cursor.fetchall() - tables_configuration_path = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'src/configs/databases.json')) + tables_configuration_path = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), '../src/configs/databases.json')) with open(tables_configuration_path, encoding='UTF-8') as config_file: database_init_configuration = json.load(config_file) for table in database_init_configuration.get('Tables', None): From 10f8c21b577e363365aa24c8c7f945c48a278dfd Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 27 Aug 2024 15:54:21 +0400 Subject: [PATCH 019/148] modified: tests/test_database.py --- tests/test_database.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_database.py b/tests/test_database.py index 9bdab2ac3..5baa80e9f 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -43,7 +43,7 @@ def test_init_database_client(prepare_vault, vault_instance, vault_configuration migrations_list = cursor.fetchall() assert len(migrations_list) > 0 - migrations_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), 'src/migrations')) + migrations_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '../src/migrations')) sys.path.append(migrations_dir) migration_files = [f for f in os.listdir(migrations_dir) if f.endswith('.py')] migration_files.sort() From 1df9f741c07e10276c634f1771a67080a1d0df26 Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 27 Aug 2024 22:42:54 +0400 Subject: [PATCH 020/148] modified: src/migrations/0001_vault_historical_data.py modified: tests/test_database.py --- src/migrations/0001_vault_historical_data.py | 8 ++++---- tests/test_database.py | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/migrations/0001_vault_historical_data.py b/src/migrations/0001_vault_historical_data.py index 389377119..145fe2c15 100644 --- a/src/migrations/0001_vault_historical_data.py +++ b/src/migrations/0001_vault_historical_data.py @@ -23,25 +23,25 @@ def execute(obj): # information about owners try: - owners = obj.vault.list_secrets(path='history/') + owners = obj.vault.kv2eninge.list_secrets(path='history/') owners_counter = len(owners) print(f"Founded {owners_counter} owners in history") # reade history form Vault for owner in owners: # information about owner posts - posts = obj.vault.read_secret(path=f"history/{owner}") + posts = obj.vault.kv2eninge.read_secret(path=f"history/{owner}") posts_counter = len(posts) print(f"{NAME}: Founded {posts_counter} posts in history/{owner}") for post in posts: - user_id = next(iter(obj.vault.read_secret(path='configuration/users').keys())) + user_id = next(iter(obj.vault.kv2eninge.read_secret(path='configuration/users').keys())) post_id = post post_url = f"https://www.instagram.com/p/{post}" post_owner = owner link_type = 'post' message_id = 'unknown' - chat_id = next(iter(obj.vault.read_secret(path='configuration/users').keys())) + chat_id = next(iter(obj.vault.kv2eninge.read_secret(path='configuration/users').keys())) download_status = 'completed' upload_status = 'completed' state = 'processed' diff --git a/tests/test_database.py b/tests/test_database.py index 5baa80e9f..1ce52f7b3 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -56,7 +56,7 @@ def test_init_database_client(prepare_vault, vault_instance, vault_configuration version = getattr(migration_module, 'VERSION', migration_module_name) name = getattr(migration_module, 'NAME', migration_module_name) if (version, name) not in migrations_list: - assert False + print(f"Not found migration {version}:{name} in {migrations_list}") # @pytest.mark.order(4) From e81517bd200be30833eba13d3c3f40bc7232cc5c Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 27 Aug 2024 22:49:18 +0400 Subject: [PATCH 021/148] modified: tests/test_database.py --- tests/test_database.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/test_database.py b/tests/test_database.py index 1ce52f7b3..1e30f9aa3 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -56,7 +56,8 @@ def test_init_database_client(prepare_vault, vault_instance, vault_configuration version = getattr(migration_module, 'VERSION', migration_module_name) name = getattr(migration_module, 'NAME', migration_module_name) if (version, name) not in migrations_list: - print(f"Not found migration {version}:{name} in {migrations_list}") + print(f"Not found migration {version}:{name} in {migrations_list}") + assert False # @pytest.mark.order(4) From d12581cf3d55b797de4328d0f15deb3b1dae9de6 Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 27 Aug 2024 22:58:00 +0400 Subject: [PATCH 022/148] modified: tests/test_database.py --- tests/test_database.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/test_database.py b/tests/test_database.py index 1e30f9aa3..caa757801 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -39,7 +39,7 @@ def test_init_database_client(prepare_vault, vault_instance, vault_configuration assert False # Check migrations execution in the database - cursor.execute("SELECT * FROM migrations") + cursor.execute("SELECT name, version FROM migrations") migrations_list = cursor.fetchall() assert len(migrations_list) > 0 @@ -55,8 +55,8 @@ def test_init_database_client(prepare_vault, vault_instance, vault_configuration migration_module = importlib.import_module(name=migration_module_name) version = getattr(migration_module, 'VERSION', migration_module_name) name = getattr(migration_module, 'NAME', migration_module_name) - if (version, name) not in migrations_list: - print(f"Not found migration {version}:{name} in {migrations_list}") + if (name, version) not in migrations_list: + print(f"Not found migration {name}:{version} in {migrations_list}") assert False From 6a98bd4eb0ade071c27d9a23b0aaaf92895e15f6 Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 27 Aug 2024 23:53:11 +0400 Subject: [PATCH 023/148] modified: src/modules/database.py modified: tests/conftest.py modified: tests/test_database.py --- src/modules/database.py | 2 +- tests/conftest.py | 75 +++++++++++++++++++++++++++++++------ tests/test_database.py | 82 +++++++++++++++++++++++++++++++++++------ 3 files changed, 135 insertions(+), 24 deletions(-) diff --git a/src/modules/database.py b/src/modules/database.py index f93cbce4e..182a48a0b 100644 --- a/src/modules/database.py +++ b/src/modules/database.py @@ -428,7 +428,7 @@ def add_message_to_queue( ... 'link_type': 'profile', ... 'message_id': 'abcde', ... 'chat_id': 'xyz', - ... 'scheduled_time': '2022-01-01 12:00:00' + ... 'scheduled_time': '2022-01-01 12:00:00', ... 'download_status': 'not started', ... 'upload_status': 'not started' ... } diff --git a/tests/conftest.py b/tests/conftest.py index 8dddcecb2..cc5826299 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -32,7 +32,12 @@ def test_example(): @pytest.fixture(name="vault_url", scope='session') def fixture_vault_url(): - """Prepare a local environment or ci environment and return the URL of the Vault server""" + """ + Prepare a local environment or ci environment and return the URL of the Vault server + + Returns: + str: The URL of the Vault server. + """ # prepare vault for local environment if not os.getenv("CI"): url = "http://0.0.0.0:8200" @@ -53,31 +58,56 @@ def fixture_vault_url(): @pytest.fixture(name="namespace", scope='session') def fixture_namespace(): - """Returns the project namespace""" + """ + Returns the namespace for the tests + + Returns: + str: The namespace for the tests. + """ return "pyinstabot-downloader" @pytest.fixture(name="policy_path", scope='session') def fixture_policy_path(): - """Returns the policy path""" + """ + Returns the policy path for the tests + + Returns: + str: The policy path for the tests. + """ return "tests/vault/policy.hcl" @pytest.fixture(name="psql_tables_path", scope='session') def fixture_psql_tables_path(): - """Returns the path to the postgres sql file with tables""" + """ + Returns the path to the postgres sql file with tables + + Returns: + str: The path to the postgres sql file with tables. + """ return "tests/postgres/tables.sql" @pytest.fixture(name="postgres_url", scope='session') def fixture_postgres_url(): - """Returns the postgres url""" + """ + Returns the postgres url for the tests + + Returns: + str: The postgres url. + """ return "postgresql://{{username}}:{{password}}@postgres:5432/postgres?sslmode=disable" @pytest.fixture(name="postgres_instance", scope='session') def fixture_postgres_instance(psql_tables_path): - """Prepare the postgres database, return the connection and cursor""" + """ + Prepare the postgres database, return the connection and cursor + + Returns: + tuple: The connection and cursor objects for the postgres database. + """ # Prepare database for tests psql_connection = psycopg2.connect( host='0.0.0.0', @@ -96,7 +126,12 @@ def fixture_postgres_instance(psql_tables_path): @pytest.fixture(name="prepare_vault", scope='session') def fixture_prepare_vault(vault_url, namespace, policy_path, postgres_url, postgres_instance): - """Returns the vault client""" + """ + Returns the vault client and prepares the vault for the tests + + Returns: + object: The vault client. + """ # Wait for the postgres database to be ready _ = postgres_instance @@ -190,7 +225,12 @@ def fixture_prepare_vault(vault_url, namespace, policy_path, postgres_url, postg @pytest.fixture(name="vault_instance", scope='session') def fixture_vault_instance(vault_url, namespace, prepare_vault): - """Returns client of the configurator""" + """ + Returns client of the configurator vault + + Returns: + object: The vault client. + """ return VaultClient( url=vault_url, namespace=namespace, @@ -211,9 +251,6 @@ def fixture_vault_configuration_data(vault_instance): Args: vault_instance: An instance of the Vault class. - - Returns: - None """ database = { 'host': '0.0.0.0', @@ -276,3 +313,19 @@ def fixture_vault_configuration_data(vault_instance): key=key, value=value ) + + +@pytest.fixture(name="postgres_messages_test_data", scope='session') +def fixture_postgres_messages_test_data(postgres_instance): + """ + This function sets up test data in the messages table in the postgres database. + + Args: + postgres_instance: A tuple containing the connection and cursor objects for the postgres database. + """ + conn, cursor = postgres_instance + cursor.execute( + "INSERT INTO messages (message_id, chat_id, created_at, updated_at, message_type, producer, message_content_hash, state) " + "VALUES ('123456', '123456', '2024-08-27 00:00:00', '2024-08-27 00:00:00', 'status_message', 'pytest', 'hash', 'updating')" + ) + conn.commit() diff --git a/tests/test_database.py b/tests/test_database.py index caa757801..01b37ebbc 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -60,15 +60,73 @@ def test_init_database_client(prepare_vault, vault_instance, vault_configuration assert False -# @pytest.mark.order(4) -# def test_database_connection(prepare_vault, vault_instance, vault_configuration_data, postgres_instance): -# """ - -# """ -# _ = vault_configuration_data -# _ = postgres_instance -# db_role = prepare_vault['db_role'] -# database = DatabaseClient(vault=vault_instance, db_role=db_role) - -# connection = database.get_connection() -# assert isinstance(connection, psycopg2.extensions.connection) +@pytest.mark.order(4) +def test_reset_stale_messages(postgres_instance, postgres_messages_test_data): + """ + Checking the reset of stale messages when the database client is initialized + """ + _, cursor = postgres_instance + _ = postgres_messages_test_data + + # Check the reset of stale messages + cursor.execute("SELECT state FROM messages") + messages_list = cursor.fetchall() + assert len(messages_list) > 0 + for message in messages_list: + assert message[0] == 'updated' + + +@pytest.mark.order(5) +def test_database_connection(prepare_vault, vault_instance, postgres_instance): + """ + Checking the database connection and disconnection + """ + _ = postgres_instance + db_role = prepare_vault['db_role'] + database = DatabaseClient(vault=vault_instance, db_role=db_role) + + # Check the database connection + connection = database.get_connection() + assert isinstance(connection, psycopg2.extensions.connection) + assert not connection.closed + + # Check the database disconnection + database.close_connection(connection) + assert connection.closed + + +@pytest.mark.order(6) +def test_add_message_in_queue(prepare_vault, vault_instance, postgres_instance): + """ + Checking the addition of a message to the queue + """ + _, cursor = postgres_instance + data = { + 'user_id': '12345', + 'post_id': '67890', + 'post_url': 'https://www.instagram.com/p/67890/', + 'post_owner': 'johndoe', + 'link_type': 'profile', + 'message_id': 'abcde', + 'chat_id': 'xyz', + 'scheduled_time': '2022-01-01 12:00:00', + 'download_status': 'not started', + 'upload_status': 'not started' + } + db_role = prepare_vault['db_role'] + database = DatabaseClient(vault=vault_instance, db_role=db_role) + response = database.add_message_in_queue(data=data) + + # Check the addition of a message to the queue + cursor.execute( + "SELECT user_id, post_id, post_url, post_owner, link_type, message_id, chat_id, scheduled_time, download_status, upload_status " + "FROM queue WHERE message_id = 'abcde'" + ) + queue_item = cursor.fetchone() + assert len(queue_item) > 0 + assert response == f"{data['message_id']}: added to queue" + assert queue_item == ( + data['user_id'], data['post_id'], data['post_url'], + data['post_owner'], data['link_type'], data['message_id'], + data['chat_id'], data['scheduled_time'], data['download_status'], data['upload_status'] + ) From ff257c85bdb225755f128591c998185d0589c56f Mon Sep 17 00:00:00 2001 From: obervinov Date: Wed, 28 Aug 2024 14:13:18 +0400 Subject: [PATCH 024/148] modified: CHANGELOG.md modified: src/configs/messages.json modified: tests/test_database.py --- CHANGELOG.md | 2 +- src/configs/messages.json | 2 +- tests/test_database.py | 8 +++++--- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 942fd80b9..44ec8f452 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,7 +3,7 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). -## v2.3.0 - 2024-08-27 +## v2.3.0 - 2024-08-28 ### What's Changed **Full Changelog**: https://github.com/obervinov/pyinstabot-downloader/compare/v2.2.1...v2.3.0 by @obervinov in https://github.com/obervinov/pyinstabot-downloader/pull/95 #### 💥 Breaking Changes diff --git a/src/configs/messages.json b/src/configs/messages.json index 9ceb4324f..63d1b47a1 100644 --- a/src/configs/messages.json +++ b/src/configs/messages.json @@ -29,7 +29,7 @@ "args": [":information:"] }, "wrong_reschedule_queue": { - "text": "{0} Incorrect format for rescheduling messages in the queue. Please check this conditions:\n1. Post-id is a string and its length is equal to 11 characters.\n2. Date-time format is correct and the date is in the future.\n3. The message format is correct \nq1wRty12345: scheduled for 2021-12-31 23:59:59\n4. Each new message is on a new line.\n\nCurrent time: {1}", + "text": "{0} Incorrect format for rescheduling messages in the queue. Please check this conditions:\n1. Post-id is a string and its length is equal to 11 characters.\n2. Date-time format is correct and the date is in the future.\n3. The message format is correct: q1wRty12345: scheduled for 2021-12-31 23:59:59\n4. Each new message is on a new line.\n5. Timestamp is not in the past: current time {1}", "args": [":warning:", "current_time"] }, "unknown_command": { diff --git a/tests/test_database.py b/tests/test_database.py index 01b37ebbc..3beb0e583 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -61,12 +61,14 @@ def test_init_database_client(prepare_vault, vault_instance, vault_configuration @pytest.mark.order(4) -def test_reset_stale_messages(postgres_instance, postgres_messages_test_data): +def test_reset_stale_messages(prepare_vault, vault_instance, postgres_instance, postgres_messages_test_data): """ Checking the reset of stale messages when the database client is initialized """ _, cursor = postgres_instance _ = postgres_messages_test_data + db_role = prepare_vault['db_role'] + _ = DatabaseClient(vault=vault_instance, db_role=db_role) # Check the reset of stale messages cursor.execute("SELECT state FROM messages") @@ -96,7 +98,7 @@ def test_database_connection(prepare_vault, vault_instance, postgres_instance): @pytest.mark.order(6) -def test_add_message_in_queue(prepare_vault, vault_instance, postgres_instance): +def test_add_message_to_queue(prepare_vault, vault_instance, postgres_instance): """ Checking the addition of a message to the queue """ @@ -115,7 +117,7 @@ def test_add_message_in_queue(prepare_vault, vault_instance, postgres_instance): } db_role = prepare_vault['db_role'] database = DatabaseClient(vault=vault_instance, db_role=db_role) - response = database.add_message_in_queue(data=data) + response = database.add_message_to_queue(data=data) # Check the addition of a message to the queue cursor.execute( From 1fd95df3c5942fb4dbfc0e1547aae162f3fa5d34 Mon Sep 17 00:00:00 2001 From: obervinov Date: Thu, 29 Aug 2024 22:55:37 +0400 Subject: [PATCH 025/148] modified: tests/conftest.py --- CHANGELOG.md | 2 +- tests/conftest.py | 8 ++------ 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 44ec8f452..48080f3bf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,7 +3,7 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). -## v2.3.0 - 2024-08-28 +## v2.3.0 - 2024-08-29 ### What's Changed **Full Changelog**: https://github.com/obervinov/pyinstabot-downloader/compare/v2.2.1...v2.3.0 by @obervinov in https://github.com/obervinov/pyinstabot-downloader/pull/95 #### 💥 Breaking Changes diff --git a/tests/conftest.py b/tests/conftest.py index cc5826299..16952be6d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -38,12 +38,8 @@ def fixture_vault_url(): Returns: str: The URL of the Vault server. """ - # prepare vault for local environment - if not os.getenv("CI"): - url = "http://0.0.0.0:8200" - # prepare vault for ci environment - else: - url = "http://localhost:8200" + + url = "http://0.0.0.0:8200" # checking the availability of the vault server while True: try: From 8752f8f6cb89d818b260d00fa461cad64d63a431 Mon Sep 17 00:00:00 2001 From: obervinov Date: Thu, 29 Aug 2024 23:20:16 +0400 Subject: [PATCH 026/148] modified: tests/conftest.py modified: tests/test_database.py modified: tests/vault/policy.hcl --- tests/conftest.py | 9 ++- tests/test_database.py | 146 ++++++++++++++++++++--------------------- tests/vault/policy.hcl | 4 +- 3 files changed, 77 insertions(+), 82 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 16952be6d..d64b4a087 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -60,7 +60,7 @@ def fixture_namespace(): Returns: str: The namespace for the tests. """ - return "pyinstabot-downloader" + return "pytest" @pytest.fixture(name="policy_path", scope='session') @@ -184,7 +184,7 @@ def fixture_prepare_vault(vault_url, namespace, policy_path, postgres_url, postg name="postgresql", plugin_name="postgresql-database-plugin", verify_connection=False, - allowed_roles=["test-role"], + allowed_roles=["pytest"], username="postgres", password="postgres", connection_url=postgres_url @@ -203,7 +203,7 @@ def fixture_prepare_vault(vault_url, namespace, policy_path, postgres_url, postg "GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO \"{{name}}\";" ) role = client.secrets.database.create_role( - name="test-role", + name="pytest", db_name="postgresql", creation_statements=statement, default_ttl="1h", @@ -214,8 +214,7 @@ def fixture_prepare_vault(vault_url, namespace, policy_path, postgres_url, postg # Return the role_id, secret_id and db_role return { 'id': approle_adapter.read_role_id(role_name=namespace, mount_point=namespace)["data"]["role_id"], - 'secret-id': approle_adapter.generate_secret_id(role_name=namespace, mount_point=namespace)["data"]["secret_id"], - 'db_role': 'test-role' + 'secret-id': approle_adapter.generate_secret_id(role_name=namespace, mount_point=namespace)["data"]["secret_id"] } diff --git a/tests/test_database.py b/tests/test_database.py index 3beb0e583..4d0b27e2c 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -14,14 +14,13 @@ # pylint: disable=too-many-locals @pytest.mark.order(2) -def test_init_database_client(prepare_vault, vault_instance, vault_configuration_data, postgres_instance): +def test_init_database_client(namespace, vault_instance, vault_configuration_data, postgres_instance): """ Checking an initialized database client """ _ = vault_configuration_data _, cursor = postgres_instance - db_role = prepare_vault['db_role'] - database = DatabaseClient(vault=vault_instance, db_role=db_role) + database = DatabaseClient(vault=vault_instance, db_role=namespace) # Check general attributes assert isinstance(database.vault, object) @@ -60,75 +59,72 @@ def test_init_database_client(prepare_vault, vault_instance, vault_configuration assert False -@pytest.mark.order(4) -def test_reset_stale_messages(prepare_vault, vault_instance, postgres_instance, postgres_messages_test_data): - """ - Checking the reset of stale messages when the database client is initialized - """ - _, cursor = postgres_instance - _ = postgres_messages_test_data - db_role = prepare_vault['db_role'] - _ = DatabaseClient(vault=vault_instance, db_role=db_role) - - # Check the reset of stale messages - cursor.execute("SELECT state FROM messages") - messages_list = cursor.fetchall() - assert len(messages_list) > 0 - for message in messages_list: - assert message[0] == 'updated' - - -@pytest.mark.order(5) -def test_database_connection(prepare_vault, vault_instance, postgres_instance): - """ - Checking the database connection and disconnection - """ - _ = postgres_instance - db_role = prepare_vault['db_role'] - database = DatabaseClient(vault=vault_instance, db_role=db_role) - - # Check the database connection - connection = database.get_connection() - assert isinstance(connection, psycopg2.extensions.connection) - assert not connection.closed - - # Check the database disconnection - database.close_connection(connection) - assert connection.closed - - -@pytest.mark.order(6) -def test_add_message_to_queue(prepare_vault, vault_instance, postgres_instance): - """ - Checking the addition of a message to the queue - """ - _, cursor = postgres_instance - data = { - 'user_id': '12345', - 'post_id': '67890', - 'post_url': 'https://www.instagram.com/p/67890/', - 'post_owner': 'johndoe', - 'link_type': 'profile', - 'message_id': 'abcde', - 'chat_id': 'xyz', - 'scheduled_time': '2022-01-01 12:00:00', - 'download_status': 'not started', - 'upload_status': 'not started' - } - db_role = prepare_vault['db_role'] - database = DatabaseClient(vault=vault_instance, db_role=db_role) - response = database.add_message_to_queue(data=data) - - # Check the addition of a message to the queue - cursor.execute( - "SELECT user_id, post_id, post_url, post_owner, link_type, message_id, chat_id, scheduled_time, download_status, upload_status " - "FROM queue WHERE message_id = 'abcde'" - ) - queue_item = cursor.fetchone() - assert len(queue_item) > 0 - assert response == f"{data['message_id']}: added to queue" - assert queue_item == ( - data['user_id'], data['post_id'], data['post_url'], - data['post_owner'], data['link_type'], data['message_id'], - data['chat_id'], data['scheduled_time'], data['download_status'], data['upload_status'] - ) +# @pytest.mark.order(4) +# def test_reset_stale_messages(namespace, vault_instance, postgres_instance, postgres_messages_test_data): +# """ +# Checking the reset of stale messages when the database client is initialized +# """ +# _, cursor = postgres_instance +# _ = postgres_messages_test_data +# _ = DatabaseClient(vault=vault_instance, db_role=namespace) + +# # Check the reset of stale messages +# cursor.execute("SELECT state FROM messages") +# messages_list = cursor.fetchall() +# assert len(messages_list) > 0 +# for message in messages_list: +# assert message[0] == 'updated' + + +# @pytest.mark.order(5) +# def test_database_connection(namespace, vault_instance, postgres_instance): +# """ +# Checking the database connection and disconnection +# """ +# _ = postgres_instance +# database = DatabaseClient(vault=vault_instance, db_role=namespace) + +# # Check the database connection +# connection = database.get_connection() +# assert isinstance(connection, psycopg2.extensions.connection) +# assert not connection.closed + +# # Check the database disconnection +# database.close_connection(connection) +# assert connection.closed + + +# @pytest.mark.order(6) +# def test_add_message_to_queue(namespace, vault_instance, postgres_instance): +# """ +# Checking the addition of a message to the queue +# """ +# _, cursor = postgres_instance +# data = { +# 'user_id': '12345', +# 'post_id': '67890', +# 'post_url': 'https://www.instagram.com/p/67890/', +# 'post_owner': 'johndoe', +# 'link_type': 'profile', +# 'message_id': 'abcde', +# 'chat_id': 'xyz', +# 'scheduled_time': '2022-01-01 12:00:00', +# 'download_status': 'not started', +# 'upload_status': 'not started' +# } +# database = DatabaseClient(vault=vault_instance, db_role=namespace) +# response = database.add_message_to_queue(data=data) + +# # Check the addition of a message to the queue +# cursor.execute( +# "SELECT user_id, post_id, post_url, post_owner, link_type, message_id, chat_id, scheduled_time, download_status, upload_status " +# "FROM queue WHERE message_id = 'abcde'" +# ) +# queue_item = cursor.fetchone() +# assert len(queue_item) > 0 +# assert response == f"{data['message_id']}: added to queue" +# assert queue_item == ( +# data['user_id'], data['post_id'], data['post_url'], +# data['post_owner'], data['link_type'], data['message_id'], +# data['chat_id'], data['scheduled_time'], data['download_status'], data['upload_status'] +# ) diff --git a/tests/vault/policy.hcl b/tests/vault/policy.hcl index 4066f0443..293c5f348 100644 --- a/tests/vault/policy.hcl +++ b/tests/vault/policy.hcl @@ -19,13 +19,13 @@ path "auth/token/lookup-self" { # Operations for pytest # Allow read, create or update operations on the pytest path -path "sys/mounts/pyinstabot-downloader" { +path "sys/mounts/pytest" { capabilities = ["read", "create", "update"] } # Operations for pytest # Allow reading database credentials for a role -path "database/creds/test-role" { +path "database/creds/pytest" { capabilities = ["read"] } From b9301451bd9b708ccadbaee895000a09841f1554 Mon Sep 17 00:00:00 2001 From: obervinov Date: Thu, 29 Aug 2024 23:24:33 +0400 Subject: [PATCH 027/148] modified: tests/vault/policy.hcl --- tests/vault/policy.hcl | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/tests/vault/policy.hcl b/tests/vault/policy.hcl index 293c5f348..adc69379d 100644 --- a/tests/vault/policy.hcl +++ b/tests/vault/policy.hcl @@ -29,6 +29,13 @@ path "database/creds/pytest" { capabilities = ["read"] } +# Operations for pytest +# Allow reading database credentials for a role +path "pytest/config" { + capabilities = ["read", "list", "update"] +} + + ############################################################### # Operations for the module From af17dd059317baea0dc168283bf06448261f1e72 Mon Sep 17 00:00:00 2001 From: obervinov Date: Thu, 29 Aug 2024 23:33:44 +0400 Subject: [PATCH 028/148] modified: tests/vault/policy.hcl --- tests/vault/policy.hcl | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/tests/vault/policy.hcl b/tests/vault/policy.hcl index adc69379d..0627220e9 100644 --- a/tests/vault/policy.hcl +++ b/tests/vault/policy.hcl @@ -35,9 +35,16 @@ path "pytest/config" { capabilities = ["read", "list", "update"] } +# Operations for pytest +# Allow reading database credentials for a role +path "pytest/data/configuration/*" { + capabilities = ["create", "read", "update", "list"] +} + ############################################################### + # Operations for the module # Read and update namespace configuration path "pyinstabot-downloader/config" { From 8cca9f9f4b43d9fd7339f52c93b54e57733a86a5 Mon Sep 17 00:00:00 2001 From: obervinov Date: Thu, 29 Aug 2024 23:36:21 +0400 Subject: [PATCH 029/148] modified: tests/test_database.py --- tests/test_database.py | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/tests/test_database.py b/tests/test_database.py index 4d0b27e2c..4780e2952 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -59,21 +59,21 @@ def test_init_database_client(namespace, vault_instance, vault_configuration_dat assert False -# @pytest.mark.order(4) -# def test_reset_stale_messages(namespace, vault_instance, postgres_instance, postgres_messages_test_data): -# """ -# Checking the reset of stale messages when the database client is initialized -# """ -# _, cursor = postgres_instance -# _ = postgres_messages_test_data -# _ = DatabaseClient(vault=vault_instance, db_role=namespace) - -# # Check the reset of stale messages -# cursor.execute("SELECT state FROM messages") -# messages_list = cursor.fetchall() -# assert len(messages_list) > 0 -# for message in messages_list: -# assert message[0] == 'updated' +@pytest.mark.order(4) +def test_reset_stale_messages(namespace, vault_instance, postgres_instance, postgres_messages_test_data): + """ + Checking the reset of stale messages when the database client is initialized + """ + _, cursor = postgres_instance + _ = postgres_messages_test_data + _ = DatabaseClient(vault=vault_instance, db_role=namespace) + + # Check the reset of stale messages + cursor.execute("SELECT state FROM messages") + messages_list = cursor.fetchall() + assert len(messages_list) > 0 + for message in messages_list: + assert message[0] == 'updated' # @pytest.mark.order(5) From bb881f65c41227de6267089946bf627fee0046b3 Mon Sep 17 00:00:00 2001 From: obervinov Date: Thu, 29 Aug 2024 23:40:13 +0400 Subject: [PATCH 030/148] modified: tests/test_database.py --- tests/test_database.py | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/tests/test_database.py b/tests/test_database.py index 4780e2952..8389d8497 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -76,22 +76,22 @@ def test_reset_stale_messages(namespace, vault_instance, postgres_instance, post assert message[0] == 'updated' -# @pytest.mark.order(5) -# def test_database_connection(namespace, vault_instance, postgres_instance): -# """ -# Checking the database connection and disconnection -# """ -# _ = postgres_instance -# database = DatabaseClient(vault=vault_instance, db_role=namespace) +@pytest.mark.order(5) +def test_database_connection(namespace, vault_instance, postgres_instance): + """ + Checking the database connection and disconnection + """ + _ = postgres_instance + database = DatabaseClient(vault=vault_instance, db_role=namespace) -# # Check the database connection -# connection = database.get_connection() -# assert isinstance(connection, psycopg2.extensions.connection) -# assert not connection.closed + # Check the database connection + connection = database.get_connection() + assert isinstance(connection, psycopg2.extensions.connection) + assert not connection.closed -# # Check the database disconnection -# database.close_connection(connection) -# assert connection.closed + # Check the database disconnection + database.close_connection(connection) + assert connection.closed # @pytest.mark.order(6) From 2207d579fbc3795efcf12c8a29dff95d2d251b11 Mon Sep 17 00:00:00 2001 From: obervinov Date: Thu, 29 Aug 2024 23:55:36 +0400 Subject: [PATCH 031/148] modified: tests/conftest.py --- tests/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index d64b4a087..649d4fbc1 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -93,7 +93,7 @@ def fixture_postgres_url(): Returns: str: The postgres url. """ - return "postgresql://{{username}}:{{password}}@postgres:5432/postgres?sslmode=disable" + return "postgresql://{{username}}:{{password}}@0.0.0.0:5432/postgres?sslmode=disable" @pytest.fixture(name="postgres_instance", scope='session') From e99082c1a4dfa8ef9ffd2a76b27d3fb32ac61c28 Mon Sep 17 00:00:00 2001 From: obervinov Date: Fri, 30 Aug 2024 00:07:57 +0400 Subject: [PATCH 032/148] modified: tests/conftest.py --- tests/conftest.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 649d4fbc1..199777dd2 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -93,7 +93,7 @@ def fixture_postgres_url(): Returns: str: The postgres url. """ - return "postgresql://{{username}}:{{password}}@0.0.0.0:5432/postgres?sslmode=disable" + return "postgresql://{{username}}:{{password}}@postgres:5432/postgres?sslmode=disable" @pytest.fixture(name="postgres_instance", scope='session') @@ -248,7 +248,7 @@ def fixture_vault_configuration_data(vault_instance): vault_instance: An instance of the Vault class. """ database = { - 'host': '0.0.0.0', + 'host': 'postgres', 'port': '5432', 'database': 'postgres', 'connections': '10' From 75f0df58d4b59a974feb1cf6ca85c6cc181784e8 Mon Sep 17 00:00:00 2001 From: obervinov Date: Fri, 30 Aug 2024 00:13:22 +0400 Subject: [PATCH 033/148] modified: tests/conftest.py --- tests/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index 199777dd2..d64b4a087 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -248,7 +248,7 @@ def fixture_vault_configuration_data(vault_instance): vault_instance: An instance of the Vault class. """ database = { - 'host': 'postgres', + 'host': '0.0.0.0', 'port': '5432', 'database': 'postgres', 'connections': '10' From e1b4f7098e15011f78b3b194e0b5626c18ad0094 Mon Sep 17 00:00:00 2001 From: obervinov Date: Fri, 30 Aug 2024 00:32:31 +0400 Subject: [PATCH 034/148] modified: tests/conftest.py --- tests/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index d64b4a087..7b9c5e015 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -218,7 +218,7 @@ def fixture_prepare_vault(vault_url, namespace, policy_path, postgres_url, postg } -@pytest.fixture(name="vault_instance", scope='session') +@pytest.fixture(name="vault_instance", scope='function') def fixture_vault_instance(vault_url, namespace, prepare_vault): """ Returns client of the configurator vault From 928e11f672eabb1bf9124488706bc0b705ec6a09 Mon Sep 17 00:00:00 2001 From: obervinov Date: Fri, 30 Aug 2024 00:53:25 +0400 Subject: [PATCH 035/148] modified: src/modules/database.py --- src/modules/database.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/modules/database.py b/src/modules/database.py index 182a48a0b..ca3060a93 100644 --- a/src/modules/database.py +++ b/src/modules/database.py @@ -107,6 +107,7 @@ def create_connection_pool(self) -> pool.SimpleConnectionPool: pool.SimpleConnectionPool: A connection pool for the PostgreSQL database. """ db_configuration = self.vault.kv2engine.read_secret(path='configuration/database') + log.warning(db_configuration) db_credentials = self.vault.dbengine.generate_credentials(role=self.db_role) log.info( '[Database]: Creating a connection pool for the %s:%s/%s', From 5bfed2aa36a160a801b26c8038e76ca2c3154a61 Mon Sep 17 00:00:00 2001 From: obervinov Date: Fri, 30 Aug 2024 01:07:39 +0400 Subject: [PATCH 036/148] modified: tests/conftest.py --- tests/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index 7b9c5e015..7c86e2b81 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -239,7 +239,7 @@ def fixture_vault_instance(vault_url, namespace, prepare_vault): ) -@pytest.fixture(name="vault_configuration_data", scope='session') +@pytest.fixture(name="vault_configuration_data", scope='function') def fixture_vault_configuration_data(vault_instance): """ This function sets up a database configuration in the vault_instance object. From a9e673e35fe8e76238e67a7a64ac9c7d48c4deaf Mon Sep 17 00:00:00 2001 From: obervinov Date: Fri, 30 Aug 2024 16:22:42 +0400 Subject: [PATCH 037/148] modified: CHANGELOG.md modified: src/migrations/0002_messages_table.py modified: src/migrations/0003_users_table.py modified: src/modules/database.py modified: tests/conftest.py --- CHANGELOG.md | 2 +- src/migrations/0002_messages_table.py | 30 +++++++++++++++++---------- src/migrations/0003_users_table.py | 25 ++++++++++++---------- src/modules/database.py | 11 +++++++++- tests/conftest.py | 9 ++++++-- 5 files changed, 51 insertions(+), 26 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 48080f3bf..90c809658 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,7 +3,7 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). -## v2.3.0 - 2024-08-29 +## v2.3.0 - 2024-08-30 ### What's Changed **Full Changelog**: https://github.com/obervinov/pyinstabot-downloader/compare/v2.2.1...v2.3.0 by @obervinov in https://github.com/obervinov/pyinstabot-downloader/pull/95 #### 💥 Breaking Changes diff --git a/src/migrations/0002_messages_table.py b/src/migrations/0002_messages_table.py index cb27d76a9..52b5ade86 100644 --- a/src/migrations/0002_messages_table.py +++ b/src/migrations/0002_messages_table.py @@ -36,8 +36,13 @@ def execute(obj): if not table: print(f"{NAME}: The {table_name} table does not exist. Skip the migration.") + elif len(columns) < 1: print(f"{NAME}: The {table_name} table does not have the necessary columns to execute the migration. Skip the migration.") + + elif not all(column in [rc[0] for rc in rename_columns] for column in columns): + print(f"{NAME}: The {table_name} table does not have the necessary columns to rename. Skip renaming.") + else: for column in rename_columns: try: @@ -53,15 +58,18 @@ def execute(obj): conn.rollback() for column in add_columns: - try: - print(f"{NAME}: Add column {column[0]} to the {table_name} table...") - cursor.execute(f"ALTER TABLE {table_name} ADD COLUMN {column[0]} {column[1]} DEFAULT {column[2]}") - conn.commit() - print(f"{NAME}: Column {column[0]} has been added to the {table_name} table.") - except obj.errors.DuplicateColumn as error: - print(f"{NAME}: Columns in the {table_name} table have already been added. Skip adding: {error}") - conn.rollback() - except obj.errors.FeatureNotSupported as error: - print(f"{NAME}: Columns in the {table_name} table have not been added. Skip adding: {error}") - conn.rollback() + if column[0] in columns: + print(f"{NAME}: The {table_name} table already has the {column[0]} column. Skip adding.") + else: + try: + print(f"{NAME}: Add column {column[0]} to the {table_name} table...") + cursor.execute(f"ALTER TABLE {table_name} ADD COLUMN {column[0]} {column[1]} DEFAULT {column[2]}") + conn.commit() + print(f"{NAME}: Column {column[0]} has been added to the {table_name} table.") + except obj.errors.DuplicateColumn as error: + print(f"{NAME}: Columns in the {table_name} table have already been added. Skip adding: {error}") + conn.rollback() + except obj.errors.FeatureNotSupported as error: + print(f"{NAME}: Columns in the {table_name} table have not been added. Skip adding: {error}") + conn.rollback() obj.close_connection(conn) diff --git a/src/migrations/0003_users_table.py b/src/migrations/0003_users_table.py index 7c96b729f..9d98e17a4 100644 --- a/src/migrations/0003_users_table.py +++ b/src/migrations/0003_users_table.py @@ -41,14 +41,17 @@ def execute(obj): else: for column in add_columns: - try: - print(f"{NAME}: Add column {column[0]} to the {table_name} table...") - cursor.execute(f"ALTER TABLE {table_name} ADD COLUMN {column[0]} {column[1]} DEFAULT {column[2]}") - conn.commit() - print(f"{NAME}: Column {column[0]} has been added to the {table_name} table.") - except obj.errors.DuplicateColumn as error: - print(f"{NAME}: Columns in the {table_name} table have already been added. Skip adding: {error}") - conn.rollback() - except obj.errors.FeatureNotSupported as error: - print(f"{NAME}: Columns in the {table_name} table have not been added. Skip adding: {error}") - conn.rollback() + if column[0] in columns: + print(f"{NAME}: The {table_name} table already has the {column[0]} column. Skip adding.") + else: + try: + print(f"{NAME}: Add column {column[0]} to the {table_name} table...") + cursor.execute(f"ALTER TABLE {table_name} ADD COLUMN {column[0]} {column[1]} DEFAULT {column[2]}") + conn.commit() + print(f"{NAME}: Column {column[0]} has been added to the {table_name} table.") + except obj.errors.DuplicateColumn as error: + print(f"{NAME}: Columns in the {table_name} table have already been added. Skip adding: {error}") + conn.rollback() + except obj.errors.FeatureNotSupported as error: + print(f"{NAME}: Columns in the {table_name} table have not been added. Skip adding: {error}") + conn.rollback() diff --git a/src/modules/database.py b/src/modules/database.py index ca3060a93..083ab8a1c 100644 --- a/src/modules/database.py +++ b/src/modules/database.py @@ -106,9 +106,18 @@ def create_connection_pool(self) -> pool.SimpleConnectionPool: Returns: pool.SimpleConnectionPool: A connection pool for the PostgreSQL database. """ + required_keys_configuration = {"host", "port", "database", "connections"} + required_keys_credentials = {"username", "password"} db_configuration = self.vault.kv2engine.read_secret(path='configuration/database') - log.warning(db_configuration) db_credentials = self.vault.dbengine.generate_credentials(role=self.db_role) + + if not db_configuration or not db_credentials: + raise ValueError('Database configuration or credentials are missing') + + missing_keys = (required_keys_configuration - set(db_configuration.keys())) | (required_keys_credentials - set(db_credentials.keys())) + if missing_keys: + raise KeyError(f"Missing keys in the database configuration or credentials: {missing_keys}") + log.info( '[Database]: Creating a connection pool for the %s:%s/%s', db_configuration['host'], db_configuration['port'], db_configuration['database'] diff --git a/tests/conftest.py b/tests/conftest.py index 7c86e2b81..2f1b55eb9 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -202,10 +202,15 @@ def fixture_prepare_vault(vault_url, namespace, policy_path, postgres_url, postg "ALTER TABLE public.migrations OWNER TO \"{{name}}\"; " "GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO \"{{name}}\";" ) + revocation_statements = ( + "SELECT 'ALTER SEQUENCE ' || sequence_name || ' OWNER TO postgres;' FROM information_schema.sequences WHERE sequence_schema = 'public'; " + "SELECT 'ALTER TABLE ' || table_name || ' OWNER TO postgres;' FROM information_schema.tables WHERE table_schema = 'public';" + ) role = client.secrets.database.create_role( name="pytest", db_name="postgresql", creation_statements=statement, + revocation_statements=revocation_statements, default_ttl="1h", max_ttl="24h" ) @@ -218,7 +223,7 @@ def fixture_prepare_vault(vault_url, namespace, policy_path, postgres_url, postg } -@pytest.fixture(name="vault_instance", scope='function') +@pytest.fixture(name="vault_instance", scope='session') def fixture_vault_instance(vault_url, namespace, prepare_vault): """ Returns client of the configurator vault @@ -239,7 +244,7 @@ def fixture_vault_instance(vault_url, namespace, prepare_vault): ) -@pytest.fixture(name="vault_configuration_data", scope='function') +@pytest.fixture(name="vault_configuration_data", scope='session') def fixture_vault_configuration_data(vault_instance): """ This function sets up a database configuration in the vault_instance object. From cf0a64133c224e605f1626eefa95c5fc086929ee Mon Sep 17 00:00:00 2001 From: obervinov Date: Fri, 30 Aug 2024 18:02:26 +0400 Subject: [PATCH 038/148] modified: src/modules/database.py modified: tests/conftest.py --- src/modules/database.py | 2 +- tests/conftest.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/modules/database.py b/src/modules/database.py index 083ab8a1c..50dc1e8cf 100644 --- a/src/modules/database.py +++ b/src/modules/database.py @@ -640,7 +640,7 @@ def get_user_queue( columns=("post_id", "scheduled_time"), condition=f"user_id = '{user_id}'", order_by='scheduled_time ASC', - limit=1000 + limit=10000 ) for message in queue: if user_id not in result: diff --git a/tests/conftest.py b/tests/conftest.py index 2f1b55eb9..7a46780c5 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -202,10 +202,10 @@ def fixture_prepare_vault(vault_url, namespace, policy_path, postgres_url, postg "ALTER TABLE public.migrations OWNER TO \"{{name}}\"; " "GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO \"{{name}}\";" ) - revocation_statements = ( - "SELECT 'ALTER SEQUENCE ' || sequence_name || ' OWNER TO postgres;' FROM information_schema.sequences WHERE sequence_schema = 'public'; " + revocation_statements = [ + "SELECT 'ALTER SEQUENCE ' || sequence_name || ' OWNER TO postgres;' FROM information_schema.sequences WHERE sequence_schema = 'public';", "SELECT 'ALTER TABLE ' || table_name || ' OWNER TO postgres;' FROM information_schema.tables WHERE table_schema = 'public';" - ) + ] role = client.secrets.database.create_role( name="pytest", db_name="postgresql", From 8aa82d1d48af628c7f0d128cf666ab694cb8ef14 Mon Sep 17 00:00:00 2001 From: obervinov Date: Fri, 30 Aug 2024 18:14:14 +0400 Subject: [PATCH 039/148] modified: tests/conftest.py --- tests/conftest.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 7a46780c5..6203bb664 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -203,8 +203,8 @@ def fixture_prepare_vault(vault_url, namespace, policy_path, postgres_url, postg "GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO \"{{name}}\";" ) revocation_statements = [ - "SELECT 'ALTER SEQUENCE ' || sequence_name || ' OWNER TO postgres;' FROM information_schema.sequences WHERE sequence_schema = 'public';", - "SELECT 'ALTER TABLE ' || table_name || ' OWNER TO postgres;' FROM information_schema.tables WHERE table_schema = 'public';" + "SELECT \"ALTER SEQUENCE\" || sequence_name || \" OWNER TO postgres;\" FROM information_schema.sequences WHERE sequence_schema = \"public\";", + "SELECT \"ALTER TABLE\" || table_name || \" OWNER TO postgres;\" FROM information_schema.tables WHERE table_schema = \"public\";" ] role = client.secrets.database.create_role( name="pytest", From 6ace3b3fd8f516423adcc112db8d35f9209ee63d Mon Sep 17 00:00:00 2001 From: obervinov Date: Fri, 30 Aug 2024 18:25:26 +0400 Subject: [PATCH 040/148] modified: tests/conftest.py --- tests/conftest.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 6203bb664..7a46780c5 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -203,8 +203,8 @@ def fixture_prepare_vault(vault_url, namespace, policy_path, postgres_url, postg "GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO \"{{name}}\";" ) revocation_statements = [ - "SELECT \"ALTER SEQUENCE\" || sequence_name || \" OWNER TO postgres;\" FROM information_schema.sequences WHERE sequence_schema = \"public\";", - "SELECT \"ALTER TABLE\" || table_name || \" OWNER TO postgres;\" FROM information_schema.tables WHERE table_schema = \"public\";" + "SELECT 'ALTER SEQUENCE ' || sequence_name || ' OWNER TO postgres;' FROM information_schema.sequences WHERE sequence_schema = 'public';", + "SELECT 'ALTER TABLE ' || table_name || ' OWNER TO postgres;' FROM information_schema.tables WHERE table_schema = 'public';" ] role = client.secrets.database.create_role( name="pytest", From ff77748440de0fb67ec97e3066c6646a741b99e4 Mon Sep 17 00:00:00 2001 From: obervinov Date: Mon, 2 Sep 2024 22:28:25 +0400 Subject: [PATCH 041/148] trying to fix the revocation statement in vault database engine --- CHANGELOG.md | 2 +- tests/conftest.py | 12 ++++++++++-- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 90c809658..c7d7bab42 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,7 +3,7 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). -## v2.3.0 - 2024-08-30 +## v2.3.0 - 2024-09-02 ### What's Changed **Full Changelog**: https://github.com/obervinov/pyinstabot-downloader/compare/v2.2.1...v2.3.0 by @obervinov in https://github.com/obervinov/pyinstabot-downloader/pull/95 #### 💥 Breaking Changes diff --git a/tests/conftest.py b/tests/conftest.py index 7a46780c5..ee57adc88 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -203,8 +203,16 @@ def fixture_prepare_vault(vault_url, namespace, policy_path, postgres_url, postg "GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO \"{{name}}\";" ) revocation_statements = [ - "SELECT 'ALTER SEQUENCE ' || sequence_name || ' OWNER TO postgres;' FROM information_schema.sequences WHERE sequence_schema = 'public';", - "SELECT 'ALTER TABLE ' || table_name || ' OWNER TO postgres;' FROM information_schema.tables WHERE table_schema = 'public';" + ( + "SELECT 'ALTER SEQUENCE ' || quote_ident(sequence_name) || " + "' OWNER TO postgres;' FROM information_schema.sequences " + "WHERE sequence_schema = 'public';" + ), + ( + "SELECT 'ALTER TABLE ' || quote_ident(table_name) || " + "' OWNER TO postgres;' FROM information_schema.tables " + "WHERE table_schema = 'public';" + ) ] role = client.secrets.database.create_role( name="pytest", From c4d0944825ef5f709be624a9727c549a76b6773d Mon Sep 17 00:00:00 2001 From: obervinov Date: Mon, 2 Sep 2024 22:35:47 +0400 Subject: [PATCH 042/148] modified: tests/conftest.py --- tests/conftest.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index ee57adc88..2590c9d5f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -205,12 +205,14 @@ def fixture_prepare_vault(vault_url, namespace, policy_path, postgres_url, postg revocation_statements = [ ( "SELECT 'ALTER SEQUENCE ' || quote_ident(sequence_name) || " - "' OWNER TO postgres;' FROM information_schema.sequences " + "' OWNER TO postgres;' " + "FROM information_schema.sequences " "WHERE sequence_schema = 'public';" ), ( "SELECT 'ALTER TABLE ' || quote_ident(table_name) || " - "' OWNER TO postgres;' FROM information_schema.tables " + "' OWNER TO postgres;' " + "FROM information_schema.tables " "WHERE table_schema = 'public';" ) ] From 1fd187390fd6535b7b8c2a3d25984b1c98daf3cb Mon Sep 17 00:00:00 2001 From: obervinov Date: Mon, 2 Sep 2024 23:10:25 +0400 Subject: [PATCH 043/148] modified: tests/conftest.py --- tests/conftest.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 2590c9d5f..9f6337da6 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -203,18 +203,18 @@ def fixture_prepare_vault(vault_url, namespace, policy_path, postgres_url, postg "GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO \"{{name}}\";" ) revocation_statements = [ - ( - "SELECT 'ALTER SEQUENCE ' || quote_ident(sequence_name) || " - "' OWNER TO postgres;' " - "FROM information_schema.sequences " - "WHERE sequence_schema = 'public';" - ), - ( - "SELECT 'ALTER TABLE ' || quote_ident(table_name) || " - "' OWNER TO postgres;' " - "FROM information_schema.tables " - "WHERE table_schema = 'public';" - ) + "ALTER SEQUENCE public.users_id_seq OWNER TO postgres;", + "ALTER SEQUENCE public.users_requests_id_seq OWNER TO postgres;", + "ALTER SEQUENCE public.messages_id_seq OWNER TO postgres;", + "ALTER SEQUENCE public.queue_id_seq OWNER TO postgres;", + "ALTER SEQUENCE public.processed_id_seq OWNER TO postgres;", + "ALTER SEQUENCE public.migrations_id_seq OWNER TO postgres;", + "ALTER TABLE public.users OWNER TO postgres;", + "ALTER TABLE public.users_requests OWNER TO postgres;", + "ALTER TABLE public.messages OWNER TO postgres;", + "ALTER TABLE public.queue OWNER TO postgres;", + "ALTER TABLE public.processed OWNER TO postgres;", + "ALTER TABLE public.migrations OWNER TO postgres;" ] role = client.secrets.database.create_role( name="pytest", From 89397cc3de3b75f43bf7149ee05054f0ec3a7c08 Mon Sep 17 00:00:00 2001 From: obervinov Date: Mon, 2 Sep 2024 23:12:33 +0400 Subject: [PATCH 044/148] fixed linting issues --- tests/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index 9f6337da6..42690e3b6 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -214,7 +214,7 @@ def fixture_prepare_vault(vault_url, namespace, policy_path, postgres_url, postg "ALTER TABLE public.messages OWNER TO postgres;", "ALTER TABLE public.queue OWNER TO postgres;", "ALTER TABLE public.processed OWNER TO postgres;", - "ALTER TABLE public.migrations OWNER TO postgres;" + "ALTER TABLE public.migrations OWNER TO postgres;" ] role = client.secrets.database.create_role( name="pytest", From ab028456e009c6b3cbb8f3425ea6f8c29be692b3 Mon Sep 17 00:00:00 2001 From: obervinov Date: Mon, 2 Sep 2024 23:20:05 +0400 Subject: [PATCH 045/148] modified: tests/conftest.py --- tests/conftest.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 42690e3b6..cea8c0c2c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -203,12 +203,6 @@ def fixture_prepare_vault(vault_url, namespace, policy_path, postgres_url, postg "GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO \"{{name}}\";" ) revocation_statements = [ - "ALTER SEQUENCE public.users_id_seq OWNER TO postgres;", - "ALTER SEQUENCE public.users_requests_id_seq OWNER TO postgres;", - "ALTER SEQUENCE public.messages_id_seq OWNER TO postgres;", - "ALTER SEQUENCE public.queue_id_seq OWNER TO postgres;", - "ALTER SEQUENCE public.processed_id_seq OWNER TO postgres;", - "ALTER SEQUENCE public.migrations_id_seq OWNER TO postgres;", "ALTER TABLE public.users OWNER TO postgres;", "ALTER TABLE public.users_requests OWNER TO postgres;", "ALTER TABLE public.messages OWNER TO postgres;", From f1198ad335a39fe1b552a5635f622611b33ef233 Mon Sep 17 00:00:00 2001 From: obervinov Date: Mon, 2 Sep 2024 23:49:14 +0400 Subject: [PATCH 046/148] modified: tests/conftest.py --- tests/conftest.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index cea8c0c2c..573c931e7 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -192,16 +192,16 @@ def fixture_prepare_vault(vault_url, namespace, policy_path, postgres_url, postg print(f"Configured database engine: {configuration}") # Create role for the database - statement = ( - "CREATE ROLE \"{{name}}\" WITH LOGIN PASSWORD '{{password}}' VALID UNTIL '{{expiration}}'; " - "ALTER TABLE public.users OWNER TO \"{{name}}\"; " - "ALTER TABLE public.users_requests OWNER TO \"{{name}}\"; " - "ALTER TABLE public.messages OWNER TO \"{{name}}\"; " - "ALTER TABLE public.queue OWNER TO \"{{name}}\"; " - "ALTER TABLE public.processed OWNER TO \"{{name}}\"; " - "ALTER TABLE public.migrations OWNER TO \"{{name}}\"; " + statement = [ + "CREATE ROLE \"{{name}}\" WITH LOGIN PASSWORD '{{password}}' VALID UNTIL '{{expiration}}';", + "ALTER TABLE public.users OWNER TO \"{{name}}\";", + "ALTER TABLE public.users_requests OWNER TO \"{{name}}\";", + "ALTER TABLE public.messages OWNER TO \"{{name}}\";", + "ALTER TABLE public.queue OWNER TO \"{{name}}\";", + "ALTER TABLE public.processed OWNER TO \"{{name}}\";", + "ALTER TABLE public.migrations OWNER TO \"{{name}}\";", "GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO \"{{name}}\";" - ) + ] revocation_statements = [ "ALTER TABLE public.users OWNER TO postgres;", "ALTER TABLE public.users_requests OWNER TO postgres;", From fdebc1087f6b75c5c0492a1c45749c830acc7ec1 Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 3 Sep 2024 00:30:46 +0400 Subject: [PATCH 047/148] trying run without matrix strategy builds --- .github/workflows/pr.yaml | 12 ++++++------ .github/workflows/release.yaml | 4 ++-- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/pr.yaml b/.github/workflows/pr.yaml index 80a3a2829..a1b922510 100644 --- a/.github/workflows/pr.yaml +++ b/.github/workflows/pr.yaml @@ -11,20 +11,20 @@ on: jobs: changelog: - uses: obervinov/_templates/.github/workflows/changelog.yaml@v1.2.8 + uses: obervinov/_templates/.github/workflows/changelog.yaml@fix/v1.2.9 pylint: - uses: obervinov/_templates/.github/workflows/pylint.yaml@v1.2.8 + uses: obervinov/_templates/.github/workflows/pylint.yaml@fix/v1.2.9 pytest: - uses: obervinov/_templates/.github/workflows/pytest-with-vault.yaml@v1.2.8 + uses: obervinov/_templates/.github/workflows/pytest-with-vault.yaml@fix/v1.2.9 pyproject: - uses: obervinov/_templates/.github/workflows/pyproject.yaml@v1.2.8 + uses: obervinov/_templates/.github/workflows/pyproject.yaml@fix/v1.2.9 pr: - uses: obervinov/_templates/.github/workflows/pr.yaml@v1.2.8 + uses: obervinov/_templates/.github/workflows/pr.yaml@fix/v1.2.9 build-pr-image: - uses: obervinov/_templates/.github/workflows/docker.yaml@v1.2.8 + uses: obervinov/_templates/.github/workflows/docker.yaml@fix/v1.2.9 needs: [changelog, pylint, pytest, pyproject] diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 51825053e..688cea565 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -10,7 +10,7 @@ on: jobs: create-release: - uses: obervinov/_templates/.github/workflows/release.yaml@v1.2.8 + uses: obervinov/_templates/.github/workflows/release.yaml@fix/v1.2.9 cleanup-untagged-images: runs-on: ubuntu-latest @@ -26,5 +26,5 @@ jobs: owner_type: 'user' # milestone: - # uses: obervinov/_templates/.github/workflows/milestone.yaml@v1.2.8 + # uses: obervinov/_templates/.github/workflows/milestone.yaml@fix/v1.2.9 # needs: [create-release] From 2a03ca09542c663f5b111a7956f887b5ccd5fd1b Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 3 Sep 2024 00:58:32 +0400 Subject: [PATCH 048/148] debug --- tests/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index 573c931e7..57a70ed9a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -196,7 +196,7 @@ def fixture_prepare_vault(vault_url, namespace, policy_path, postgres_url, postg "CREATE ROLE \"{{name}}\" WITH LOGIN PASSWORD '{{password}}' VALID UNTIL '{{expiration}}';", "ALTER TABLE public.users OWNER TO \"{{name}}\";", "ALTER TABLE public.users_requests OWNER TO \"{{name}}\";", - "ALTER TABLE public.messages OWNER TO \"{{name}}\";", + #"ALTER TABLE public.messages OWNER TO \"{{name}}\";", "ALTER TABLE public.queue OWNER TO \"{{name}}\";", "ALTER TABLE public.processed OWNER TO \"{{name}}\";", "ALTER TABLE public.migrations OWNER TO \"{{name}}\";", From 6e097dc27e7d8a2749824aa0686708b27d286fcd Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 3 Sep 2024 00:59:02 +0400 Subject: [PATCH 049/148] modified: tests/conftest.py --- tests/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index 57a70ed9a..53a2f16da 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -196,7 +196,7 @@ def fixture_prepare_vault(vault_url, namespace, policy_path, postgres_url, postg "CREATE ROLE \"{{name}}\" WITH LOGIN PASSWORD '{{password}}' VALID UNTIL '{{expiration}}';", "ALTER TABLE public.users OWNER TO \"{{name}}\";", "ALTER TABLE public.users_requests OWNER TO \"{{name}}\";", - #"ALTER TABLE public.messages OWNER TO \"{{name}}\";", + # "ALTER TABLE public.messages OWNER TO \"{{name}}\";", "ALTER TABLE public.queue OWNER TO \"{{name}}\";", "ALTER TABLE public.processed OWNER TO \"{{name}}\";", "ALTER TABLE public.migrations OWNER TO \"{{name}}\";", From bb7d995d59c78a9f4757942efa4358da69a5b07d Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 3 Sep 2024 01:03:50 +0400 Subject: [PATCH 050/148] modified: tests/conftest.py --- tests/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index 53a2f16da..48f7fb7db 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -196,10 +196,10 @@ def fixture_prepare_vault(vault_url, namespace, policy_path, postgres_url, postg "CREATE ROLE \"{{name}}\" WITH LOGIN PASSWORD '{{password}}' VALID UNTIL '{{expiration}}';", "ALTER TABLE public.users OWNER TO \"{{name}}\";", "ALTER TABLE public.users_requests OWNER TO \"{{name}}\";", - # "ALTER TABLE public.messages OWNER TO \"{{name}}\";", "ALTER TABLE public.queue OWNER TO \"{{name}}\";", "ALTER TABLE public.processed OWNER TO \"{{name}}\";", "ALTER TABLE public.migrations OWNER TO \"{{name}}\";", + "ALTER TABLE public.messages OWNER TO \"{{name}}\";", "GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO \"{{name}}\";" ] revocation_statements = [ From 7e27953c20707ae2381c9943469edf11229203ac Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 3 Sep 2024 12:43:45 +0400 Subject: [PATCH 051/148] modified: CHANGELOG.md modified: poetry.lock modified: tests/conftest.py --- CHANGELOG.md | 2 +- poetry.lock | 28 ++++++++++++---------- tests/conftest.py | 59 ++++++++++++++++++++++++++++++++++++++--------- 3 files changed, 65 insertions(+), 24 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c7d7bab42..832fb4975 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,7 +3,7 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). -## v2.3.0 - 2024-09-02 +## v2.3.0 - 2024-09-03 ### What's Changed **Full Changelog**: https://github.com/obervinov/pyinstabot-downloader/compare/v2.2.1...v2.3.0 by @obervinov in https://github.com/obervinov/pyinstabot-downloader/pull/95 #### 💥 Breaking Changes diff --git a/poetry.lock b/poetry.lock index 77d61af2a..2ae73e47d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,13 +2,13 @@ [[package]] name = "certifi" -version = "2024.7.4" +version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, - {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] [[package]] @@ -326,12 +326,12 @@ test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "p [[package]] name = "instaloader" -version = "4.13" +version = "4.13.1" description = "Download pictures (or videos) along with their captions and other metadata from Instagram." optional = false python-versions = ">=3.8" files = [ - {file = "instaloader-4.13.tar.gz", hash = "sha256:49b15c3c41ba9287ddecacb57c5fdd1ee706107117b4b8ddd9ccb56ab75c573c"}, + {file = "instaloader-4.13.1.tar.gz", hash = "sha256:36774ea1076eeb236f8782d221e3737f71ddc023042f0b13761429ef137f1133"}, ] [package.dependencies] @@ -981,13 +981,13 @@ files = [ [[package]] name = "urllib3" -version = "1.26.19" +version = "1.26.20" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "urllib3-1.26.19-py2.py3-none-any.whl", hash = "sha256:37a0344459b199fce0e80b0d3569837ec6b6937435c5244e7fd73fa6006830f3"}, - {file = "urllib3-1.26.19.tar.gz", hash = "sha256:3e3d753a8618b86d7de333b4223005f68720bcd6a7d2bcb9fbd2229ec7c1e429"}, + {file = "urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e"}, + {file = "urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32"}, ] [package.extras] @@ -1057,18 +1057,22 @@ requests = "*" [[package]] name = "zipp" -version = "3.20.0" +version = "3.20.1" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.20.0-py3-none-any.whl", hash = "sha256:58da6168be89f0be59beb194da1250516fdaa062ccebd30127ac65d30045e10d"}, - {file = "zipp-3.20.0.tar.gz", hash = "sha256:0145e43d89664cfe1a2e533adc75adafed82fe2da404b4bbb6b026c0157bdb31"}, + {file = "zipp-3.20.1-py3-none-any.whl", hash = "sha256:9960cd8967c8f85a56f920d5d507274e74f9ff813a0ab8889a5b5be2daf44064"}, + {file = "zipp-3.20.1.tar.gz", hash = "sha256:c22b14cc4763c5a5b04134207736c107db42e9d3ef2d9779d465f5f1bcba572b"}, ] [package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] [metadata] lock-version = "2.0" diff --git a/tests/conftest.py b/tests/conftest.py index 48f7fb7db..144f48548 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -7,6 +7,7 @@ import pytest import hvac import psycopg2 +from psycopg2 import sql # pylint: disable=E0401 from vault import VaultClient @@ -97,27 +98,61 @@ def fixture_postgres_url(): @pytest.fixture(name="postgres_instance", scope='session') -def fixture_postgres_instance(psql_tables_path): +def fixture_postgres_instance(psql_tables_path, namespace): """ - Prepare the postgres database, return the connection and cursor + Prepare the postgres database for tests, return the connection and cursor. Returns: tuple: The connection and cursor objects for the postgres database. """ - # Prepare database for tests - psql_connection = psycopg2.connect( + pytest_db_name = namespace + original_db_name = "postgres" + + # Connect to the default 'postgres' database to create a new test database + connection = psycopg2.connect( host='0.0.0.0', port=5432, user='postgres', password='postgres', - dbname='postgres' + dbname=original_db_name ) - psql_cursor = psql_connection.cursor() + connection.autocommit = True + cursor = connection.cursor() + + try: + # Create a new pytest database + cursor.execute(sql.SQL("CREATE DATABASE {}").format( + sql.Identifier(pytest_db_name) + )) + except psycopg2.errors.DuplicateDatabase: + print(f"Database {pytest_db_name} already exists.") + except Exception as error: + print(f"Failed to create database {pytest_db_name}: {error}") + raise + finally: + cursor.close() + connection.close() + + # Connect to the newly created test database + pytest_connection = psycopg2.connect( + host='0.0.0.0', + port=5432, + user='postgres', + password='postgres', + dbname=pytest_db_name + ) + pytest_cursor = pytest_connection.cursor() + + # Execute the SQL script to create tables with open(psql_tables_path, 'r', encoding='utf-8') as sql_file: sql_script = sql_file.read() - psql_cursor.execute(sql_script) - psql_connection.commit() - return psql_connection, psql_cursor + pytest_cursor.execute(sql_script) + pytest_connection.commit() + + yield pytest_connection, pytest_cursor + + pytest_cursor.close() + pytest_connection.close() @pytest.fixture(name="prepare_vault", scope='session') @@ -166,7 +201,7 @@ def fixture_prepare_vault(vault_url, namespace, policy_path, postgres_url, postg token_type='service', secret_id_num_uses=0, token_num_uses=0, - token_ttl='15s', + token_ttl='360s', bind_secret_id=True, token_no_default_policy=True, mount_point=namespace @@ -194,6 +229,7 @@ def fixture_prepare_vault(vault_url, namespace, policy_path, postgres_url, postg # Create role for the database statement = [ "CREATE ROLE \"{{name}}\" WITH LOGIN PASSWORD '{{password}}' VALID UNTIL '{{expiration}}';", + f"ALTER DATABASE {namespace} OWNER TO \"{{name}}\";", "ALTER TABLE public.users OWNER TO \"{{name}}\";", "ALTER TABLE public.users_requests OWNER TO \"{{name}}\";", "ALTER TABLE public.queue OWNER TO \"{{name}}\";", @@ -203,6 +239,7 @@ def fixture_prepare_vault(vault_url, namespace, policy_path, postgres_url, postg "GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO \"{{name}}\";" ] revocation_statements = [ + f"ALTER DATABASE {namespace} OWNER TO postgres;", "ALTER TABLE public.users OWNER TO postgres;", "ALTER TABLE public.users_requests OWNER TO postgres;", "ALTER TABLE public.messages OWNER TO postgres;", @@ -212,7 +249,7 @@ def fixture_prepare_vault(vault_url, namespace, policy_path, postgres_url, postg ] role = client.secrets.database.create_role( name="pytest", - db_name="postgresql", + db_name=namespace, creation_statements=statement, revocation_statements=revocation_statements, default_ttl="1h", From 5c4026f8187093fbb20302020a0f52fbd48e63cd Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 3 Sep 2024 12:49:34 +0400 Subject: [PATCH 052/148] modified: tests/conftest.py --- tests/conftest.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 144f48548..37c6ae73c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -124,8 +124,6 @@ def fixture_postgres_instance(psql_tables_path, namespace): cursor.execute(sql.SQL("CREATE DATABASE {}").format( sql.Identifier(pytest_db_name) )) - except psycopg2.errors.DuplicateDatabase: - print(f"Database {pytest_db_name} already exists.") except Exception as error: print(f"Failed to create database {pytest_db_name}: {error}") raise From f4bffbe5d9c307e7a089659bd5fab00ccfad8283 Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 3 Sep 2024 12:53:54 +0400 Subject: [PATCH 053/148] modified: SECURITY.md --- SECURITY.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/SECURITY.md b/SECURITY.md index 22ac1020f..247703120 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -11,4 +11,4 @@ Versions supported to fix vulnerabilities ## Reporting a Vulnerability -To report a vulnerability to me, just open the issue https://github.com/obervinov/pyinstabot-downloader/security/advisories/new \ No newline at end of file +To report a vulnerability to me, just open the issue https://github.com/obervinov/pyinstabot-downloader/security/advisories/new From 74e2816858f85b2663db642863e3857afd469778 Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 3 Sep 2024 14:06:47 +0400 Subject: [PATCH 054/148] modified: tests/conftest.py --- tests/conftest.py | 27 +++++++-------------------- 1 file changed, 7 insertions(+), 20 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 37c6ae73c..1e73a9d29 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -87,14 +87,15 @@ def fixture_psql_tables_path(): @pytest.fixture(name="postgres_url", scope='session') -def fixture_postgres_url(): +def fixture_postgres_url(namespace): """ Returns the postgres url for the tests Returns: str: The postgres url. """ - return "postgresql://{{username}}:{{password}}@postgres:5432/postgres?sslmode=disable" + database_name = namespace + return f"postgresql://{{username}}:{{password}}@postgres:5432/{database_name}?sslmode=disable" @pytest.fixture(name="postgres_instance", scope='session') @@ -163,6 +164,7 @@ def fixture_prepare_vault(vault_url, namespace, policy_path, postgres_url, postg """ # Wait for the postgres database to be ready _ = postgres_instance + database_name = namespace # Initialize the vault client = hvac.Client(url=vault_url) @@ -227,29 +229,14 @@ def fixture_prepare_vault(vault_url, namespace, policy_path, postgres_url, postg # Create role for the database statement = [ "CREATE ROLE \"{{name}}\" WITH LOGIN PASSWORD '{{password}}' VALID UNTIL '{{expiration}}';", - f"ALTER DATABASE {namespace} OWNER TO \"{{name}}\";", - "ALTER TABLE public.users OWNER TO \"{{name}}\";", - "ALTER TABLE public.users_requests OWNER TO \"{{name}}\";", - "ALTER TABLE public.queue OWNER TO \"{{name}}\";", - "ALTER TABLE public.processed OWNER TO \"{{name}}\";", - "ALTER TABLE public.migrations OWNER TO \"{{name}}\";", - "ALTER TABLE public.messages OWNER TO \"{{name}}\";", + "GRANT ALL PRIVILEGES ON SCHEMA public TO \"{{name}}\";", + "GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO \"{{name}}\";", "GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO \"{{name}}\";" ] - revocation_statements = [ - f"ALTER DATABASE {namespace} OWNER TO postgres;", - "ALTER TABLE public.users OWNER TO postgres;", - "ALTER TABLE public.users_requests OWNER TO postgres;", - "ALTER TABLE public.messages OWNER TO postgres;", - "ALTER TABLE public.queue OWNER TO postgres;", - "ALTER TABLE public.processed OWNER TO postgres;", - "ALTER TABLE public.migrations OWNER TO postgres;" - ] role = client.secrets.database.create_role( name="pytest", - db_name=namespace, + db_name="postgresql", creation_statements=statement, - revocation_statements=revocation_statements, default_ttl="1h", max_ttl="24h" ) From c10df092a258a5ff1531167e4041ba9e198bddf5 Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 3 Sep 2024 14:17:53 +0400 Subject: [PATCH 055/148] fixed database connection url --- tests/conftest.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 1e73a9d29..988c8a8f8 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -95,7 +95,7 @@ def fixture_postgres_url(namespace): str: The postgres url. """ database_name = namespace - return f"postgresql://{{username}}:{{password}}@postgres:5432/{database_name}?sslmode=disable" + return ("postgresql://{{username}}:{{password}}@postgres:5432/%s?sslmode=disable", database_name) @pytest.fixture(name="postgres_instance", scope='session') @@ -164,7 +164,6 @@ def fixture_prepare_vault(vault_url, namespace, policy_path, postgres_url, postg """ # Wait for the postgres database to be ready _ = postgres_instance - database_name = namespace # Initialize the vault client = hvac.Client(url=vault_url) From adcb28c0e6077ddac7bedfcc3d612bb19af638f2 Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 3 Sep 2024 14:24:28 +0400 Subject: [PATCH 056/148] modified: tests/conftest.py --- tests/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index 988c8a8f8..9f60fd0bc 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -95,7 +95,7 @@ def fixture_postgres_url(namespace): str: The postgres url. """ database_name = namespace - return ("postgresql://{{username}}:{{password}}@postgres:5432/%s?sslmode=disable", database_name) + return f"postgresql://{{{{username}}}}:{{{{password}}}}@postgres:5432/{database_name}?sslmode=disable" @pytest.fixture(name="postgres_instance", scope='session') From da879dd507d5c66bf8247bdb74519c4698b35939 Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 3 Sep 2024 14:31:00 +0400 Subject: [PATCH 057/148] fixed database in tests --- tests/conftest.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 9f60fd0bc..9511f4b86 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -270,7 +270,7 @@ def fixture_vault_instance(vault_url, namespace, prepare_vault): @pytest.fixture(name="vault_configuration_data", scope='session') -def fixture_vault_configuration_data(vault_instance): +def fixture_vault_configuration_data(vault_instance, namespace): """ This function sets up a database configuration in the vault_instance object. @@ -280,7 +280,7 @@ def fixture_vault_configuration_data(vault_instance): database = { 'host': '0.0.0.0', 'port': '5432', - 'database': 'postgres', + 'database': namespace, 'connections': '10' } for key, value in database.items(): From 9c6aea6f9f956f80234139c71ab2018e88a72ab6 Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 3 Sep 2024 14:36:16 +0400 Subject: [PATCH 058/148] fixed test_database_connection --- tests/test_database.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_database.py b/tests/test_database.py index 8389d8497..ffd8f1354 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -91,7 +91,7 @@ def test_database_connection(namespace, vault_instance, postgres_instance): # Check the database disconnection database.close_connection(connection) - assert connection.closed + assert connection == 0 # @pytest.mark.order(6) From 15b493996ce91d6c836971d907d71f3395521948 Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 3 Sep 2024 21:43:26 +0400 Subject: [PATCH 059/148] added additional test for database module: part2 --- tests/test_database.py | 75 ++++++++++++++++++++---------------------- 1 file changed, 35 insertions(+), 40 deletions(-) diff --git a/tests/test_database.py b/tests/test_database.py index ffd8f1354..a3c4e7df8 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -79,52 +79,47 @@ def test_reset_stale_messages(namespace, vault_instance, postgres_instance, post @pytest.mark.order(5) def test_database_connection(namespace, vault_instance, postgres_instance): """ - Checking the database connection and disconnection + Checking the database connection """ _ = postgres_instance database = DatabaseClient(vault=vault_instance, db_role=namespace) - - # Check the database connection connection = database.get_connection() assert isinstance(connection, psycopg2.extensions.connection) assert not connection.closed - - # Check the database disconnection database.close_connection(connection) - assert connection == 0 -# @pytest.mark.order(6) -# def test_add_message_to_queue(namespace, vault_instance, postgres_instance): -# """ -# Checking the addition of a message to the queue -# """ -# _, cursor = postgres_instance -# data = { -# 'user_id': '12345', -# 'post_id': '67890', -# 'post_url': 'https://www.instagram.com/p/67890/', -# 'post_owner': 'johndoe', -# 'link_type': 'profile', -# 'message_id': 'abcde', -# 'chat_id': 'xyz', -# 'scheduled_time': '2022-01-01 12:00:00', -# 'download_status': 'not started', -# 'upload_status': 'not started' -# } -# database = DatabaseClient(vault=vault_instance, db_role=namespace) -# response = database.add_message_to_queue(data=data) - -# # Check the addition of a message to the queue -# cursor.execute( -# "SELECT user_id, post_id, post_url, post_owner, link_type, message_id, chat_id, scheduled_time, download_status, upload_status " -# "FROM queue WHERE message_id = 'abcde'" -# ) -# queue_item = cursor.fetchone() -# assert len(queue_item) > 0 -# assert response == f"{data['message_id']}: added to queue" -# assert queue_item == ( -# data['user_id'], data['post_id'], data['post_url'], -# data['post_owner'], data['link_type'], data['message_id'], -# data['chat_id'], data['scheduled_time'], data['download_status'], data['upload_status'] -# ) +@pytest.mark.order(6) +def test_add_message_to_queue(namespace, vault_instance, postgres_instance): + """ + Checking the addition of a message to the queue + """ + _, cursor = postgres_instance + data = { + 'user_id': '12345', + 'post_id': '67890', + 'post_url': 'https://www.instagram.com/p/67890/', + 'post_owner': 'johndoe', + 'link_type': 'profile', + 'message_id': 'abcde', + 'chat_id': 'xyz', + 'scheduled_time': '2022-01-01 12:00:00', + 'download_status': 'not started', + 'upload_status': 'not started' + } + database = DatabaseClient(vault=vault_instance, db_role=namespace) + response = database.add_message_to_queue(data=data) + + # Check the addition of a message to the queue + cursor.execute( + "SELECT user_id, post_id, post_url, post_owner, link_type, message_id, chat_id, scheduled_time, download_status, upload_status " + "FROM queue WHERE message_id = 'abcde'" + ) + queue_item = cursor.fetchone() + assert len(queue_item) > 0 + assert response == f"{data['message_id']}: added to queue" + assert queue_item == ( + data['user_id'], data['post_id'], data['post_url'], + data['post_owner'], data['link_type'], data['message_id'], + data['chat_id'], data['scheduled_time'], data['download_status'], data['upload_status'] + ) From 097256d9ce4abc470f8eac87d0f439eec25ffb20 Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 3 Sep 2024 22:18:16 +0400 Subject: [PATCH 060/148] modified: tests/test_database.py --- tests/test_database.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/test_database.py b/tests/test_database.py index a3c4e7df8..d1766d265 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -7,6 +7,7 @@ import json import importlib import pytest +from datetime import datetime import psycopg2 from psycopg2 import pool from src.modules.database import DatabaseClient @@ -121,5 +122,5 @@ def test_add_message_to_queue(namespace, vault_instance, postgres_instance): assert queue_item == ( data['user_id'], data['post_id'], data['post_url'], data['post_owner'], data['link_type'], data['message_id'], - data['chat_id'], data['scheduled_time'], data['download_status'], data['upload_status'] + data['chat_id'], datetime.strptime(data['scheduled_time'], '%Y-%m-%d %H:%M:%S'), data['download_status'], data['upload_status'] ) From 045e8990fb664387411d35e1ed59dec1a8cddb5c Mon Sep 17 00:00:00 2001 From: obervinov Date: Fri, 6 Sep 2024 00:47:50 +0400 Subject: [PATCH 061/148] added final tests for database module --- .github/workflows/pr.yaml | 12 +- .github/workflows/release.yaml | 4 +- CHANGELOG.md | 3 +- src/modules/database.py | 43 +---- tests/test_database.py | 294 ++++++++++++++++++++++++++++++--- 5 files changed, 291 insertions(+), 65 deletions(-) diff --git a/.github/workflows/pr.yaml b/.github/workflows/pr.yaml index a1b922510..45bea32ae 100644 --- a/.github/workflows/pr.yaml +++ b/.github/workflows/pr.yaml @@ -11,20 +11,20 @@ on: jobs: changelog: - uses: obervinov/_templates/.github/workflows/changelog.yaml@fix/v1.2.9 + uses: obervinov/_templates/.github/workflows/changelog.yaml@v1.2.9 pylint: - uses: obervinov/_templates/.github/workflows/pylint.yaml@fix/v1.2.9 + uses: obervinov/_templates/.github/workflows/pylint.yaml@v1.2.9 pytest: - uses: obervinov/_templates/.github/workflows/pytest-with-vault.yaml@fix/v1.2.9 + uses: obervinov/_templates/.github/workflows/pytest-with-vault.yaml@v1.2.9 pyproject: - uses: obervinov/_templates/.github/workflows/pyproject.yaml@fix/v1.2.9 + uses: obervinov/_templates/.github/workflows/pyproject.yaml@v1.2.9 pr: - uses: obervinov/_templates/.github/workflows/pr.yaml@fix/v1.2.9 + uses: obervinov/_templates/.github/workflows/pr.yaml@v1.2.9 build-pr-image: - uses: obervinov/_templates/.github/workflows/docker.yaml@fix/v1.2.9 + uses: obervinov/_templates/.github/workflows/docker.yaml@v1.2.9 needs: [changelog, pylint, pytest, pyproject] diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 688cea565..61ea53c49 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -10,7 +10,7 @@ on: jobs: create-release: - uses: obervinov/_templates/.github/workflows/release.yaml@fix/v1.2.9 + uses: obervinov/_templates/.github/workflows/release.yaml@v1.2.9 cleanup-untagged-images: runs-on: ubuntu-latest @@ -26,5 +26,5 @@ jobs: owner_type: 'user' # milestone: - # uses: obervinov/_templates/.github/workflows/milestone.yaml@fix/v1.2.9 + # uses: obervinov/_templates/.github/workflows/milestone.yaml@v1.2.9 # needs: [create-release] diff --git a/CHANGELOG.md b/CHANGELOG.md index 832fb4975..6f388169a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,7 +3,7 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). -## v2.3.0 - 2024-09-03 +## v2.3.0 - 2024-09-05 ### What's Changed **Full Changelog**: https://github.com/obervinov/pyinstabot-downloader/compare/v2.2.1...v2.3.0 by @obervinov in https://github.com/obervinov/pyinstabot-downloader/pull/95 #### 💥 Breaking Changes @@ -13,6 +13,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/) and this p * bump vault-package to major version `3.0.0` * bump users-package to major version `3.0.0` * bump telegram-package to major version `2.0.1` +* add tests for database module * [Switch reading of the database connection configuration to db engine](https://github.com/obervinov/pyinstabot-downloader/issues/33) #### 🐛 Bug Fixes * general bug fixes and improvements diff --git a/src/modules/database.py b/src/modules/database.py index 50dc1e8cf..865d7dc5e 100644 --- a/src/modules/database.py +++ b/src/modules/database.py @@ -64,7 +64,6 @@ class DatabaseClient: get_user_processed(user_id): Get last ten messages from the processed table for the specified user. check_message_uniqueness(post_id, user_id): Check if a message with the given post ID and chat ID already exists in the queue. keep_message(message_id, chat_id, message_content, **kwargs): Add a message to the messages table in the database. - add_user(user_id, chat_id): Add a user to the users table in the database. get_users(): Get a list of all users in the database. get_considered_message(message_type, chat_id): Get a message with specified type and @@ -672,7 +671,7 @@ def get_user_processed( columns=("post_id", "timestamp", "state"), condition=f"user_id = '{user_id}'", order_by='timestamp ASC', - limit=5000 + limit=10000 ) for message in processed: if user_id not in result: @@ -780,6 +779,7 @@ def keep_message( condition=f"id = '{check_exist_message_type[0][0]}'" ) response = f"{message_id} updated" + elif not check_exist_message_type: self._insert( table_name='messages', @@ -787,51 +787,20 @@ def keep_message( values=(message_id, chat_id, message_type, message_content_hash, 'bot') ) response = f"{message_id} kept" + else: log.warning('[Database]: Message with ID %s already exists in the messages table and cannot be updated', message_id) response = f"{message_id} already exists" - return response - - def add_user( - self, - user_id: str = None, - chat_id: str = None - ) -> str: - """ - Add a user to the users table in the database. - It is used to store the user ID and chat ID for sending messages to the user. - Args: - user_id (str): The ID of the user. - chat_id (str): The ID of the chat. - - Returns: - str: A message indicating that the user was added to the users table or that the user already exists. - - Examples: - >>> add_user(user_id='12345', chat_id='67890') - '12345 added' - or - '12345 already exists' - """ - exist_user = self._select(table_name='users', columns=("user_id",), condition=f"user_id = '{user_id}'") - if exist_user: - result = f"{user_id} already exists" - else: - self._insert( - table_name='users', - columns=("chat_id", "user_id"), - values=(chat_id, user_id) - ) - result = f"{user_id} added" - return result + return response def get_users(self) -> list: """ + This method will be deprecated after https://github.com/obervinov/users-package/issues/44 (users-package:v3.1.0). Get a list of all users in the database. Returns: - list: A list of all users from the messages table. + list: A list of all users from the users table. Examples: >>> get_users() diff --git a/tests/test_database.py b/tests/test_database.py index d1766d265..8c7baa55c 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -6,11 +6,13 @@ import sys import json import importlib -import pytest from datetime import datetime +from collections import defaultdict +import pytest import psycopg2 from psycopg2 import pool from src.modules.database import DatabaseClient +from src.modules.tools import get_hash # pylint: disable=too-many-locals @@ -91,36 +93,290 @@ def test_database_connection(namespace, vault_instance, postgres_instance): @pytest.mark.order(6) -def test_add_message_to_queue(namespace, vault_instance, postgres_instance): +def test_messages_queue(namespace, vault_instance): """ - Checking the addition of a message to the queue + Checking the addition of a message to the queue and extraction of a message from the queue """ - _, cursor = postgres_instance data = { 'user_id': '12345', - 'post_id': '67890', - 'post_url': 'https://www.instagram.com/p/67890/', + 'post_id': 'qwerty123', + 'post_url': 'https://www.instagram.com/p/qwerty123/', 'post_owner': 'johndoe', - 'link_type': 'profile', - 'message_id': 'abcde', + 'link_type': 'post', + 'message_id': '111111', 'chat_id': 'xyz', 'scheduled_time': '2022-01-01 12:00:00', 'download_status': 'not started', 'upload_status': 'not started' } database = DatabaseClient(vault=vault_instance, db_role=namespace) - response = database.add_message_to_queue(data=data) + status = database.add_message_to_queue(data=data) # Check the addition of a message to the queue - cursor.execute( - "SELECT user_id, post_id, post_url, post_owner, link_type, message_id, chat_id, scheduled_time, download_status, upload_status " - "FROM queue WHERE message_id = 'abcde'" + queue_message = database.get_message_from_queue(scheduled_time=data['scheduled_time']) + queue_item = {} + queue_item['user_id'] = queue_message[1] + queue_item['post_id'] = queue_message[2] + queue_item['post_url'] = queue_message[3] + queue_item['post_owner'] = queue_message[4] + queue_item['link_type'] = queue_message[5] + queue_item['message_id'] = queue_message[6] + queue_item['chat_id'] = queue_message[7] + queue_item['scheduled_time'] = queue_message[8] + queue_item['download_status'] = queue_message[9] + queue_item['upload_status'] = queue_message[10] + assert status == f"{data['message_id']}: added to queue" + assert queue_item == data + + +@pytest.mark.order(7) +def test_change_message_state_in_queue(namespace, vault_instance, postgres_instance): + """ + Checking the change of the message state in the queue + """ + _, cursor = postgres_instance + data = { + 'user_id': '12345', + 'post_id': 'qwerty456', + 'post_url': 'https://www.instagram.com/p/qwerty456/', + 'post_owner': 'johndoe', + 'link_type': 'post', + 'message_id': '222222', + 'chat_id': 'xyz', + 'scheduled_time': '2022-01-01 12:00:00', + 'download_status': 'not started', + 'upload_status': 'not started' + } + database = DatabaseClient(vault=vault_instance, db_role=namespace) + database.update_message_state_in_queue( + post_id='qwerty456', + state='processed', + download_status='completed', + upload_status='completed', + post_owner='johndoe' + ) + + # Check the change of the message state in the queue + status = database.update_message_state_in_queue( + message_id=data['message_id'], + state='downloaded', + download_status='completed', + upload_status='completed', + post_owner='johndoe' ) - queue_item = cursor.fetchone() - assert len(queue_item) > 0 - assert response == f"{data['message_id']}: added to queue" - assert queue_item == ( - data['user_id'], data['post_id'], data['post_url'], - data['post_owner'], data['link_type'], data['message_id'], - data['chat_id'], datetime.strptime(data['scheduled_time'], '%Y-%m-%d %H:%M:%S'), data['download_status'], data['upload_status'] + assert status == f"{data['message_id']}: processed" + + # Check records in database + cursor.execute("SELECT post_id FROM queue WHERE post_id = 'qwerty456'") + record_queue = cursor.fetchall() + assert record_queue is None + cursor.execute("SELECT post_id, state, upload_status, download_status FROM processed WHERE post_id = 'qwerty456'") + record_processed = cursor.fetchall() + assert record_processed is not None + assert record_processed[0][0] == 'qwerty456' + assert record_processed[0][1] == 'processed' + assert record_processed[0][2] == 'completed' + assert record_processed[0][3] == 'completed' + + +@pytest.mark.order(8) +def test_change_message_schedule_time_in_queue(namespace, vault_instance, postgres_instance): + """ + Checking the change of the message schedule time in the queue + """ + _, cursor = postgres_instance + data = { + 'user_id': '12345', + 'post_id': 'qwerty789', + 'post_url': 'https://www.instagram.com/p/qwerty789/', + 'post_owner': 'johndoe', + 'link_type': 'post', + 'message_id': '333333', + 'chat_id': 'xyz', + 'scheduled_time': '2022-01-01 12:00:00', + 'download_status': 'not started', + 'upload_status': 'not started' + } + database = DatabaseClient(vault=vault_instance, db_role=namespace) + database.add_message_to_queue(data=data) + + # Check the change of the message schedule time in the queue + status = database.update_schedule_time_in_queue( + post_id='qwerty789', + user_id='12345', + scheduled_time='2022-01-02 13:00:00' ) + assert status == f"{data['post_id']}: scheduled time updated" + + # Check records in database + cursor.execute("SELECT scheduled_time FROM queue WHERE post_id = 'qwerty789'") + record_queue = cursor.fetchall() + assert record_queue is not None + assert record_queue[0][0] == datetime.strptime('2022-01-02 13:00:00', '%Y-%m-%d %H:%M:%S') + + +@pytest.mark.order(9) +def test_get_user_queue(namespace, vault_instance): + """ + Checking the extraction of the user queue + """ + user_id = '111111' + data = [ + { + 'user_id': user_id, + 'post_id': 'qwerty123', + 'post_url': 'https://www.instagram.com/p/qwerty123/', + 'post_owner': 'johndoe', + 'link_type': 'post', + 'message_id': '111111', + 'chat_id': 'xyz', + 'scheduled_time': '2022-01-01 12:00:00', + 'download_status': 'not started', + 'upload_status': 'not started' + }, + { + 'user_id': user_id, + 'post_id': 'qwerty456', + 'post_url': 'https://www.instagram.com/p/qwerty456/', + 'post_owner': 'johndoe', + 'link_type': 'post', + 'message_id': '222222', + 'chat_id': 'xyz', + 'scheduled_time': '2022-01-01 12:00:00', + 'download_status': 'not started', + 'upload_status': 'not started' + }, + { + 'user_id': user_id, + 'post_id': 'qwerty789', + 'post_url': 'https://www.instagram.com/p/qwerty789/', + 'post_owner': 'johndoe', + 'link_type': 'post', + 'message_id': '333333', + 'chat_id': 'xyz', + 'scheduled_time': '2022-01-02 13:00:00', + 'download_status': 'not started', + 'upload_status': 'not started' + } + ] + database = DatabaseClient(vault=vault_instance, db_role=namespace) + for message in data: + status = database.add_message_to_queue(data=message) + assert status == f"{message['message_id']}: added to queue" + + # Validate the extraction of the user queue + user_queue = database.get_user_queue(user_id='111111') + expected_response = defaultdict(list) + for entry in data: + expected_response[entry['user_id']].append({ + 'post_id': entry['post_id'], + 'scheduled_time': entry['scheduled_time'] + }) + expected_response = dict(expected_response) + assert user_queue is not None + assert len(user_queue.get(user_id, [])) == len(data) + assert user_queue == expected_response + + +@pytest.mark.order(10) +def test_get_user_processed_data(namespace, vault_instance): + """ + Checking the extraction of the user processed data + """ + user_id = '111111' + # Marked messages from previous tests + mark_processed = ['qwerty123', 'qwerty456', 'qwerty789'] + database = DatabaseClient(vault=vault_instance, db_role=namespace) + for item in mark_processed: + status = database.update_message_state_in_queue( + post_id=item, + state='processed', + download_status='completed', + upload_status='completed', + post_owner='johndoe' + ) + assert status == f"{item}: processed" + user_processed = database.get_user_processed(user_id=user_id) + user_queue = database.get_user_queue(user_id=user_id) + for item in mark_processed: + assert item not in user_queue.get(user_id, []).values() + assert item in user_processed.get(user_id, []).values() + + +@pytest.mark.order(11) +def test_check_message_uniqueness(namespace, vault_instance): + """ + Checking the uniqueness of the message + """ + data = { + 'user_id': '123456', + 'post_id': 'qwerty1111', + 'post_url': 'https://www.instagram.com/p/qwerty789/', + 'post_owner': 'johndoe', + 'link_type': 'post', + 'message_id': '333333', + 'chat_id': 'xyz', + 'scheduled_time': '2022-01-02 13:00:00', + 'download_status': 'not started', + 'upload_status': 'not started' + } + database = DatabaseClient(vault=vault_instance, db_role=namespace) + uniqueness = database.check_message_uniqueness(post_id=data['post_id']) + assert uniqueness is True + + status = database.add_message_to_queue(data=data) + assert status == f"{data['message_id']}: added to queue" + + _ = database.add_message_to_queue(data=data) + uniqueness = database.check_message_uniqueness(post_id=data['post_id']) + assert uniqueness is False + + +@pytest.mark.order(12) +def test_service_messages(namespace, vault_instance, postgres_instance): + """ + Checking + """ + _, cursor = postgres_instance + data = { + 'message_id': '444444', + 'chat_id': 'xyz', + 'message_content': 'Test message', + 'message_type': 'status_message', + 'state': 'updated' + } + + # Keep new status_message + database = DatabaseClient(vault=vault_instance, db_role=namespace) + status = database.keep_message(**data) + assert status == f"{data['message_id']} kept" + new_message = database.get_considered_message(message_type=data['message_type'], chat_id=data['chat_id']) + assert new_message[0] == data['message_id'] + assert new_message[1] == data['chat_id'] + assert new_message[4] == get_hash(data['message_content']) + assert new_message[5] == 'updated' + + # Update exist message + data['message_content'] = 'Updated message' + status = database.keep_message(**data) + assert status == f"{data['message_id']} updated" + updated_message = database.get_considered_message(message_type=data['message_type'], chat_id=data['chat_id']) + assert updated_message[0] == data['message_id'] + assert updated_message[1] == data['chat_id'] + assert updated_message[2] != updated_message[3] + assert updated_message[3] != new_message[3] + assert updated_message[4] == get_hash(data['message_content']) + assert updated_message[5] == 'updated' + + # Recreate exist message + data['message_content'] = 'Recreated message' + status = database.keep_message(**data, recreate=True) + assert status == f"{data['message_id']} recreated" + recreated_message = database.get_considered_message(message_type=data['message_type'], chat_id=data['chat_id']) + assert recreated_message[0] == data['message_id'] + assert recreated_message[1] == data['chat_id'] + assert recreated_message[2] == recreated_message[3] + assert recreated_message[2] != updated_message[2] + assert recreated_message[3] != updated_message[3] + assert recreated_message[4] == get_hash(data['message_content']) + assert recreated_message[5] == 'updated' From a0fdfb624881f8f3c999898380aa6ef724ea249b Mon Sep 17 00:00:00 2001 From: obervinov Date: Fri, 6 Sep 2024 00:50:15 +0400 Subject: [PATCH 062/148] fixed linting issues --- .github/workflows/release.yaml | 2 +- CHANGELOG.md | 1 + tests/test_database.py | 7 +++---- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 61ea53c49..96d7654b6 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -17,7 +17,7 @@ jobs: steps: - name: Delete untagged images from GitHub Container Registry continue-on-error: true - uses: Chizkiyahu/delete-untagged-ghcr-action@v3 + uses: Chizkiyahu/delete-untagged-ghcr-action@v4 with: token: ${{ secrets.PAT_GHCR_CLEANUP }} package_name: 'pyinstabot-downloader' diff --git a/CHANGELOG.md b/CHANGELOG.md index 6f388169a..a7c681a83 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/) and this p * now all user data is stored in the database * psql credentials are now written out via Vault Database Engine #### 🚀 Features +* bump workflow version to `1.2.9` * bump vault-package to major version `3.0.0` * bump users-package to major version `3.0.0` * bump telegram-package to major version `2.0.1` diff --git a/tests/test_database.py b/tests/test_database.py index 8c7baa55c..12fabe4cf 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -12,7 +12,7 @@ import psycopg2 from psycopg2 import pool from src.modules.database import DatabaseClient -from src.modules.tools import get_hash +from src.modules.tools import get_hash # pylint: disable=too-many-locals @@ -333,11 +333,10 @@ def test_check_message_uniqueness(namespace, vault_instance): @pytest.mark.order(12) -def test_service_messages(namespace, vault_instance, postgres_instance): +def test_service_messages(namespace, vault_instance): """ - Checking + Checking the registration of service messages """ - _, cursor = postgres_instance data = { 'message_id': '444444', 'chat_id': 'xyz', From 0d597e50fbfa08731f09df5aaf99a9b9c17e5f5a Mon Sep 17 00:00:00 2001 From: obervinov Date: Fri, 6 Sep 2024 01:01:36 +0400 Subject: [PATCH 063/148] fixed tests: part1 --- tests/test_database.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/test_database.py b/tests/test_database.py index 12fabe4cf..32cfb63cf 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -122,7 +122,7 @@ def test_messages_queue(namespace, vault_instance): queue_item['link_type'] = queue_message[5] queue_item['message_id'] = queue_message[6] queue_item['chat_id'] = queue_message[7] - queue_item['scheduled_time'] = queue_message[8] + queue_item['scheduled_time'] = datetime.strftime(queue_message[8], '%Y-%m-%d %H:%M:%S') queue_item['download_status'] = queue_message[9] queue_item['upload_status'] = queue_message[10] assert status == f"{data['message_id']}: added to queue" @@ -230,7 +230,7 @@ def test_get_user_queue(namespace, vault_instance): 'link_type': 'post', 'message_id': '111111', 'chat_id': 'xyz', - 'scheduled_time': '2022-01-01 12:00:00', + 'scheduled_time': datetime.now() + datetime.timedelta(hours=1), 'download_status': 'not started', 'upload_status': 'not started' }, @@ -242,7 +242,7 @@ def test_get_user_queue(namespace, vault_instance): 'link_type': 'post', 'message_id': '222222', 'chat_id': 'xyz', - 'scheduled_time': '2022-01-01 12:00:00', + 'scheduled_time': datetime.now() - datetime.timedelta(hours=2), 'download_status': 'not started', 'upload_status': 'not started' }, @@ -254,7 +254,7 @@ def test_get_user_queue(namespace, vault_instance): 'link_type': 'post', 'message_id': '333333', 'chat_id': 'xyz', - 'scheduled_time': '2022-01-02 13:00:00', + 'scheduled_time': datetime.now() + datetime.timedelta(hours=3), 'download_status': 'not started', 'upload_status': 'not started' } From 568801b957eb2f5c8615073b222cddfb2f04942a Mon Sep 17 00:00:00 2001 From: obervinov Date: Fri, 6 Sep 2024 01:17:15 +0400 Subject: [PATCH 064/148] fixed tests --- src/bot.py | 5 ----- src/modules/database.py | 4 ++-- tests/test_database.py | 29 ++++++++++++++--------------- 3 files changed, 16 insertions(+), 22 deletions(-) diff --git a/src/bot.py b/src/bot.py index 949562659..aea1ea048 100644 --- a/src/bot.py +++ b/src/bot.py @@ -83,11 +83,6 @@ def start_command(message: telegram.telegram_types.Message = None) -> None: """ if users.user_access_check(message.chat.id).get('access', None) == users.user_status_allow: log.info('[Bot]: Processing "start" command for user %s...', message.chat.id) - - # Add user to the database - response = database.add_user(user_id=message.chat.id, chat_id=message.chat.id) - log.info('[Bot]: user %s added to the database: %s', message.chat.id, response) - # Main message reply_markup = telegram.create_inline_markup(ROLES_MAP.keys()) start_message = telegram.send_styled_message( diff --git a/src/modules/database.py b/src/modules/database.py index 865d7dc5e..612331f59 100644 --- a/src/modules/database.py +++ b/src/modules/database.py @@ -432,7 +432,7 @@ def add_message_to_queue( >>> data = { ... 'user_id': '12345', ... 'post_id': '67890', - ... 'post_url': 'https://www.instagram.com/p/67890/', + ... 'post_url': 'https://www.example.com/p/67890/', ... 'post_owner': 'johndoe', ... 'link_type': 'profile', ... 'message_id': 'abcde', @@ -489,7 +489,7 @@ def get_message_from_queue( Examples: >>> database.get_message_from_queue('2022-01-01 12:00:00') - (1, '123456789', 'vahj5AN8aek', 'https://www.instagram.com/p/vahj5AN8aek', 'johndoe', 'post', '12345', '12346', '123456789', + (1, '123456789', 'vahj5AN8aek', 'https://www.example.com/p/vahj5AN8aek', 'johndoe', 'post', '12345', '12346', '123456789', datetime.datetime(2023, 11, 14, 21, 21, 22, 603440), 'None', 'None', datetime.datetime(2023, 11, 14, 21, 14, 26, 680024), 'waiting') """ message = self._select( diff --git a/tests/test_database.py b/tests/test_database.py index 32cfb63cf..565a9b5fc 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -6,7 +6,7 @@ import sys import json import importlib -from datetime import datetime +from datetime import datetime, timedelta from collections import defaultdict import pytest import psycopg2 @@ -100,7 +100,7 @@ def test_messages_queue(namespace, vault_instance): data = { 'user_id': '12345', 'post_id': 'qwerty123', - 'post_url': 'https://www.instagram.com/p/qwerty123/', + 'post_url': 'https://www.example.com/p/qwerty123/', 'post_owner': 'johndoe', 'link_type': 'post', 'message_id': '111111', @@ -138,7 +138,7 @@ def test_change_message_state_in_queue(namespace, vault_instance, postgres_insta data = { 'user_id': '12345', 'post_id': 'qwerty456', - 'post_url': 'https://www.instagram.com/p/qwerty456/', + 'post_url': 'https://www.example.com/p/qwerty456/', 'post_owner': 'johndoe', 'link_type': 'post', 'message_id': '222222', @@ -188,7 +188,7 @@ def test_change_message_schedule_time_in_queue(namespace, vault_instance, postgr data = { 'user_id': '12345', 'post_id': 'qwerty789', - 'post_url': 'https://www.instagram.com/p/qwerty789/', + 'post_url': 'https://www.example.com/p/qwerty789/', 'post_owner': 'johndoe', 'link_type': 'post', 'message_id': '333333', @@ -225,36 +225,36 @@ def test_get_user_queue(namespace, vault_instance): { 'user_id': user_id, 'post_id': 'qwerty123', - 'post_url': 'https://www.instagram.com/p/qwerty123/', + 'post_url': 'https://www.example.com/p/qwerty123/', 'post_owner': 'johndoe', 'link_type': 'post', 'message_id': '111111', 'chat_id': 'xyz', - 'scheduled_time': datetime.now() + datetime.timedelta(hours=1), + 'scheduled_time': datetime.now() + timedelta(hours=1), 'download_status': 'not started', 'upload_status': 'not started' }, { 'user_id': user_id, 'post_id': 'qwerty456', - 'post_url': 'https://www.instagram.com/p/qwerty456/', + 'post_url': 'https://www.example.com/p/qwerty456/', 'post_owner': 'johndoe', 'link_type': 'post', 'message_id': '222222', 'chat_id': 'xyz', - 'scheduled_time': datetime.now() - datetime.timedelta(hours=2), + 'scheduled_time': datetime.now() - timedelta(hours=2), 'download_status': 'not started', 'upload_status': 'not started' }, { 'user_id': user_id, 'post_id': 'qwerty789', - 'post_url': 'https://www.instagram.com/p/qwerty789/', + 'post_url': 'https://www.example.com/p/qwerty789/', 'post_owner': 'johndoe', 'link_type': 'post', 'message_id': '333333', 'chat_id': 'xyz', - 'scheduled_time': datetime.now() + datetime.timedelta(hours=3), + 'scheduled_time': datetime.now() + timedelta(hours=3), 'download_status': 'not started', 'upload_status': 'not started' } @@ -311,7 +311,7 @@ def test_check_message_uniqueness(namespace, vault_instance): data = { 'user_id': '123456', 'post_id': 'qwerty1111', - 'post_url': 'https://www.instagram.com/p/qwerty789/', + 'post_url': 'https://www.example.com/p/qwerty789/', 'post_owner': 'johndoe', 'link_type': 'post', 'message_id': '333333', @@ -321,14 +321,13 @@ def test_check_message_uniqueness(namespace, vault_instance): 'upload_status': 'not started' } database = DatabaseClient(vault=vault_instance, db_role=namespace) - uniqueness = database.check_message_uniqueness(post_id=data['post_id']) + + uniqueness = database.check_message_uniqueness(post_id=data['post_id'], user_id=data['user_id']) assert uniqueness is True status = database.add_message_to_queue(data=data) assert status == f"{data['message_id']}: added to queue" - - _ = database.add_message_to_queue(data=data) - uniqueness = database.check_message_uniqueness(post_id=data['post_id']) + uniqueness = database.check_message_uniqueness(post_id=data['post_id'], user_id=data['user_id']) assert uniqueness is False From 0e560f4a3b4ecd473247cfd2f1f3fa8e36e7af64 Mon Sep 17 00:00:00 2001 From: obervinov Date: Fri, 6 Sep 2024 01:25:43 +0400 Subject: [PATCH 065/148] fixed tests: part2 --- tests/test_database.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/tests/test_database.py b/tests/test_database.py index 565a9b5fc..42663094c 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -221,6 +221,7 @@ def test_get_user_queue(namespace, vault_instance): Checking the extraction of the user queue """ user_id = '111111' + timestamp = datetime.now() data = [ { 'user_id': user_id, @@ -230,7 +231,7 @@ def test_get_user_queue(namespace, vault_instance): 'link_type': 'post', 'message_id': '111111', 'chat_id': 'xyz', - 'scheduled_time': datetime.now() + timedelta(hours=1), + 'scheduled_time': timestamp + timedelta(hours=1), 'download_status': 'not started', 'upload_status': 'not started' }, @@ -242,7 +243,7 @@ def test_get_user_queue(namespace, vault_instance): 'link_type': 'post', 'message_id': '222222', 'chat_id': 'xyz', - 'scheduled_time': datetime.now() - timedelta(hours=2), + 'scheduled_time': timestamp - timedelta(hours=2), 'download_status': 'not started', 'upload_status': 'not started' }, @@ -254,7 +255,7 @@ def test_get_user_queue(namespace, vault_instance): 'link_type': 'post', 'message_id': '333333', 'chat_id': 'xyz', - 'scheduled_time': datetime.now() + timedelta(hours=3), + 'scheduled_time': timestamp + timedelta(hours=3), 'download_status': 'not started', 'upload_status': 'not started' } @@ -352,7 +353,7 @@ def test_service_messages(namespace, vault_instance): assert new_message[0] == data['message_id'] assert new_message[1] == data['chat_id'] assert new_message[4] == get_hash(data['message_content']) - assert new_message[5] == 'updated' + assert new_message[5] == 'added' # Update exist message data['message_content'] = 'Updated message' From a744ae777818916e7a58ebfc20296cd01ec2857e Mon Sep 17 00:00:00 2001 From: obervinov Date: Fri, 6 Sep 2024 13:54:21 +0400 Subject: [PATCH 066/148] fixed tests: part3 --- CHANGELOG.md | 2 +- tests/test_database.py | 19 +++++++------------ 2 files changed, 8 insertions(+), 13 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a7c681a83..84a664272 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,7 +3,7 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). -## v2.3.0 - 2024-09-05 +## v2.3.0 - 2024-09-06 ### What's Changed **Full Changelog**: https://github.com/obervinov/pyinstabot-downloader/compare/v2.2.1...v2.3.0 by @obervinov in https://github.com/obervinov/pyinstabot-downloader/pull/95 #### 💥 Breaking Changes diff --git a/tests/test_database.py b/tests/test_database.py index 42663094c..52276ad84 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -137,8 +137,8 @@ def test_change_message_state_in_queue(namespace, vault_instance, postgres_insta _, cursor = postgres_instance data = { 'user_id': '12345', - 'post_id': 'qwerty456', - 'post_url': 'https://www.example.com/p/qwerty456/', + 'post_id': 'qwerty222', + 'post_url': 'https://www.example.com/p/qwerty222/', 'post_owner': 'johndoe', 'link_type': 'post', 'message_id': '222222', @@ -148,13 +148,8 @@ def test_change_message_state_in_queue(namespace, vault_instance, postgres_insta 'upload_status': 'not started' } database = DatabaseClient(vault=vault_instance, db_role=namespace) - database.update_message_state_in_queue( - post_id='qwerty456', - state='processed', - download_status='completed', - upload_status='completed', - post_owner='johndoe' - ) + status = database.add_message_to_queue(data=data) + assert status == f"{data['message_id']}: added to queue" # Check the change of the message state in the queue status = database.update_message_state_in_queue( @@ -167,13 +162,13 @@ def test_change_message_state_in_queue(namespace, vault_instance, postgres_insta assert status == f"{data['message_id']}: processed" # Check records in database - cursor.execute("SELECT post_id FROM queue WHERE post_id = 'qwerty456'") + cursor.execute(f"SELECT post_id FROM queue WHERE post_id = '{data['post_id']}'") record_queue = cursor.fetchall() assert record_queue is None - cursor.execute("SELECT post_id, state, upload_status, download_status FROM processed WHERE post_id = 'qwerty456'") + cursor.execute(f"SELECT post_id, state, upload_status, download_status FROM processed WHERE post_id = '{data['post_id']}'") record_processed = cursor.fetchall() assert record_processed is not None - assert record_processed[0][0] == 'qwerty456' + assert record_processed[0][0] == data['post_id'] assert record_processed[0][1] == 'processed' assert record_processed[0][2] == 'completed' assert record_processed[0][3] == 'completed' From 911ac0e93fbb686b0a29dfd1bb16aae1d24c2e08 Mon Sep 17 00:00:00 2001 From: obervinov Date: Fri, 6 Sep 2024 14:15:28 +0400 Subject: [PATCH 067/148] fixed tests: part4 --- tests/test_database.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/tests/test_database.py b/tests/test_database.py index 52276ad84..aeef7b078 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -152,14 +152,14 @@ def test_change_message_state_in_queue(namespace, vault_instance, postgres_insta assert status == f"{data['message_id']}: added to queue" # Check the change of the message state in the queue - status = database.update_message_state_in_queue( + updated_status = database.update_message_state_in_queue( message_id=data['message_id'], - state='downloaded', + state='processed', download_status='completed', upload_status='completed', post_owner='johndoe' ) - assert status == f"{data['message_id']}: processed" + assert updated_status == f"{data['message_id']}: processed" # Check records in database cursor.execute(f"SELECT post_id FROM queue WHERE post_id = '{data['post_id']}'") @@ -224,7 +224,7 @@ def test_get_user_queue(namespace, vault_instance): 'post_url': 'https://www.example.com/p/qwerty123/', 'post_owner': 'johndoe', 'link_type': 'post', - 'message_id': '111111', + 'message_id': 'qwerty123', 'chat_id': 'xyz', 'scheduled_time': timestamp + timedelta(hours=1), 'download_status': 'not started', @@ -236,7 +236,7 @@ def test_get_user_queue(namespace, vault_instance): 'post_url': 'https://www.example.com/p/qwerty456/', 'post_owner': 'johndoe', 'link_type': 'post', - 'message_id': '222222', + 'message_id': 'qwerty456', 'chat_id': 'xyz', 'scheduled_time': timestamp - timedelta(hours=2), 'download_status': 'not started', @@ -248,7 +248,7 @@ def test_get_user_queue(namespace, vault_instance): 'post_url': 'https://www.example.com/p/qwerty789/', 'post_owner': 'johndoe', 'link_type': 'post', - 'message_id': '333333', + 'message_id': 'qwerty789', 'chat_id': 'xyz', 'scheduled_time': timestamp + timedelta(hours=3), 'download_status': 'not started', @@ -268,7 +268,8 @@ def test_get_user_queue(namespace, vault_instance): 'post_id': entry['post_id'], 'scheduled_time': entry['scheduled_time'] }) - expected_response = dict(expected_response) + for user_id, posts in expected_response.items(): + expected_response[user_id] = sorted(posts, key=lambda x: x['scheduled_time']) assert user_queue is not None assert len(user_queue.get(user_id, [])) == len(data) assert user_queue == expected_response From f9c3233bf165323e19dcf92bf20300b6d0f6fa15 Mon Sep 17 00:00:00 2001 From: obervinov Date: Fri, 6 Sep 2024 14:54:35 +0400 Subject: [PATCH 068/148] fixed tests: part5 --- tests/test_database.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/test_database.py b/tests/test_database.py index aeef7b078..801f8a262 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -153,11 +153,11 @@ def test_change_message_state_in_queue(namespace, vault_instance, postgres_insta # Check the change of the message state in the queue updated_status = database.update_message_state_in_queue( - message_id=data['message_id'], + post_id=data['post_id'], state='processed', download_status='completed', upload_status='completed', - post_owner='johndoe' + post_owner=data['post_owner'] ) assert updated_status == f"{data['message_id']}: processed" @@ -365,7 +365,7 @@ def test_service_messages(namespace, vault_instance): # Recreate exist message data['message_content'] = 'Recreated message' - status = database.keep_message(**data, recreate=True) + status = database.keep_message(**data, recreated=True) assert status == f"{data['message_id']} recreated" recreated_message = database.get_considered_message(message_type=data['message_type'], chat_id=data['chat_id']) assert recreated_message[0] == data['message_id'] From 2130e28cd6cc876f6fa7a33b2478658117c7e254 Mon Sep 17 00:00:00 2001 From: obervinov Date: Fri, 6 Sep 2024 15:06:25 +0400 Subject: [PATCH 069/148] fixed tests: part6 --- tests/test_database.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/test_database.py b/tests/test_database.py index 801f8a262..173e6c752 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -103,7 +103,7 @@ def test_messages_queue(namespace, vault_instance): 'post_url': 'https://www.example.com/p/qwerty123/', 'post_owner': 'johndoe', 'link_type': 'post', - 'message_id': '111111', + 'message_id': 'qwerty123', 'chat_id': 'xyz', 'scheduled_time': '2022-01-01 12:00:00', 'download_status': 'not started', @@ -164,10 +164,10 @@ def test_change_message_state_in_queue(namespace, vault_instance, postgres_insta # Check records in database cursor.execute(f"SELECT post_id FROM queue WHERE post_id = '{data['post_id']}'") record_queue = cursor.fetchall() - assert record_queue is None + assert record_queue == [] cursor.execute(f"SELECT post_id, state, upload_status, download_status FROM processed WHERE post_id = '{data['post_id']}'") record_processed = cursor.fetchall() - assert record_processed is not None + assert record_processed != [] assert record_processed[0][0] == data['post_id'] assert record_processed[0][1] == 'processed' assert record_processed[0][2] == 'completed' From b1bc9dc4f1d585b402ae7dc02635ff8917942c52 Mon Sep 17 00:00:00 2001 From: obervinov Date: Fri, 6 Sep 2024 15:08:57 +0400 Subject: [PATCH 070/148] fixed tests: part 7 --- tests/test_database.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_database.py b/tests/test_database.py index 173e6c752..8e5407377 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -186,7 +186,7 @@ def test_change_message_schedule_time_in_queue(namespace, vault_instance, postgr 'post_url': 'https://www.example.com/p/qwerty789/', 'post_owner': 'johndoe', 'link_type': 'post', - 'message_id': '333333', + 'message_id': 'qwerty789', 'chat_id': 'xyz', 'scheduled_time': '2022-01-01 12:00:00', 'download_status': 'not started', @@ -311,7 +311,7 @@ def test_check_message_uniqueness(namespace, vault_instance): 'post_url': 'https://www.example.com/p/qwerty789/', 'post_owner': 'johndoe', 'link_type': 'post', - 'message_id': '333333', + 'message_id': 'qwerty1111', 'chat_id': 'xyz', 'scheduled_time': '2022-01-02 13:00:00', 'download_status': 'not started', From fa7baf6d394bdf4a4cc440942b682194905c0df0 Mon Sep 17 00:00:00 2001 From: obervinov Date: Fri, 6 Sep 2024 17:46:49 +0400 Subject: [PATCH 071/148] fixed empty queue exception --- src/modules/database.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/modules/database.py b/src/modules/database.py index 612331f59..6138074b0 100644 --- a/src/modules/database.py +++ b/src/modules/database.py @@ -641,10 +641,11 @@ def get_user_queue( order_by='scheduled_time ASC', limit=10000 ) - for message in queue: - if user_id not in result: - result[user_id] = [] - result[user_id].append({'post_id': message[0], 'scheduled_time': message[1]}) + if queue: + for message in queue: + if user_id not in result: + result[user_id] = [] + result[user_id].append({'post_id': message[0], 'scheduled_time': message[1]}) return result if result else None def get_user_processed( From 855e8468cc55ee2636e780d9bf8832ccc4d5000f Mon Sep 17 00:00:00 2001 From: obervinov Date: Sun, 8 Sep 2024 01:32:08 +0400 Subject: [PATCH 072/148] modified: CHANGELOG.md modified: tests/conftest.py modified: tests/test_database.py new file: tests/test_metrics.py --- CHANGELOG.md | 4 +- tests/conftest.py | 21 ++++++++++ tests/test_database.py | 93 +++++++++++++++++++----------------------- tests/test_metrics.py | 31 ++++++++++++++ 4 files changed, 97 insertions(+), 52 deletions(-) create mode 100644 tests/test_metrics.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 84a664272..82d5edfcc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,7 +3,7 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). -## v2.3.0 - 2024-09-06 +## v2.3.0 - 2024-09-08 ### What's Changed **Full Changelog**: https://github.com/obervinov/pyinstabot-downloader/compare/v2.2.1...v2.3.0 by @obervinov in https://github.com/obervinov/pyinstabot-downloader/pull/95 #### 💥 Breaking Changes @@ -14,7 +14,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/) and this p * bump vault-package to major version `3.0.0` * bump users-package to major version `3.0.0` * bump telegram-package to major version `2.0.1` -* add tests for database module +* add tests for database and metrics modules * [Switch reading of the database connection configuration to db engine](https://github.com/obervinov/pyinstabot-downloader/issues/33) #### 🐛 Bug Fixes * general bug fixes and improvements diff --git a/tests/conftest.py b/tests/conftest.py index 9511f4b86..c95d9918a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -10,6 +10,8 @@ from psycopg2 import sql # pylint: disable=E0401 from vault import VaultClient +from src.modules.database import DatabaseClient +from src.modules.metrics import Metrics def pytest_configure(config): @@ -340,6 +342,25 @@ def fixture_vault_configuration_data(vault_instance, namespace): ) +@pytest.fixture(name="database_class", scope='session') +def fixture_database_class(vault_instance, namespace): + """ + Returns the database class + + Returns: + object: The database class. + """ + return DatabaseClient(vault=vault_instance, db_role=namespace) + + +@pytest.fixture(name="metrics_class", scope='session') +def fixture_metrics_class(vault_instance, database_class): + """ + Returns the metrics class + """ + return Metrics(port=8000, interval=1, metrics_prefix='pytest', vault=vault_instance, database=database_class) + + @pytest.fixture(name="postgres_messages_test_data", scope='session') def fixture_postgres_messages_test_data(postgres_instance): """ diff --git a/tests/test_database.py b/tests/test_database.py index 8e5407377..a50572a99 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -1,7 +1,6 @@ """ This module contains tests for the database module. """ - import os import sys import json @@ -11,24 +10,22 @@ import pytest import psycopg2 from psycopg2 import pool -from src.modules.database import DatabaseClient from src.modules.tools import get_hash # pylint: disable=too-many-locals @pytest.mark.order(2) -def test_init_database_client(namespace, vault_instance, vault_configuration_data, postgres_instance): +def test_init_database_client(vault_configuration_data, postgres_instance, database_class): """ Checking an initialized database client """ _ = vault_configuration_data _, cursor = postgres_instance - database = DatabaseClient(vault=vault_instance, db_role=namespace) # Check general attributes - assert isinstance(database.vault, object) - assert isinstance(database.db_role, str) - assert isinstance(database.database_connections, pool.SimpleConnectionPool) + assert isinstance(database_class.vault, object) + assert isinstance(database_class.db_role, str) + assert isinstance(database_class.database_connections, pool.SimpleConnectionPool) # Check tables creation in the database cursor.execute("SELECT * FROM information_schema.tables WHERE table_schema = 'public'") @@ -63,13 +60,13 @@ def test_init_database_client(namespace, vault_instance, vault_configuration_dat @pytest.mark.order(4) -def test_reset_stale_messages(namespace, vault_instance, postgres_instance, postgres_messages_test_data): +def test_reset_stale_messages(postgres_instance, postgres_messages_test_data, database_class): """ Checking the reset of stale messages when the database client is initialized """ _, cursor = postgres_instance _ = postgres_messages_test_data - _ = DatabaseClient(vault=vault_instance, db_role=namespace) + _ = database_class # Check the reset of stale messages cursor.execute("SELECT state FROM messages") @@ -80,20 +77,19 @@ def test_reset_stale_messages(namespace, vault_instance, postgres_instance, post @pytest.mark.order(5) -def test_database_connection(namespace, vault_instance, postgres_instance): +def test_database_connection(postgres_instance, database_class): """ Checking the database connection """ _ = postgres_instance - database = DatabaseClient(vault=vault_instance, db_role=namespace) - connection = database.get_connection() + connection = database_class.get_connection() assert isinstance(connection, psycopg2.extensions.connection) assert not connection.closed - database.close_connection(connection) + database_class.close_connection(connection) @pytest.mark.order(6) -def test_messages_queue(namespace, vault_instance): +def test_messages_queue(database_class): """ Checking the addition of a message to the queue and extraction of a message from the queue """ @@ -109,11 +105,10 @@ def test_messages_queue(namespace, vault_instance): 'download_status': 'not started', 'upload_status': 'not started' } - database = DatabaseClient(vault=vault_instance, db_role=namespace) - status = database.add_message_to_queue(data=data) + status = database_class.add_message_to_queue(data=data) # Check the addition of a message to the queue - queue_message = database.get_message_from_queue(scheduled_time=data['scheduled_time']) + queue_message = database_class.get_message_from_queue(scheduled_time=data['scheduled_time']) queue_item = {} queue_item['user_id'] = queue_message[1] queue_item['post_id'] = queue_message[2] @@ -130,7 +125,7 @@ def test_messages_queue(namespace, vault_instance): @pytest.mark.order(7) -def test_change_message_state_in_queue(namespace, vault_instance, postgres_instance): +def test_change_message_state_in_queue(database_class, postgres_instance): """ Checking the change of the message state in the queue """ @@ -147,12 +142,11 @@ def test_change_message_state_in_queue(namespace, vault_instance, postgres_insta 'download_status': 'not started', 'upload_status': 'not started' } - database = DatabaseClient(vault=vault_instance, db_role=namespace) - status = database.add_message_to_queue(data=data) + status = database_class.add_message_to_queue(data=data) assert status == f"{data['message_id']}: added to queue" # Check the change of the message state in the queue - updated_status = database.update_message_state_in_queue( + updated_status = database_class.update_message_state_in_queue( post_id=data['post_id'], state='processed', download_status='completed', @@ -175,7 +169,7 @@ def test_change_message_state_in_queue(namespace, vault_instance, postgres_insta @pytest.mark.order(8) -def test_change_message_schedule_time_in_queue(namespace, vault_instance, postgres_instance): +def test_change_message_schedule_time_in_queue(database_class, postgres_instance): """ Checking the change of the message schedule time in the queue """ @@ -192,11 +186,11 @@ def test_change_message_schedule_time_in_queue(namespace, vault_instance, postgr 'download_status': 'not started', 'upload_status': 'not started' } - database = DatabaseClient(vault=vault_instance, db_role=namespace) - database.add_message_to_queue(data=data) + status = database_class.add_message_to_queue(data=data) + assert status == f"{data['message_id']}: added to queue" # Check the change of the message schedule time in the queue - status = database.update_schedule_time_in_queue( + status = database_class.update_schedule_time_in_queue( post_id='qwerty789', user_id='12345', scheduled_time='2022-01-02 13:00:00' @@ -211,7 +205,7 @@ def test_change_message_schedule_time_in_queue(namespace, vault_instance, postgr @pytest.mark.order(9) -def test_get_user_queue(namespace, vault_instance): +def test_get_user_queue(database_class): """ Checking the extraction of the user queue """ @@ -255,13 +249,12 @@ def test_get_user_queue(namespace, vault_instance): 'upload_status': 'not started' } ] - database = DatabaseClient(vault=vault_instance, db_role=namespace) for message in data: - status = database.add_message_to_queue(data=message) + status = database_class.add_message_to_queue(data=message) assert status == f"{message['message_id']}: added to queue" # Validate the extraction of the user queue - user_queue = database.get_user_queue(user_id='111111') + user_queue = database_class.get_user_queue(user_id='111111') expected_response = defaultdict(list) for entry in data: expected_response[entry['user_id']].append({ @@ -276,16 +269,15 @@ def test_get_user_queue(namespace, vault_instance): @pytest.mark.order(10) -def test_get_user_processed_data(namespace, vault_instance): +def test_get_user_processed_data(database_class): """ Checking the extraction of the user processed data """ user_id = '111111' # Marked messages from previous tests mark_processed = ['qwerty123', 'qwerty456', 'qwerty789'] - database = DatabaseClient(vault=vault_instance, db_role=namespace) for item in mark_processed: - status = database.update_message_state_in_queue( + status = database_class.update_message_state_in_queue( post_id=item, state='processed', download_status='completed', @@ -293,15 +285,19 @@ def test_get_user_processed_data(namespace, vault_instance): post_owner='johndoe' ) assert status == f"{item}: processed" - user_processed = database.get_user_processed(user_id=user_id) - user_queue = database.get_user_queue(user_id=user_id) + user_processed = database_class.get_user_processed(user_id=user_id) + user_queue = database_class.get_user_queue(user_id=user_id) for item in mark_processed: - assert item not in user_queue.get(user_id, []).values() - assert item in user_processed.get(user_id, []).values() + if user_queue: + assert item not in user_queue.get(user_id, []).values() + if user_processed: + assert item in user_processed.get(user_id, []).values() + else: + assert False @pytest.mark.order(11) -def test_check_message_uniqueness(namespace, vault_instance): +def test_check_message_uniqueness(database_class): """ Checking the uniqueness of the message """ @@ -317,19 +313,17 @@ def test_check_message_uniqueness(namespace, vault_instance): 'download_status': 'not started', 'upload_status': 'not started' } - database = DatabaseClient(vault=vault_instance, db_role=namespace) - - uniqueness = database.check_message_uniqueness(post_id=data['post_id'], user_id=data['user_id']) + uniqueness = database_class.check_message_uniqueness(post_id=data['post_id'], user_id=data['user_id']) assert uniqueness is True - status = database.add_message_to_queue(data=data) + status = database_class.add_message_to_queue(data=data) assert status == f"{data['message_id']}: added to queue" - uniqueness = database.check_message_uniqueness(post_id=data['post_id'], user_id=data['user_id']) + uniqueness = database_class.check_message_uniqueness(post_id=data['post_id'], user_id=data['user_id']) assert uniqueness is False @pytest.mark.order(12) -def test_service_messages(namespace, vault_instance): +def test_service_messages(database_class): """ Checking the registration of service messages """ @@ -342,10 +336,9 @@ def test_service_messages(namespace, vault_instance): } # Keep new status_message - database = DatabaseClient(vault=vault_instance, db_role=namespace) - status = database.keep_message(**data) + status = database_class.keep_message(**data) assert status == f"{data['message_id']} kept" - new_message = database.get_considered_message(message_type=data['message_type'], chat_id=data['chat_id']) + new_message = database_class.get_considered_message(message_type=data['message_type'], chat_id=data['chat_id']) assert new_message[0] == data['message_id'] assert new_message[1] == data['chat_id'] assert new_message[4] == get_hash(data['message_content']) @@ -353,9 +346,9 @@ def test_service_messages(namespace, vault_instance): # Update exist message data['message_content'] = 'Updated message' - status = database.keep_message(**data) + status = database_class.keep_message(**data) assert status == f"{data['message_id']} updated" - updated_message = database.get_considered_message(message_type=data['message_type'], chat_id=data['chat_id']) + updated_message = database_class.get_considered_message(message_type=data['message_type'], chat_id=data['chat_id']) assert updated_message[0] == data['message_id'] assert updated_message[1] == data['chat_id'] assert updated_message[2] != updated_message[3] @@ -365,9 +358,9 @@ def test_service_messages(namespace, vault_instance): # Recreate exist message data['message_content'] = 'Recreated message' - status = database.keep_message(**data, recreated=True) + status = database_class.keep_message(**data, recreated=True) assert status == f"{data['message_id']} recreated" - recreated_message = database.get_considered_message(message_type=data['message_type'], chat_id=data['chat_id']) + recreated_message = database_class.get_considered_message(message_type=data['message_type'], chat_id=data['chat_id']) assert recreated_message[0] == data['message_id'] assert recreated_message[1] == data['chat_id'] assert recreated_message[2] == recreated_message[3] diff --git a/tests/test_metrics.py b/tests/test_metrics.py new file mode 100644 index 000000000..a8f58423e --- /dev/null +++ b/tests/test_metrics.py @@ -0,0 +1,31 @@ +""" +This module contains tests for the database module. +""" +import requests +import pytest + + +@pytest.mark.order(13) +def test_metrics_instance(metrics_class, database_class, vault_instance): + """ + Checking the creation of a metrics instance. + """ + assert metrics_class.port == 8000 + assert metrics_class.metrics_prefix == "pytest" + assert metrics_class.interval == 1 + assert metrics_class.vault == vault_instance + assert metrics_class.database == database_class + assert metrics_class.thread_status_gauge is not None + assert metrics_class.access_granted_counter is not None + assert metrics_class.access_denied_counter is not None + assert metrics_class.processed_messages_counter is not None + assert metrics_class.queue_length_gauge is not None + + +@pytest.mark.order(14) +def test_metrics_users_stats(metrics_class): + """ + Checking the collection of user statistics. + """ + response = requests.get(f"http://localhost:{metrics_class.port}/", timeout=10) + assert f"{metrics_class.metrics_prefix}_thread_status" in response.text From 580d75e10a454fd18d52d5f24fe0552510240103 Mon Sep 17 00:00:00 2001 From: obervinov Date: Sun, 8 Sep 2024 01:50:16 +0400 Subject: [PATCH 073/148] modified: src/modules/database.py modified: tests/test_database.py --- src/modules/database.py | 2 +- tests/test_database.py | 10 ++++++++-- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/src/modules/database.py b/src/modules/database.py index 6138074b0..1f9231117 100644 --- a/src/modules/database.py +++ b/src/modules/database.py @@ -664,7 +664,7 @@ def get_user_processed( Examples: >>> get_user_processed(user_id='12345') - {'12345': [{'post_id': '123456789', 'processed_time': '2022-01-01 12:00:00', 'state': 'completed'}]} + {'12345': [{'post_id': '123456789', 'timestamp': '2022-01-01 12:00:00', 'state': 'processed'}]} """ result = {} processed = self._select( diff --git a/tests/test_database.py b/tests/test_database.py index a50572a99..bccc607ca 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -289,9 +289,15 @@ def test_get_user_processed_data(database_class): user_queue = database_class.get_user_queue(user_id=user_id) for item in mark_processed: if user_queue: - assert item not in user_queue.get(user_id, []).values() + for q_message in user_queue.get(user_id, []): + assert item != q_message['post_id'] if user_processed: - assert item in user_processed.get(user_id, []).values() + found = False + for p_message in user_processed.get(user_id, []): + if item == p_message['post_id']: + found = True + if not found: + assert False else: assert False From 686cb60dc5e02e52c475d504da13e4bb6f495354 Mon Sep 17 00:00:00 2001 From: obervinov Date: Sun, 8 Sep 2024 13:49:15 +0400 Subject: [PATCH 074/148] modified: tests/test_database.py modified: tests/test_metrics.py --- tests/test_database.py | 8 ++++++-- tests/test_metrics.py | 3 +-- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/tests/test_database.py b/tests/test_database.py index bccc607ca..af239479c 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -11,6 +11,8 @@ import psycopg2 from psycopg2 import pool from src.modules.tools import get_hash +from src.modules.database import DatabaseClient + # pylint: disable=too-many-locals @@ -60,13 +62,15 @@ def test_init_database_client(vault_configuration_data, postgres_instance, datab @pytest.mark.order(4) -def test_reset_stale_messages(postgres_instance, postgres_messages_test_data, database_class): +def test_reset_stale_messages(postgres_instance, postgres_messages_test_data, vault_instance, namespace): """ Checking the reset of stale messages when the database client is initialized """ _, cursor = postgres_instance _ = postgres_messages_test_data - _ = database_class + # Reinitialize the database class for triggering the reset of stale messages + # Create new instance of the DatabaseClient class because private method _reset_stale_records() is launched only when the class is initialized + _ = DatabaseClient(vault=vault_instance, db_role=namespace) # Check the reset of stale messages cursor.execute("SELECT state FROM messages") diff --git a/tests/test_metrics.py b/tests/test_metrics.py index a8f58423e..690e12dfd 100644 --- a/tests/test_metrics.py +++ b/tests/test_metrics.py @@ -11,7 +11,6 @@ def test_metrics_instance(metrics_class, database_class, vault_instance): Checking the creation of a metrics instance. """ assert metrics_class.port == 8000 - assert metrics_class.metrics_prefix == "pytest" assert metrics_class.interval == 1 assert metrics_class.vault == vault_instance assert metrics_class.database == database_class @@ -28,4 +27,4 @@ def test_metrics_users_stats(metrics_class): Checking the collection of user statistics. """ response = requests.get(f"http://localhost:{metrics_class.port}/", timeout=10) - assert f"{metrics_class.metrics_prefix}_thread_status" in response.text + assert "pytest_thread_status" in response.text From cefe70ddb628bdd2c6c18662547f9156d3954194 Mon Sep 17 00:00:00 2001 From: obervinov Date: Sun, 8 Sep 2024 13:57:52 +0400 Subject: [PATCH 075/148] modified: tests/test_database.py --- tests/test_database.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/test_database.py b/tests/test_database.py index af239479c..574914a8c 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -297,6 +297,8 @@ def test_get_user_processed_data(database_class): assert item != q_message['post_id'] if user_processed: found = False + if user_processed.get(user_id, []) == []: + assert False for p_message in user_processed.get(user_id, []): if item == p_message['post_id']: found = True From 08b2b36ed85dd56e1f4aabb275aba63056809c4a Mon Sep 17 00:00:00 2001 From: obervinov Date: Sun, 8 Sep 2024 14:09:59 +0400 Subject: [PATCH 076/148] modified: tests/test_database.py --- tests/test_database.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_database.py b/tests/test_database.py index 574914a8c..de5ae07aa 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -14,7 +14,6 @@ from src.modules.database import DatabaseClient - # pylint: disable=too-many-locals @pytest.mark.order(2) def test_init_database_client(vault_configuration_data, postgres_instance, database_class): @@ -303,6 +302,7 @@ def test_get_user_processed_data(database_class): if item == p_message['post_id']: found = True if not found: + print(f"Message {item} not found in processed messages: {user_processed.get(user_id, [])}") assert False else: assert False From 9478312ce2639efdeb340bf24f37d0123059dfda Mon Sep 17 00:00:00 2001 From: obervinov Date: Sun, 8 Sep 2024 14:37:54 +0400 Subject: [PATCH 077/148] modified: tests/test_database.py --- tests/test_database.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/test_database.py b/tests/test_database.py index de5ae07aa..656b76bfc 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -298,6 +298,8 @@ def test_get_user_processed_data(database_class): found = False if user_processed.get(user_id, []) == []: assert False + else: + assert len(user_processed.get(user_id, [])) == len(mark_processed) for p_message in user_processed.get(user_id, []): if item == p_message['post_id']: found = True From 58cf28a0260e1f907eea538ffa55e3b0b9c34be9 Mon Sep 17 00:00:00 2001 From: obervinov Date: Mon, 9 Sep 2024 00:04:05 +0400 Subject: [PATCH 078/148] modified: tests/test_database.py --- tests/test_database.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_database.py b/tests/test_database.py index 656b76bfc..11453f836 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -287,6 +287,7 @@ def test_get_user_processed_data(database_class): upload_status='completed', post_owner='johndoe' ) + print(status) assert status == f"{item}: processed" user_processed = database_class.get_user_processed(user_id=user_id) user_queue = database_class.get_user_queue(user_id=user_id) From b772c398c953ad3db18936715c22e51d63eead22 Mon Sep 17 00:00:00 2001 From: obervinov Date: Mon, 9 Sep 2024 00:47:16 +0400 Subject: [PATCH 079/148] modified: tests/test_database.py --- tests/test_database.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/test_database.py b/tests/test_database.py index 11453f836..0f8429f39 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -272,10 +272,11 @@ def test_get_user_queue(database_class): @pytest.mark.order(10) -def test_get_user_processed_data(database_class): +def test_get_user_processed_data(database_class, postgres_instance): """ Checking the extraction of the user processed data """ + conn, cursor = postgres_instance user_id = '111111' # Marked messages from previous tests mark_processed = ['qwerty123', 'qwerty456', 'qwerty789'] @@ -287,7 +288,6 @@ def test_get_user_processed_data(database_class): upload_status='completed', post_owner='johndoe' ) - print(status) assert status == f"{item}: processed" user_processed = database_class.get_user_processed(user_id=user_id) user_queue = database_class.get_user_queue(user_id=user_id) @@ -300,6 +300,8 @@ def test_get_user_processed_data(database_class): if user_processed.get(user_id, []) == []: assert False else: + items = cursor.select("SELECT * FROM processed") + print(items.fetchall()) assert len(user_processed.get(user_id, [])) == len(mark_processed) for p_message in user_processed.get(user_id, []): if item == p_message['post_id']: From 1bf4a5a7ec4caa639dec972483e6ac6666cba8ed Mon Sep 17 00:00:00 2001 From: obervinov Date: Mon, 9 Sep 2024 01:05:28 +0400 Subject: [PATCH 080/148] modified: tests/test_database.py --- tests/test_database.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_database.py b/tests/test_database.py index 0f8429f39..c4b98ea38 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -276,7 +276,7 @@ def test_get_user_processed_data(database_class, postgres_instance): """ Checking the extraction of the user processed data """ - conn, cursor = postgres_instance + _, cursor = postgres_instance user_id = '111111' # Marked messages from previous tests mark_processed = ['qwerty123', 'qwerty456', 'qwerty789'] @@ -300,7 +300,7 @@ def test_get_user_processed_data(database_class, postgres_instance): if user_processed.get(user_id, []) == []: assert False else: - items = cursor.select("SELECT * FROM processed") + items = cursor.execute("SELECT * FROM processed") print(items.fetchall()) assert len(user_processed.get(user_id, [])) == len(mark_processed) for p_message in user_processed.get(user_id, []): From a84fb0e831f6beb79c29b1c31e1b857e7aca3c3a Mon Sep 17 00:00:00 2001 From: obervinov Date: Mon, 9 Sep 2024 01:10:43 +0400 Subject: [PATCH 081/148] modified: tests/test_database.py --- tests/test_database.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_database.py b/tests/test_database.py index c4b98ea38..337a95d46 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -300,8 +300,8 @@ def test_get_user_processed_data(database_class, postgres_instance): if user_processed.get(user_id, []) == []: assert False else: - items = cursor.execute("SELECT * FROM processed") - print(items.fetchall()) + cursor.execute("SELECT * FROM processed") + print(cursor.fetchall()) assert len(user_processed.get(user_id, [])) == len(mark_processed) for p_message in user_processed.get(user_id, []): if item == p_message['post_id']: From f9a0c25faafe80a7b5c6922c86ada7ee69e232bc Mon Sep 17 00:00:00 2001 From: obervinov Date: Mon, 9 Sep 2024 01:18:23 +0400 Subject: [PATCH 082/148] modified: tests/test_database.py --- tests/test_database.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/tests/test_database.py b/tests/test_database.py index 337a95d46..4562966b5 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -98,11 +98,11 @@ def test_messages_queue(database_class): """ data = { 'user_id': '12345', - 'post_id': 'qwerty123', - 'post_url': 'https://www.example.com/p/qwerty123/', + 'post_id': 'qwerty333', + 'post_url': 'https://www.example.com/p/qwerty333/', 'post_owner': 'johndoe', 'link_type': 'post', - 'message_id': 'qwerty123', + 'message_id': 'qwerty333', 'chat_id': 'xyz', 'scheduled_time': '2022-01-01 12:00:00', 'download_status': 'not started', @@ -139,7 +139,7 @@ def test_change_message_state_in_queue(database_class, postgres_instance): 'post_url': 'https://www.example.com/p/qwerty222/', 'post_owner': 'johndoe', 'link_type': 'post', - 'message_id': '222222', + 'message_id': 'qwerty222', 'chat_id': 'xyz', 'scheduled_time': '2022-01-01 12:00:00', 'download_status': 'not started', @@ -179,11 +179,11 @@ def test_change_message_schedule_time_in_queue(database_class, postgres_instance _, cursor = postgres_instance data = { 'user_id': '12345', - 'post_id': 'qwerty789', - 'post_url': 'https://www.example.com/p/qwerty789/', + 'post_id': 'qwerty444', + 'post_url': 'https://www.example.com/p/qwerty444/', 'post_owner': 'johndoe', 'link_type': 'post', - 'message_id': 'qwerty789', + 'message_id': 'qwerty444', 'chat_id': 'xyz', 'scheduled_time': '2022-01-01 12:00:00', 'download_status': 'not started', @@ -194,7 +194,7 @@ def test_change_message_schedule_time_in_queue(database_class, postgres_instance # Check the change of the message schedule time in the queue status = database_class.update_schedule_time_in_queue( - post_id='qwerty789', + post_id='qwerty444', user_id='12345', scheduled_time='2022-01-02 13:00:00' ) @@ -321,7 +321,7 @@ def test_check_message_uniqueness(database_class): data = { 'user_id': '123456', 'post_id': 'qwerty1111', - 'post_url': 'https://www.example.com/p/qwerty789/', + 'post_url': 'https://www.example.com/p/qwerty1111/', 'post_owner': 'johndoe', 'link_type': 'post', 'message_id': 'qwerty1111', From 5fb1a69d3bb498d8ec7262a40b13128766907c7f Mon Sep 17 00:00:00 2001 From: obervinov Date: Mon, 9 Sep 2024 01:23:11 +0400 Subject: [PATCH 083/148] modified: tests/test_database.py modified: tests/test_metrics.py --- tests/test_database.py | 2 +- tests/test_metrics.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_database.py b/tests/test_database.py index 4562966b5..54630c5ac 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -201,7 +201,7 @@ def test_change_message_schedule_time_in_queue(database_class, postgres_instance assert status == f"{data['post_id']}: scheduled time updated" # Check records in database - cursor.execute("SELECT scheduled_time FROM queue WHERE post_id = 'qwerty789'") + cursor.execute("SELECT scheduled_time FROM queue WHERE post_id = 'qwerty444'") record_queue = cursor.fetchall() assert record_queue is not None assert record_queue[0][0] == datetime.strptime('2022-01-02 13:00:00', '%Y-%m-%d %H:%M:%S') diff --git a/tests/test_metrics.py b/tests/test_metrics.py index 690e12dfd..d39a0c373 100644 --- a/tests/test_metrics.py +++ b/tests/test_metrics.py @@ -26,5 +26,5 @@ def test_metrics_users_stats(metrics_class): """ Checking the collection of user statistics. """ - response = requests.get(f"http://localhost:{metrics_class.port}/", timeout=10) + response = requests.get(f"http://0.0.0.0:{metrics_class.port}/", timeout=10) assert "pytest_thread_status" in response.text From 6319a398643dbbe260a2f089352f3a7e5aadacf1 Mon Sep 17 00:00:00 2001 From: obervinov Date: Mon, 9 Sep 2024 13:15:27 +0400 Subject: [PATCH 084/148] modified: tests/conftest.py --- CHANGELOG.md | 2 +- tests/conftest.py | 6 +++++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 82d5edfcc..181439c83 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,7 +3,7 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). -## v2.3.0 - 2024-09-08 +## v2.3.0 - 2024-09-09 ### What's Changed **Full Changelog**: https://github.com/obervinov/pyinstabot-downloader/compare/v2.2.1...v2.3.0 by @obervinov in https://github.com/obervinov/pyinstabot-downloader/pull/95 #### 💥 Breaking Changes diff --git a/tests/conftest.py b/tests/conftest.py index c95d9918a..15aef45b0 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -5,6 +5,7 @@ import time import requests import pytest +import threading import hvac import psycopg2 from psycopg2 import sql @@ -358,7 +359,10 @@ def fixture_metrics_class(vault_instance, database_class): """ Returns the metrics class """ - return Metrics(port=8000, interval=1, metrics_prefix='pytest', vault=vault_instance, database=database_class) + metrics = Metrics(port=8000, interval=3, metrics_prefix='pytest', vault=vault_instance, database=database_class) + threads_list = threading.enumerate() + metrics.run(threads=threads_list) + return metrics @pytest.fixture(name="postgres_messages_test_data", scope='session') From 5c8e7e7d09d780e1c3c3b7ab94607a1103c60841 Mon Sep 17 00:00:00 2001 From: obervinov Date: Mon, 9 Sep 2024 13:39:58 +0400 Subject: [PATCH 085/148] rewrote `get_users()` method in `database.py` --- src/bot.py | 10 +++++----- src/modules/database.py | 18 ++++++++++-------- src/modules/metrics.py | 14 +++++--------- tests/conftest.py | 4 ++-- 4 files changed, 22 insertions(+), 24 deletions(-) diff --git a/src/bot.py b/src/bot.py index aea1ea048..b90827810 100644 --- a/src/bot.py +++ b/src/bot.py @@ -491,16 +491,16 @@ def status_message_updater_thread() -> None: while True: time.sleep(STATUSES_MESSAGE_FREQUENCY) try: - if database.get_users(): - for user in database.get_users(): - user_id = user[0] - update_status_message(user_id=user_id) + users_dict = database.get_users() + if users_dict: + for user in users_dict: + update_status_message(user_id=user['user_id']) # pylint: disable=broad-exception-caught except Exception as exception: exception_context = { 'call': threading.current_thread().name, 'message': 'Failed to update the message with the status of received messages', - 'users': database.get_users(), + 'users': users_dict, 'user': user, 'exception': exception } diff --git a/src/modules/database.py b/src/modules/database.py index 1f9231117..423facb7e 100644 --- a/src/modules/database.py +++ b/src/modules/database.py @@ -64,7 +64,7 @@ class DatabaseClient: get_user_processed(user_id): Get last ten messages from the processed table for the specified user. check_message_uniqueness(post_id, user_id): Check if a message with the given post ID and chat ID already exists in the queue. keep_message(message_id, chat_id, message_content, **kwargs): Add a message to the messages table in the database. - get_users(): Get a list of all users in the database. + get_users(): Get a dict of users with their metadata from the users table. get_considered_message(message_type, chat_id): Get a message with specified type and Rises: @@ -795,25 +795,27 @@ def keep_message( return response - def get_users(self) -> list: + def get_users(self) -> dict: """ This method will be deprecated after https://github.com/obervinov/users-package/issues/44 (users-package:v3.1.0). - Get a list of all users in the database. + Get a dictionary of all users with their metadata from the users table in the database. Returns: - list: A list of all users from the users table. + dict: A dictionary containing all users in the database and their metadata. Examples: >>> get_users() - # [('{user_id}', '{chat_id}')] - [('12345', '67890')] + [{'user_id': '12345', 'chat_id': '67890', 'status': 'denied'}, {'user_id': '12346', 'chat_id': '67891', 'status': 'allowed'}] """ + users_dict = [] users = self._select( table_name='users', - columns=("user_id", "chat_id"), + columns=("user_id", "chat_id", "status"), limit=1000 ) - return users if users else None + for user in users: + users_dict.append({'user_id': user[0], 'chat_id': user[1], 'status': user[2]}) + return users_dict def get_considered_message( self, diff --git a/src/modules/metrics.py b/src/modules/metrics.py index c60a4e047..967ed0423 100644 --- a/src/modules/metrics.py +++ b/src/modules/metrics.py @@ -27,7 +27,6 @@ def __init__( :param metrics_prefix (str): prefix for the metrics. Keyword Args: - :param vault (Vault): instance of the Vault class. :param database (Database): instance of the Database class. Returns: @@ -37,13 +36,11 @@ def __init__( self.port = port self.interval = interval - self.vault = kwargs.get('vault', None) self.database = kwargs.get('database', None) self.thread_status_gauge = Gauge(f'{metrics_prefix}_thread_status', 'Thread status (1 = running, 0 = not running)', ['thread_name']) - if self.vault: + if self.database: self.access_granted_counter = Gauge(f'{metrics_prefix}_access_granted_total', 'Total number of users granted access') self.access_denied_counter = Gauge(f'{metrics_prefix}_access_denied_total', 'Total number of users denied access') - if self.database: self.processed_messages_counter = Gauge(f'{metrics_prefix}_processed_messages_total', 'Total number of processed messages') self.queue_length_gauge = Gauge(f'{metrics_prefix}_queue_length', 'Queue length') @@ -51,15 +48,14 @@ def collect_users_stats(self) -> None: """ The method collects information about users access status and updates the gauge. """ - users = self.vault.list_secrets(path='data/users') + users_dict = self.database.get_users() access_granted_count = 0 access_denied_count = 0 - for user in users: - user_status = json.loads(self.vault.read_secret(path=f'data/users/{user}')['authentication']) - if user_status.get('status') == 'denied': + for user in users_dict: + if user.get('status') == 'denied': access_denied_count += 1 - elif user_status.get('status') == 'allowed': + elif user.get('status') == 'allowed': access_granted_count += 1 self.access_granted_counter.set(access_granted_count) diff --git a/tests/conftest.py b/tests/conftest.py index 15aef45b0..6f464eb4a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,9 +3,9 @@ """ import os import time +import threading import requests import pytest -import threading import hvac import psycopg2 from psycopg2 import sql @@ -359,7 +359,7 @@ def fixture_metrics_class(vault_instance, database_class): """ Returns the metrics class """ - metrics = Metrics(port=8000, interval=3, metrics_prefix='pytest', vault=vault_instance, database=database_class) + metrics = Metrics(port=8000, interval=3, metrics_prefix='pytest', database=database_class) threads_list = threading.enumerate() metrics.run(threads=threads_list) return metrics From f945593ee405b648bab1c60a5f7806fb9035574a Mon Sep 17 00:00:00 2001 From: obervinov Date: Mon, 9 Sep 2024 14:02:30 +0400 Subject: [PATCH 086/148] fixed linting issues --- src/modules/metrics.py | 4 +--- tests/conftest.py | 2 +- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/src/modules/metrics.py b/src/modules/metrics.py index 967ed0423..c96fb763e 100644 --- a/src/modules/metrics.py +++ b/src/modules/metrics.py @@ -1,6 +1,5 @@ """This module provides a way to expose metrics to Prometheus for monitoring the application.""" import time -import json from prometheus_client import start_http_server, Gauge from logger import log @@ -89,9 +88,8 @@ def run(self, threads: list) -> None: start_http_server(self.port) log.info('[Metrics]: Metrics server started on port %s', self.port) while True: - if self.vault: - self.collect_users_stats() if self.database: + self.collect_users_stats() self.collect_messages_stats() time.sleep(self.interval) for thread in threads: diff --git a/tests/conftest.py b/tests/conftest.py index 6f464eb4a..c1cb55f22 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -355,7 +355,7 @@ def fixture_database_class(vault_instance, namespace): @pytest.fixture(name="metrics_class", scope='session') -def fixture_metrics_class(vault_instance, database_class): +def fixture_metrics_class(database_class): """ Returns the metrics class """ From 0f1fc290ab75b155d362cc4927572ab2b53e24ff Mon Sep 17 00:00:00 2001 From: obervinov Date: Mon, 9 Sep 2024 14:39:22 +0400 Subject: [PATCH 087/148] modified: src/modules/metrics.py modified: tests/conftest.py modified: tests/test_metrics.py --- src/modules/metrics.py | 12 ++++++------ tests/conftest.py | 34 ++++++++++++++++++++++++++++++++++ tests/test_metrics.py | 24 ++++++++++++++++++++++++ 3 files changed, 64 insertions(+), 6 deletions(-) diff --git a/src/modules/metrics.py b/src/modules/metrics.py index c96fb763e..996b507d1 100644 --- a/src/modules/metrics.py +++ b/src/modules/metrics.py @@ -50,12 +50,12 @@ def collect_users_stats(self) -> None: users_dict = self.database.get_users() access_granted_count = 0 access_denied_count = 0 - - for user in users_dict: - if user.get('status') == 'denied': - access_denied_count += 1 - elif user.get('status') == 'allowed': - access_granted_count += 1 + if users_dict: + for user in users_dict: + if user.get('status') == 'denied': + access_denied_count += 1 + elif user.get('status') == 'allowed': + access_granted_count += 1 self.access_granted_counter.set(access_granted_count) self.access_denied_counter.set(access_denied_count) diff --git a/tests/conftest.py b/tests/conftest.py index c1cb55f22..815a418f3 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -379,3 +379,37 @@ def fixture_postgres_messages_test_data(postgres_instance): "VALUES ('123456', '123456', '2024-08-27 00:00:00', '2024-08-27 00:00:00', 'status_message', 'pytest', 'hash', 'updating')" ) conn.commit() + + +@pytest.fixture(name="postgres_users_test_data", scope='session') +def fixture_postgres_users_test_data(postgres_instance): + """ + This function sets up test data in the users table in the postgres database. + + Args: + postgres_instance: A tuple containing the connection and cursor objects for the postgres database. + """ + data = [ + { + 'user_id': '111111', + 'chat_id': '111111', + 'status': 'allowed' + }, + { + 'user_id': '222222', + 'chat_id': '222222', + 'status': 'denied' + }, + { + 'user_id': '333333', + 'chat_id': '333333', + 'status': 'denied' + } + ] + conn, cursor = postgres_instance + for user in data: + cursor.execute( + "INSERT INTO users (user_id, chat_id, status) VALUES (%s, %s, %s)", + (user['user_id'], user['chat_id'], user['status']) + ) + conn.commit() diff --git a/tests/test_metrics.py b/tests/test_metrics.py index d39a0c373..c6a2c4e80 100644 --- a/tests/test_metrics.py +++ b/tests/test_metrics.py @@ -27,4 +27,28 @@ def test_metrics_users_stats(metrics_class): Checking the collection of user statistics. """ response = requests.get(f"http://0.0.0.0:{metrics_class.port}/", timeout=10) + assert "pytest_access_granted_total" in response.text + assert "pytest_access_denied_total" in response.text + assert "pytest_access_granted_total 1.0" in response.text + assert "pytest_access_denied_total 2.0" in response.text + + +@pytest.mark.order(15) +def test_metrics_threads_status(metrics_class): + """ + Checking the collection of thread statistics. + """ + response = requests.get(f"http://0.0.0.0:{metrics_class.port}/", timeout=10) assert "pytest_thread_status" in response.text + + +@pytest.mark.order(16) +def test_metrics_messages(metrics_class): + """ + Checking the collection of processed and queued messages statistics. + """ + response = requests.get(f"http://0.0.0.0:{metrics_class.port}/", timeout=10) + assert "pytest_processed_messages_total" in response.text + assert "pytest_queue_length" in response.text + assert "pytest_processed_messages_total 3.0" in response.text + assert "pytest_queue_length 0.0" in response.text From 58cd47840923acf43a11a5047d5a8b766bf66ada Mon Sep 17 00:00:00 2001 From: obervinov Date: Mon, 9 Sep 2024 14:44:36 +0400 Subject: [PATCH 088/148] modified: src/modules/database.py modified: src/modules/metrics.py modified: tests/test_metrics.py --- src/modules/database.py | 5 +++-- src/modules/metrics.py | 11 +++++------ tests/test_metrics.py | 3 ++- 3 files changed, 10 insertions(+), 9 deletions(-) diff --git a/src/modules/database.py b/src/modules/database.py index 423facb7e..324d1c0f2 100644 --- a/src/modules/database.py +++ b/src/modules/database.py @@ -813,8 +813,9 @@ def get_users(self) -> dict: columns=("user_id", "chat_id", "status"), limit=1000 ) - for user in users: - users_dict.append({'user_id': user[0], 'chat_id': user[1], 'status': user[2]}) + if users: + for user in users: + users_dict.append({'user_id': user[0], 'chat_id': user[1], 'status': user[2]}) return users_dict def get_considered_message( diff --git a/src/modules/metrics.py b/src/modules/metrics.py index 996b507d1..6609fb2b1 100644 --- a/src/modules/metrics.py +++ b/src/modules/metrics.py @@ -50,12 +50,11 @@ def collect_users_stats(self) -> None: users_dict = self.database.get_users() access_granted_count = 0 access_denied_count = 0 - if users_dict: - for user in users_dict: - if user.get('status') == 'denied': - access_denied_count += 1 - elif user.get('status') == 'allowed': - access_granted_count += 1 + for user in users_dict: + if user.get('status') == 'denied': + access_denied_count += 1 + elif user.get('status') == 'allowed': + access_granted_count += 1 self.access_granted_counter.set(access_granted_count) self.access_denied_counter.set(access_denied_count) diff --git a/tests/test_metrics.py b/tests/test_metrics.py index c6a2c4e80..4c577747f 100644 --- a/tests/test_metrics.py +++ b/tests/test_metrics.py @@ -22,10 +22,11 @@ def test_metrics_instance(metrics_class, database_class, vault_instance): @pytest.mark.order(14) -def test_metrics_users_stats(metrics_class): +def test_metrics_users_stats(metrics_class, postgres_users_test_data): """ Checking the collection of user statistics. """ + _ = postgres_users_test_data response = requests.get(f"http://0.0.0.0:{metrics_class.port}/", timeout=10) assert "pytest_access_granted_total" in response.text assert "pytest_access_denied_total" in response.text From abf4982969dedcdb3194309853503bdd9cebdf66 Mon Sep 17 00:00:00 2001 From: obervinov Date: Mon, 9 Sep 2024 15:33:51 +0400 Subject: [PATCH 089/148] added stop method for metric server --- src/modules/metrics.py | 29 +++++++++++++++++++++++++---- tests/test_metrics.py | 8 ++++++++ 2 files changed, 33 insertions(+), 4 deletions(-) diff --git a/src/modules/metrics.py b/src/modules/metrics.py index 6609fb2b1..7988c9ea8 100644 --- a/src/modules/metrics.py +++ b/src/modules/metrics.py @@ -9,6 +9,22 @@ class Metrics(): """ This class provides a way to expose metrics to Prometheus for monitoring the application. + + Attributes: + :attribute port (int): port for the metrics server. + :attribute interval (int): interval for collecting metrics. + :attribute database (Database): instance of the Database class. + :attribute running (bool): the status of the metrics server. + :attribute thread_status_gauge (Gauge): gauge for the thread status. + :attribute access_granted_counter (Gauge): gauge for the access granted counter. + :attribute access_denied_counter (Gauge): gauge for the access denied counter. + :attribute processed_messages_counter (Gauge): gauge for the processed messages counter. + :attribute queue_length_gauge (Gauge): gauge for the queue length. + + Examples: + >>> metrics = Metrics(port=8000, interval=1, metrics_prefix='pytest') + >>> metrics.run(threads=[thread1, thread2]) + >>> metrics.stop() """ def __init__( self, @@ -27,15 +43,13 @@ def __init__( Keyword Args: :param database (Database): instance of the Database class. - - Returns: - None """ metrics_prefix = metrics_prefix.replace('-', '_') self.port = port self.interval = interval self.database = kwargs.get('database', None) + self.running = True self.thread_status_gauge = Gauge(f'{metrics_prefix}_thread_status', 'Thread status (1 = running, 0 = not running)', ['thread_name']) if self.database: self.access_granted_counter = Gauge(f'{metrics_prefix}_access_granted_total', 'Total number of users granted access') @@ -86,10 +100,17 @@ def run(self, threads: list) -> None: """ start_http_server(self.port) log.info('[Metrics]: Metrics server started on port %s', self.port) - while True: + while self.running: if self.database: self.collect_users_stats() self.collect_messages_stats() time.sleep(self.interval) for thread in threads: self.update_thread_status(thread.name, thread.is_alive()) + + def stop(self) -> None: + """ + The method stops the metrics server. + """ + self.running = False + log.info('[Metrics]: Metrics server stopped') diff --git a/tests/test_metrics.py b/tests/test_metrics.py index 4c577747f..7afa636ea 100644 --- a/tests/test_metrics.py +++ b/tests/test_metrics.py @@ -53,3 +53,11 @@ def test_metrics_messages(metrics_class): assert "pytest_queue_length" in response.text assert "pytest_processed_messages_total 3.0" in response.text assert "pytest_queue_length 0.0" in response.text + + +@pytest.mark.order(17) +def test_stop_metric_server(metrics_class): + """ + Stopping the metrics server. + """ + metrics_class.stop() From 1352feab79be4218628e8ff85ef301fd6301e424 Mon Sep 17 00:00:00 2001 From: obervinov Date: Mon, 9 Sep 2024 17:10:21 +0400 Subject: [PATCH 090/148] modified: src/modules/metrics.py modified: tests/conftest.py modified: tests/test_metrics.py --- src/modules/metrics.py | 2 +- tests/conftest.py | 9 ++++++--- tests/test_metrics.py | 8 -------- 3 files changed, 7 insertions(+), 12 deletions(-) diff --git a/src/modules/metrics.py b/src/modules/metrics.py index 7988c9ea8..9854ae260 100644 --- a/src/modules/metrics.py +++ b/src/modules/metrics.py @@ -104,9 +104,9 @@ def run(self, threads: list) -> None: if self.database: self.collect_users_stats() self.collect_messages_stats() - time.sleep(self.interval) for thread in threads: self.update_thread_status(thread.name, thread.is_alive()) + time.sleep(self.interval) def stop(self) -> None: """ diff --git a/tests/conftest.py b/tests/conftest.py index 815a418f3..8ea245c76 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -359,10 +359,13 @@ def fixture_metrics_class(database_class): """ Returns the metrics class """ - metrics = Metrics(port=8000, interval=3, metrics_prefix='pytest', database=database_class) + metrics = Metrics(port=8000, interval=1, metrics_prefix='pytest', database=database_class) threads_list = threading.enumerate() - metrics.run(threads=threads_list) - return metrics + metrics_thread = threading.Thread(target=metrics.run, args=(threads_list,)) + metrics_thread.start() + yield metrics + metrics.stop() + metrics_thread.join() @pytest.fixture(name="postgres_messages_test_data", scope='session') diff --git a/tests/test_metrics.py b/tests/test_metrics.py index 7afa636ea..4c577747f 100644 --- a/tests/test_metrics.py +++ b/tests/test_metrics.py @@ -53,11 +53,3 @@ def test_metrics_messages(metrics_class): assert "pytest_queue_length" in response.text assert "pytest_processed_messages_total 3.0" in response.text assert "pytest_queue_length 0.0" in response.text - - -@pytest.mark.order(17) -def test_stop_metric_server(metrics_class): - """ - Stopping the metrics server. - """ - metrics_class.stop() From 508fc2d24a961d84483a934e0c511bb8027ee0a0 Mon Sep 17 00:00:00 2001 From: obervinov Date: Mon, 9 Sep 2024 17:17:46 +0400 Subject: [PATCH 091/148] modified: tests/test_metrics.py --- tests/test_metrics.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_metrics.py b/tests/test_metrics.py index 4c577747f..2750a930a 100644 --- a/tests/test_metrics.py +++ b/tests/test_metrics.py @@ -12,7 +12,6 @@ def test_metrics_instance(metrics_class, database_class, vault_instance): """ assert metrics_class.port == 8000 assert metrics_class.interval == 1 - assert metrics_class.vault == vault_instance assert metrics_class.database == database_class assert metrics_class.thread_status_gauge is not None assert metrics_class.access_granted_counter is not None @@ -28,6 +27,7 @@ def test_metrics_users_stats(metrics_class, postgres_users_test_data): """ _ = postgres_users_test_data response = requests.get(f"http://0.0.0.0:{metrics_class.port}/", timeout=10) + print(response.text) assert "pytest_access_granted_total" in response.text assert "pytest_access_denied_total" in response.text assert "pytest_access_granted_total 1.0" in response.text From 7c0c57a6aec75933ca9ab5fee120d4ca0e910d2a Mon Sep 17 00:00:00 2001 From: obervinov Date: Mon, 9 Sep 2024 17:58:57 +0400 Subject: [PATCH 092/148] modified: tests/test_metrics.py --- tests/test_metrics.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_metrics.py b/tests/test_metrics.py index 2750a930a..7ea18e02d 100644 --- a/tests/test_metrics.py +++ b/tests/test_metrics.py @@ -6,7 +6,7 @@ @pytest.mark.order(13) -def test_metrics_instance(metrics_class, database_class, vault_instance): +def test_metrics_instance(metrics_class, database_class): """ Checking the creation of a metrics instance. """ From e943c423a738883be4dd6f09ecf38cd97d987bd2 Mon Sep 17 00:00:00 2001 From: obervinov Date: Mon, 9 Sep 2024 18:53:12 +0400 Subject: [PATCH 093/148] modified: src/modules/metrics.py modified: tests/conftest.py --- src/modules/metrics.py | 12 ++++++------ tests/conftest.py | 4 ++-- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/src/modules/metrics.py b/src/modules/metrics.py index 9854ae260..7b09af4c9 100644 --- a/src/modules/metrics.py +++ b/src/modules/metrics.py @@ -83,14 +83,14 @@ def collect_messages_stats(self) -> None: """ The method updates the gauge with the number of processed and queued messages. """ + users_dict = self.database.get_users() processed_messages_count = 0 queue_messages_count = 0 - for user in self.database.get_users(): - user_id = user[0] - processed_messages = self.database.get_user_processed(user_id=user_id) - queue_messages = self.database.get_user_queue(user_id=user_id) - processed_messages_count += len(processed_messages.get(user_id, [])) - queue_messages_count = len(queue_messages.get(user_id, [])) + for user in users_dict: + processed_messages = self.database.get_user_processed(user_id=user['user_id']) + queue_messages = self.database.get_user_queue(user_id=user['user_id']) + processed_messages_count += len(processed_messages.get(user['user_id'], [])) + queue_messages_count = len(queue_messages.get(user['user_id'], [])) self.processed_messages_counter.set(processed_messages_count) self.queue_length_gauge.set(queue_messages_count) diff --git a/tests/conftest.py b/tests/conftest.py index 8ea245c76..42d9e2a64 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -355,11 +355,11 @@ def fixture_database_class(vault_instance, namespace): @pytest.fixture(name="metrics_class", scope='session') -def fixture_metrics_class(database_class): +def fixture_metrics_class(database_class, postgres_users_test_data): """ Returns the metrics class """ - metrics = Metrics(port=8000, interval=1, metrics_prefix='pytest', database=database_class) + metrics = Metrics(port=8000, interval=5, metrics_prefix='pytest', database=database_class) threads_list = threading.enumerate() metrics_thread = threading.Thread(target=metrics.run, args=(threads_list,)) metrics_thread.start() From da8290f3fabd43becb8e6f89b382b19d9670896c Mon Sep 17 00:00:00 2001 From: obervinov Date: Mon, 9 Sep 2024 18:57:22 +0400 Subject: [PATCH 094/148] modified: tests/conftest.py modified: tests/test_metrics.py --- tests/conftest.py | 2 +- tests/test_metrics.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 42d9e2a64..1dc7b7070 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -355,7 +355,7 @@ def fixture_database_class(vault_instance, namespace): @pytest.fixture(name="metrics_class", scope='session') -def fixture_metrics_class(database_class, postgres_users_test_data): +def fixture_metrics_class(database_class): """ Returns the metrics class """ diff --git a/tests/test_metrics.py b/tests/test_metrics.py index 7ea18e02d..e193d57bc 100644 --- a/tests/test_metrics.py +++ b/tests/test_metrics.py @@ -11,7 +11,7 @@ def test_metrics_instance(metrics_class, database_class): Checking the creation of a metrics instance. """ assert metrics_class.port == 8000 - assert metrics_class.interval == 1 + assert metrics_class.interval == 5 assert metrics_class.database == database_class assert metrics_class.thread_status_gauge is not None assert metrics_class.access_granted_counter is not None @@ -49,6 +49,7 @@ def test_metrics_messages(metrics_class): Checking the collection of processed and queued messages statistics. """ response = requests.get(f"http://0.0.0.0:{metrics_class.port}/", timeout=10) + print(response.text) assert "pytest_processed_messages_total" in response.text assert "pytest_queue_length" in response.text assert "pytest_processed_messages_total 3.0" in response.text From 56a7bfacef10ef096677cfbc8638b87f1e32e639 Mon Sep 17 00:00:00 2001 From: obervinov Date: Mon, 9 Sep 2024 19:03:17 +0400 Subject: [PATCH 095/148] debug metrics tests --- src/modules/database.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/modules/database.py b/src/modules/database.py index 324d1c0f2..406d47054 100644 --- a/src/modules/database.py +++ b/src/modules/database.py @@ -816,6 +816,7 @@ def get_users(self) -> dict: if users: for user in users: users_dict.append({'user_id': user[0], 'chat_id': user[1], 'status': user[2]}) + log.info('[Database]: Users: %s', users_dict) return users_dict def get_considered_message( From a096ab777666fd0672a62615cdca7db066cf95ed Mon Sep 17 00:00:00 2001 From: obervinov Date: Mon, 9 Sep 2024 20:28:52 +0400 Subject: [PATCH 096/148] modified: tests/conftest.py --- tests/conftest.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index 1dc7b7070..dc6408f59 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -355,10 +355,11 @@ def fixture_database_class(vault_instance, namespace): @pytest.fixture(name="metrics_class", scope='session') -def fixture_metrics_class(database_class): +def fixture_metrics_class(database_class, postgres_users_test_data): """ Returns the metrics class """ + _ = postgres_users_test_data metrics = Metrics(port=8000, interval=5, metrics_prefix='pytest', database=database_class) threads_list = threading.enumerate() metrics_thread = threading.Thread(target=metrics.run, args=(threads_list,)) From 3c4c4a31aaafb11402c01d2cf3c0bb43d8667855 Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 10 Sep 2024 00:17:46 +0400 Subject: [PATCH 097/148] refactored tests and test data --- tests/conftest.py | 133 ++++++++++++++++++++++++++++++++-- tests/test_database.py | 159 ++++++++++++++++++++++++++--------------- tests/test_metrics.py | 2 +- 3 files changed, 227 insertions(+), 67 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index dc6408f59..a9829a745 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -355,11 +355,13 @@ def fixture_database_class(vault_instance, namespace): @pytest.fixture(name="metrics_class", scope='session') -def fixture_metrics_class(database_class, postgres_users_test_data): +def fixture_metrics_class(database_class, postgres_users_test_data, postgres_queue_test_data, postgres_processed_test_data): """ Returns the metrics class """ _ = postgres_users_test_data + _ = postgres_queue_test_data + _ = postgres_processed_test_data metrics = Metrics(port=8000, interval=5, metrics_prefix='pytest', database=database_class) threads_list = threading.enumerate() metrics_thread = threading.Thread(target=metrics.run, args=(threads_list,)) @@ -395,18 +397,18 @@ def fixture_postgres_users_test_data(postgres_instance): """ data = [ { - 'user_id': '111111', - 'chat_id': '111111', + 'user_id': 'test_user_1', + 'chat_id': 'test_chat_1', 'status': 'allowed' }, { - 'user_id': '222222', - 'chat_id': '222222', + 'user_id': 'test_user_2', + 'chat_id': 'test_chat_2', 'status': 'denied' }, { - 'user_id': '333333', - 'chat_id': '333333', + 'user_id': 'test_user_3', + 'chat_id': 'test_chat_3', 'status': 'denied' } ] @@ -417,3 +419,120 @@ def fixture_postgres_users_test_data(postgres_instance): (user['user_id'], user['chat_id'], user['status']) ) conn.commit() + + +@pytest.fixture(name="postgres_queue_test_data", scope='session') +def fixture_postgres_queue_test_data(postgres_instance): + """ + This function sets up test data in the queue table in the postgres database. + """ + data = [ + { + 'user_id': 'test_user_1', + 'post_id': 'test_post_1', + 'post_url': 'https://example.com/p/test_post_1', + 'post_owner': 'test_owner_1', + 'link_type': 'post', + 'message_id': 'test_message_1', + 'chat_id': 'test_chat_1', + 'scheduled_time': '2024-08-27 00:00:00', + 'download_status': 'not started', + 'upload_status': 'not started', + 'state': 'waiting' + }, + { + 'user_id': 'test_user_2', + 'post_id': 'test_post_2', + 'post_url': 'https://example.com/p/test_post_2', + 'post_owner': 'test_owner_2', + 'link_type': 'post', + 'message_id': 'test_message_2', + 'chat_id': 'test_chat_2', + 'scheduled_time': '2024-08-27 00:00:00', + 'download_status': 'not started', + 'upload_status': 'not started', + 'state': 'waiting' + }, + { + 'user_id': 'test_user_3', + 'post_id': 'test_post_3', + 'post_url': 'https://example.com/p/test_post_3', + 'post_owner': 'test_owner_3', + 'link_type': 'post', + 'message_id': 'test_message_3', + 'chat_id': 'test_chat_3', + 'scheduled_time': '2024-08-27 00:00:00', + 'download_status': 'not started', + 'upload_status': 'not started', + 'state': 'waiting' + } + ] + conn, cursor = postgres_instance + for message in data: + cursor.execute( + "INSERT INTO queue (user_id, post_id, post_url, post_owner, link_type, message_id, chat_id, scheduled_time, download_status, upload_status, state) " + "VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)", + ( + message['user_id'], message['post_id'], message['post_url'], message['post_owner'], message['link_type'], + message['message_id'], message['chat_id'], message['scheduled_time'], message['download_status'], + message['upload_status'], message['state'] + ) + ) + conn.commit() + + +@pytest.fixture(name="postgres_processed_test_data", scope='session') +def fixture_postgres_processed_test_data(postgres_instance): + """ + This function sets up test data in the processed table in the postgres database. + """ + data = [ + { + 'user_id': 'test_user_4', + 'post_id': 'test_post_4', + 'post_url': 'https://example.com/p/test_post_4', + 'post_owner': 'test_owner_4', + 'link_type': 'post', + 'message_id': 'test_message_4', + 'chat_id': 'test_chat_4', + 'download_status': 'completed', + 'upload_status': 'completed', + 'state': 'processed' + }, + { + 'user_id': 'test_user_5', + 'post_id': 'test_post_5', + 'post_url': 'https://example.com/p/test_post_5', + 'post_owner': 'test_owner_5', + 'link_type': 'post', + 'message_id': 'test_message_5', + 'chat_id': 'test_chat_5', + 'download_status': 'completed', + 'upload_status': 'completed', + 'state': 'processed' + }, + { + 'user_id': 'test_user_6', + 'post_id': 'test_post_6', + 'post_url': 'https://example.com/p/test_post_6', + 'post_owner': 'test_owner_6', + 'link_type': 'post', + 'message_id': 'test_message_6', + 'chat_id': 'test_chat_6', + 'download_status': 'completed', + 'upload_status': 'completed', + 'state': 'processed' + } + ] + conn, cursor = postgres_instance + for message in data: + cursor.execute( + "INSERT INTO processed (user_id, post_id, post_url, post_owner, link_type, message_id, chat_id, download_status, upload_status, state) " + "VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)", + ( + message['user_id'], message['post_id'], message['post_url'], message['post_owner'], message['link_type'], + message['message_id'], message['chat_id'], message['download_status'], message['upload_status'], + message['state'] + ) + ) + conn.commit() diff --git a/tests/test_database.py b/tests/test_database.py index 54630c5ac..ad16ecba6 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -97,13 +97,13 @@ def test_messages_queue(database_class): Checking the addition of a message to the queue and extraction of a message from the queue """ data = { - 'user_id': '12345', - 'post_id': 'qwerty333', - 'post_url': 'https://www.example.com/p/qwerty333/', - 'post_owner': 'johndoe', + 'user_id': 'test_case_6', + 'post_id': 'test_case_6', + 'post_url': 'https://example.com/p/test_case_6', + 'post_owner': 'test_case_6', 'link_type': 'post', - 'message_id': 'qwerty333', - 'chat_id': 'xyz', + 'message_id': 'test_case_6', + 'chat_id': 'test_case_6', 'scheduled_time': '2022-01-01 12:00:00', 'download_status': 'not started', 'upload_status': 'not started' @@ -134,13 +134,13 @@ def test_change_message_state_in_queue(database_class, postgres_instance): """ _, cursor = postgres_instance data = { - 'user_id': '12345', - 'post_id': 'qwerty222', - 'post_url': 'https://www.example.com/p/qwerty222/', - 'post_owner': 'johndoe', + 'user_id': 'test_case_7', + 'post_id': 'test_case_7', + 'post_url': 'https://example.com/p/test_case_7', + 'post_owner': 'test_case_7', 'link_type': 'post', - 'message_id': 'qwerty222', - 'chat_id': 'xyz', + 'message_id': 'test_case_7', + 'chat_id': 'test_case_7', 'scheduled_time': '2022-01-01 12:00:00', 'download_status': 'not started', 'upload_status': 'not started' @@ -178,13 +178,13 @@ def test_change_message_schedule_time_in_queue(database_class, postgres_instance """ _, cursor = postgres_instance data = { - 'user_id': '12345', - 'post_id': 'qwerty444', - 'post_url': 'https://www.example.com/p/qwerty444/', - 'post_owner': 'johndoe', + 'user_id': 'test_case_8', + 'post_id': 'test_case_8', + 'post_url': 'https://example.com/p/test_case_8', + 'post_owner': 'test_case_8', 'link_type': 'post', - 'message_id': 'qwerty444', - 'chat_id': 'xyz', + 'message_id': 'test_case_8', + 'chat_id': 'test_case_8', 'scheduled_time': '2022-01-01 12:00:00', 'download_status': 'not started', 'upload_status': 'not started' @@ -194,14 +194,14 @@ def test_change_message_schedule_time_in_queue(database_class, postgres_instance # Check the change of the message schedule time in the queue status = database_class.update_schedule_time_in_queue( - post_id='qwerty444', - user_id='12345', + post_id=data['post_id'], + user_id=data['user_id'], scheduled_time='2022-01-02 13:00:00' ) assert status == f"{data['post_id']}: scheduled time updated" # Check records in database - cursor.execute("SELECT scheduled_time FROM queue WHERE post_id = 'qwerty444'") + cursor.execute(f"SELECT scheduled_time FROM queue WHERE post_id = '{data['post_id']}'") record_queue = cursor.fetchall() assert record_queue is not None assert record_queue[0][0] == datetime.strptime('2022-01-02 13:00:00', '%Y-%m-%d %H:%M:%S') @@ -212,41 +212,41 @@ def test_get_user_queue(database_class): """ Checking the extraction of the user queue """ - user_id = '111111' + user_id = 'test_case_9' timestamp = datetime.now() data = [ { 'user_id': user_id, - 'post_id': 'qwerty123', - 'post_url': 'https://www.example.com/p/qwerty123/', - 'post_owner': 'johndoe', + 'post_id': 'test_case_9_1', + 'post_url': 'https://example.com/p/test_case_9_1', + 'post_owner': 'test_case_9', 'link_type': 'post', - 'message_id': 'qwerty123', - 'chat_id': 'xyz', + 'message_id': 'test_case_9_1', + 'chat_id': 'test_case_9', 'scheduled_time': timestamp + timedelta(hours=1), 'download_status': 'not started', 'upload_status': 'not started' }, { 'user_id': user_id, - 'post_id': 'qwerty456', - 'post_url': 'https://www.example.com/p/qwerty456/', - 'post_owner': 'johndoe', + 'post_id': 'test_case_9_2', + 'post_url': 'https://example.com/p/test_case_9_2', + 'post_owner': 'test_case_9', 'link_type': 'post', - 'message_id': 'qwerty456', - 'chat_id': 'xyz', + 'message_id': 'test_case_9_2', + 'chat_id': 'test_case_9', 'scheduled_time': timestamp - timedelta(hours=2), 'download_status': 'not started', 'upload_status': 'not started' }, { 'user_id': user_id, - 'post_id': 'qwerty789', - 'post_url': 'https://www.example.com/p/qwerty789/', - 'post_owner': 'johndoe', + 'post_id': 'test_case_9_3', + 'post_url': 'https://example.com/p/test_case_9_3', + 'post_owner': 'test_case_9', 'link_type': 'post', - 'message_id': 'qwerty789', - 'chat_id': 'xyz', + 'message_id': 'test_case_9_3', + 'chat_id': 'test_case_9', 'scheduled_time': timestamp + timedelta(hours=3), 'download_status': 'not started', 'upload_status': 'not started' @@ -257,7 +257,7 @@ def test_get_user_queue(database_class): assert status == f"{message['message_id']}: added to queue" # Validate the extraction of the user queue - user_queue = database_class.get_user_queue(user_id='111111') + user_queue = database_class.get_user_queue(user_id=user_id) expected_response = defaultdict(list) for entry in data: expected_response[entry['user_id']].append({ @@ -277,24 +277,65 @@ def test_get_user_processed_data(database_class, postgres_instance): Checking the extraction of the user processed data """ _, cursor = postgres_instance - user_id = '111111' - # Marked messages from previous tests - mark_processed = ['qwerty123', 'qwerty456', 'qwerty789'] - for item in mark_processed: + user_id = 'test_case_10' + timestamp = datetime.now() + data = [ + { + 'user_id': user_id, + 'post_id': 'test_case_10_1', + 'post_url': 'https://example.com/p/test_case_10_1', + 'post_owner': 'test_case_10', + 'link_type': 'post', + 'message_id': 'test_case_10_1', + 'chat_id': 'test_case_10', + 'scheduled_time': timestamp + timedelta(hours=1), + 'download_status': 'not started', + 'upload_status': 'not started' + }, + { + 'user_id': user_id, + 'post_id': 'test_case_10_2', + 'post_url': 'https://example.com/p/test_case_10_2', + 'post_owner': 'test_case_10', + 'link_type': 'post', + 'message_id': 'test_case_10_2', + 'chat_id': 'test_case_10', + 'scheduled_time': timestamp - timedelta(hours=2), + 'download_status': 'not started', + 'upload_status': 'not started' + }, + { + 'user_id': user_id, + 'post_id': 'test_case_10_3', + 'post_url': 'https://example.com/p/test_case_10_3', + 'post_owner': 'test_case_10', + 'link_type': 'post', + 'message_id': 'test_case_10_3', + 'chat_id': 'test_case_10', + 'scheduled_time': timestamp + timedelta(hours=3), + 'download_status': 'not started', + 'upload_status': 'not started' + } + ] + for message in data: + status = database_class.add_message_to_queue(data=message) + assert status == f"{message['message_id']}: added to queue" status = database_class.update_message_state_in_queue( - post_id=item, + post_id=message['post_id'], state='processed', download_status='completed', upload_status='completed', - post_owner='johndoe' + post_owner=message['post_owner'] ) - assert status == f"{item}: processed" + assert status == f"{message['post_id']}: processed" + user_processed = database_class.get_user_processed(user_id=user_id) user_queue = database_class.get_user_queue(user_id=user_id) - for item in mark_processed: + + for message in data: if user_queue: for q_message in user_queue.get(user_id, []): - assert item != q_message['post_id'] + assert message['post_id'] == q_message['post_id'] if user_processed: found = False if user_processed.get(user_id, []) == []: @@ -302,12 +343,12 @@ def test_get_user_processed_data(database_class, postgres_instance): else: cursor.execute("SELECT * FROM processed") print(cursor.fetchall()) - assert len(user_processed.get(user_id, [])) == len(mark_processed) + assert len(user_processed.get(user_id, [])) == len(data) for p_message in user_processed.get(user_id, []): - if item == p_message['post_id']: + if message['post_id'] == p_message['post_id']: found = True if not found: - print(f"Message {item} not found in processed messages: {user_processed.get(user_id, [])}") + print(f"Message {message['post_id']} not found in processed: {user_processed}") assert False else: assert False @@ -319,13 +360,13 @@ def test_check_message_uniqueness(database_class): Checking the uniqueness of the message """ data = { - 'user_id': '123456', - 'post_id': 'qwerty1111', - 'post_url': 'https://www.example.com/p/qwerty1111/', - 'post_owner': 'johndoe', + 'user_id': 'test_case_11', + 'post_id': 'test_case_11', + 'post_url': 'https://example.com/p/test_case_11', + 'post_owner': 'test_case_11', 'link_type': 'post', - 'message_id': 'qwerty1111', - 'chat_id': 'xyz', + 'message_id': 'test_case_11', + 'chat_id': 'test_case_11', 'scheduled_time': '2022-01-02 13:00:00', 'download_status': 'not started', 'upload_status': 'not started' @@ -345,9 +386,9 @@ def test_service_messages(database_class): Checking the registration of service messages """ data = { - 'message_id': '444444', - 'chat_id': 'xyz', - 'message_content': 'Test message', + 'message_id': 'test_case_12', + 'chat_id': 'test_case_12', + 'message_content': 'Test case 12', 'message_type': 'status_message', 'state': 'updated' } diff --git a/tests/test_metrics.py b/tests/test_metrics.py index e193d57bc..6b4623c4d 100644 --- a/tests/test_metrics.py +++ b/tests/test_metrics.py @@ -53,4 +53,4 @@ def test_metrics_messages(metrics_class): assert "pytest_processed_messages_total" in response.text assert "pytest_queue_length" in response.text assert "pytest_processed_messages_total 3.0" in response.text - assert "pytest_queue_length 0.0" in response.text + assert "pytest_queue_length 3.0" in response.text From 3eb504005450f749ee8d85e9a71bf315829921af Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 10 Sep 2024 00:22:36 +0400 Subject: [PATCH 098/148] modified: src/modules/database.py modified: tests/conftest.py --- src/modules/database.py | 9 +++++---- tests/conftest.py | 3 ++- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/src/modules/database.py b/src/modules/database.py index 406d47054..483865596 100644 --- a/src/modules/database.py +++ b/src/modules/database.py @@ -674,10 +674,11 @@ def get_user_processed( order_by='timestamp ASC', limit=10000 ) - for message in processed: - if user_id not in result: - result[user_id] = [] - result[user_id].append({'post_id': message[0], 'timestamp': message[1], 'state': message[2]}) + if processed: + for message in processed: + if user_id not in result: + result[user_id] = [] + result[user_id].append({'post_id': message[0], 'timestamp': message[1], 'state': message[2]}) return result if result else None def check_message_uniqueness( diff --git a/tests/conftest.py b/tests/conftest.py index a9829a745..2b2919012 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -470,7 +470,8 @@ def fixture_postgres_queue_test_data(postgres_instance): conn, cursor = postgres_instance for message in data: cursor.execute( - "INSERT INTO queue (user_id, post_id, post_url, post_owner, link_type, message_id, chat_id, scheduled_time, download_status, upload_status, state) " + "INSERT INTO queue " + "(user_id, post_id, post_url, post_owner, link_type, message_id, chat_id, scheduled_time, download_status, upload_status, state) " "VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)", ( message['user_id'], message['post_id'], message['post_url'], message['post_owner'], message['link_type'], From ccb52db00da88c58e0e0daafd21d71cd1cfbfca1 Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 10 Sep 2024 10:29:40 +0400 Subject: [PATCH 099/148] modified: src/modules/database.py modified: src/modules/metrics.py --- src/modules/database.py | 1 - src/modules/metrics.py | 6 ++++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/src/modules/database.py b/src/modules/database.py index 483865596..54efaead4 100644 --- a/src/modules/database.py +++ b/src/modules/database.py @@ -817,7 +817,6 @@ def get_users(self) -> dict: if users: for user in users: users_dict.append({'user_id': user[0], 'chat_id': user[1], 'status': user[2]}) - log.info('[Database]: Users: %s', users_dict) return users_dict def get_considered_message( diff --git a/src/modules/metrics.py b/src/modules/metrics.py index 7b09af4c9..03edcb316 100644 --- a/src/modules/metrics.py +++ b/src/modules/metrics.py @@ -89,8 +89,10 @@ def collect_messages_stats(self) -> None: for user in users_dict: processed_messages = self.database.get_user_processed(user_id=user['user_id']) queue_messages = self.database.get_user_queue(user_id=user['user_id']) - processed_messages_count += len(processed_messages.get(user['user_id'], [])) - queue_messages_count = len(queue_messages.get(user['user_id'], [])) + if processed_messages: + processed_messages_count += len(processed_messages.get(user['user_id'], [])) + if queue_messages: + queue_messages_count = len(queue_messages.get(user['user_id'], [])) self.processed_messages_counter.set(processed_messages_count) self.queue_length_gauge.set(queue_messages_count) From afc96f10b9885bd1fe6746bf91c8a10bda72f2c4 Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 10 Sep 2024 10:44:24 +0400 Subject: [PATCH 100/148] debug messages metrics --- src/modules/metrics.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/modules/metrics.py b/src/modules/metrics.py index 03edcb316..daad71a68 100644 --- a/src/modules/metrics.py +++ b/src/modules/metrics.py @@ -89,10 +89,11 @@ def collect_messages_stats(self) -> None: for user in users_dict: processed_messages = self.database.get_user_processed(user_id=user['user_id']) queue_messages = self.database.get_user_queue(user_id=user['user_id']) + log.info('[Metrics]: User %s processed messages: %s, queue messages: %s', user['user_id'], processed_messages, queue_messages) if processed_messages: processed_messages_count += len(processed_messages.get(user['user_id'], [])) if queue_messages: - queue_messages_count = len(queue_messages.get(user['user_id'], [])) + queue_messages_count += len(queue_messages.get(user['user_id'], [])) self.processed_messages_counter.set(processed_messages_count) self.queue_length_gauge.set(queue_messages_count) From 7a4b8b560cb4249a257ce52c7f2309a6529a4472 Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 10 Sep 2024 10:52:58 +0400 Subject: [PATCH 101/148] modified: src/modules/metrics.py modified: tests/test_metrics.py --- CHANGELOG.md | 2 +- src/modules/metrics.py | 1 - tests/test_metrics.py | 4 +++- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 181439c83..182d3a60e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,7 +3,7 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). -## v2.3.0 - 2024-09-09 +## v2.3.0 - 2024-09-10 ### What's Changed **Full Changelog**: https://github.com/obervinov/pyinstabot-downloader/compare/v2.2.1...v2.3.0 by @obervinov in https://github.com/obervinov/pyinstabot-downloader/pull/95 #### 💥 Breaking Changes diff --git a/src/modules/metrics.py b/src/modules/metrics.py index daad71a68..68532fb61 100644 --- a/src/modules/metrics.py +++ b/src/modules/metrics.py @@ -89,7 +89,6 @@ def collect_messages_stats(self) -> None: for user in users_dict: processed_messages = self.database.get_user_processed(user_id=user['user_id']) queue_messages = self.database.get_user_queue(user_id=user['user_id']) - log.info('[Metrics]: User %s processed messages: %s, queue messages: %s', user['user_id'], processed_messages, queue_messages) if processed_messages: processed_messages_count += len(processed_messages.get(user['user_id'], [])) if queue_messages: diff --git a/tests/test_metrics.py b/tests/test_metrics.py index 6b4623c4d..9aac12db8 100644 --- a/tests/test_metrics.py +++ b/tests/test_metrics.py @@ -44,10 +44,12 @@ def test_metrics_threads_status(metrics_class): @pytest.mark.order(16) -def test_metrics_messages(metrics_class): +def test_metrics_messages(metrics_class, postgres_queue_test_data, postgres_processed_test_data): """ Checking the collection of processed and queued messages statistics. """ + _ = postgres_queue_test_data + _ = postgres_processed_test_data response = requests.get(f"http://0.0.0.0:{metrics_class.port}/", timeout=10) print(response.text) assert "pytest_processed_messages_total" in response.text From 472afe31c3754722d8aee56a847160e7af25403b Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 10 Sep 2024 11:08:47 +0400 Subject: [PATCH 102/148] debugging test_metrics_messages --- tests/test_metrics.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/tests/test_metrics.py b/tests/test_metrics.py index 9aac12db8..18a8ba31c 100644 --- a/tests/test_metrics.py +++ b/tests/test_metrics.py @@ -44,14 +44,22 @@ def test_metrics_threads_status(metrics_class): @pytest.mark.order(16) -def test_metrics_messages(metrics_class, postgres_queue_test_data, postgres_processed_test_data): +def test_metrics_messages(metrics_class, postgres_queue_test_data, postgres_processed_test_data, postgres_instance): """ Checking the collection of processed and queued messages statistics. """ _ = postgres_queue_test_data _ = postgres_processed_test_data + + _, cursor = postgres_instance + response = requests.get(f"http://0.0.0.0:{metrics_class.port}/", timeout=10) print(response.text) + cursor.execute("SELECT * FROM users;") + print(cursor.fetchall()) + cursor.execute("SELECT * FROM processed;") + print(cursor.fetchall()) + assert "pytest_processed_messages_total" in response.text assert "pytest_queue_length" in response.text assert "pytest_processed_messages_total 3.0" in response.text From 0d12230116914a4bc5cb9ba1bf5137fdbffd1c6c Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 10 Sep 2024 13:50:32 +0400 Subject: [PATCH 103/148] modified: src/bot.py modified: src/modules/database.py modified: tests/conftest.py modified: tests/test_database.py modified: tests/test_metrics.py --- src/bot.py | 20 +++++++------------- src/modules/database.py | 28 ++++++++++++---------------- tests/conftest.py | 1 + tests/test_database.py | 24 ++++++++++++------------ tests/test_metrics.py | 10 +--------- 5 files changed, 33 insertions(+), 50 deletions(-) diff --git a/src/bot.py b/src/bot.py index b90827810..a5cf4582b 100644 --- a/src/bot.py +++ b/src/bot.py @@ -299,30 +299,24 @@ def get_user_messages(user_id: str = None) -> dict: >>> get_user_messages(user_id='1234567890') {'queue_list': 'queue is empty', 'processed_list': 'processed is empty', 'queue_count': 0, 'processed_count': 0} """ - queue_dict = database.get_user_queue(user_id=user_id) - processed_dict = database.get_user_processed(user_id=user_id) - - last_ten_queue = queue_dict.get(user_id, [])[:10] if queue_dict else [] - last_ten_processed = processed_dict.get(user_id, [])[-10:] if processed_dict else [] - - queue_count = len(queue_dict.get(user_id, [])) if queue_dict else 0 - processed_count = len(processed_dict.get(user_id, [])) if processed_dict else 0 + queue = database.get_user_queue(user_id=user_id) + processed = database.get_user_processed(user_id=user_id) queue_string = '' - if last_ten_queue: - for item in last_ten_queue: + if queue[:10]: + for item in queue[:10]: queue_string += f"+ {item['post_id']}: scheduled for {item['scheduled_time']}\n" else: queue_string = 'queue is empty' processed_string = '' - if last_ten_processed: - for item in last_ten_processed: + if processed[-10:]: + for item in processed[-10:]: processed_string += f"* {item['post_id']}: {item['state']} at {item['timestamp']}\n" else: processed_string = 'processed is empty' - return {'queue_list': queue_string, 'processed_list': processed_string, 'queue_count': queue_count, 'processed_count': processed_count} + return {'queue_list': queue_string, 'processed_list': processed_string, 'queue_count': len(queue), 'processed_count': len(processed)} def message_parser(message: telegram.telegram_types.Message = None) -> dict: diff --git a/src/modules/database.py b/src/modules/database.py index 54efaead4..5a8a105ac 100644 --- a/src/modules/database.py +++ b/src/modules/database.py @@ -619,7 +619,7 @@ def update_schedule_time_in_queue( def get_user_queue( self, user_id: str = None - ) -> Union[dict, None]: + ) -> dict: """ Get messages from the queue table for the specified user. @@ -627,13 +627,13 @@ def get_user_queue( user_id (str): The ID of the user. Returns: - dict: A dictionary containing messages from the queue for the specified user. + dict: A list of dictionaries containing the messages from the queue table for the specified user. Examples: >>> get_user_queue(user_id='12345') - {'12345': [{'post_id': '123456789', 'scheduled_time': '2022-01-01 12:00:00'}]} + [{'post_id': '123456789', 'scheduled_time': '2022-01-01 12:00:00'}] """ - result = {} + result = [] queue = self._select( table_name='queue', columns=("post_id", "scheduled_time"), @@ -643,15 +643,13 @@ def get_user_queue( ) if queue: for message in queue: - if user_id not in result: - result[user_id] = [] - result[user_id].append({'post_id': message[0], 'scheduled_time': message[1]}) - return result if result else None + result.append({'post_id': message[0], 'scheduled_time': message[1]}) + return result def get_user_processed( self, user_id: str = None - ) -> Union[dict, None]: + ) -> dict: """ Get last ten messages from the processed table for the specified user. It is used to display the last messages sent by the bot to the user. @@ -660,13 +658,13 @@ def get_user_processed( user_id (str): The ID of the user. Returns: - dict: A dictionary containing the last five messages from the processed table for the specified user. + dict: A list of dictionaries containing the last ten messages from the processed table for the specified user. Examples: >>> get_user_processed(user_id='12345') - {'12345': [{'post_id': '123456789', 'timestamp': '2022-01-01 12:00:00', 'state': 'processed'}]} + [{'post_id': '123456789', 'timestamp': '2022-01-01 12:00:00', 'state': 'processed'}] """ - result = {} + result = [] processed = self._select( table_name='processed', columns=("post_id", "timestamp", "state"), @@ -676,10 +674,8 @@ def get_user_processed( ) if processed: for message in processed: - if user_id not in result: - result[user_id] = [] - result[user_id].append({'post_id': message[0], 'timestamp': message[1], 'state': message[2]}) - return result if result else None + result.append({'post_id': message[0], 'timestamp': message[1], 'state': message[2]}) + return result def check_message_uniqueness( self, diff --git a/tests/conftest.py b/tests/conftest.py index 2b2919012..1685cd91f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -362,6 +362,7 @@ def fixture_metrics_class(database_class, postgres_users_test_data, postgres_que _ = postgres_users_test_data _ = postgres_queue_test_data _ = postgres_processed_test_data + metrics = Metrics(port=8000, interval=5, metrics_prefix='pytest', database=database_class) threads_list = threading.enumerate() metrics_thread = threading.Thread(target=metrics.run, args=(threads_list,)) diff --git a/tests/test_database.py b/tests/test_database.py index ad16ecba6..082b70ae9 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -256,18 +256,18 @@ def test_get_user_queue(database_class): status = database_class.add_message_to_queue(data=message) assert status == f"{message['message_id']}: added to queue" - # Validate the extraction of the user queue + # Validate the extraction of the user queue (now directly a list) user_queue = database_class.get_user_queue(user_id=user_id) - expected_response = defaultdict(list) - for entry in data: - expected_response[entry['user_id']].append({ + expected_response = sorted([ + { 'post_id': entry['post_id'], 'scheduled_time': entry['scheduled_time'] - }) - for user_id, posts in expected_response.items(): - expected_response[user_id] = sorted(posts, key=lambda x: x['scheduled_time']) + } + for entry in data + ], key=lambda x: x['scheduled_time']) + assert user_queue is not None - assert len(user_queue.get(user_id, [])) == len(data) + assert len(user_queue) == len(data) assert user_queue == expected_response @@ -334,17 +334,17 @@ def test_get_user_processed_data(database_class, postgres_instance): for message in data: if user_queue: - for q_message in user_queue.get(user_id, []): + for q_message in user_queue: assert message['post_id'] == q_message['post_id'] if user_processed: found = False - if user_processed.get(user_id, []) == []: + if user_processed: assert False else: cursor.execute("SELECT * FROM processed") print(cursor.fetchall()) - assert len(user_processed.get(user_id, [])) == len(data) - for p_message in user_processed.get(user_id, []): + assert len(user_processed) == len(data) + for p_message in user_processed: if message['post_id'] == p_message['post_id']: found = True if not found: diff --git a/tests/test_metrics.py b/tests/test_metrics.py index 18a8ba31c..9aac12db8 100644 --- a/tests/test_metrics.py +++ b/tests/test_metrics.py @@ -44,22 +44,14 @@ def test_metrics_threads_status(metrics_class): @pytest.mark.order(16) -def test_metrics_messages(metrics_class, postgres_queue_test_data, postgres_processed_test_data, postgres_instance): +def test_metrics_messages(metrics_class, postgres_queue_test_data, postgres_processed_test_data): """ Checking the collection of processed and queued messages statistics. """ _ = postgres_queue_test_data _ = postgres_processed_test_data - - _, cursor = postgres_instance - response = requests.get(f"http://0.0.0.0:{metrics_class.port}/", timeout=10) print(response.text) - cursor.execute("SELECT * FROM users;") - print(cursor.fetchall()) - cursor.execute("SELECT * FROM processed;") - print(cursor.fetchall()) - assert "pytest_processed_messages_total" in response.text assert "pytest_queue_length" in response.text assert "pytest_processed_messages_total 3.0" in response.text From 34d95a8508f4bdf82a4c179c3a3f02a05fea56cc Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 10 Sep 2024 14:12:51 +0400 Subject: [PATCH 104/148] modified: tests/test_database.py --- tests/test_database.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/tests/test_database.py b/tests/test_database.py index 082b70ae9..0a755af09 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -336,24 +336,27 @@ def test_get_user_processed_data(database_class, postgres_instance): if user_queue: for q_message in user_queue: assert message['post_id'] == q_message['post_id'] + else: + cursor.execute("SELECT * FROM queue") + print(cursor.fetchall()) + assert False + if user_processed: found = False - if user_processed: - assert False - else: - cursor.execute("SELECT * FROM processed") - print(cursor.fetchall()) - assert len(user_processed) == len(data) + assert len(user_processed) == len(data) for p_message in user_processed: if message['post_id'] == p_message['post_id']: found = True if not found: print(f"Message {message['post_id']} not found in processed: {user_processed}") assert False + else: + assert True else: + cursor.execute("SELECT * FROM processed") + print(cursor.fetchall()) assert False - @pytest.mark.order(11) def test_check_message_uniqueness(database_class): """ From da6bb449b0f593995ad85f81a5579290ad4ab3a7 Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 10 Sep 2024 14:16:28 +0400 Subject: [PATCH 105/148] modified: tests/test_database.py --- tests/test_database.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/tests/test_database.py b/tests/test_database.py index 0a755af09..8f74cdee0 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -6,7 +6,6 @@ import json import importlib from datetime import datetime, timedelta -from collections import defaultdict import pytest import psycopg2 from psycopg2 import pool @@ -335,11 +334,7 @@ def test_get_user_processed_data(database_class, postgres_instance): for message in data: if user_queue: for q_message in user_queue: - assert message['post_id'] == q_message['post_id'] - else: - cursor.execute("SELECT * FROM queue") - print(cursor.fetchall()) - assert False + assert message['post_id'] != q_message['post_id'] if user_processed: found = False From 445011d1ee0133b42237eb4540d87da045799ed6 Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 10 Sep 2024 14:19:34 +0400 Subject: [PATCH 106/148] modified: tests/conftest.py modified: tests/test_database.py --- tests/conftest.py | 1 + tests/test_database.py | 1 + 2 files changed, 2 insertions(+) diff --git a/tests/conftest.py b/tests/conftest.py index 1685cd91f..7d0199e42 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -367,6 +367,7 @@ def fixture_metrics_class(database_class, postgres_users_test_data, postgres_que threads_list = threading.enumerate() metrics_thread = threading.Thread(target=metrics.run, args=(threads_list,)) metrics_thread.start() + time.sleep(10) yield metrics metrics.stop() metrics_thread.join() diff --git a/tests/test_database.py b/tests/test_database.py index 8f74cdee0..e365af23b 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -352,6 +352,7 @@ def test_get_user_processed_data(database_class, postgres_instance): print(cursor.fetchall()) assert False + @pytest.mark.order(11) def test_check_message_uniqueness(database_class): """ From 9963454037b8725bff0bd990fa31421934f0c1f0 Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 10 Sep 2024 14:29:15 +0400 Subject: [PATCH 107/148] modified: src/modules/metrics.py --- src/modules/metrics.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/modules/metrics.py b/src/modules/metrics.py index 68532fb61..bcdd81442 100644 --- a/src/modules/metrics.py +++ b/src/modules/metrics.py @@ -83,12 +83,15 @@ def collect_messages_stats(self) -> None: """ The method updates the gauge with the number of processed and queued messages. """ - users_dict = self.database.get_users() processed_messages_count = 0 queue_messages_count = 0 + users_dict = self.database.get_users() + + log.info('Users dict: %s', users_dict) for user in users_dict: processed_messages = self.database.get_user_processed(user_id=user['user_id']) queue_messages = self.database.get_user_queue(user_id=user['user_id']) + log.info('User %s: processed messages %s, queue messages %s', user['user_id'], processed_messages, queue_messages) if processed_messages: processed_messages_count += len(processed_messages.get(user['user_id'], [])) if queue_messages: From 58049b8f7fa5b844b4695770c13f98d1ddef2ea5 Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 10 Sep 2024 14:33:07 +0400 Subject: [PATCH 108/148] modified: src/modules/metrics.py --- src/modules/metrics.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/modules/metrics.py b/src/modules/metrics.py index bcdd81442..4b9bad17d 100644 --- a/src/modules/metrics.py +++ b/src/modules/metrics.py @@ -93,9 +93,9 @@ def collect_messages_stats(self) -> None: queue_messages = self.database.get_user_queue(user_id=user['user_id']) log.info('User %s: processed messages %s, queue messages %s', user['user_id'], processed_messages, queue_messages) if processed_messages: - processed_messages_count += len(processed_messages.get(user['user_id'], [])) + processed_messages_count += len(processed_messages) if queue_messages: - queue_messages_count += len(queue_messages.get(user['user_id'], [])) + queue_messages_count += len(queue_messages) self.processed_messages_counter.set(processed_messages_count) self.queue_length_gauge.set(queue_messages_count) From 0fe22c73ec4d475397d727938b3047100229d78e Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 10 Sep 2024 14:42:43 +0400 Subject: [PATCH 109/148] modified: src/modules/metrics.py modified: tests/conftest.py modified: tests/test_metrics.py --- src/modules/metrics.py | 2 -- tests/conftest.py | 19 +++++++++++++++++-- tests/test_metrics.py | 4 ++-- 3 files changed, 19 insertions(+), 6 deletions(-) diff --git a/src/modules/metrics.py b/src/modules/metrics.py index 4b9bad17d..eff76d911 100644 --- a/src/modules/metrics.py +++ b/src/modules/metrics.py @@ -87,11 +87,9 @@ def collect_messages_stats(self) -> None: queue_messages_count = 0 users_dict = self.database.get_users() - log.info('Users dict: %s', users_dict) for user in users_dict: processed_messages = self.database.get_user_processed(user_id=user['user_id']) queue_messages = self.database.get_user_queue(user_id=user['user_id']) - log.info('User %s: processed messages %s, queue messages %s', user['user_id'], processed_messages, queue_messages) if processed_messages: processed_messages_count += len(processed_messages) if queue_messages: diff --git a/tests/conftest.py b/tests/conftest.py index 7d0199e42..b689156af 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -411,8 +411,23 @@ def fixture_postgres_users_test_data(postgres_instance): { 'user_id': 'test_user_3', 'chat_id': 'test_chat_3', - 'status': 'denied' - } + 'status': 'allowed' + }, + { + 'user_id': 'test_user_4', + 'chat_id': 'test_chat_4', + 'status': 'allowed' + }, + { + 'user_id': 'test_user_5', + 'chat_id': 'test_chat_5', + 'status': 'allowed' + }, + { + 'user_id': 'test_user_6', + 'chat_id': 'test_chat_6', + 'status': 'allowed' + }, ] conn, cursor = postgres_instance for user in data: diff --git a/tests/test_metrics.py b/tests/test_metrics.py index 9aac12db8..f2866b9de 100644 --- a/tests/test_metrics.py +++ b/tests/test_metrics.py @@ -30,8 +30,8 @@ def test_metrics_users_stats(metrics_class, postgres_users_test_data): print(response.text) assert "pytest_access_granted_total" in response.text assert "pytest_access_denied_total" in response.text - assert "pytest_access_granted_total 1.0" in response.text - assert "pytest_access_denied_total 2.0" in response.text + assert "pytest_access_granted_total 5.0" in response.text + assert "pytest_access_denied_total 1.0" in response.text @pytest.mark.order(15) From addcf71937f20ca44b507c6aa04d7f29bdf7696a Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 10 Sep 2024 14:48:17 +0400 Subject: [PATCH 110/148] build dev image --- Dockerfile | 2 +- poetry.lock | 146 ++++++++++++++++++++++++------------------------- pyproject.toml | 2 +- 3 files changed, 75 insertions(+), 75 deletions(-) diff --git a/Dockerfile b/Dockerfile index c637eb7b6..caa64c565 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.9.19-alpine3.20 +FROM python:3.10.15-alpine3.20 ### External argumetns ### ARG PROJECT_DESCRIPTION diff --git a/poetry.lock b/poetry.lock index 2ae73e47d..20f87ab7f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -13,78 +13,78 @@ files = [ [[package]] name = "cffi" -version = "1.17.0" +version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" files = [ - {file = "cffi-1.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f9338cc05451f1942d0d8203ec2c346c830f8e86469903d5126c1f0a13a2bcbb"}, - {file = "cffi-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0ce71725cacc9ebf839630772b07eeec220cbb5f03be1399e0457a1464f8e1a"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c815270206f983309915a6844fe994b2fa47e5d05c4c4cef267c3b30e34dbe42"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6bdcd415ba87846fd317bee0774e412e8792832e7805938987e4ede1d13046d"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a98748ed1a1df4ee1d6f927e151ed6c1a09d5ec21684de879c7ea6aa96f58f2"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a048d4f6630113e54bb4b77e315e1ba32a5a31512c31a273807d0027a7e69ab"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24aa705a5f5bd3a8bcfa4d123f03413de5d86e497435693b638cbffb7d5d8a1b"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:856bf0924d24e7f93b8aee12a3a1095c34085600aa805693fb7f5d1962393206"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4304d4416ff032ed50ad6bb87416d802e67139e31c0bde4628f36a47a3164bfa"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:331ad15c39c9fe9186ceaf87203a9ecf5ae0ba2538c9e898e3a6967e8ad3db6f"}, - {file = "cffi-1.17.0-cp310-cp310-win32.whl", hash = "sha256:669b29a9eca6146465cc574659058ed949748f0809a2582d1f1a324eb91054dc"}, - {file = "cffi-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:48b389b1fd5144603d61d752afd7167dfd205973a43151ae5045b35793232aa2"}, - {file = "cffi-1.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5d97162c196ce54af6700949ddf9409e9833ef1003b4741c2b39ef46f1d9720"}, - {file = "cffi-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ba5c243f4004c750836f81606a9fcb7841f8874ad8f3bf204ff5e56332b72b9"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb9333f58fc3a2296fb1d54576138d4cf5d496a2cc118422bd77835e6ae0b9cb"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:435a22d00ec7d7ea533db494da8581b05977f9c37338c80bc86314bec2619424"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1df34588123fcc88c872f5acb6f74ae59e9d182a2707097f9e28275ec26a12d"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df8bb0010fdd0a743b7542589223a2816bdde4d94bb5ad67884348fa2c1c67e8"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b5b9712783415695663bd463990e2f00c6750562e6ad1d28e072a611c5f2a6"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ffef8fd58a36fb5f1196919638f73dd3ae0db1a878982b27a9a5a176ede4ba91"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e67d26532bfd8b7f7c05d5a766d6f437b362c1bf203a3a5ce3593a645e870b8"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45f7cd36186db767d803b1473b3c659d57a23b5fa491ad83c6d40f2af58e4dbb"}, - {file = "cffi-1.17.0-cp311-cp311-win32.whl", hash = "sha256:a9015f5b8af1bb6837a3fcb0cdf3b874fe3385ff6274e8b7925d81ccaec3c5c9"}, - {file = "cffi-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:b50aaac7d05c2c26dfd50c3321199f019ba76bb650e346a6ef3616306eed67b0"}, - {file = "cffi-1.17.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aec510255ce690d240f7cb23d7114f6b351c733a74c279a84def763660a2c3bc"}, - {file = "cffi-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2770bb0d5e3cc0e31e7318db06efcbcdb7b31bcb1a70086d3177692a02256f59"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db9a30ec064129d605d0f1aedc93e00894b9334ec74ba9c6bdd08147434b33eb"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a47eef975d2b8b721775a0fa286f50eab535b9d56c70a6e62842134cf7841195"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3e0992f23bbb0be00a921eae5363329253c3b86287db27092461c887b791e5e"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6107e445faf057c118d5050560695e46d272e5301feffda3c41849641222a828"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb862356ee9391dc5a0b3cbc00f416b48c1b9a52d252d898e5b7696a5f9fe150"}, - {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c1c13185b90bbd3f8b5963cd8ce7ad4ff441924c31e23c975cb150e27c2bf67a"}, - {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17c6d6d3260c7f2d94f657e6872591fe8733872a86ed1345bda872cfc8c74885"}, - {file = "cffi-1.17.0-cp312-cp312-win32.whl", hash = "sha256:c3b8bd3133cd50f6b637bb4322822c94c5ce4bf0d724ed5ae70afce62187c492"}, - {file = "cffi-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:dca802c8db0720ce1c49cce1149ff7b06e91ba15fa84b1d59144fef1a1bc7ac2"}, - {file = "cffi-1.17.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ce01337d23884b21c03869d2f68c5523d43174d4fc405490eb0091057943118"}, - {file = "cffi-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cab2eba3830bf4f6d91e2d6718e0e1c14a2f5ad1af68a89d24ace0c6b17cced7"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14b9cbc8f7ac98a739558eb86fabc283d4d564dafed50216e7f7ee62d0d25377"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b00e7bcd71caa0282cbe3c90966f738e2db91e64092a877c3ff7f19a1628fdcb"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:41f4915e09218744d8bae14759f983e466ab69b178de38066f7579892ff2a555"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4760a68cab57bfaa628938e9c2971137e05ce48e762a9cb53b76c9b569f1204"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:011aff3524d578a9412c8b3cfaa50f2c0bd78e03eb7af7aa5e0df59b158efb2f"}, - {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:a003ac9edc22d99ae1286b0875c460351f4e101f8c9d9d2576e78d7e048f64e0"}, - {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ef9528915df81b8f4c7612b19b8628214c65c9b7f74db2e34a646a0a2a0da2d4"}, - {file = "cffi-1.17.0-cp313-cp313-win32.whl", hash = "sha256:70d2aa9fb00cf52034feac4b913181a6e10356019b18ef89bc7c12a283bf5f5a"}, - {file = "cffi-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:b7b6ea9e36d32582cda3465f54c4b454f62f23cb083ebc7a94e2ca6ef011c3a7"}, - {file = "cffi-1.17.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:964823b2fc77b55355999ade496c54dde161c621cb1f6eac61dc30ed1b63cd4c"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:516a405f174fd3b88829eabfe4bb296ac602d6a0f68e0d64d5ac9456194a5b7e"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dec6b307ce928e8e112a6bb9921a1cb00a0e14979bf28b98e084a4b8a742bd9b"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4094c7b464cf0a858e75cd14b03509e84789abf7b79f8537e6a72152109c76e"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2404f3de742f47cb62d023f0ba7c5a916c9c653d5b368cc966382ae4e57da401"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa9d43b02a0c681f0bfbc12d476d47b2b2b6a3f9287f11ee42989a268a1833c"}, - {file = "cffi-1.17.0-cp38-cp38-win32.whl", hash = "sha256:0bb15e7acf8ab35ca8b24b90af52c8b391690ef5c4aec3d31f38f0d37d2cc499"}, - {file = "cffi-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:93a7350f6706b31f457c1457d3a3259ff9071a66f312ae64dc024f049055f72c"}, - {file = "cffi-1.17.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a2ddbac59dc3716bc79f27906c010406155031a1c801410f1bafff17ea304d2"}, - {file = "cffi-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6327b572f5770293fc062a7ec04160e89741e8552bf1c358d1a23eba68166759"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbc183e7bef690c9abe5ea67b7b60fdbca81aa8da43468287dae7b5c046107d4"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bdc0f1f610d067c70aa3737ed06e2726fd9d6f7bfee4a351f4c40b6831f4e82"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6d872186c1617d143969defeadac5a904e6e374183e07977eedef9c07c8953bf"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d46ee4764b88b91f16661a8befc6bfb24806d885e27436fdc292ed7e6f6d058"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f76a90c345796c01d85e6332e81cab6d70de83b829cf1d9762d0a3da59c7932"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0e60821d312f99d3e1569202518dddf10ae547e799d75aef3bca3a2d9e8ee693"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:eb09b82377233b902d4c3fbeeb7ad731cdab579c6c6fda1f763cd779139e47c3"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:24658baf6224d8f280e827f0a50c46ad819ec8ba380a42448e24459daf809cf4"}, - {file = "cffi-1.17.0-cp39-cp39-win32.whl", hash = "sha256:0fdacad9e0d9fc23e519efd5ea24a70348305e8d7d85ecbb1a5fa66dc834e7fb"}, - {file = "cffi-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:7cbc78dc018596315d4e7841c8c3a7ae31cc4d638c9b627f87d52e8abaaf2d29"}, - {file = "cffi-1.17.0.tar.gz", hash = "sha256:f3157624b7558b914cb039fd1af735e5e8049a87c817cc215109ad1c8779df76"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] [package.dependencies] @@ -618,13 +618,13 @@ test = ["pytest", "pytest-cov"] [[package]] name = "more-itertools" -version = "10.4.0" +version = "10.5.0" description = "More routines for operating on iterables, beyond itertools" optional = false python-versions = ">=3.8" files = [ - {file = "more-itertools-10.4.0.tar.gz", hash = "sha256:fe0e63c4ab068eac62410ab05cccca2dc71ec44ba8ef29916a0090df061cf923"}, - {file = "more_itertools-10.4.0-py3-none-any.whl", hash = "sha256:0f7d9f83a0a8dcfa8a2694a770590d98a67ea943e3d9f5298309a484758c4e27"}, + {file = "more-itertools-10.5.0.tar.gz", hash = "sha256:5482bfef7849c25dc3c6dd53a6173ae4795da2a41a80faea6700d9f5846c5da6"}, + {file = "more_itertools-10.5.0-py3-none-any.whl", hash = "sha256:037b0d3203ce90cca8ab1defbbdac29d5f993fc20131f3664dc8d6acfa872aef"}, ] [[package]] @@ -1076,5 +1076,5 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" -python-versions = "^3.9" -content-hash = "fa955dfdcb032f7463ef978064e12800f34d95047753864159e2af6917c00da0" +python-versions = "^3.10" +content-hash = "5e837ba4da3aff515cf419a60f54782552524ae4c947681f52337f2f41167c64" diff --git a/pyproject.toml b/pyproject.toml index 65cfce439..fe8fb630c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,7 +17,7 @@ packages = [{include = "src"}] "Bug Tracker" = "https://github.com/obervinov/pyinstabot-downloader/issues" [tool.poetry.dependencies] -python = "^3.9" +python = "^3.10" instaloader = "^4" dropbox = "^12" 'mega.py' = "^1.0.8" From 58f1499188fe018ae37982d8a1d555638e4bec12 Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 10 Sep 2024 15:14:34 +0400 Subject: [PATCH 111/148] modified: poetry.lock --- poetry.lock | 32 +++++--------------------------- pyproject.toml | 2 +- 2 files changed, 6 insertions(+), 28 deletions(-) diff --git a/poetry.lock b/poetry.lock index 20f87ab7f..e17aa40c7 100644 --- a/poetry.lock +++ b/poetry.lock @@ -663,28 +663,6 @@ files = [ [package.extras] twisted = ["twisted"] -[[package]] -name = "psycopg2" -version = "2.9.9" -description = "psycopg2 - Python-PostgreSQL Database Adapter" -optional = false -python-versions = ">=3.7" -files = [ - {file = "psycopg2-2.9.9-cp310-cp310-win32.whl", hash = "sha256:38a8dcc6856f569068b47de286b472b7c473ac7977243593a288ebce0dc89516"}, - {file = "psycopg2-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:426f9f29bde126913a20a96ff8ce7d73fd8a216cfb323b1f04da402d452853c3"}, - {file = "psycopg2-2.9.9-cp311-cp311-win32.whl", hash = "sha256:ade01303ccf7ae12c356a5e10911c9e1c51136003a9a1d92f7aa9d010fb98372"}, - {file = "psycopg2-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:121081ea2e76729acfb0673ff33755e8703d45e926e416cb59bae3a86c6a4981"}, - {file = "psycopg2-2.9.9-cp312-cp312-win32.whl", hash = "sha256:d735786acc7dd25815e89cc4ad529a43af779db2e25aa7c626de864127e5a024"}, - {file = "psycopg2-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:a7653d00b732afb6fc597e29c50ad28087dcb4fbfb28e86092277a559ae4e693"}, - {file = "psycopg2-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:5e0d98cade4f0e0304d7d6f25bbfbc5bd186e07b38eac65379309c4ca3193efa"}, - {file = "psycopg2-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:7e2dacf8b009a1c1e843b5213a87f7c544b2b042476ed7755be813eaf4e8347a"}, - {file = "psycopg2-2.9.9-cp38-cp38-win32.whl", hash = "sha256:ff432630e510709564c01dafdbe996cb552e0b9f3f065eb89bdce5bd31fabf4c"}, - {file = "psycopg2-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:bac58c024c9922c23550af2a581998624d6e02350f4ae9c5f0bc642c633a2d5e"}, - {file = "psycopg2-2.9.9-cp39-cp39-win32.whl", hash = "sha256:c92811b2d4c9b6ea0285942b2e7cac98a59e166d59c588fe5cfe1eda58e72d59"}, - {file = "psycopg2-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:de80739447af31525feddeb8effd640782cf5998e1a4e9192ebdf829717e3913"}, - {file = "psycopg2-2.9.9.tar.gz", hash = "sha256:d1454bde93fb1e224166811694d600e746430c006fbb031ea06ecc2ea41bf156"}, -] - [[package]] name = "psycopg2-binary" version = "2.9.9" @@ -997,7 +975,7 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "users" -version = "3.0.0" +version = "3.0.1" description = "This python module is a simple implementation of user management functionality for telegram bots, such as: authentication, authorization and requests limiting." optional = false python-versions = "^3.9 || ^3.10 || ^3.11" @@ -1006,14 +984,14 @@ develop = false [package.dependencies] logger = {git = "https://github.com/obervinov/logger-package.git", tag = "v1.0.6"} -psycopg2 = "^2" +psycopg2-binary = "^2" vault = {git = "https://github.com/obervinov/vault-package.git", tag = "v3.0.0"} [package.source] type = "git" url = "https://github.com/obervinov/users-package.git" -reference = "v3.0.0" -resolved_reference = "1026da4cb8cfacd1186808a75284257b1cdac610" +reference = "v3.0.1" +resolved_reference = "c3d4a6cafc7a05861cdd5cb7dc1c56da3e50205d" [[package]] name = "vault" @@ -1077,4 +1055,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "5e837ba4da3aff515cf419a60f54782552524ae4c947681f52337f2f41167c64" +content-hash = "348e564a374494262d069857f8b6d0904a06831e1c59798c496193d6a857b8c5" diff --git a/pyproject.toml b/pyproject.toml index fe8fb630c..57a3118ab 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,7 +27,7 @@ webdavclient3 = "^3" prometheus-client = "^0" logger = { git = "https://github.com/obervinov/logger-package.git", tag = "v1.0.6" } vault = { git = "https://github.com/obervinov/vault-package.git", tag = "v3.0.0" } -users = { git = "https://github.com/obervinov/users-package.git", tag = "v3.0.0" } +users = { git = "https://github.com/obervinov/users-package.git", tag = "v3.0.1" } telegram = { git = "https://github.com/obervinov/telegram-package.git", tag = "v2.0.1" } [build-system] From 1f4826f87cf98ad2c79e02fa2901b4f20eb68ec9 Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 10 Sep 2024 15:14:54 +0400 Subject: [PATCH 112/148] modified: CHANGELOG.md --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 182d3a60e..38854b077 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,7 +12,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/) and this p #### 🚀 Features * bump workflow version to `1.2.9` * bump vault-package to major version `3.0.0` -* bump users-package to major version `3.0.0` +* bump users-package to major version `3.0.1` * bump telegram-package to major version `2.0.1` * add tests for database and metrics modules * [Switch reading of the database connection configuration to db engine](https://github.com/obervinov/pyinstabot-downloader/issues/33) From 6782cf7e06a253e69e9fd55b3858c38537c4555f Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 10 Sep 2024 15:30:16 +0400 Subject: [PATCH 113/148] rollback python version to 3.9.20 --- Dockerfile | 2 +- poetry.lock | 4 ++-- pyproject.toml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Dockerfile b/Dockerfile index caa64c565..1ac6b8b15 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.10.15-alpine3.20 +FROM python:3.9.20-alpine3.20 ### External argumetns ### ARG PROJECT_DESCRIPTION diff --git a/poetry.lock b/poetry.lock index e17aa40c7..52c67e256 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1054,5 +1054,5 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" -python-versions = "^3.10" -content-hash = "348e564a374494262d069857f8b6d0904a06831e1c59798c496193d6a857b8c5" +python-versions = "^3.9" +content-hash = "f1b46f0dc1e30d0a15241f6ff4fa152181523c6288969d953edaad0c5da7781e" diff --git a/pyproject.toml b/pyproject.toml index 57a3118ab..711c3640a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,7 +17,7 @@ packages = [{include = "src"}] "Bug Tracker" = "https://github.com/obervinov/pyinstabot-downloader/issues" [tool.poetry.dependencies] -python = "^3.10" +python = "^3.9" instaloader = "^4" dropbox = "^12" 'mega.py' = "^1.0.8" From 26596ba66bd1d8cf526ab4b5714ead3cd0cfac92 Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 10 Sep 2024 20:57:13 +0400 Subject: [PATCH 114/148] added migration for vault users data --- src/migrations/0001_vault_historical_data.py | 2 +- src/migrations/0004_vault_users_data.py | 62 ++++++++++++++++++++ 2 files changed, 63 insertions(+), 1 deletion(-) create mode 100644 src/migrations/0004_vault_users_data.py diff --git a/src/migrations/0001_vault_historical_data.py b/src/migrations/0001_vault_historical_data.py index 145fe2c15..1e33d1fb1 100644 --- a/src/migrations/0001_vault_historical_data.py +++ b/src/migrations/0001_vault_historical_data.py @@ -64,8 +64,8 @@ def execute(obj): with conn.cursor() as cursor: cursor.execute(f"INSERT INTO {table_name} ({columns}) VALUES ({values})") conn.commit() - obj.close_connection(conn) print(f"{NAME}: Post {post_id} from history/{owner} has been added to processed table") + obj.close_connection(conn) print(f"{NAME}: Migration has been completed") # Will be fixed after the issue https://github.com/obervinov/vault-package/issues/46 is resolved # pylint: disable=broad-exception-caught diff --git a/src/migrations/0004_vault_users_data.py b/src/migrations/0004_vault_users_data.py new file mode 100644 index 000000000..5a762ca93 --- /dev/null +++ b/src/migrations/0004_vault_users_data.py @@ -0,0 +1,62 @@ +# pylint: disable=C0103,R0914,R0801 +""" +Migration for the vault users data to the users table in the database. +https://github.com/obervinov/users-package/blob/v3.0.0/tests/postgres/tables.sql +""" +VERSION = '1.0' +NAME = '0004_vault_users_data' + + +def execute(obj): + """ + Migration for the vault users data to the users table in the database. + + Args: + obj: An obj containing the database connection and cursor, as well as the Vault instance. + + Returns: + None + """ + # database settings + table_name = 'users' + print(f"{NAME}: Start migration from the vault to the {table_name} table...") + + # check if the table exists for execute the migration + conn = obj.get_connection() + with conn.cursor() as cursor: + cursor.execute("SELECT * FROM information_schema.tables WHERE table_schema = 'public' AND table_name = %s;", (table_name,)) + table = cursor.fetchone() + + if not table: + print(f"{NAME}: The {table_name} table does not exist. Skip the migration.") + + else: + try: + users = obj.vault.kv2eninge.list_secrets(path='data/users') + users_counter = len(users) + print(f"{NAME}: Founded {users_counter} users in users data") + + for user in users: + user_last_state = obj.vault.kv2eninge.read_secret(path=f"data/users/{user}") + + user_id = user + chat_id = 'unknown' + status = user_last_state.get('status', 'unknown') + + values = f"'{user_id}', '{chat_id}', '{status}'" + + print(f"{NAME}: Migrating user {user_id} to the {table_name} table...") + conn = obj.get_connection() + with conn.cursor() as cursor: + cursor.execute(f"INSERT INTO {table_name} (user_id, chat_id, status) VALUES ({values})") + conn.commit() + print(f"{NAME}: User {user_id} has been added to the {table_name} table") + obj.close_connection(conn) + print(f"{NAME}: Migration has been completed") + # pylint: disable=broad-exception-caught + except Exception as migration_error: + print( + f"{NAME}: Migration cannot be completed due to an error: {migration_error}. " + "Perhaps the history is empty or the Vault secrets path does not exist and migration isn't unnecessary." + "It's not a critical error, so the migration will be skipped." + ) From 62022f0a21c3bdd24c033378fbbca35569311dc4 Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 10 Sep 2024 21:05:00 +0400 Subject: [PATCH 115/148] fixed `AttributeError: 'NoneType' object has no attribute 'upper'` --- src/bot.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/bot.py b/src/bot.py index a5cf4582b..4be72127b 100644 --- a/src/bot.py +++ b/src/bot.py @@ -24,7 +24,7 @@ # Vault client -vault = VaultClient(name=TELEGRAM_BOT_NAME) +vault = VaultClient(namespace=TELEGRAM_BOT_NAME) # Telegram instance telegram = TelegramBot(vault=vault) # Telegram bot for decorators From f4d3a7c3e0ba2f3d13dbd41b78b717fe442196af Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 10 Sep 2024 21:24:44 +0400 Subject: [PATCH 116/148] fixed users-package database role --- README.md | 8 +++----- docker-compose.yml | 4 +++- src/bot.py | 6 +++--- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index 9a3bf1666..0c5b29cb2 100644 --- a/README.md +++ b/README.md @@ -61,11 +61,9 @@ This project is a Telegram bot that allows you to upload posts from your Instagr | Variable | Description | Default value | | ------------- | ------------- | ------------- | | `LOGGER_LEVEL` | [The logging level of the logging module](https://docs.python.org/3/library/logging.html#logging-levels) | `INFO` | -| `BOT_NAME` | The name of the bot, used to determine the unique mount point in the vault | `pyinstabot-downloader` | +| `TELEGRAM_BOT_NAME` | The name of the bot, used to determine the unique mount point in the vault | `pyinstabot-downloader` | | `MESSAGES_CONFIG` | The path to the message template file | `src/configs/messages.json` | -| `VAULT_ADDR` | The address at which the vault server will be available to the bot | `None` | -| `VAULT_APPROLE_ID` | [Approle id created during vault setup](https://developer.hashicorp.com/vault/docs/auth/approle) | `None` | -| `VAULT_APPROLE_SECRETID` | [Approle secret id created during vault setup](https://developer.hashicorp.com/vault/docs/auth/approle) | `None` | +| `VAULT_*` | All supported vault environment variables can be found [here](https://github.com/obervinov/vault-package/tree/v3.0.0?tab=readme-ov-file#-supported-environment-variables) | - |
## Prepare and configure environment @@ -229,7 +227,7 @@ export PGDATABASE=postgres ## How to run project locally ```sh export VAULT_APPROLE_ID={change_me} -export VAULT_APPROLE_SECRETID={change_me} +export VAULT_APPROLE_SECRET_ID={change_me} export VAULT_ADDR={change_me} docker compose -f docker-compose.yml up -d ``` diff --git a/docker-compose.yml b/docker-compose.yml index 3ab81a3a1..48dcd615e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -52,8 +52,10 @@ services: environment: - TELEGRAM_BOT_NAME=pyinstabot-downloader - VAULT_APPROLE_ID=${VAULT_APPROLE_ID} - - VAULT_APPROLE_SECRETID=${VAULT_APPROLE_SECRETID} + - VAULT_APPROLE_SECRET_ID=${VAULT_APPROLE_SECRET_ID} - VAULT_ADDR=${VAULT_ADDR} + - VAULT_AUTH_TYPE=approle + - VAULT_NAMESPACE=pyinstabot-downloader - LOGGER_LEVEL=DEBUG - MESSAGES_CONFIG=configs/messages.json volumes: diff --git a/src/bot.py b/src/bot.py index 4be72127b..1675307c9 100644 --- a/src/bot.py +++ b/src/bot.py @@ -24,15 +24,15 @@ # Vault client -vault = VaultClient(namespace=TELEGRAM_BOT_NAME) +vault = VaultClient() # Telegram instance telegram = TelegramBot(vault=vault) # Telegram bot for decorators bot = telegram.telegram_bot # Users module with rate limits option -users_rl = Users(vault=vault) +users_rl = Users(vault=vault, rate_limits=True, storage={'db_role': TELEGRAM_BOT_NAME}) # Users module without rate limits option -users = Users(vault=vault, rate_limits=False) +users = Users(vault=vault, storage={'db_role': TELEGRAM_BOT_NAME}) # Client for download content from supplier # If API disabled, the mock object will be used From 25350ed77dcddb9375bf62e8a1b8ef680aa10409 Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 10 Sep 2024 21:31:58 +0400 Subject: [PATCH 117/148] updated general vault policy --- vault/policy.hcl | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/vault/policy.hcl b/vault/policy.hcl index d93dd38df..bb5227c32 100644 --- a/vault/policy.hcl +++ b/vault/policy.hcl @@ -28,6 +28,11 @@ path "pyinstabot-downloader/data/configuration/*" { capabilities = ["read", "list"] } +# Allowed to read and generate credentials in database engine +path "pyinstabot-downloader/config" { + capabilities = ["read", "list", "update"] +} + # Allowed read and write of bot data (!!! deprecated after https://github.com/obervinov/users-package/issues/41) path "pyinstabot-downloader/data/data/*" { capabilities = ["read", "list", "create", "update"] From e208a409ff65f10e69199262c4742e47231c7d51 Mon Sep 17 00:00:00 2001 From: obervinov Date: Tue, 10 Sep 2024 22:10:39 +0400 Subject: [PATCH 118/148] fixed vault database engine mount point --- CHANGELOG.md | 2 +- src/bot.py | 2 +- vault/policy.hcl | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 38854b077..5369c5a99 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,7 +12,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/) and this p #### 🚀 Features * bump workflow version to `1.2.9` * bump vault-package to major version `3.0.0` -* bump users-package to major version `3.0.1` +* bump users-package to major version `3.0.2` * bump telegram-package to major version `2.0.1` * add tests for database and metrics modules * [Switch reading of the database connection configuration to db engine](https://github.com/obervinov/pyinstabot-downloader/issues/33) diff --git a/src/bot.py b/src/bot.py index 1675307c9..100e4f42b 100644 --- a/src/bot.py +++ b/src/bot.py @@ -24,7 +24,7 @@ # Vault client -vault = VaultClient() +vault = VaultClient(dbengine={"mount_point": f"{TELEGRAM_BOT_NAME}-database"}) # Telegram instance telegram = TelegramBot(vault=vault) # Telegram bot for decorators diff --git a/vault/policy.hcl b/vault/policy.hcl index bb5227c32..260603466 100644 --- a/vault/policy.hcl +++ b/vault/policy.hcl @@ -29,7 +29,7 @@ path "pyinstabot-downloader/data/configuration/*" { } # Allowed to read and generate credentials in database engine -path "pyinstabot-downloader/config" { +path "pyinstabot-downloader-database/creds" { capabilities = ["read", "list", "update"] } From 542136982b7272e6d93266b4c450b72135c51373 Mon Sep 17 00:00:00 2001 From: obervinov Date: Wed, 11 Sep 2024 01:45:10 +0400 Subject: [PATCH 119/148] fixed errors and typos --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- src/bot.py | 1 + src/modules/database.py | 4 ++-- vault/policy.hcl | 2 +- 5 files changed, 9 insertions(+), 8 deletions(-) diff --git a/poetry.lock b/poetry.lock index 52c67e256..ed98df0f0 100644 --- a/poetry.lock +++ b/poetry.lock @@ -975,7 +975,7 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "users" -version = "3.0.1" +version = "3.0.2" description = "This python module is a simple implementation of user management functionality for telegram bots, such as: authentication, authorization and requests limiting." optional = false python-versions = "^3.9 || ^3.10 || ^3.11" @@ -990,8 +990,8 @@ vault = {git = "https://github.com/obervinov/vault-package.git", tag = "v3.0.0"} [package.source] type = "git" url = "https://github.com/obervinov/users-package.git" -reference = "v3.0.1" -resolved_reference = "c3d4a6cafc7a05861cdd5cb7dc1c56da3e50205d" +reference = "v3.0.2" +resolved_reference = "25cbde0ef24379f66557e0bb5dc27d9d43d8867e" [[package]] name = "vault" @@ -1055,4 +1055,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "f1b46f0dc1e30d0a15241f6ff4fa152181523c6288969d953edaad0c5da7781e" +content-hash = "60e533bec340da4685f73edf8483650533df2c4c5637a5d2b63589aa44bda0f9" diff --git a/pyproject.toml b/pyproject.toml index 711c3640a..c4982f247 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,7 +27,7 @@ webdavclient3 = "^3" prometheus-client = "^0" logger = { git = "https://github.com/obervinov/logger-package.git", tag = "v1.0.6" } vault = { git = "https://github.com/obervinov/vault-package.git", tag = "v3.0.0" } -users = { git = "https://github.com/obervinov/users-package.git", tag = "v3.0.1" } +users = { git = "https://github.com/obervinov/users-package.git", tag = "v3.0.2" } telegram = { git = "https://github.com/obervinov/telegram-package.git", tag = "v2.0.1" } [build-system] diff --git a/src/bot.py b/src/bot.py index 100e4f42b..e44f30776 100644 --- a/src/bot.py +++ b/src/bot.py @@ -24,6 +24,7 @@ # Vault client +# The need to explicitly specify a mount point will no longer be necessary after solving the https://github.com/obervinov/vault-package/issues/49 vault = VaultClient(dbengine={"mount_point": f"{TELEGRAM_BOT_NAME}-database"}) # Telegram instance telegram = TelegramBot(vault=vault) diff --git a/src/modules/database.py b/src/modules/database.py index 5a8a105ac..1cb83698a 100644 --- a/src/modules/database.py +++ b/src/modules/database.py @@ -105,7 +105,7 @@ def create_connection_pool(self) -> pool.SimpleConnectionPool: Returns: pool.SimpleConnectionPool: A connection pool for the PostgreSQL database. """ - required_keys_configuration = {"host", "port", "database", "connections"} + required_keys_configuration = {"host", "port", "dbname", "connections"} required_keys_credentials = {"username", "password"} db_configuration = self.vault.kv2engine.read_secret(path='configuration/database') db_credentials = self.vault.dbengine.generate_credentials(role=self.db_role) @@ -115,7 +115,7 @@ def create_connection_pool(self) -> pool.SimpleConnectionPool: missing_keys = (required_keys_configuration - set(db_configuration.keys())) | (required_keys_credentials - set(db_credentials.keys())) if missing_keys: - raise KeyError(f"Missing keys in the database configuration or credentials: {missing_keys}") + raise KeyError("Missing keys in the database configuration or credentials: {missing_keys}") log.info( '[Database]: Creating a connection pool for the %s:%s/%s', diff --git a/vault/policy.hcl b/vault/policy.hcl index 260603466..c2844032e 100644 --- a/vault/policy.hcl +++ b/vault/policy.hcl @@ -29,7 +29,7 @@ path "pyinstabot-downloader/data/configuration/*" { } # Allowed to read and generate credentials in database engine -path "pyinstabot-downloader-database/creds" { +path "pyinstabot-downloader-database/creds/*" { capabilities = ["read", "list", "update"] } From 3b931ddb356d547cd4fba0b1ebccdfdd768ca4f8 Mon Sep 17 00:00:00 2001 From: obervinov Date: Wed, 11 Sep 2024 01:53:43 +0400 Subject: [PATCH 120/148] additional corrections --- README.md | 5 ++--- scripts/vault-init.sh | 19 +++++++++++++++++++ tests/conftest.py | 2 +- 3 files changed, 22 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 0c5b29cb2..d85ca54e4 100644 --- a/README.md +++ b/README.md @@ -87,14 +87,13 @@ This project is a Telegram bot that allows you to upload posts from your Instagr ### Bot configuration source and supported parameters All bot configuration is stored in the `Vault Secrets`
_except for the part of the configuration that configures the connection to `Vault`_
+- `pyinstabot-downloader-database` - vault database engine mount point (returns the temporary username and password for the database) - `configuration/database`: database connection parameters ```json { - "database": "pyinstabot-downloader", + "dbname": "pyinstabot-downloader", "host": "postgresql.example.com", - "password": "qwerty123", "port": "5432", - "user": "user1", "connections": "10" } ``` diff --git a/scripts/vault-init.sh b/scripts/vault-init.sh index ffb81565a..06dc0591c 100644 --- a/scripts/vault-init.sh +++ b/scripts/vault-init.sh @@ -1,6 +1,10 @@ # /bin/bash # Description: Prepare vault for pyinstabot-downloader + +# Prepare kv2 engine vault secrets enable -path=pyinstabot-downloader kv-v2 + +# Prepare approle vault policy write pyinstabot-downloader vault/policy.hcl vault auth enable -path=pyinstabot-downloader approle vault write auth/pyinstabot-downloader/role/pyinstabot-downloader \ @@ -12,4 +16,19 @@ vault write auth/pyinstabot-downloader/role/pyinstabot-downloader \ bind_secret_id=true \ mount_point="pyinstabot-downloader" \ secret_id_ttl=0 + +# Prepare db engine +vault secrets enable -path=pyinstabot-downloader-database database +vault write pyinstabot-downloader-database/config/postgresql \ + plugin_name=postgresql-database-plugin \ + allowed_roles="pyinstabot-downloader" \ + verify_connection=false \ + connection_url="postgresql://{{username}}:{{password}}@localhost:5432/pyinstabot-downloader?sslmode=disable" \ + username="postgres" \ + password="changeme" +vault write pyinstabot-downloader-database/roles/pyinstabot-downloader \ + db_name=postgresql \ + creation_statements="CREATE ROLE \"{{name}}\" WITH LOGIN PASSWORD '{{password}}' VALID UNTIL '{{expiration}}'; GRANT ALL PRIVILEGES ON SCHEMA public TO \"{{name}}\"; GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO \"{{name}}\"; GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO \"{{name}}\";" \ + default_ttl="1h" \ + max_ttl="24h" # End of snippet \ No newline at end of file diff --git a/tests/conftest.py b/tests/conftest.py index b689156af..5492d44f2 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -283,7 +283,7 @@ def fixture_vault_configuration_data(vault_instance, namespace): database = { 'host': '0.0.0.0', 'port': '5432', - 'database': namespace, + 'dbname': namespace, 'connections': '10' } for key, value in database.items(): From e28346c5c6db46c29a24e56d8df1b91767939b70 Mon Sep 17 00:00:00 2001 From: obervinov Date: Wed, 11 Sep 2024 02:00:57 +0400 Subject: [PATCH 121/148] modified: src/modules/database.py --- src/modules/database.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/modules/database.py b/src/modules/database.py index 1cb83698a..cbb0431ae 100644 --- a/src/modules/database.py +++ b/src/modules/database.py @@ -119,7 +119,7 @@ def create_connection_pool(self) -> pool.SimpleConnectionPool: log.info( '[Database]: Creating a connection pool for the %s:%s/%s', - db_configuration['host'], db_configuration['port'], db_configuration['database'] + db_configuration['host'], db_configuration['port'], db_configuration['dbname'] ) return pool.SimpleConnectionPool( minconn=1, @@ -128,7 +128,7 @@ def create_connection_pool(self) -> pool.SimpleConnectionPool: port=db_configuration['port'], user=db_credentials['username'], password=db_credentials['password'], - database=db_configuration['database'] + database=db_configuration['dbname'] ) def get_connection(self) -> psycopg2.extensions.connection: From 515777db7447431c54f3e1870b539e7ef18689e8 Mon Sep 17 00:00:00 2001 From: obervinov Date: Wed, 11 Sep 2024 11:04:38 +0400 Subject: [PATCH 122/148] fixed typo in migration --- CHANGELOG.md | 2 +- src/migrations/0004_vault_users_data.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5369c5a99..cd72da0a2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,7 +3,7 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). -## v2.3.0 - 2024-09-10 +## v2.3.0 - 2024-09-11 ### What's Changed **Full Changelog**: https://github.com/obervinov/pyinstabot-downloader/compare/v2.2.1...v2.3.0 by @obervinov in https://github.com/obervinov/pyinstabot-downloader/pull/95 #### 💥 Breaking Changes diff --git a/src/migrations/0004_vault_users_data.py b/src/migrations/0004_vault_users_data.py index 5a762ca93..1b67ba345 100644 --- a/src/migrations/0004_vault_users_data.py +++ b/src/migrations/0004_vault_users_data.py @@ -32,12 +32,12 @@ def execute(obj): else: try: - users = obj.vault.kv2eninge.list_secrets(path='data/users') + users = obj.vault.kv2engine.list_secrets(path='data/users') users_counter = len(users) print(f"{NAME}: Founded {users_counter} users in users data") for user in users: - user_last_state = obj.vault.kv2eninge.read_secret(path=f"data/users/{user}") + user_last_state = obj.vault.kv2engine.read_secret(path=f"data/users/{user}") user_id = user chat_id = 'unknown' From 9480fd9b7bc742375f3543a4243e977ef307b49e Mon Sep 17 00:00:00 2001 From: obervinov Date: Wed, 11 Sep 2024 12:12:15 +0400 Subject: [PATCH 123/148] fixed errors and typos --- src/bot.py | 6 +++--- src/migrations/0004_vault_users_data.py | 3 +-- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/src/bot.py b/src/bot.py index e44f30776..e19d63508 100644 --- a/src/bot.py +++ b/src/bot.py @@ -31,9 +31,9 @@ # Telegram bot for decorators bot = telegram.telegram_bot # Users module with rate limits option -users_rl = Users(vault=vault, rate_limits=True, storage={'db_role': TELEGRAM_BOT_NAME}) +users_rl = Users(vault=vault, rate_limits=True, storage={'db_role': f"{TELEGRAM_BOT_NAME}-users-rl"}) # Users module without rate limits option -users = Users(vault=vault, storage={'db_role': TELEGRAM_BOT_NAME}) +users = Users(vault=vault, storage={'db_role': f"{TELEGRAM_BOT_NAME}-users"}) # Client for download content from supplier # If API disabled, the mock object will be used @@ -63,7 +63,7 @@ uploader.run_transfers.return_value = 'completed' # Client for communication with the database -database = DatabaseClient(vault=vault, db_role=TELEGRAM_BOT_NAME) +database = DatabaseClient(vault=vault, db_role=f"{TELEGRAM_BOT_NAME}-bot") # Metrics exporter metrics = Metrics(port=METRICS_PORT, interval=METRICS_INTERVAL, metrics_prefix=TELEGRAM_BOT_NAME, vault=vault, database=database) diff --git a/src/migrations/0004_vault_users_data.py b/src/migrations/0004_vault_users_data.py index 1b67ba345..2c433d922 100644 --- a/src/migrations/0004_vault_users_data.py +++ b/src/migrations/0004_vault_users_data.py @@ -46,12 +46,10 @@ def execute(obj): values = f"'{user_id}', '{chat_id}', '{status}'" print(f"{NAME}: Migrating user {user_id} to the {table_name} table...") - conn = obj.get_connection() with conn.cursor() as cursor: cursor.execute(f"INSERT INTO {table_name} (user_id, chat_id, status) VALUES ({values})") conn.commit() print(f"{NAME}: User {user_id} has been added to the {table_name} table") - obj.close_connection(conn) print(f"{NAME}: Migration has been completed") # pylint: disable=broad-exception-caught except Exception as migration_error: @@ -60,3 +58,4 @@ def execute(obj): "Perhaps the history is empty or the Vault secrets path does not exist and migration isn't unnecessary." "It's not a critical error, so the migration will be skipped." ) +obj.close_connection(conn) \ No newline at end of file From cb23fa3f07804ad667f2f63ebbafc3c5d0e2d196 Mon Sep 17 00:00:00 2001 From: obervinov Date: Wed, 11 Sep 2024 12:15:47 +0400 Subject: [PATCH 124/148] fixed 0004_vault_users_data.py --- src/migrations/0004_vault_users_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/migrations/0004_vault_users_data.py b/src/migrations/0004_vault_users_data.py index 2c433d922..a86854ca8 100644 --- a/src/migrations/0004_vault_users_data.py +++ b/src/migrations/0004_vault_users_data.py @@ -58,4 +58,4 @@ def execute(obj): "Perhaps the history is empty or the Vault secrets path does not exist and migration isn't unnecessary." "It's not a critical error, so the migration will be skipped." ) -obj.close_connection(conn) \ No newline at end of file + obj.close_connection(conn) From 36cad9050747d154c8e68f67795c8efa56a9f193 Mon Sep 17 00:00:00 2001 From: obervinov Date: Wed, 11 Sep 2024 13:02:21 +0400 Subject: [PATCH 125/148] fixed `status_message_updater_thread()` for denied users --- src/bot.py | 5 +++-- src/migrations/0004_vault_users_data.py | 3 ++- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/src/bot.py b/src/bot.py index e19d63508..2fbc41113 100644 --- a/src/bot.py +++ b/src/bot.py @@ -486,9 +486,10 @@ def status_message_updater_thread() -> None: while True: time.sleep(STATUSES_MESSAGE_FREQUENCY) try: + users_dict = [] users_dict = database.get_users() - if users_dict: - for user in users_dict: + for user in users_dict: + if user['status'] == 'allowed': update_status_message(user_id=user['user_id']) # pylint: disable=broad-exception-caught except Exception as exception: diff --git a/src/migrations/0004_vault_users_data.py b/src/migrations/0004_vault_users_data.py index a86854ca8..49f47a3d1 100644 --- a/src/migrations/0004_vault_users_data.py +++ b/src/migrations/0004_vault_users_data.py @@ -6,6 +6,7 @@ VERSION = '1.0' NAME = '0004_vault_users_data' +import json def execute(obj): """ @@ -37,7 +38,7 @@ def execute(obj): print(f"{NAME}: Founded {users_counter} users in users data") for user in users: - user_last_state = obj.vault.kv2engine.read_secret(path=f"data/users/{user}") + user_last_state = json.loads(obj.vault.kv2engine.read_secret(path=f"data/users/{user}")) user_id = user chat_id = 'unknown' From 0e0c1b901c7e863b8cf71c61aed42d7312b52b2c Mon Sep 17 00:00:00 2001 From: obervinov Date: Wed, 11 Sep 2024 13:06:44 +0400 Subject: [PATCH 126/148] modified: src/migrations/0004_vault_users_data.py --- src/migrations/0004_vault_users_data.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/migrations/0004_vault_users_data.py b/src/migrations/0004_vault_users_data.py index 49f47a3d1..a6bb9f938 100644 --- a/src/migrations/0004_vault_users_data.py +++ b/src/migrations/0004_vault_users_data.py @@ -6,7 +6,6 @@ VERSION = '1.0' NAME = '0004_vault_users_data' -import json def execute(obj): """ @@ -38,7 +37,7 @@ def execute(obj): print(f"{NAME}: Founded {users_counter} users in users data") for user in users: - user_last_state = json.loads(obj.vault.kv2engine.read_secret(path=f"data/users/{user}")) + user_last_state = obj.json.loads(obj.vault.kv2engine.read_secret(path=f"data/users/{user}")) user_id = user chat_id = 'unknown' From 2305c6a82f8aca13acdab12fd030acc50f1ec03a Mon Sep 17 00:00:00 2001 From: obervinov Date: Wed, 11 Sep 2024 13:16:05 +0400 Subject: [PATCH 127/148] fixed json module for migrations --- src/migrations/0004_vault_users_data.py | 1 - src/modules/database.py | 2 ++ 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/src/migrations/0004_vault_users_data.py b/src/migrations/0004_vault_users_data.py index a6bb9f938..49b512e10 100644 --- a/src/migrations/0004_vault_users_data.py +++ b/src/migrations/0004_vault_users_data.py @@ -55,7 +55,6 @@ def execute(obj): except Exception as migration_error: print( f"{NAME}: Migration cannot be completed due to an error: {migration_error}. " - "Perhaps the history is empty or the Vault secrets path does not exist and migration isn't unnecessary." "It's not a critical error, so the migration will be skipped." ) obj.close_connection(conn) diff --git a/src/modules/database.py b/src/modules/database.py index cbb0431ae..da5b1a577 100644 --- a/src/modules/database.py +++ b/src/modules/database.py @@ -40,6 +40,7 @@ class DatabaseClient: vault (object): An object representing a HashiCorp Vault client for retrieving secrets. db_role (str): The role to use for generating database credentials. errors (psycopg2.errors): A collection of error classes for exceptions raised by the psycopg2 module. + json (json): A JSON encoder and decoder for working with JSON data in migrations. Methods: create_connection_pool(): Create a connection pool for the PostgreSQL database. @@ -89,6 +90,7 @@ def __init__( >>> vault = Vault() >>> db = Database(vault=vault) """ + self.json = json self.vault = vault self.db_role = db_role self.errors = psycopg2.errors From 8bd619850cc5ebd2443a7550c1ef2a60ed21a2e1 Mon Sep 17 00:00:00 2001 From: obervinov Date: Wed, 11 Sep 2024 13:26:58 +0400 Subject: [PATCH 128/148] modified: src/bot.py modified: src/modules/database.py modified: src/modules/metrics.py --- src/bot.py | 3 +-- src/modules/database.py | 30 +++++++++++++++++++++++------- src/modules/metrics.py | 4 ++-- 3 files changed, 26 insertions(+), 11 deletions(-) diff --git a/src/bot.py b/src/bot.py index 2fbc41113..ea1a9545a 100644 --- a/src/bot.py +++ b/src/bot.py @@ -489,8 +489,7 @@ def status_message_updater_thread() -> None: users_dict = [] users_dict = database.get_users() for user in users_dict: - if user['status'] == 'allowed': - update_status_message(user_id=user['user_id']) + update_status_message(user_id=user['user_id']) # pylint: disable=broad-exception-caught except Exception as exception: exception_context = { diff --git a/src/modules/database.py b/src/modules/database.py index da5b1a577..6fcc3ab8e 100644 --- a/src/modules/database.py +++ b/src/modules/database.py @@ -65,7 +65,7 @@ class DatabaseClient: get_user_processed(user_id): Get last ten messages from the processed table for the specified user. check_message_uniqueness(post_id, user_id): Check if a message with the given post ID and chat ID already exists in the queue. keep_message(message_id, chat_id, message_content, **kwargs): Add a message to the messages table in the database. - get_users(): Get a dict of users with their metadata from the users table. + get_users(only_allowed): Get a list of users from the users table in the database. get_considered_message(message_type, chat_id): Get a message with specified type and Rises: @@ -794,10 +794,17 @@ def keep_message( return response - def get_users(self) -> dict: + def get_users( + self, + only_allowed: bool = True + ) -> dict: """ This method will be deprecated after https://github.com/obervinov/users-package/issues/44 (users-package:v3.1.0). Get a dictionary of all users with their metadata from the users table in the database. + By default, the method returns only allowed users. + + Args: + only_allowed (bool): A flag indicating whether to return only allowed users. Default is True. Returns: dict: A dictionary containing all users in the database and their metadata. @@ -807,11 +814,20 @@ def get_users(self) -> dict: [{'user_id': '12345', 'chat_id': '67890', 'status': 'denied'}, {'user_id': '12346', 'chat_id': '67891', 'status': 'allowed'}] """ users_dict = [] - users = self._select( - table_name='users', - columns=("user_id", "chat_id", "status"), - limit=1000 - ) + if only_allowed: + users = self._select( + table_name='users', + columns=("user_id", "chat_id", "status"), + condition="status = 'allowed'", + limit=1000 + ) + else: + users = self._select( + table_name='users', + columns=("user_id", "chat_id", "status"), + limit=1000 + ) + if users: for user in users: users_dict.append({'user_id': user[0], 'chat_id': user[1], 'status': user[2]}) diff --git a/src/modules/metrics.py b/src/modules/metrics.py index eff76d911..b1d6bdad7 100644 --- a/src/modules/metrics.py +++ b/src/modules/metrics.py @@ -61,7 +61,7 @@ def collect_users_stats(self) -> None: """ The method collects information about users access status and updates the gauge. """ - users_dict = self.database.get_users() + users_dict = self.database.get_users(only_allowed=False) access_granted_count = 0 access_denied_count = 0 for user in users_dict: @@ -85,7 +85,7 @@ def collect_messages_stats(self) -> None: """ processed_messages_count = 0 queue_messages_count = 0 - users_dict = self.database.get_users() + users_dict = self.database.get_users(only_allowed=False) for user in users_dict: processed_messages = self.database.get_user_processed(user_id=user['user_id']) From 79f038ca50dafc922c953a57d6cb0fe6d05fee99 Mon Sep 17 00:00:00 2001 From: obervinov Date: Wed, 11 Sep 2024 13:28:58 +0400 Subject: [PATCH 129/148] fixed migration 0004_vault_users_data.py --- src/migrations/0004_vault_users_data.py | 2 +- src/modules/database.py | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/src/migrations/0004_vault_users_data.py b/src/migrations/0004_vault_users_data.py index 49b512e10..ff50e78ed 100644 --- a/src/migrations/0004_vault_users_data.py +++ b/src/migrations/0004_vault_users_data.py @@ -37,7 +37,7 @@ def execute(obj): print(f"{NAME}: Founded {users_counter} users in users data") for user in users: - user_last_state = obj.json.loads(obj.vault.kv2engine.read_secret(path=f"data/users/{user}")) + user_last_state = obj.vault.kv2engine.read_secret(path=f"data/users/{user}", key='authentication') user_id = user chat_id = 'unknown' diff --git a/src/modules/database.py b/src/modules/database.py index 6fcc3ab8e..be78fa15e 100644 --- a/src/modules/database.py +++ b/src/modules/database.py @@ -40,7 +40,6 @@ class DatabaseClient: vault (object): An object representing a HashiCorp Vault client for retrieving secrets. db_role (str): The role to use for generating database credentials. errors (psycopg2.errors): A collection of error classes for exceptions raised by the psycopg2 module. - json (json): A JSON encoder and decoder for working with JSON data in migrations. Methods: create_connection_pool(): Create a connection pool for the PostgreSQL database. @@ -90,7 +89,6 @@ def __init__( >>> vault = Vault() >>> db = Database(vault=vault) """ - self.json = json self.vault = vault self.db_role = db_role self.errors = psycopg2.errors From 48d16b809d43850bc12657e8a0d7ebf5d634f3df Mon Sep 17 00:00:00 2001 From: obervinov Date: Wed, 11 Sep 2024 13:43:29 +0400 Subject: [PATCH 130/148] returned json in 0004_vault_users_data.py --- src/migrations/0004_vault_users_data.py | 2 +- src/modules/database.py | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/src/migrations/0004_vault_users_data.py b/src/migrations/0004_vault_users_data.py index ff50e78ed..9e3379d0e 100644 --- a/src/migrations/0004_vault_users_data.py +++ b/src/migrations/0004_vault_users_data.py @@ -37,7 +37,7 @@ def execute(obj): print(f"{NAME}: Founded {users_counter} users in users data") for user in users: - user_last_state = obj.vault.kv2engine.read_secret(path=f"data/users/{user}", key='authentication') + user_last_state = obj.json.loads(obj.vault.kv2engine.read_secret(path=f"data/users/{user}", key='authentication')) user_id = user chat_id = 'unknown' diff --git a/src/modules/database.py b/src/modules/database.py index be78fa15e..6d6c4d83e 100644 --- a/src/modules/database.py +++ b/src/modules/database.py @@ -40,6 +40,7 @@ class DatabaseClient: vault (object): An object representing a HashiCorp Vault client for retrieving secrets. db_role (str): The role to use for generating database credentials. errors (psycopg2.errors): A collection of error classes for exceptions raised by the psycopg2 module. + json (json): A JSON encoder and decoder for working with JSON data to execute database migrations. Methods: create_connection_pool(): Create a connection pool for the PostgreSQL database. @@ -89,6 +90,7 @@ def __init__( >>> vault = Vault() >>> db = Database(vault=vault) """ + self.json = json self.vault = vault self.db_role = db_role self.errors = psycopg2.errors From cf313b8fcd5fb0391bbb3d814057d400c0b0f6b5 Mon Sep 17 00:00:00 2001 From: obervinov Date: Wed, 11 Sep 2024 13:47:52 +0400 Subject: [PATCH 131/148] fixed linting issues --- src/modules/database.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/modules/database.py b/src/modules/database.py index 6d6c4d83e..c999c2b74 100644 --- a/src/modules/database.py +++ b/src/modules/database.py @@ -90,7 +90,7 @@ def __init__( >>> vault = Vault() >>> db = Database(vault=vault) """ - self.json = json + self.json = json self.vault = vault self.db_role = db_role self.errors = psycopg2.errors From da1a7d5cdf316157f0de3106b44a391e63a4e9fd Mon Sep 17 00:00:00 2001 From: obervinov Date: Thu, 12 Sep 2024 20:03:38 +0400 Subject: [PATCH 132/148] difference corrections in the code --- CHANGELOG.md | 4 +-- src/bot.py | 46 ++++++++++++++++++++++-------- src/configs/constants.py | 7 +++++ src/configs/databases.json | 2 +- src/migrations/0003_users_table.py | 3 +- 5 files changed, 46 insertions(+), 16 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index cd72da0a2..3e138ebf4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,7 +3,7 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). -## v2.3.0 - 2024-09-11 +## v2.3.0 - 2024-09-12 ### What's Changed **Full Changelog**: https://github.com/obervinov/pyinstabot-downloader/compare/v2.2.1...v2.3.0 by @obervinov in https://github.com/obervinov/pyinstabot-downloader/pull/95 #### 💥 Breaking Changes @@ -12,7 +12,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/) and this p #### 🚀 Features * bump workflow version to `1.2.9` * bump vault-package to major version `3.0.0` -* bump users-package to major version `3.0.2` +* bump users-package to major version `3.0.3` * bump telegram-package to major version `2.0.1` * add tests for database and metrics modules * [Switch reading of the database connection configuration to db engine](https://github.com/obervinov/pyinstabot-downloader/issues/33) diff --git a/src/bot.py b/src/bot.py index ea1a9545a..9db35c6f4 100644 --- a/src/bot.py +++ b/src/bot.py @@ -14,7 +14,12 @@ from telegram import TelegramBot, exceptions as TelegramExceptions from users import Users from vault import VaultClient -from configs.constants import (TELEGRAM_BOT_NAME, ROLES_MAP, QUEUE_FREQUENCY, STATUSES_MESSAGE_FREQUENCY, METRICS_PORT, METRICS_INTERVAL) +from configs.constants import ( + TELEGRAM_BOT_NAME, ROLES_MAP, + QUEUE_FREQUENCY, STATUSES_MESSAGE_FREQUENCY, + METRICS_PORT, METRICS_INTERVAL, + VAULT_DBENGINE_MOUNT_POINT, VAULT_DB_ROLE_MAIN, VAULT_DB_ROLE_USERS, VAULT_DB_ROLE_USERS_RL +) from modules.database import DatabaseClient from modules.exceptions import FailedMessagesStatusUpdater from modules.tools import get_hash @@ -25,15 +30,15 @@ # Vault client # The need to explicitly specify a mount point will no longer be necessary after solving the https://github.com/obervinov/vault-package/issues/49 -vault = VaultClient(dbengine={"mount_point": f"{TELEGRAM_BOT_NAME}-database"}) +vault = VaultClient(dbengine={"mount_point": VAULT_DBENGINE_MOUNT_POINT}) # Telegram instance telegram = TelegramBot(vault=vault) # Telegram bot for decorators bot = telegram.telegram_bot # Users module with rate limits option -users_rl = Users(vault=vault, rate_limits=True, storage={'db_role': f"{TELEGRAM_BOT_NAME}-users-rl"}) +users_rl = Users(vault=vault, rate_limits=True, storage={'db_role': VAULT_DB_ROLE_USERS_RL}) # Users module without rate limits option -users = Users(vault=vault, storage={'db_role': f"{TELEGRAM_BOT_NAME}-users"}) +users = Users(vault=vault, storage={'db_role': VAULT_DB_ROLE_USERS}) # Client for download content from supplier # If API disabled, the mock object will be used @@ -63,7 +68,7 @@ uploader.run_transfers.return_value = 'completed' # Client for communication with the database -database = DatabaseClient(vault=vault, db_role=f"{TELEGRAM_BOT_NAME}-bot") +database = DatabaseClient(vault=vault, db_role=VAULT_DB_ROLE_MAIN) # Metrics exporter metrics = Metrics(port=METRICS_PORT, interval=METRICS_INTERVAL, metrics_prefix=TELEGRAM_BOT_NAME, vault=vault, database=database) @@ -82,7 +87,8 @@ def start_command(message: telegram.telegram_types.Message = None) -> None: Returns: None """ - if users.user_access_check(message.chat.id).get('access', None) == users.user_status_allow: + requestor = {'user_id': message.chat.id, 'chat_id': message.chat.id, 'message_id': message.message_id} + if users.user_access_check(**requestor).get('access', None) == users.user_status_allow: log.info('[Bot]: Processing "start" command for user %s...', message.chat.id) # Main message reply_markup = telegram.create_inline_markup(ROLES_MAP.keys()) @@ -121,7 +127,11 @@ def bot_callback_query_handler(call: telegram.callback_query = None) -> None: None """ log.info('[Bot]: Processing button "%s" for user %s...', call.data, call.message.chat.id) - if users.user_access_check(call.message.chat.id, ROLES_MAP[call.data]).get('permissions', None) == users.user_status_allow: + requestor = { + 'user_id': call.message.chat.id, 'role_id': ROLES_MAP[call.data], + 'chat_id': call.message.chat.id, 'message_id': call.message.message_id + } + if users.user_access_check(**requestor).get('permissions', None) == users.user_status_allow: if call.data == "Post": help_message = telegram.send_styled_message( chat_id=call.message.chat.id, @@ -168,7 +178,8 @@ def unknown_command(message: telegram.telegram_types.Message = None) -> None: Returns: None """ - if users.user_access_check(message.chat.id).get('access', None) == users.user_status_allow: + requestor = {'user_id': message.chat.id, 'chat_id': message.chat.id, 'message_id': message.message_id} + if users.user_access_check(**requestor).get('access', None) == users.user_status_allow: log.error('[Bot]: Invalid command "%s" from user %s', message.text, message.chat.id) telegram.send_styled_message(chat_id=message.chat.id, messages_template={'alias': 'unknown_command'}) else: @@ -374,8 +385,11 @@ def process_one_post( Returns: None """ - # Check permissions - user = users_rl.user_access_check(message.chat.id, ROLES_MAP['Post']) + requestor = { + 'user_id': message.chat.id, 'role_id': ROLES_MAP['Post'], + 'chat_id': message.chat.id, 'message_id': message.message_id + } + user = users_rl.user_access_check(**requestor) if user.get('permissions', None) == users_rl.user_status_allow: data = message_parser(message) rate_limit = user.get('rate_limits', None) @@ -414,7 +428,11 @@ def process_list_posts( Returns: None """ - user = users.user_access_check(message.chat.id, ROLES_MAP['Posts List']) + requestor = { + 'user_id': message.chat.id, 'role_id': ROLES_MAP['Posts List'], + 'chat_id': message.chat.id, 'message_id': message.message_id + } + user = users.user_access_check(**requestor) if user.get('permissions', None) == users.user_status_allow: for link in message.text.split('\n'): message.text = link @@ -442,7 +460,11 @@ def reschedule_queue( Returns: None """ - user = users.user_access_check(message.chat.id, ROLES_MAP['Reschedule Queue']) + requestor = { + 'user_id': message.chat.id, 'role_id': ROLES_MAP['Reschedule Queue'], + 'chat_id': message.chat.id, 'message_id': message.message_id + } + user = users.user_access_check(**requestor) can_be_deleted = True if user.get('permissions', None) == users.user_status_allow: for item in message.text.split('\n'): diff --git a/src/configs/constants.py b/src/configs/constants.py index f16bee075..39ffe1b2e 100644 --- a/src/configs/constants.py +++ b/src/configs/constants.py @@ -19,3 +19,10 @@ STATUSES_MESSAGE_FREQUENCY = 15 METRICS_PORT = 8000 METRICS_INTERVAL = 30 + +# Vault Database Engine constants +VAULT_DBENGINE_MOUNT_POINT = f"{TELEGRAM_BOT_NAME}-database" +# Will be removed after https://github.com/obervinov/users-package/issues/47 +VAULT_DB_ROLE_MAIN = f"{TELEGRAM_BOT_NAME}-bot" +VAULT_DB_ROLE_USERS = f"{TELEGRAM_BOT_NAME}-users" +VAULT_DB_ROLE_USERS_RL = f"{TELEGRAM_BOT_NAME}-users-rl" diff --git a/src/configs/databases.json b/src/configs/databases.json index 046d8e8fd..d9f6e4a04 100644 --- a/src/configs/databases.json +++ b/src/configs/databases.json @@ -67,7 +67,7 @@ "description": "The table stores the user ID and the chat ID for communication with the bot", "columns": [ "id SERIAL PRIMARY KEY, ", - "user_id VARCHAR(255) NOT NULL, ", + "user_id VARCHAR(255) UNIQUE NOT NULL, ", "chat_id VARCHAR(255) NOT NULL, ", "status VARCHAR(255) NOT NULL DEFAULT 'denied'" ] diff --git a/src/migrations/0003_users_table.py b/src/migrations/0003_users_table.py index 9d98e17a4..60b169861 100644 --- a/src/migrations/0003_users_table.py +++ b/src/migrations/0003_users_table.py @@ -20,7 +20,8 @@ def execute(obj): # database settings table_name = 'users' add_columns = [('status', 'VARCHAR(255)', "'denied'")] - print(f"{NAME}: Start migration for the {table_name} table: Add columns {add_columns}...") + update_columns = [('user_id', 'VARCHAR(255)', 'UNIQUE NOT NULL')] + print(f"{NAME}: Start migration for the {table_name} table: Add columns {add_columns} and update columns {update_columns}...") # check if the table exists and has the necessary schema for execute the migration conn = obj.get_connection() From 1fadeb00085c25299f02fcab3b2b97cfb0fa5367 Mon Sep 17 00:00:00 2001 From: obervinov Date: Fri, 13 Sep 2024 21:51:26 +0400 Subject: [PATCH 133/148] corrected log messages --- CHANGELOG.md | 2 +- src/modules/downloader.py | 31 +++++++++++++------------------ src/modules/uploader.py | 20 ++++++++++---------- 3 files changed, 24 insertions(+), 29 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3e138ebf4..c436d81e7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,7 +3,7 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). -## v2.3.0 - 2024-09-12 +## v2.3.0 - 2024-09-13 ### What's Changed **Full Changelog**: https://github.com/obervinov/pyinstabot-downloader/compare/v2.2.1...v2.3.0 by @obervinov in https://github.com/obervinov/pyinstabot-downloader/pull/95 #### 💥 Breaking Changes diff --git a/src/modules/downloader.py b/src/modules/downloader.py index 0333634cd..26a6ab0b4 100644 --- a/src/modules/downloader.py +++ b/src/modules/downloader.py @@ -69,7 +69,7 @@ def __init__( "Failed to initialize the Downloader instance." "Please check the configuration in class argument or the secret with the configuration in the Vault." ) - log.info('[Downloader]: creating a new instance of the Downloader...') + log.info('[Downloader]: Creating a new instance...') self.instaloader = instaloader.Instaloader( quiet=True, user_agent=self.configuration.get('user-agent', None), @@ -89,10 +89,10 @@ def __init__( fatal_status_codes=literal_eval(self.configuration.get('fatal-status-codes', '[]')) ) auth_status = self._login() - log.info( - '[Downloader]: downloader instance created successfully: %s in %s', - auth_status, self.configuration['username'] - ) + if auth_status == 'logged_in': + log.info('[Downloader]: Instance created successfully with account ', self.configuration['username']) + else: + raise FailedAuthInstaloader("Failed to authenticate the Instaloader instance.") def _login(self) -> Union[str, None]: """ @@ -115,7 +115,7 @@ def _login(self) -> Union[str, None]: self.configuration['username'], self.configuration['session-file'] ) - log.info('[Downloader]: session file %s was load success', self.configuration['session-file']) + log.info('[Downloader]: Session file %s was saved successfully', self.configuration['session-file']) return 'logged_in' if self.configuration['login-method'] == 'password': @@ -124,19 +124,14 @@ def _login(self) -> Union[str, None]: self.configuration['password'] ) self.instaloader.save_session_to_file(self.configuration['session-file']) - log.info( - '[Downloader]: login with password was successful. Save session in %s', - self.configuration['sessionfile'] - ) + log.info('[Downloader]: Login with password was successful. Save session in %s', self.configuration['sessionfile']) return 'logged_in' if self.configuration['login-method'] == 'anonymous': - log.warning('[Downloader]: initialization without authentication into an account (anonymous)') + log.warning('[Downloader]: Initialization without authentication into an account (anonymous)') return None - raise FailedAuthInstaloader( - "Failed to authenticate the Instaloader instance. Please check the configuration in the Vault or the class argument." - ) + raise FailedAuthInstaloader("Failed to authenticate the Instaloader instance.") def get_post_content( self, @@ -156,21 +151,21 @@ def get_post_content( 'status': 'completed' } """ - log.info('[Downloader]: downloading the contents of the post %s...', shortcode) + log.info('[Downloader]: Downloading the contents of the post %s...', shortcode) try: post = instaloader.Post.from_shortcode(self.instaloader.context, shortcode) self.instaloader.download_post(post, '') - log.info('[Downloader]: the contents of the post %s have been successfully downloaded', shortcode) + log.info('[Downloader]: The contents of the post %s have been successfully downloaded', shortcode) status = 'completed' owner = post.owner_username typename = post.typename except instaloader.exceptions.BadResponseException as error: - log.error('[Downloader]: error downloading post content: %s', error) + log.error('[Downloader]: Error downloading post content: %s', error) if "Fetching Post metadata failed" in str(error): status = 'source_not_found' owner = 'undefined' typename = 'undefined' - log.warning('[Downloader]: post %s not found, perhaps it was deleted. Message will be marked as processed.', shortcode) + log.warning('[Downloader]: Post %s not found, perhaps it was deleted. Message will be marked as processed.', shortcode) else: raise instaloader.exceptions.BadResponseException(error) diff --git a/src/modules/uploader.py b/src/modules/uploader.py index 329b4df8d..0836016c4 100644 --- a/src/modules/uploader.py +++ b/src/modules/uploader.py @@ -106,17 +106,17 @@ def _check_incomplete_transfers(self) -> None: Returns: None """ - log.info('[class.%s] Uploader: checking incomplete transfers in the temporary directory...', __class__.__name__) + log.info('[Uploader]: Checking incomplete transfers...', __class__.__name__) for root, dirs, _ in os.walk(self.configuration['source-directory']): for dir_name in dirs: sub_directory = os.path.join(root, dir_name) # Check the subdirectory for files sub_files = [f for f in os.listdir(sub_directory) if os.path.isfile(os.path.join(sub_directory, f))] if sub_files: - log.warning('[class.%s] Uploader: an unloaded artifact was found: %s', __class__.__name__, sub_directory) + log.warning('[Uploader]: An unloaded artifact was found: %s', __class__.__name__, sub_directory) self.run_transfers(sub_directory=sub_directory) else: - log.info('[class.%s] Uploader: remove of an empty directory %s', __class__.__name__, sub_directory) + log.info('[Uploader]: Remove of an empty directory %s', __class__.__name__, sub_directory) os.rmdir(sub_directory) def run_transfers( @@ -137,7 +137,7 @@ def run_transfers( """ transfers = {} result = "" - log.info('[class.%s] Uploader: preparing media files for transfer to the %s cloud...', __class__.__name__, self.configuration['storage-type']) + log.info('[Uploader]: Preparing media files for transfer to the %s...', __class__.__name__, self.configuration['storage-type']) for root, _, files in os.walk(f"{self.configuration['source-directory']}{sub_directory}"): for file in files: if file.split('.')[-1] in self.configuration.get('exclude-types', None): @@ -152,7 +152,7 @@ def run_transfers( result = 'completed' else: result = 'not_completed' - log.info('[class.%s] Uploader: list of all transfers %s', __class__.__name__, transfers) + log.info('[Uploader]: List of all transfers %s', __class__.__name__, transfers) return result def upload_to_cloud( @@ -172,20 +172,20 @@ def upload_to_cloud( or None """ - log.info('[class.%s] starting upload file %s to %s://%s', __class__.__name__, source, self.configuration['storage-type'], destination) + log.info('[Uploader]: Starting upload file %s to %s://%s', __class__.__name__, source, self.configuration['storage-type'], destination) response = None result = None if self.configuration['storage-type'] == 'mega': directory = f"{self.configuration['destination-directory']}/{destination}" - log.info('[class.%s] Uploader: trying found mega folder %s...', __class__.__name__, directory) + log.info('[Uploader]: Trying found mega folder %s...', __class__.__name__, directory) mega_folder = self.storage.find(directory, exclude_deleted=True) if not mega_folder: self.storage.create_folder(directory) mega_folder = self.storage.find(directory, exclude_deleted=True) - log.info('[class.%s] Uploader: mega folder not found, created new folder %s', __class__.__name__, mega_folder) + log.info('[Uploader]: Mega folder not found, created new folder %s', __class__.__name__, mega_folder) else: - log.info('[class.%s] Uploader: mega folder %s was found', __class__.__name__, mega_folder) + log.info('[Uploader]: Mega folder %s was found', __class__.__name__, mega_folder) response = self.storage.upload(filename=source, dest=mega_folder[0]) result = "uploaded" @@ -205,5 +205,5 @@ def upload_to_cloud( response = self.storage.info(f"{self.configuration['destination-directory']}/{destination}/{source.split('/')[-1]}")['etag'] result = "uploaded" - log.info('[class.%s] Uploader: %s successful transferred', __class__.__name__, response) + log.info('[Uploader]: %s successful transferred', __class__.__name__, response) return result From 1406750e934072f6d161a46f474398c5a98ba33a Mon Sep 17 00:00:00 2001 From: obervinov Date: Fri, 13 Sep 2024 22:42:08 +0400 Subject: [PATCH 134/148] updated README.md --- README.md | 109 ++++++++++++++++++++++++++----------------- scripts/psql-init.sh | 2 +- 2 files changed, 66 insertions(+), 45 deletions(-) diff --git a/README.md b/README.md index d85ca54e4..a957c5005 100644 --- a/README.md +++ b/README.md @@ -20,7 +20,7 @@ - [Target storage of the content](#target-storage-of-the-content) - [Bot configuration source and supported parameters](#bot-configuration-source-and-supported-parameters) - [Bot persistent data storage](#bot-persistent-data-storage) -- [How to run project](#-how-to-run-project) +- [How to run project locally](#-how-to-run-project-locally) ## About this project @@ -50,11 +50,20 @@ This project is a Telegram bot that allows you to upload posts from your Instagr
## Requirements -- Vault server - [a storage of secrets for bot with kv v2 engine](https://developer.hashicorp.com/vault/docs/secrets/kv/kv-v2) -- Dropbox [api token](https://dropbox.tech/developers/generate-an-access-token-for-your-own-account) or Mega.nz [account](https://mega.nz) or WebDav provider [url, username and password](https://docs.nextcloud.com/server/latest/user_manual/en/files/access_webdav.html) -- Telegram bot api token - [instructions for creating bot and getting a token of api](https://learn.microsoft.com/en-us/azure/bot-service/bot-service-channel-connect-telegram?view=azure-bot-service-4.0) -- Instagram username/password - [login and password from the instagram account, it is advisable to create a new account](https://www.instagram.com/accounts/emailsignup/) -- Postgresql - [a storage of project persistent data](https://www.postgresql.org/download/) +- **Vault server** + - [store the project configuration in kv2](https://developer.hashicorp.com/vault/docs/secrets/kv/kv-v2) + - [generate access credentials in the database](https://developer.hashicorp.com/vault/docs/secrets/databases) + - [prepare the vault server](scripts/vault-init.sh) +- **Cloud Storage** (choose one) + - dropbox: [api token](https://dropbox.tech/developers/generate-an-access-token-for-your-own-account) + - mega: [account](https://mega.nz) + - webdav: [url, username and password](https://docs.nextcloud.com/server/latest/user_manual/en/files/access_webdav.html) +- **Telegram** + - bot: [api token](https://learn.microsoft.com/en-us/azure/bot-service/bot-service-channel-connect-telegram?view=azure-bot-service-4.0) +- **Instagram** (choose one) + - account: [username/password](https://www.instagram.com/accounts/emailsignup/) or [a ready uploaded session from the browser](https://raw.githubusercontent.com/instaloader/instaloader/master/docs/codesnippets/615_import_firefox_session.py) +- **Postgresql** + - database: [empty database](scripts/psql-init.sh)
## Environment variables @@ -85,9 +94,12 @@ This project is a Telegram bot that allows you to upload posts from your Instagr
### Bot configuration source and supported parameters - All bot configuration is stored in the `Vault Secrets`
-_except for the part of the configuration that configures the connection to `Vault`_
-- `pyinstabot-downloader-database` - vault database engine mount point (returns the temporary username and password for the database) + All bot configuration is stored in the `Vault Secrets` (_except for the part of the configuration that configures the connection to `Vault`_) +
+ +- `pyinstabot-downloader-database` - vault database engine mount point, returns the temporary username and password for the database. More information about the database engine can be found [here](https://developer.hashicorp.com/vault/docs/secrets/databases/postgresql) and [here](https://developer.hashicorp.com/vault/tutorials/db-credentials/database-secrets) +
+ - `configuration/database`: database connection parameters ```json { @@ -98,7 +110,22 @@ _except for the part of the configuration that configures the connection to `Vau } ```
+ +- `configuration/telegram`: telegram bot configuration + ```json + { + "token": "123456:qwerty" + } + ``` +
+ - `configuration/downloader-api`: downloader module configuration (for downloading content from instagram) + + Clarification of non-obvious parameters + - `fatal-status-codes`: a list of status codes that are considered fatal and stop downloader module work + - `iphone-support`: if `True` the downloader module will use the iphone user-agent + - `login-method`: the method of logging into the instagram account (`session`, `password`, `anonymous`) + - `session-base64`: the session file content in base64 format (only for `session` login method) ```json { "enabled": "True", @@ -112,24 +139,20 @@ _except for the part of the configuration that configures the connection to `Vau "username": "username1" } ``` - Clarification of non-obvious parameters - - `fatal-status-codes`: a list of status codes that are considered fatal and stop downloader module work - - `iphone-support`: if `True`, the downloader module will use the iphone user-agent - - `login-method`: the method of logging into the instagram account (`session`, `password`, `anonymous`) - - `session-base64`: the session file content in base64 format -
-- `configuration/telegram`: telegram bot configuration - ```json - { - "token": "123456:qwerty" - } - ```
+ - `configuration/uploader-api`: uploader module configuration (for upload content to the target storage) + + Clarification of non-obvious parameters + - `destination-directory`: the directory in the target storage where the content will be uploaded + - `exclude-types`: a list of file extensions that will be excluded from the upload (for example, `.txt` - text from the post) + - `source-directory`: the directory where the content will be stored before uploading (temporary directory) + - `storage-type`: the type of storage where the content will be uploaded (`dropbox`, `mega`, `webdav`) + - `url`: the url of the target webdav directory (only for `webdav` storage) ```json { - "destination-directory": "cloud-directory/", "enabled": "True", + "destination-directory": "cloud-directory/", "exclude-types": "[\".txt\", \".tmp\"]", "password": "qwerty123", "source-directory": "data/", @@ -138,14 +161,14 @@ _except for the part of the configuration that configures the connection to `Vau "url": "https://webdav.example.com/directory" } ``` - Clarification of non-obvious parameters - - `destination-directory`: the directory in the target storage where the content will be uploaded - - `exclude-types`: a list of file extensions that will be excluded from the upload (for example, `.txt` - text from the post) - - `source-directory`: the directory where the content will be stored before uploading (temporary directory) - - `storage-type`: the type of storage where the content will be uploaded (`dropbox`, `mega`, `webdav`) - - `url`: the url of the target webdav directory (only for `webdav` storage)
-- `configuration/users/`: user permissions configuration + +- `configuration/users/`: users permissions and attributes + + Clarification of non-obvious parameters + - `requests`: the number of requests that the user can make per day and per hour, as well as the random shift in minutes (scheduling of message processing from the queue works on the basis of this parameter) + - `roles`: list of roles that allow to use the corresponding functionality ([available roles](src/configs/constants.py#L11-L15)). + - `status`: allowed or denied user access to the bot ```json { "requests": "{\"requests_per_day\": 10, \"requests_per_hour\": 1, \"random_shift_minutes\": 60}", @@ -153,10 +176,6 @@ _except for the part of the configuration that configures the connection to `Vau "status": "allowed" } ``` - Clarification of non-obvious parameters - - `requests`: the number of requests that the user can make per day and per hour, as well as the random shift in minutes (scheduling of message processing from the queue works on the basis of this parameter) - - `roles`: list of roles that allow to use the corresponding functionality ([available roles](src/configs/constants.py#L19-L23)). - - `status`: allowed or denied user access to the bot #### You can use an existing vault-server or launch a new one using docker-compose Scripts for configuring the vault-server are located in the [vault-init.sh](scripts/vault-init.sh) @@ -170,11 +189,11 @@ cd pyinstabot-downloader docker-compose -f docker-compose.yml up vault-server -d # Initialize and unseal new vault-server +export VAULT_ADDR=http://0.0.0.0:8200 vault operator init vault operator unseal # Run the script for configuring the vault-server for this bot project -export VAULT_ADDR=http://localhost:8200 export VAULT_TOKEN=hvs.123456qwerty ./scripts/vault-init.sh ``` @@ -193,15 +212,16 @@ export VAULT_TOKEN=hvs.123456qwerty
### Bot persistent data storage - Persistent data storage is implemented using `Postgresql`
-You can familiarize yourself with the + Persistent data storage is implemented using `Postgresql` - data structure, tables and assignment of tables [here](src/configs/databases.json) - migrations [here](src/migrations/) -The database structure is created automatically when the bot starts. Bot checks the database structure and creates missing tables if necessary. -After checking the database structure, the bot executes the migrations in the order of their numbering.
-All that is required is a database and the rights of the owner of this data database. -To quickly prepare an instance, you can execute the[psql-init.sh](scripts/psql-init.sh) script +The database structure is created automatically when the bot starts: + 1. bot checks the database structure and creates missing tables if necessary + 2. after checking the database structure, the bot executes the migrations in the order of their numbering + +To quickly prepare an instance, you can execute the [psql-init.sh](scripts/psql-init.sh) script + ```bash git clone https://github.com/obervinov/pyinstabot-downloader.git cd pyinstabot-downloader @@ -218,9 +238,10 @@ export PGDATABASE=postgres **What data is stored in tables:** - users requests queue - users metadata -- history of processed users requests -- migration history -- messages sent by the bot +- history of users requests +- history of processed messages +- migrations history +- service messages by the bot
## How to run project locally @@ -235,4 +256,4 @@ docker compose -f docker-compose.yml up -d ## GitHub Actions | Name | Version | | ------------------------ | ----------- | -| GitHub Actions Templates | [v1.2.8](https://github.com/obervinov/_templates/tree/v1.2.8) | +| GitHub Actions Templates | [v1.2.9](https://github.com/obervinov/_templates/tree/v1.2.9) | diff --git a/scripts/psql-init.sh b/scripts/psql-init.sh index 6eb8af406..15210e92a 100644 --- a/scripts/psql-init.sh +++ b/scripts/psql-init.sh @@ -3,7 +3,7 @@ NEW_USER_PASSWORD=$(pwgen 24 -c1) psql -c "CREATE DATABASE pyinstabot-downloader;" psql -c "CREATE USER pyinstabot-downloader WITH PASSWORD '$NEW_USER_PASSWORD';" -psql -c "GRANT ALL PRIVILEGES ON DATABASE pyinstabot-downloader TO pyinstabot-downloader;" +psql -c "ALTER DATABASE pyinstabot-downloader OWNER TO pyinstabot-downloader;" echo "New user: pyinstabot-downloader" echo "New password: $NEW_USER_PASSWORD" echo "Database: pyinstabot-downloader" From 8b693e9eb3c82e4fa819fc9f64b43bb34ab4a725 Mon Sep 17 00:00:00 2001 From: obervinov Date: Fri, 13 Sep 2024 22:44:51 +0400 Subject: [PATCH 135/148] updated docker-compose.yml --- docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index 48dcd615e..a0ed473ed 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -46,7 +46,7 @@ services: args: PROJECT_NAME: pyinstabot-downloader PROJECT_DESCRIPTION: "This project is a Telegram bot that allows you to upload posts from your Instagram profile to clouds like Dropbox, Mega or any WebDav compatible cloud storage." - PROJECT_VERSION: 2.2.0 + PROJECT_VERSION: 2.3.0 container_name: pyinstabot-downloader restart: always environment: From 6b8d4ac96934933aac21e053aeb60d2f27e20ef9 Mon Sep 17 00:00:00 2001 From: obervinov Date: Fri, 13 Sep 2024 22:49:49 +0400 Subject: [PATCH 136/148] updated vault-init.sh --- scripts/vault-init.sh | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/scripts/vault-init.sh b/scripts/vault-init.sh index 06dc0591c..2c06588ba 100644 --- a/scripts/vault-init.sh +++ b/scripts/vault-init.sh @@ -12,7 +12,7 @@ vault write auth/pyinstabot-downloader/role/pyinstabot-downloader \ token_type=service \ secret_id_num_uses=0 \ token_num_uses=0 \ - token_ttl=1h \ + token_ttl=24h \ bind_secret_id=true \ mount_point="pyinstabot-downloader" \ secret_id_ttl=0 @@ -29,6 +29,7 @@ vault write pyinstabot-downloader-database/config/postgresql \ vault write pyinstabot-downloader-database/roles/pyinstabot-downloader \ db_name=postgresql \ creation_statements="CREATE ROLE \"{{name}}\" WITH LOGIN PASSWORD '{{password}}' VALID UNTIL '{{expiration}}'; GRANT ALL PRIVILEGES ON SCHEMA public TO \"{{name}}\"; GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO \"{{name}}\"; GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO \"{{name}}\";" \ + revocation_statements="REVOKE ALL PRIVILEGES ON SCHEMA public FROM \"{{name}}\"; REVOKE ALL PRIVILEGES ON ALL TABLES IN SCHEMA public FROM \"{{name}}\"; REVOKE ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public FROM \"{{name}}\"; DROP ROLE \"{{name}}\";" \ default_ttl="1h" \ max_ttl="24h" -# End of snippet \ No newline at end of file +# End of snippet From 7c704f3560a27afeec967e4464edfa67471bd749 Mon Sep 17 00:00:00 2001 From: obervinov Date: Fri, 13 Sep 2024 22:51:57 +0400 Subject: [PATCH 137/148] modified: scripts/vault-init.sh --- scripts/vault-init.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/vault-init.sh b/scripts/vault-init.sh index 2c06588ba..45e26dee1 100644 --- a/scripts/vault-init.sh +++ b/scripts/vault-init.sh @@ -30,6 +30,6 @@ vault write pyinstabot-downloader-database/roles/pyinstabot-downloader \ db_name=postgresql \ creation_statements="CREATE ROLE \"{{name}}\" WITH LOGIN PASSWORD '{{password}}' VALID UNTIL '{{expiration}}'; GRANT ALL PRIVILEGES ON SCHEMA public TO \"{{name}}\"; GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO \"{{name}}\"; GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO \"{{name}}\";" \ revocation_statements="REVOKE ALL PRIVILEGES ON SCHEMA public FROM \"{{name}}\"; REVOKE ALL PRIVILEGES ON ALL TABLES IN SCHEMA public FROM \"{{name}}\"; REVOKE ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public FROM \"{{name}}\"; DROP ROLE \"{{name}}\";" \ - default_ttl="1h" \ - max_ttl="24h" + default_ttl="24h" \ + max_ttl="72h" # End of snippet From cbf154dd51627cf2063892c95cc95630e450a5c6 Mon Sep 17 00:00:00 2001 From: obervinov Date: Fri, 13 Sep 2024 23:13:08 +0400 Subject: [PATCH 138/148] small corrections in bot.py --- src/bot.py | 39 +++++++++++++-------------------------- 1 file changed, 13 insertions(+), 26 deletions(-) diff --git a/src/bot.py b/src/bot.py index 9db35c6f4..711e52572 100644 --- a/src/bot.py +++ b/src/bot.py @@ -1,6 +1,5 @@ """ -This module contains the main code for the bot -to work and contains the main logic linking the additional modules. +This module contains the main code for the bot to work and contains the main logic linking the additional modules. """ from datetime import datetime, timedelta import re @@ -40,30 +39,30 @@ # Users module without rate limits option users = Users(vault=vault, storage={'db_role': VAULT_DB_ROLE_USERS}) -# Client for download content from supplier +# Client for download content from instagram # If API disabled, the mock object will be used downloader_api_enabled = vault.kv2engine.read_secret(path='configuration/downloader-api').get('enabled', False) if downloader_api_enabled == 'True': - log.info('[Bot]: downloader API is enabled: %s', downloader_api_enabled) + log.info('[Bot]: Downloader api is enabled: %s', downloader_api_enabled) downloader = Downloader(vault=vault) else: - log.warning('[Bot]: downloader API is disabled, using mock object, because enabled flag is %s', downloader_api_enabled) + log.warning('[Bot]: Downloader api is disabled, using mock object, because enabled flag is %s', downloader_api_enabled) downloader = MagicMock() downloader.get_post_content.return_value = { 'post': f"mock_{''.join(random.choices(string.ascii_letters + string.digits, k=10))}", - 'owner': 'undefined', + 'owner': 'mock', 'type': 'fake', 'status': 'completed' } -# Client for upload content to the cloud storage +# Client for upload content to the target storage # If API disabled, the mock object will be used uploader_api_enabled = vault.kv2engine.read_secret(path='configuration/uploader-api').get('enabled', False) if uploader_api_enabled == 'True': - log.info('[Bot]: uploader API is enabled: %s', uploader_api_enabled) + log.info('[Bot]: Uploader API is enabled: %s', uploader_api_enabled) uploader = Uploader(vault=vault) else: - log.warning('[Bot]: uploader API is disabled, using mock object, because enabled flag is %s', uploader_api_enabled) + log.warning('[Bot]: Uploader API is disabled, using mock object, because enabled flag is %s', uploader_api_enabled) uploader = MagicMock() uploader.run_transfers.return_value = 'completed' @@ -83,14 +82,11 @@ def start_command(message: telegram.telegram_types.Message = None) -> None: Args: message (telegram.telegram_types.Message): The message object containing information about the chat. - - Returns: - None """ requestor = {'user_id': message.chat.id, 'chat_id': message.chat.id, 'message_id': message.message_id} if users.user_access_check(**requestor).get('access', None) == users.user_status_allow: - log.info('[Bot]: Processing "start" command for user %s...', message.chat.id) - # Main message + log.info('[Bot]: Processing start command for user %s...', message.chat.id) + # Main pinned message reply_markup = telegram.create_inline_markup(ROLES_MAP.keys()) start_message = telegram.send_styled_message( chat_id=message.chat.id, @@ -122,11 +118,8 @@ def bot_callback_query_handler(call: telegram.callback_query = None) -> None: Args: call (telegram.callback_query): The callback query object. - - Returns: - None """ - log.info('[Bot]: Processing button "%s" for user %s...', call.data, call.message.chat.id) + log.info('[Bot]: Processing button %s for user %s...', call.data, call.message.chat.id) requestor = { 'user_id': call.message.chat.id, 'role_id': ROLES_MAP[call.data], 'chat_id': call.message.chat.id, 'message_id': call.message.message_id @@ -154,7 +147,7 @@ def bot_callback_query_handler(call: telegram.callback_query = None) -> None: bot.register_next_step_handler(call.message, reschedule_queue, help_message) else: - log.error('[Bot]: Handler for button "%s" not found', call.data) + log.error('[Bot]: Handler for button %s not found', call.data) else: telegram.send_styled_message( @@ -174,13 +167,10 @@ def unknown_command(message: telegram.telegram_types.Message = None) -> None: Args: message (telegram.telegram_types.Message): The message object containing the unrecognized command. - - Returns: - None """ requestor = {'user_id': message.chat.id, 'chat_id': message.chat.id, 'message_id': message.message_id} if users.user_access_check(**requestor).get('access', None) == users.user_status_allow: - log.error('[Bot]: Invalid command "%s" from user %s', message.text, message.chat.id) + log.error('[Bot]: Invalid command %s from user %s', message.text, message.chat.id) telegram.send_styled_message(chat_id=message.chat.id, messages_template={'alias': 'unknown_command'}) else: telegram.send_styled_message( @@ -200,9 +190,6 @@ def update_status_message(user_id: str = None) -> None: Args: user_id (str): The user id. - - Returns: - None """ try: diff_between_messages = False From 067c774239e9a5f8f3776c4bb009f1280d8205f7 Mon Sep 17 00:00:00 2001 From: obervinov Date: Sat, 14 Sep 2024 14:45:30 +0200 Subject: [PATCH 139/148] corrected tests --- tests/test_init.py | 21 --------------------- tests/test_metrics.py | 1 + 2 files changed, 1 insertion(+), 21 deletions(-) delete mode 100644 tests/test_init.py diff --git a/tests/test_init.py b/tests/test_init.py deleted file mode 100644 index 49f3ddb6d..000000000 --- a/tests/test_init.py +++ /dev/null @@ -1,21 +0,0 @@ -""" -A test for quick setup of the dev environment for testing the release. -""" -import subprocess -import pytest - - -@pytest.mark.order(1) -def test_init_dev_environment(vault_configuration_data, prepare_vault): - """ - Check the function for the user who is allow access to the bot - """ - _ = vault_configuration_data - command = ( - "export VAULT_ADDR=http://vault-server:8200 && " - f"export VAULT_APPROLE_ID={prepare_vault['id']} && " - f"export VAULT_APPROLE_SECRETID={prepare_vault['secret-id']} && " - "docker compose -f docker-compose.yml up -d --force-recreate --build pyinstabot-downloader" - ) - with subprocess.Popen(command, shell=True): - print("Running docker-compose.yml...") diff --git a/tests/test_metrics.py b/tests/test_metrics.py index f2866b9de..366001d19 100644 --- a/tests/test_metrics.py +++ b/tests/test_metrics.py @@ -41,6 +41,7 @@ def test_metrics_threads_status(metrics_class): """ response = requests.get(f"http://0.0.0.0:{metrics_class.port}/", timeout=10) assert "pytest_thread_status" in response.text + assert 'pytest_thread_status{thread_name="MainThread"} 1.0' in response.text @pytest.mark.order(16) From 4af18732c221c8ee6d5d271841d5be876e5e87db Mon Sep 17 00:00:00 2001 From: obervinov Date: Sat, 14 Sep 2024 14:45:50 +0200 Subject: [PATCH 140/148] updated CHANGELOG --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c436d81e7..49a3f0c04 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,7 +3,7 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). -## v2.3.0 - 2024-09-13 +## v2.3.0 - 2024-09-14 ### What's Changed **Full Changelog**: https://github.com/obervinov/pyinstabot-downloader/compare/v2.2.1...v2.3.0 by @obervinov in https://github.com/obervinov/pyinstabot-downloader/pull/95 #### 💥 Breaking Changes From 69396683a8db76b756b777c848af13eb7a5f65ea Mon Sep 17 00:00:00 2001 From: obervinov Date: Sat, 14 Sep 2024 15:01:24 +0200 Subject: [PATCH 141/148] fixed linting issues --- src/modules/uploader.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/src/modules/uploader.py b/src/modules/uploader.py index 0836016c4..aeaec888c 100644 --- a/src/modules/uploader.py +++ b/src/modules/uploader.py @@ -106,17 +106,17 @@ def _check_incomplete_transfers(self) -> None: Returns: None """ - log.info('[Uploader]: Checking incomplete transfers...', __class__.__name__) + log.info('[Uploader]: Checking incomplete transfers...') for root, dirs, _ in os.walk(self.configuration['source-directory']): for dir_name in dirs: sub_directory = os.path.join(root, dir_name) # Check the subdirectory for files sub_files = [f for f in os.listdir(sub_directory) if os.path.isfile(os.path.join(sub_directory, f))] if sub_files: - log.warning('[Uploader]: An unloaded artifact was found: %s', __class__.__name__, sub_directory) + log.warning('[Uploader]: An unloaded artifact was found: %s', sub_directory) self.run_transfers(sub_directory=sub_directory) else: - log.info('[Uploader]: Remove of an empty directory %s', __class__.__name__, sub_directory) + log.info('[Uploader]: Remove of an empty directory %s', sub_directory) os.rmdir(sub_directory) def run_transfers( @@ -137,7 +137,7 @@ def run_transfers( """ transfers = {} result = "" - log.info('[Uploader]: Preparing media files for transfer to the %s...', __class__.__name__, self.configuration['storage-type']) + log.info('[Uploader]: Preparing media files for transfer to the %s...', self.configuration['storage-type']) for root, _, files in os.walk(f"{self.configuration['source-directory']}{sub_directory}"): for file in files: if file.split('.')[-1] in self.configuration.get('exclude-types', None): @@ -152,7 +152,7 @@ def run_transfers( result = 'completed' else: result = 'not_completed' - log.info('[Uploader]: List of all transfers %s', __class__.__name__, transfers) + log.info('[Uploader]: List of all transfers %s', transfers) return result def upload_to_cloud( @@ -172,20 +172,20 @@ def upload_to_cloud( or None """ - log.info('[Uploader]: Starting upload file %s to %s://%s', __class__.__name__, source, self.configuration['storage-type'], destination) + log.info('[Uploader]: Starting upload file %s to %s://%s', source, self.configuration['storage-type'], destination) response = None result = None if self.configuration['storage-type'] == 'mega': directory = f"{self.configuration['destination-directory']}/{destination}" - log.info('[Uploader]: Trying found mega folder %s...', __class__.__name__, directory) + log.info('[Uploader]: Trying found mega folder %s...', directory) mega_folder = self.storage.find(directory, exclude_deleted=True) if not mega_folder: self.storage.create_folder(directory) mega_folder = self.storage.find(directory, exclude_deleted=True) - log.info('[Uploader]: Mega folder not found, created new folder %s', __class__.__name__, mega_folder) + log.info('[Uploader]: Mega folder not found, created new folder %s', mega_folder) else: - log.info('[Uploader]: Mega folder %s was found', __class__.__name__, mega_folder) + log.info('[Uploader]: Mega folder %s was found', mega_folder) response = self.storage.upload(filename=source, dest=mega_folder[0]) result = "uploaded" @@ -205,5 +205,5 @@ def upload_to_cloud( response = self.storage.info(f"{self.configuration['destination-directory']}/{destination}/{source.split('/')[-1]}")['etag'] result = "uploaded" - log.info('[Uploader]: %s successful transferred', __class__.__name__, response) + log.info('[Uploader]: %s successful transferred', response) return result From 62f0725a87d588d8b8ee53ff7477777592cf6bc2 Mon Sep 17 00:00:00 2001 From: obervinov Date: Sat, 14 Sep 2024 15:06:38 +0200 Subject: [PATCH 142/148] modified: src/modules/downloader.py --- src/modules/downloader.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/modules/downloader.py b/src/modules/downloader.py index 26a6ab0b4..d7872c160 100644 --- a/src/modules/downloader.py +++ b/src/modules/downloader.py @@ -90,7 +90,7 @@ def __init__( ) auth_status = self._login() if auth_status == 'logged_in': - log.info('[Downloader]: Instance created successfully with account ', self.configuration['username']) + log.info('[Downloader]: Instance created successfully with account %s', self.configuration['username']) else: raise FailedAuthInstaloader("Failed to authenticate the Instaloader instance.") From 4f8cd1559f7956ac7d0e32a2762a9053f54b2336 Mon Sep 17 00:00:00 2001 From: obervinov Date: Sat, 14 Sep 2024 15:09:15 +0200 Subject: [PATCH 143/148] modified: src/modules/downloader.py --- src/modules/downloader.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/modules/downloader.py b/src/modules/downloader.py index d7872c160..4e6bf1082 100644 --- a/src/modules/downloader.py +++ b/src/modules/downloader.py @@ -124,7 +124,7 @@ def _login(self) -> Union[str, None]: self.configuration['password'] ) self.instaloader.save_session_to_file(self.configuration['session-file']) - log.info('[Downloader]: Login with password was successful. Save session in %s', self.configuration['sessionfile']) + log.info('[Downloader]: Login with password was successful. Saved session in %s', self.configuration['sessionfile']) return 'logged_in' if self.configuration['login-method'] == 'anonymous': From 3de9a777a566b01822a62de9b8ec1c39488fc250 Mon Sep 17 00:00:00 2001 From: obervinov Date: Sat, 14 Sep 2024 15:15:06 +0200 Subject: [PATCH 144/148] modified: src/modules/database.py --- src/modules/database.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/modules/database.py b/src/modules/database.py index c999c2b74..f88c484d1 100644 --- a/src/modules/database.py +++ b/src/modules/database.py @@ -403,7 +403,9 @@ def _reset_stale_records(self) -> None: values="state = 'updated'", condition=f"id = '{message[0]}'" ) - log.info('[Database]: Stale status messages have been reset') + log.info('[Database]: Stale status messages have been reset') + else: + log.info('[Database]: No stale status messages found') def add_message_to_queue( self, From cfc5913d3f221a8831b07b5a5f08dd232d6e5cfb Mon Sep 17 00:00:00 2001 From: obervinov Date: Sat, 14 Sep 2024 15:21:40 +0200 Subject: [PATCH 145/148] modified: scripts/vault-init.sh --- scripts/vault-init.sh | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/scripts/vault-init.sh b/scripts/vault-init.sh index 45e26dee1..0c7b36d28 100644 --- a/scripts/vault-init.sh +++ b/scripts/vault-init.sh @@ -26,7 +26,19 @@ vault write pyinstabot-downloader-database/config/postgresql \ connection_url="postgresql://{{username}}:{{password}}@localhost:5432/pyinstabot-downloader?sslmode=disable" \ username="postgres" \ password="changeme" -vault write pyinstabot-downloader-database/roles/pyinstabot-downloader \ +vault write pyinstabot-downloader-database/roles/pyinstabot-downloader-bot \ + db_name=postgresql \ + creation_statements="CREATE ROLE \"{{name}}\" WITH LOGIN PASSWORD '{{password}}' VALID UNTIL '{{expiration}}'; GRANT ALL PRIVILEGES ON SCHEMA public TO \"{{name}}\"; GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO \"{{name}}\"; GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO \"{{name}}\";" \ + revocation_statements="REVOKE ALL PRIVILEGES ON SCHEMA public FROM \"{{name}}\"; REVOKE ALL PRIVILEGES ON ALL TABLES IN SCHEMA public FROM \"{{name}}\"; REVOKE ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public FROM \"{{name}}\"; DROP ROLE \"{{name}}\";" \ + default_ttl="24h" \ + max_ttl="72h" +vault write pyinstabot-downloader-database/roles/pyinstabot-downloader-users \ + db_name=postgresql \ + creation_statements="CREATE ROLE \"{{name}}\" WITH LOGIN PASSWORD '{{password}}' VALID UNTIL '{{expiration}}'; GRANT ALL PRIVILEGES ON SCHEMA public TO \"{{name}}\"; GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO \"{{name}}\"; GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO \"{{name}}\";" \ + revocation_statements="REVOKE ALL PRIVILEGES ON SCHEMA public FROM \"{{name}}\"; REVOKE ALL PRIVILEGES ON ALL TABLES IN SCHEMA public FROM \"{{name}}\"; REVOKE ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public FROM \"{{name}}\"; DROP ROLE \"{{name}}\";" \ + default_ttl="24h" \ + max_ttl="72h" +vault write pyinstabot-downloader-database/roles/pyinstabot-downloader-users-rl \ db_name=postgresql \ creation_statements="CREATE ROLE \"{{name}}\" WITH LOGIN PASSWORD '{{password}}' VALID UNTIL '{{expiration}}'; GRANT ALL PRIVILEGES ON SCHEMA public TO \"{{name}}\"; GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO \"{{name}}\"; GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO \"{{name}}\";" \ revocation_statements="REVOKE ALL PRIVILEGES ON SCHEMA public FROM \"{{name}}\"; REVOKE ALL PRIVILEGES ON ALL TABLES IN SCHEMA public FROM \"{{name}}\"; REVOKE ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public FROM \"{{name}}\"; DROP ROLE \"{{name}}\";" \ From 26b914386ab48a664f266b4043a89e568d617c33 Mon Sep 17 00:00:00 2001 From: obervinov Date: Fri, 4 Oct 2024 13:11:14 +0400 Subject: [PATCH 146/148] finalizing release v2.3.0 --- CHANGELOG.md | 3 +- poetry.lock | 128 ++++++++++++++--------------- pyproject.toml | 4 +- src/bot.py | 5 +- src/configs/constants.py | 2 +- src/migrations/0003_users_table.py | 17 +++- src/modules/tools.py | 14 ++++ 7 files changed, 98 insertions(+), 75 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 49a3f0c04..6528bff2d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,7 +3,7 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). -## v2.3.0 - 2024-09-14 +## v2.3.0 - 2024-10-04 ### What's Changed **Full Changelog**: https://github.com/obervinov/pyinstabot-downloader/compare/v2.2.1...v2.3.0 by @obervinov in https://github.com/obervinov/pyinstabot-downloader/pull/95 #### 💥 Breaking Changes @@ -15,6 +15,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/) and this p * bump users-package to major version `3.0.3` * bump telegram-package to major version `2.0.1` * add tests for database and metrics modules +* add proxy support for all dependencies with `requests` library * [Switch reading of the database connection configuration to db engine](https://github.com/obervinov/pyinstabot-downloader/issues/33) #### 🐛 Bug Fixes * general bug fixes and improvements diff --git a/poetry.lock b/poetry.lock index ed98df0f0..9181b2512 100644 --- a/poetry.lock +++ b/poetry.lock @@ -262,18 +262,15 @@ stone = ">=2,<3.3.3" [[package]] name = "emoji" -version = "2.12.1" +version = "2.13.2" description = "Emoji for Python" optional = false python-versions = ">=3.7" files = [ - {file = "emoji-2.12.1-py3-none-any.whl", hash = "sha256:a00d62173bdadc2510967a381810101624a2f0986145b8da0cffa42e29430235"}, - {file = "emoji-2.12.1.tar.gz", hash = "sha256:4aa0488817691aa58d83764b6c209f8a27c0b3ab3f89d1b8dceca1a62e4973eb"}, + {file = "emoji-2.13.2-py3-none-any.whl", hash = "sha256:ef6f2ee63b245e934c763b1a9a0637713955aa3d9e322432e036bb60559de4d6"}, + {file = "emoji-2.13.2.tar.gz", hash = "sha256:f95d10d96c5f21299ed2c4b32511611ba890b8c07f5f2bf5b04d5d3eee91fd19"}, ] -[package.dependencies] -typing-extensions = ">=4.7.0" - [package.extras] dev = ["coverage", "pytest (>=7.4.4)"] @@ -296,33 +293,40 @@ parser = ["pyhcl (>=0.4.4,<0.5.0)"] [[package]] name = "idna" -version = "3.8" +version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" files = [ - {file = "idna-3.8-py3-none-any.whl", hash = "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac"}, - {file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"}, + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + [[package]] name = "importlib-metadata" -version = "8.4.0" +version = "8.5.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-8.4.0-py3-none-any.whl", hash = "sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1"}, - {file = "importlib_metadata-8.4.0.tar.gz", hash = "sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5"}, + {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, + {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, ] [package.dependencies] -zipp = ">=0.5" +zipp = ">=3.20" [package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] -test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] [[package]] name = "instaloader" @@ -651,13 +655,13 @@ files = [ [[package]] name = "prometheus-client" -version = "0.20.0" +version = "0.21.0" description = "Python client for the Prometheus monitoring system." optional = false python-versions = ">=3.8" files = [ - {file = "prometheus_client-0.20.0-py3-none-any.whl", hash = "sha256:cde524a85bce83ca359cc837f28b8c0db5cac7aa653a588fd7e84ba061c329e7"}, - {file = "prometheus_client-0.20.0.tar.gz", hash = "sha256:287629d00b147a32dcb2be0b9df905da599b2d82f80377083ec8463309a4bb89"}, + {file = "prometheus_client-0.21.0-py3-none-any.whl", hash = "sha256:4fa6b4dd0ac16d58bb587c04b1caae65b8c5043e85f778f42f5f632f6af2e166"}, + {file = "prometheus_client-0.21.0.tar.gz", hash = "sha256:96c83c606b71ff2b0a433c98889d275f51ffec6c5e267de37c7a2b5c9aa9233e"}, ] [package.extras] @@ -757,53 +761,54 @@ files = [ [[package]] name = "pycryptodome" -version = "3.20.0" +version = "3.21.0" description = "Cryptographic library for Python" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "pycryptodome-3.20.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:f0e6d631bae3f231d3634f91ae4da7a960f7ff87f2865b2d2b831af1dfb04e9a"}, - {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:baee115a9ba6c5d2709a1e88ffe62b73ecc044852a925dcb67713a288c4ec70f"}, - {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:417a276aaa9cb3be91f9014e9d18d10e840a7a9b9a9be64a42f553c5b50b4d1d"}, - {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a1250b7ea809f752b68e3e6f3fd946b5939a52eaeea18c73bdab53e9ba3c2dd"}, - {file = "pycryptodome-3.20.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:d5954acfe9e00bc83ed9f5cb082ed22c592fbbef86dc48b907238be64ead5c33"}, - {file = "pycryptodome-3.20.0-cp27-cp27m-win32.whl", hash = "sha256:06d6de87c19f967f03b4cf9b34e538ef46e99a337e9a61a77dbe44b2cbcf0690"}, - {file = "pycryptodome-3.20.0-cp27-cp27m-win_amd64.whl", hash = "sha256:ec0bb1188c1d13426039af8ffcb4dbe3aad1d7680c35a62d8eaf2a529b5d3d4f"}, - {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:5601c934c498cd267640b57569e73793cb9a83506f7c73a8ec57a516f5b0b091"}, - {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d29daa681517f4bc318cd8a23af87e1f2a7bad2fe361e8aa29c77d652a065de4"}, - {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3427d9e5310af6680678f4cce149f54e0bb4af60101c7f2c16fdf878b39ccccc"}, - {file = "pycryptodome-3.20.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:3cd3ef3aee1079ae44afaeee13393cf68b1058f70576b11439483e34f93cf818"}, - {file = "pycryptodome-3.20.0-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac1c7c0624a862f2e53438a15c9259d1655325fc2ec4392e66dc46cdae24d044"}, - {file = "pycryptodome-3.20.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:76658f0d942051d12a9bd08ca1b6b34fd762a8ee4240984f7c06ddfb55eaf15a"}, - {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f35d6cee81fa145333137009d9c8ba90951d7d77b67c79cbe5f03c7eb74d8fe2"}, - {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76cb39afede7055127e35a444c1c041d2e8d2f1f9c121ecef573757ba4cd2c3c"}, - {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49a4c4dc60b78ec41d2afa392491d788c2e06edf48580fbfb0dd0f828af49d25"}, - {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fb3b87461fa35afa19c971b0a2b7456a7b1db7b4eba9a8424666104925b78128"}, - {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:acc2614e2e5346a4a4eab6e199203034924313626f9620b7b4b38e9ad74b7e0c"}, - {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:210ba1b647837bfc42dd5a813cdecb5b86193ae11a3f5d972b9a0ae2c7e9e4b4"}, - {file = "pycryptodome-3.20.0-cp35-abi3-win32.whl", hash = "sha256:8d6b98d0d83d21fb757a182d52940d028564efe8147baa9ce0f38d057104ae72"}, - {file = "pycryptodome-3.20.0-cp35-abi3-win_amd64.whl", hash = "sha256:9b3ae153c89a480a0ec402e23db8d8d84a3833b65fa4b15b81b83be9d637aab9"}, - {file = "pycryptodome-3.20.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:4401564ebf37dfde45d096974c7a159b52eeabd9969135f0426907db367a652a"}, - {file = "pycryptodome-3.20.0-pp27-pypy_73-win32.whl", hash = "sha256:ec1f93feb3bb93380ab0ebf8b859e8e5678c0f010d2d78367cf6bc30bfeb148e"}, - {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:acae12b9ede49f38eb0ef76fdec2df2e94aad85ae46ec85be3648a57f0a7db04"}, - {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f47888542a0633baff535a04726948e876bf1ed880fddb7c10a736fa99146ab3"}, - {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e0e4a987d38cfc2e71b4a1b591bae4891eeabe5fa0f56154f576e26287bfdea"}, - {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c18b381553638414b38705f07d1ef0a7cf301bc78a5f9bc17a957eb19446834b"}, - {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a60fedd2b37b4cb11ccb5d0399efe26db9e0dd149016c1cc6c8161974ceac2d6"}, - {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:405002eafad114a2f9a930f5db65feef7b53c4784495dd8758069b89baf68eab"}, - {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ab6ab0cb755154ad14e507d1df72de9897e99fd2d4922851a276ccc14f4f1a5"}, - {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:acf6e43fa75aca2d33e93409f2dafe386fe051818ee79ee8a3e21de9caa2ac9e"}, - {file = "pycryptodome-3.20.0.tar.gz", hash = "sha256:09609209ed7de61c2b560cc5c8c4fbf892f8b15b1faf7e4cbffac97db1fffda7"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:dad9bf36eda068e89059d1f07408e397856be9511d7113ea4b586642a429a4fd"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:a1752eca64c60852f38bb29e2c86fca30d7672c024128ef5d70cc15868fa10f4"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:3ba4cc304eac4d4d458f508d4955a88ba25026890e8abff9b60404f76a62c55e"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7cb087b8612c8a1a14cf37dd754685be9a8d9869bed2ffaaceb04850a8aeef7e"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:26412b21df30b2861424a6c6d5b1d8ca8107612a4cfa4d0183e71c5d200fb34a"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-win32.whl", hash = "sha256:cc2269ab4bce40b027b49663d61d816903a4bd90ad88cb99ed561aadb3888dd3"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-win_amd64.whl", hash = "sha256:0fa0a05a6a697ccbf2a12cec3d6d2650b50881899b845fac6e87416f8cb7e87d"}, + {file = "pycryptodome-3.21.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6cce52e196a5f1d6797ff7946cdff2038d3b5f0aba4a43cb6bf46b575fd1b5bb"}, + {file = "pycryptodome-3.21.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:a915597ffccabe902e7090e199a7bf7a381c5506a747d5e9d27ba55197a2c568"}, + {file = "pycryptodome-3.21.0-cp27-cp27mu-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4e74c522d630766b03a836c15bff77cb657c5fdf098abf8b1ada2aebc7d0819"}, + {file = "pycryptodome-3.21.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:a3804675283f4764a02db05f5191eb8fec2bb6ca34d466167fc78a5f05bbe6b3"}, + {file = "pycryptodome-3.21.0-cp36-abi3-macosx_10_9_universal2.whl", hash = "sha256:2480ec2c72438430da9f601ebc12c518c093c13111a5c1644c82cdfc2e50b1e4"}, + {file = "pycryptodome-3.21.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:de18954104667f565e2fbb4783b56667f30fb49c4d79b346f52a29cb198d5b6b"}, + {file = "pycryptodome-3.21.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de4b7263a33947ff440412339cb72b28a5a4c769b5c1ca19e33dd6cd1dcec6e"}, + {file = "pycryptodome-3.21.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0714206d467fc911042d01ea3a1847c847bc10884cf674c82e12915cfe1649f8"}, + {file = "pycryptodome-3.21.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d85c1b613121ed3dbaa5a97369b3b757909531a959d229406a75b912dd51dd1"}, + {file = "pycryptodome-3.21.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:8898a66425a57bcf15e25fc19c12490b87bd939800f39a03ea2de2aea5e3611a"}, + {file = "pycryptodome-3.21.0-cp36-abi3-musllinux_1_2_i686.whl", hash = "sha256:932c905b71a56474bff8a9c014030bc3c882cee696b448af920399f730a650c2"}, + {file = "pycryptodome-3.21.0-cp36-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:18caa8cfbc676eaaf28613637a89980ad2fd96e00c564135bf90bc3f0b34dd93"}, + {file = "pycryptodome-3.21.0-cp36-abi3-win32.whl", hash = "sha256:280b67d20e33bb63171d55b1067f61fbd932e0b1ad976b3a184303a3dad22764"}, + {file = "pycryptodome-3.21.0-cp36-abi3-win_amd64.whl", hash = "sha256:b7aa25fc0baa5b1d95b7633af4f5f1838467f1815442b22487426f94e0d66c53"}, + {file = "pycryptodome-3.21.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:2cb635b67011bc147c257e61ce864879ffe6d03342dc74b6045059dfbdedafca"}, + {file = "pycryptodome-3.21.0-pp27-pypy_73-win32.whl", hash = "sha256:4c26a2f0dc15f81ea3afa3b0c87b87e501f235d332b7f27e2225ecb80c0b1cdd"}, + {file = "pycryptodome-3.21.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:d5ebe0763c982f069d3877832254f64974139f4f9655058452603ff559c482e8"}, + {file = "pycryptodome-3.21.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ee86cbde706be13f2dec5a42b52b1c1d1cbb90c8e405c68d0755134735c8dc6"}, + {file = "pycryptodome-3.21.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fd54003ec3ce4e0f16c484a10bc5d8b9bd77fa662a12b85779a2d2d85d67ee0"}, + {file = "pycryptodome-3.21.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5dfafca172933506773482b0e18f0cd766fd3920bd03ec85a283df90d8a17bc6"}, + {file = "pycryptodome-3.21.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:590ef0898a4b0a15485b05210b4a1c9de8806d3ad3d47f74ab1dc07c67a6827f"}, + {file = "pycryptodome-3.21.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f35e442630bc4bc2e1878482d6f59ea22e280d7121d7adeaedba58c23ab6386b"}, + {file = "pycryptodome-3.21.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff99f952db3db2fbe98a0b355175f93ec334ba3d01bbde25ad3a5a33abc02b58"}, + {file = "pycryptodome-3.21.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8acd7d34af70ee63f9a849f957558e49a98f8f1634f86a59d2be62bb8e93f71c"}, + {file = "pycryptodome-3.21.0.tar.gz", hash = "sha256:f7787e0d469bdae763b876174cf2e6c0f7be79808af26b1da96f1a64bcf47297"}, ] [[package]] name = "pytelegrambotapi" -version = "4.22.1" +version = "4.23.0" description = "Python Telegram bot api." optional = false python-versions = ">=3.8" files = [ - {file = "pytelegrambotapi-4.22.1-py3-none-any.whl", hash = "sha256:58a4bc11b054683ba4ef43452e125c80786bbeffd1ba08dfa45291f56e5d08c4"}, + {file = "pytelegrambotapi-4.23.0-py3-none-any.whl", hash = "sha256:4fd4a64f3d5ec389270cf4f1eacd68f6d25d199e1048b76a1caefcb17fbe214b"}, + {file = "pytelegrambotapi-4.23.0.tar.gz", hash = "sha256:ced74787cfaf59d959799786f12a401cdb3abeb58dcd25568fc91363ba1cccfa"}, ] [package.dependencies] @@ -946,17 +951,6 @@ six = ">=1.9.0" [package.extras] doc = ["reno", "sphinx", "tornado (>=4.5)"] -[[package]] -name = "typing-extensions" -version = "4.12.2" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, -] - [[package]] name = "urllib3" version = "1.26.20" @@ -1035,13 +1029,13 @@ requests = "*" [[package]] name = "zipp" -version = "3.20.1" +version = "3.20.2" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.20.1-py3-none-any.whl", hash = "sha256:9960cd8967c8f85a56f920d5d507274e74f9ff813a0ab8889a5b5be2daf44064"}, - {file = "zipp-3.20.1.tar.gz", hash = "sha256:c22b14cc4763c5a5b04134207736c107db42e9d3ef2d9779d465f5f1bcba572b"}, + {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, + {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, ] [package.extras] diff --git a/pyproject.toml b/pyproject.toml index c4982f247..81b0be00e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -2,8 +2,8 @@ name = "pyinstabot-downloader" version = "2.3.0" description = "This project is a Telegram bot that allows you to upload posts from your Instagram profile to clouds like Dropbox, Mega or any WebDav compatible cloud storage." -authors = ["Bervinov Oleg "] -maintainers = ["Bervinov Oleg "] +authors = ["Bervinov Oleg "] +maintainers = ["Bervinov Oleg "] license = "MIT" readme = "README.md" homepage = "https://github.com/obervinov/pyinstabot-downloader" diff --git a/src/bot.py b/src/bot.py index 711e52572..c367f8aaf 100644 --- a/src/bot.py +++ b/src/bot.py @@ -21,7 +21,7 @@ ) from modules.database import DatabaseClient from modules.exceptions import FailedMessagesStatusUpdater -from modules.tools import get_hash +from modules.tools import get_hash, check_proxy from modules.downloader import Downloader from modules.uploader import Uploader from modules.metrics import Metrics @@ -39,6 +39,9 @@ # Users module without rate limits option users = Users(vault=vault, storage={'db_role': VAULT_DB_ROLE_USERS}) +# Detected connection type +check_proxy() + # Client for download content from instagram # If API disabled, the mock object will be used downloader_api_enabled = vault.kv2engine.read_secret(path='configuration/downloader-api').get('enabled', False) diff --git a/src/configs/constants.py b/src/configs/constants.py index 39ffe1b2e..f7bb436b6 100644 --- a/src/configs/constants.py +++ b/src/configs/constants.py @@ -14,7 +14,7 @@ 'Reschedule Queue': 'reschedule_queue', } -# Queue handler +# Other constants QUEUE_FREQUENCY = 60 STATUSES_MESSAGE_FREQUENCY = 15 METRICS_PORT = 8000 diff --git a/src/migrations/0003_users_table.py b/src/migrations/0003_users_table.py index 60b169861..39aa11595 100644 --- a/src/migrations/0003_users_table.py +++ b/src/migrations/0003_users_table.py @@ -37,9 +37,6 @@ def execute(obj): if not table: print(f"{NAME}: The {table_name} table does not exist. Skip the migration.") - elif len(columns) < 1: - print(f"{NAME}: The {table_name} table does not have the necessary columns to execute the migration. Skip the migration.") - else: for column in add_columns: if column[0] in columns: @@ -56,3 +53,17 @@ def execute(obj): except obj.errors.FeatureNotSupported as error: print(f"{NAME}: Columns in the {table_name} table have not been added. Skip adding: {error}") conn.rollback() + + for column in update_columns: + if column[0] in columns: + try: + print(f"{NAME}: Alter column {column[0]} to {column[2]}...") + cursor.execute(f"ALTER TABLE {table_name} ALTER COLUMN {column[0]} SET NOT NULL;") + cursor.execute(f"ALTER TABLE {table_name} ADD CONSTRAINT {column[0]}_unique UNIQUE ({column[0]});") + conn.commit() + print(f"{NAME}: Column {column[0]} has been updated to {column[2]}.") + except obj.errors as error: + print(f"{NAME}: Failed to update column {column[0]}: {error}") + conn.rollback() + else: + print(f"{NAME}: The {table_name} table does not have the {column[0]} column. Skip updating.") diff --git a/src/modules/tools.py b/src/modules/tools.py index 751fc5877..549cd4898 100644 --- a/src/modules/tools.py +++ b/src/modules/tools.py @@ -1,6 +1,8 @@ """This module contains the tools for this python project""" +import os from typing import Union import hashlib +from logger import log def get_hash(data: Union[str, dict] = None) -> str: @@ -22,3 +24,15 @@ def get_hash(data: Union[str, dict] = None) -> str: data = str(data) hasher.update(data.encode('utf-8')) return hasher.hexdigest() + + +def check_proxy() -> None: + """ + Check if the proxy is set up. + """ + http_proxy = os.environ.get('HTTP_PROXY', None) + https_proxy = os.environ.get('HTTPS_PROXY', None) + if http_proxy or https_proxy: + log.info('[Tools]: Proxy is set up http: %s, https: %s', http_proxy, https_proxy) + else: + log.info('[Tools]: Direct connection will be used because the proxy is not set up') From a084acc855b6b977b076ea3922b01673e9a31350 Mon Sep 17 00:00:00 2001 From: obervinov Date: Fri, 4 Oct 2024 13:18:40 +0400 Subject: [PATCH 147/148] fixed exception --- src/migrations/0003_users_table.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/migrations/0003_users_table.py b/src/migrations/0003_users_table.py index 39aa11595..a9caa28b4 100644 --- a/src/migrations/0003_users_table.py +++ b/src/migrations/0003_users_table.py @@ -62,7 +62,8 @@ def execute(obj): cursor.execute(f"ALTER TABLE {table_name} ADD CONSTRAINT {column[0]}_unique UNIQUE ({column[0]});") conn.commit() print(f"{NAME}: Column {column[0]} has been updated to {column[2]}.") - except obj.errors as error: + # pylint: disable=broad-exception-caught + except Exception as error: print(f"{NAME}: Failed to update column {column[0]}: {error}") conn.rollback() else: From d52a00a3bb2a17701d5a2c1215a8c98330c026df Mon Sep 17 00:00:00 2001 From: obervinov Date: Fri, 4 Oct 2024 13:23:00 +0400 Subject: [PATCH 148/148] fixed typo in CHANGELOG.md --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6528bff2d..e95705c67 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,7 +12,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/) and this p #### 🚀 Features * bump workflow version to `1.2.9` * bump vault-package to major version `3.0.0` -* bump users-package to major version `3.0.3` +* bump users-package to major version `3.0.2` * bump telegram-package to major version `2.0.1` * add tests for database and metrics modules * add proxy support for all dependencies with `requests` library