diff --git a/.github/workflows/pr.yaml b/.github/workflows/pr.yaml
index 80a3a2829..45bea32ae 100644
--- a/.github/workflows/pr.yaml
+++ b/.github/workflows/pr.yaml
@@ -11,20 +11,20 @@ on:
jobs:
changelog:
- uses: obervinov/_templates/.github/workflows/changelog.yaml@v1.2.8
+ uses: obervinov/_templates/.github/workflows/changelog.yaml@v1.2.9
pylint:
- uses: obervinov/_templates/.github/workflows/pylint.yaml@v1.2.8
+ uses: obervinov/_templates/.github/workflows/pylint.yaml@v1.2.9
pytest:
- uses: obervinov/_templates/.github/workflows/pytest-with-vault.yaml@v1.2.8
+ uses: obervinov/_templates/.github/workflows/pytest-with-vault.yaml@v1.2.9
pyproject:
- uses: obervinov/_templates/.github/workflows/pyproject.yaml@v1.2.8
+ uses: obervinov/_templates/.github/workflows/pyproject.yaml@v1.2.9
pr:
- uses: obervinov/_templates/.github/workflows/pr.yaml@v1.2.8
+ uses: obervinov/_templates/.github/workflows/pr.yaml@v1.2.9
build-pr-image:
- uses: obervinov/_templates/.github/workflows/docker.yaml@v1.2.8
+ uses: obervinov/_templates/.github/workflows/docker.yaml@v1.2.9
needs: [changelog, pylint, pytest, pyproject]
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml
index 51825053e..96d7654b6 100644
--- a/.github/workflows/release.yaml
+++ b/.github/workflows/release.yaml
@@ -10,14 +10,14 @@ on:
jobs:
create-release:
- uses: obervinov/_templates/.github/workflows/release.yaml@v1.2.8
+ uses: obervinov/_templates/.github/workflows/release.yaml@v1.2.9
cleanup-untagged-images:
runs-on: ubuntu-latest
steps:
- name: Delete untagged images from GitHub Container Registry
continue-on-error: true
- uses: Chizkiyahu/delete-untagged-ghcr-action@v3
+ uses: Chizkiyahu/delete-untagged-ghcr-action@v4
with:
token: ${{ secrets.PAT_GHCR_CLEANUP }}
package_name: 'pyinstabot-downloader'
@@ -26,5 +26,5 @@ jobs:
owner_type: 'user'
# milestone:
- # uses: obervinov/_templates/.github/workflows/milestone.yaml@v1.2.8
+ # uses: obervinov/_templates/.github/workflows/milestone.yaml@v1.2.9
# needs: [create-release]
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 2cf795786..e95705c67 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -3,6 +3,24 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/).
+## v2.3.0 - 2024-10-04
+### What's Changed
+**Full Changelog**: https://github.com/obervinov/pyinstabot-downloader/compare/v2.2.1...v2.3.0 by @obervinov in https://github.com/obervinov/pyinstabot-downloader/pull/95
+#### 💥 Breaking Changes
+* now all user data is stored in the database
+* psql credentials are now written out via Vault Database Engine
+#### 🚀 Features
+* bump workflow version to `1.2.9`
+* bump vault-package to major version `3.0.0`
+* bump users-package to major version `3.0.2`
+* bump telegram-package to major version `2.0.1`
+* add tests for database and metrics modules
+* add proxy support for all dependencies with `requests` library
+* [Switch reading of the database connection configuration to db engine](https://github.com/obervinov/pyinstabot-downloader/issues/33)
+#### 🐛 Bug Fixes
+* general bug fixes and improvements
+
+
## v2.2.1 - 2024-08-24
### What's Changed
**Full Changelog**: https://github.com/obervinov/pyinstabot-downloader/compare/v2.2.0...v2.2.1 by @obervinov in https://github.com/obervinov/pyinstabot-downloader/pull/94
diff --git a/Dockerfile b/Dockerfile
index c637eb7b6..1ac6b8b15 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,4 +1,4 @@
-FROM python:3.9.19-alpine3.20
+FROM python:3.9.20-alpine3.20
### External argumetns ###
ARG PROJECT_DESCRIPTION
diff --git a/README.md b/README.md
index 9a3bf1666..a957c5005 100644
--- a/README.md
+++ b/README.md
@@ -20,7 +20,7 @@
- [Target storage of the content](#target-storage-of-the-content)
- [Bot configuration source and supported parameters](#bot-configuration-source-and-supported-parameters)
- [Bot persistent data storage](#bot-persistent-data-storage)
-- [How to run project](#-how-to-run-project)
+- [How to run project locally](#-how-to-run-project-locally)
## About this project
@@ -50,22 +50,29 @@ This project is a Telegram bot that allows you to upload posts from your Instagr
## Requirements
-- Vault server - [a storage of secrets for bot with kv v2 engine](https://developer.hashicorp.com/vault/docs/secrets/kv/kv-v2)
-- Dropbox [api token](https://dropbox.tech/developers/generate-an-access-token-for-your-own-account) or Mega.nz [account](https://mega.nz) or WebDav provider [url, username and password](https://docs.nextcloud.com/server/latest/user_manual/en/files/access_webdav.html)
-- Telegram bot api token - [instructions for creating bot and getting a token of api](https://learn.microsoft.com/en-us/azure/bot-service/bot-service-channel-connect-telegram?view=azure-bot-service-4.0)
-- Instagram username/password - [login and password from the instagram account, it is advisable to create a new account](https://www.instagram.com/accounts/emailsignup/)
-- Postgresql - [a storage of project persistent data](https://www.postgresql.org/download/)
+- **Vault server**
+ - [store the project configuration in kv2](https://developer.hashicorp.com/vault/docs/secrets/kv/kv-v2)
+ - [generate access credentials in the database](https://developer.hashicorp.com/vault/docs/secrets/databases)
+ - [prepare the vault server](scripts/vault-init.sh)
+- **Cloud Storage** (choose one)
+ - dropbox: [api token](https://dropbox.tech/developers/generate-an-access-token-for-your-own-account)
+ - mega: [account](https://mega.nz)
+ - webdav: [url, username and password](https://docs.nextcloud.com/server/latest/user_manual/en/files/access_webdav.html)
+- **Telegram**
+ - bot: [api token](https://learn.microsoft.com/en-us/azure/bot-service/bot-service-channel-connect-telegram?view=azure-bot-service-4.0)
+- **Instagram** (choose one)
+ - account: [username/password](https://www.instagram.com/accounts/emailsignup/) or [a ready uploaded session from the browser](https://raw.githubusercontent.com/instaloader/instaloader/master/docs/codesnippets/615_import_firefox_session.py)
+- **Postgresql**
+ - database: [empty database](scripts/psql-init.sh)
## Environment variables
| Variable | Description | Default value |
| ------------- | ------------- | ------------- |
| `LOGGER_LEVEL` | [The logging level of the logging module](https://docs.python.org/3/library/logging.html#logging-levels) | `INFO` |
-| `BOT_NAME` | The name of the bot, used to determine the unique mount point in the vault | `pyinstabot-downloader` |
+| `TELEGRAM_BOT_NAME` | The name of the bot, used to determine the unique mount point in the vault | `pyinstabot-downloader` |
| `MESSAGES_CONFIG` | The path to the message template file | `src/configs/messages.json` |
-| `VAULT_ADDR` | The address at which the vault server will be available to the bot | `None` |
-| `VAULT_APPROLE_ID` | [Approle id created during vault setup](https://developer.hashicorp.com/vault/docs/auth/approle) | `None` |
-| `VAULT_APPROLE_SECRETID` | [Approle secret id created during vault setup](https://developer.hashicorp.com/vault/docs/auth/approle) | `None` |
+| `VAULT_*` | All supported vault environment variables can be found [here](https://github.com/obervinov/vault-package/tree/v3.0.0?tab=readme-ov-file#-supported-environment-variables) | - |
## Prepare and configure environment
@@ -87,21 +94,38 @@ This project is a Telegram bot that allows you to upload posts from your Instagr
### Bot configuration source and supported parameters
- All bot configuration is stored in the `Vault Secrets`
-_except for the part of the configuration that configures the connection to `Vault`_
+ All bot configuration is stored in the `Vault Secrets` (_except for the part of the configuration that configures the connection to `Vault`_)
+
+
+- `pyinstabot-downloader-database` - vault database engine mount point, returns the temporary username and password for the database. More information about the database engine can be found [here](https://developer.hashicorp.com/vault/docs/secrets/databases/postgresql) and [here](https://developer.hashicorp.com/vault/tutorials/db-credentials/database-secrets)
+
+
- `configuration/database`: database connection parameters
```json
{
- "database": "pyinstabot-downloader",
+ "dbname": "pyinstabot-downloader",
"host": "postgresql.example.com",
- "password": "qwerty123",
"port": "5432",
- "user": "user1",
"connections": "10"
}
```
+
+- `configuration/telegram`: telegram bot configuration
+ ```json
+ {
+ "token": "123456:qwerty"
+ }
+ ```
+
+
- `configuration/downloader-api`: downloader module configuration (for downloading content from instagram)
+
+ Clarification of non-obvious parameters
+ - `fatal-status-codes`: a list of status codes that are considered fatal and stop downloader module work
+ - `iphone-support`: if `True` the downloader module will use the iphone user-agent
+ - `login-method`: the method of logging into the instagram account (`session`, `password`, `anonymous`)
+ - `session-base64`: the session file content in base64 format (only for `session` login method)
```json
{
"enabled": "True",
@@ -115,24 +139,20 @@ _except for the part of the configuration that configures the connection to `Vau
"username": "username1"
}
```
- Clarification of non-obvious parameters
- - `fatal-status-codes`: a list of status codes that are considered fatal and stop downloader module work
- - `iphone-support`: if `True`, the downloader module will use the iphone user-agent
- - `login-method`: the method of logging into the instagram account (`session`, `password`, `anonymous`)
- - `session-base64`: the session file content in base64 format
-
-- `configuration/telegram`: telegram bot configuration
- ```json
- {
- "token": "123456:qwerty"
- }
- ```
+
- `configuration/uploader-api`: uploader module configuration (for upload content to the target storage)
+
+ Clarification of non-obvious parameters
+ - `destination-directory`: the directory in the target storage where the content will be uploaded
+ - `exclude-types`: a list of file extensions that will be excluded from the upload (for example, `.txt` - text from the post)
+ - `source-directory`: the directory where the content will be stored before uploading (temporary directory)
+ - `storage-type`: the type of storage where the content will be uploaded (`dropbox`, `mega`, `webdav`)
+ - `url`: the url of the target webdav directory (only for `webdav` storage)
```json
{
- "destination-directory": "cloud-directory/",
"enabled": "True",
+ "destination-directory": "cloud-directory/",
"exclude-types": "[\".txt\", \".tmp\"]",
"password": "qwerty123",
"source-directory": "data/",
@@ -141,14 +161,14 @@ _except for the part of the configuration that configures the connection to `Vau
"url": "https://webdav.example.com/directory"
}
```
- Clarification of non-obvious parameters
- - `destination-directory`: the directory in the target storage where the content will be uploaded
- - `exclude-types`: a list of file extensions that will be excluded from the upload (for example, `.txt` - text from the post)
- - `source-directory`: the directory where the content will be stored before uploading (temporary directory)
- - `storage-type`: the type of storage where the content will be uploaded (`dropbox`, `mega`, `webdav`)
- - `url`: the url of the target webdav directory (only for `webdav` storage)
-- `configuration/users/`: user permissions configuration
+
+- `configuration/users/`: users permissions and attributes
+
+ Clarification of non-obvious parameters
+ - `requests`: the number of requests that the user can make per day and per hour, as well as the random shift in minutes (scheduling of message processing from the queue works on the basis of this parameter)
+ - `roles`: list of roles that allow to use the corresponding functionality ([available roles](src/configs/constants.py#L11-L15)).
+ - `status`: allowed or denied user access to the bot
```json
{
"requests": "{\"requests_per_day\": 10, \"requests_per_hour\": 1, \"random_shift_minutes\": 60}",
@@ -156,10 +176,6 @@ _except for the part of the configuration that configures the connection to `Vau
"status": "allowed"
}
```
- Clarification of non-obvious parameters
- - `requests`: the number of requests that the user can make per day and per hour, as well as the random shift in minutes (scheduling of message processing from the queue works on the basis of this parameter)
- - `roles`: list of roles that allow to use the corresponding functionality ([available roles](src/configs/constants.py#L19-L23)).
- - `status`: allowed or denied user access to the bot
#### You can use an existing vault-server or launch a new one using docker-compose
Scripts for configuring the vault-server are located in the [vault-init.sh](scripts/vault-init.sh)
@@ -173,11 +189,11 @@ cd pyinstabot-downloader
docker-compose -f docker-compose.yml up vault-server -d
# Initialize and unseal new vault-server
+export VAULT_ADDR=http://0.0.0.0:8200
vault operator init
vault operator unseal
# Run the script for configuring the vault-server for this bot project
-export VAULT_ADDR=http://localhost:8200
export VAULT_TOKEN=hvs.123456qwerty
./scripts/vault-init.sh
```
@@ -196,15 +212,16 @@ export VAULT_TOKEN=hvs.123456qwerty
### Bot persistent data storage
- Persistent data storage is implemented using `Postgresql`
-You can familiarize yourself with the
+ Persistent data storage is implemented using `Postgresql`
- data structure, tables and assignment of tables [here](src/configs/databases.json)
- migrations [here](src/migrations/)
-The database structure is created automatically when the bot starts. Bot checks the database structure and creates missing tables if necessary.
-After checking the database structure, the bot executes the migrations in the order of their numbering.
-All that is required is a database and the rights of the owner of this data database.
-To quickly prepare an instance, you can execute the[psql-init.sh](scripts/psql-init.sh) script
+The database structure is created automatically when the bot starts:
+ 1. bot checks the database structure and creates missing tables if necessary
+ 2. after checking the database structure, the bot executes the migrations in the order of their numbering
+
+To quickly prepare an instance, you can execute the [psql-init.sh](scripts/psql-init.sh) script
+
```bash
git clone https://github.com/obervinov/pyinstabot-downloader.git
cd pyinstabot-downloader
@@ -221,15 +238,16 @@ export PGDATABASE=postgres
**What data is stored in tables:**
- users requests queue
- users metadata
-- history of processed users requests
-- migration history
-- messages sent by the bot
+- history of users requests
+- history of processed messages
+- migrations history
+- service messages by the bot
## How to run project locally
```sh
export VAULT_APPROLE_ID={change_me}
-export VAULT_APPROLE_SECRETID={change_me}
+export VAULT_APPROLE_SECRET_ID={change_me}
export VAULT_ADDR={change_me}
docker compose -f docker-compose.yml up -d
```
@@ -238,4 +256,4 @@ docker compose -f docker-compose.yml up -d
## GitHub Actions
| Name | Version |
| ------------------------ | ----------- |
-| GitHub Actions Templates | [v1.2.8](https://github.com/obervinov/_templates/tree/v1.2.8) |
+| GitHub Actions Templates | [v1.2.9](https://github.com/obervinov/_templates/tree/v1.2.9) |
diff --git a/SECURITY.md b/SECURITY.md
index 22ac1020f..247703120 100644
--- a/SECURITY.md
+++ b/SECURITY.md
@@ -11,4 +11,4 @@ Versions supported to fix vulnerabilities
## Reporting a Vulnerability
-To report a vulnerability to me, just open the issue https://github.com/obervinov/pyinstabot-downloader/security/advisories/new
\ No newline at end of file
+To report a vulnerability to me, just open the issue https://github.com/obervinov/pyinstabot-downloader/security/advisories/new
diff --git a/docker-compose.yml b/docker-compose.yml
index 3ab81a3a1..a0ed473ed 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -46,14 +46,16 @@ services:
args:
PROJECT_NAME: pyinstabot-downloader
PROJECT_DESCRIPTION: "This project is a Telegram bot that allows you to upload posts from your Instagram profile to clouds like Dropbox, Mega or any WebDav compatible cloud storage."
- PROJECT_VERSION: 2.2.0
+ PROJECT_VERSION: 2.3.0
container_name: pyinstabot-downloader
restart: always
environment:
- TELEGRAM_BOT_NAME=pyinstabot-downloader
- VAULT_APPROLE_ID=${VAULT_APPROLE_ID}
- - VAULT_APPROLE_SECRETID=${VAULT_APPROLE_SECRETID}
+ - VAULT_APPROLE_SECRET_ID=${VAULT_APPROLE_SECRET_ID}
- VAULT_ADDR=${VAULT_ADDR}
+ - VAULT_AUTH_TYPE=approle
+ - VAULT_NAMESPACE=pyinstabot-downloader
- LOGGER_LEVEL=DEBUG
- MESSAGES_CONFIG=configs/messages.json
volumes:
diff --git a/poetry.lock b/poetry.lock
index 0727bf244..9181b2512 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -2,89 +2,89 @@
[[package]]
name = "certifi"
-version = "2024.7.4"
+version = "2024.8.30"
description = "Python package for providing Mozilla's CA Bundle."
optional = false
python-versions = ">=3.6"
files = [
- {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"},
- {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"},
+ {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"},
+ {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"},
]
[[package]]
name = "cffi"
-version = "1.17.0"
+version = "1.17.1"
description = "Foreign Function Interface for Python calling C code."
optional = false
python-versions = ">=3.8"
files = [
- {file = "cffi-1.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f9338cc05451f1942d0d8203ec2c346c830f8e86469903d5126c1f0a13a2bcbb"},
- {file = "cffi-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0ce71725cacc9ebf839630772b07eeec220cbb5f03be1399e0457a1464f8e1a"},
- {file = "cffi-1.17.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c815270206f983309915a6844fe994b2fa47e5d05c4c4cef267c3b30e34dbe42"},
- {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6bdcd415ba87846fd317bee0774e412e8792832e7805938987e4ede1d13046d"},
- {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a98748ed1a1df4ee1d6f927e151ed6c1a09d5ec21684de879c7ea6aa96f58f2"},
- {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a048d4f6630113e54bb4b77e315e1ba32a5a31512c31a273807d0027a7e69ab"},
- {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24aa705a5f5bd3a8bcfa4d123f03413de5d86e497435693b638cbffb7d5d8a1b"},
- {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:856bf0924d24e7f93b8aee12a3a1095c34085600aa805693fb7f5d1962393206"},
- {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4304d4416ff032ed50ad6bb87416d802e67139e31c0bde4628f36a47a3164bfa"},
- {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:331ad15c39c9fe9186ceaf87203a9ecf5ae0ba2538c9e898e3a6967e8ad3db6f"},
- {file = "cffi-1.17.0-cp310-cp310-win32.whl", hash = "sha256:669b29a9eca6146465cc574659058ed949748f0809a2582d1f1a324eb91054dc"},
- {file = "cffi-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:48b389b1fd5144603d61d752afd7167dfd205973a43151ae5045b35793232aa2"},
- {file = "cffi-1.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5d97162c196ce54af6700949ddf9409e9833ef1003b4741c2b39ef46f1d9720"},
- {file = "cffi-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ba5c243f4004c750836f81606a9fcb7841f8874ad8f3bf204ff5e56332b72b9"},
- {file = "cffi-1.17.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb9333f58fc3a2296fb1d54576138d4cf5d496a2cc118422bd77835e6ae0b9cb"},
- {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:435a22d00ec7d7ea533db494da8581b05977f9c37338c80bc86314bec2619424"},
- {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1df34588123fcc88c872f5acb6f74ae59e9d182a2707097f9e28275ec26a12d"},
- {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df8bb0010fdd0a743b7542589223a2816bdde4d94bb5ad67884348fa2c1c67e8"},
- {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b5b9712783415695663bd463990e2f00c6750562e6ad1d28e072a611c5f2a6"},
- {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ffef8fd58a36fb5f1196919638f73dd3ae0db1a878982b27a9a5a176ede4ba91"},
- {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e67d26532bfd8b7f7c05d5a766d6f437b362c1bf203a3a5ce3593a645e870b8"},
- {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45f7cd36186db767d803b1473b3c659d57a23b5fa491ad83c6d40f2af58e4dbb"},
- {file = "cffi-1.17.0-cp311-cp311-win32.whl", hash = "sha256:a9015f5b8af1bb6837a3fcb0cdf3b874fe3385ff6274e8b7925d81ccaec3c5c9"},
- {file = "cffi-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:b50aaac7d05c2c26dfd50c3321199f019ba76bb650e346a6ef3616306eed67b0"},
- {file = "cffi-1.17.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aec510255ce690d240f7cb23d7114f6b351c733a74c279a84def763660a2c3bc"},
- {file = "cffi-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2770bb0d5e3cc0e31e7318db06efcbcdb7b31bcb1a70086d3177692a02256f59"},
- {file = "cffi-1.17.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db9a30ec064129d605d0f1aedc93e00894b9334ec74ba9c6bdd08147434b33eb"},
- {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a47eef975d2b8b721775a0fa286f50eab535b9d56c70a6e62842134cf7841195"},
- {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3e0992f23bbb0be00a921eae5363329253c3b86287db27092461c887b791e5e"},
- {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6107e445faf057c118d5050560695e46d272e5301feffda3c41849641222a828"},
- {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb862356ee9391dc5a0b3cbc00f416b48c1b9a52d252d898e5b7696a5f9fe150"},
- {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c1c13185b90bbd3f8b5963cd8ce7ad4ff441924c31e23c975cb150e27c2bf67a"},
- {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17c6d6d3260c7f2d94f657e6872591fe8733872a86ed1345bda872cfc8c74885"},
- {file = "cffi-1.17.0-cp312-cp312-win32.whl", hash = "sha256:c3b8bd3133cd50f6b637bb4322822c94c5ce4bf0d724ed5ae70afce62187c492"},
- {file = "cffi-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:dca802c8db0720ce1c49cce1149ff7b06e91ba15fa84b1d59144fef1a1bc7ac2"},
- {file = "cffi-1.17.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ce01337d23884b21c03869d2f68c5523d43174d4fc405490eb0091057943118"},
- {file = "cffi-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cab2eba3830bf4f6d91e2d6718e0e1c14a2f5ad1af68a89d24ace0c6b17cced7"},
- {file = "cffi-1.17.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14b9cbc8f7ac98a739558eb86fabc283d4d564dafed50216e7f7ee62d0d25377"},
- {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b00e7bcd71caa0282cbe3c90966f738e2db91e64092a877c3ff7f19a1628fdcb"},
- {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:41f4915e09218744d8bae14759f983e466ab69b178de38066f7579892ff2a555"},
- {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4760a68cab57bfaa628938e9c2971137e05ce48e762a9cb53b76c9b569f1204"},
- {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:011aff3524d578a9412c8b3cfaa50f2c0bd78e03eb7af7aa5e0df59b158efb2f"},
- {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:a003ac9edc22d99ae1286b0875c460351f4e101f8c9d9d2576e78d7e048f64e0"},
- {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ef9528915df81b8f4c7612b19b8628214c65c9b7f74db2e34a646a0a2a0da2d4"},
- {file = "cffi-1.17.0-cp313-cp313-win32.whl", hash = "sha256:70d2aa9fb00cf52034feac4b913181a6e10356019b18ef89bc7c12a283bf5f5a"},
- {file = "cffi-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:b7b6ea9e36d32582cda3465f54c4b454f62f23cb083ebc7a94e2ca6ef011c3a7"},
- {file = "cffi-1.17.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:964823b2fc77b55355999ade496c54dde161c621cb1f6eac61dc30ed1b63cd4c"},
- {file = "cffi-1.17.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:516a405f174fd3b88829eabfe4bb296ac602d6a0f68e0d64d5ac9456194a5b7e"},
- {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dec6b307ce928e8e112a6bb9921a1cb00a0e14979bf28b98e084a4b8a742bd9b"},
- {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4094c7b464cf0a858e75cd14b03509e84789abf7b79f8537e6a72152109c76e"},
- {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2404f3de742f47cb62d023f0ba7c5a916c9c653d5b368cc966382ae4e57da401"},
- {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa9d43b02a0c681f0bfbc12d476d47b2b2b6a3f9287f11ee42989a268a1833c"},
- {file = "cffi-1.17.0-cp38-cp38-win32.whl", hash = "sha256:0bb15e7acf8ab35ca8b24b90af52c8b391690ef5c4aec3d31f38f0d37d2cc499"},
- {file = "cffi-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:93a7350f6706b31f457c1457d3a3259ff9071a66f312ae64dc024f049055f72c"},
- {file = "cffi-1.17.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a2ddbac59dc3716bc79f27906c010406155031a1c801410f1bafff17ea304d2"},
- {file = "cffi-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6327b572f5770293fc062a7ec04160e89741e8552bf1c358d1a23eba68166759"},
- {file = "cffi-1.17.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbc183e7bef690c9abe5ea67b7b60fdbca81aa8da43468287dae7b5c046107d4"},
- {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bdc0f1f610d067c70aa3737ed06e2726fd9d6f7bfee4a351f4c40b6831f4e82"},
- {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6d872186c1617d143969defeadac5a904e6e374183e07977eedef9c07c8953bf"},
- {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d46ee4764b88b91f16661a8befc6bfb24806d885e27436fdc292ed7e6f6d058"},
- {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f76a90c345796c01d85e6332e81cab6d70de83b829cf1d9762d0a3da59c7932"},
- {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0e60821d312f99d3e1569202518dddf10ae547e799d75aef3bca3a2d9e8ee693"},
- {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:eb09b82377233b902d4c3fbeeb7ad731cdab579c6c6fda1f763cd779139e47c3"},
- {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:24658baf6224d8f280e827f0a50c46ad819ec8ba380a42448e24459daf809cf4"},
- {file = "cffi-1.17.0-cp39-cp39-win32.whl", hash = "sha256:0fdacad9e0d9fc23e519efd5ea24a70348305e8d7d85ecbb1a5fa66dc834e7fb"},
- {file = "cffi-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:7cbc78dc018596315d4e7841c8c3a7ae31cc4d638c9b627f87d52e8abaaf2d29"},
- {file = "cffi-1.17.0.tar.gz", hash = "sha256:f3157624b7558b914cb039fd1af735e5e8049a87c817cc215109ad1c8779df76"},
+ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"},
+ {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"},
+ {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"},
+ {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"},
+ {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"},
+ {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"},
+ {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"},
+ {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"},
+ {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"},
+ {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"},
+ {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"},
+ {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"},
+ {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"},
+ {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"},
+ {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"},
+ {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"},
+ {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"},
+ {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"},
+ {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"},
+ {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"},
+ {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"},
+ {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"},
+ {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"},
+ {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"},
+ {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"},
+ {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"},
+ {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"},
+ {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"},
+ {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"},
+ {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"},
+ {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"},
+ {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"},
+ {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"},
+ {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"},
+ {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"},
+ {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"},
+ {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"},
+ {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"},
+ {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"},
+ {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"},
+ {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"},
+ {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"},
+ {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"},
+ {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"},
+ {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"},
+ {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"},
+ {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"},
+ {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"},
+ {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"},
+ {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"},
+ {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"},
+ {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"},
+ {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"},
+ {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"},
+ {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"},
+ {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"},
+ {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"},
+ {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"},
+ {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"},
+ {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"},
+ {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"},
+ {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"},
+ {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"},
+ {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"},
+ {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"},
+ {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"},
+ {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"},
]
[package.dependencies]
@@ -189,51 +189,45 @@ files = [
{file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"},
]
-[[package]]
-name = "colorama"
-version = "0.4.6"
-description = "Cross-platform colored terminal text."
-optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
-files = [
- {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
- {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
-]
-
[[package]]
name = "cryptography"
-version = "43.0.0"
+version = "42.0.8"
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
optional = false
python-versions = ">=3.7"
files = [
- {file = "cryptography-43.0.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:64c3f16e2a4fc51c0d06af28441881f98c5d91009b8caaff40cf3548089e9c74"},
- {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3dcdedae5c7710b9f97ac6bba7e1052b95c7083c9d0e9df96e02a1932e777895"},
- {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d9a1eca329405219b605fac09ecfc09ac09e595d6def650a437523fcd08dd22"},
- {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ea9e57f8ea880eeea38ab5abf9fbe39f923544d7884228ec67d666abd60f5a47"},
- {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9a8d6802e0825767476f62aafed40532bd435e8a5f7d23bd8b4f5fd04cc80ecf"},
- {file = "cryptography-43.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:cc70b4b581f28d0a254d006f26949245e3657d40d8857066c2ae22a61222ef55"},
- {file = "cryptography-43.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:4a997df8c1c2aae1e1e5ac49c2e4f610ad037fc5a3aadc7b64e39dea42249431"},
- {file = "cryptography-43.0.0-cp37-abi3-win32.whl", hash = "sha256:6e2b11c55d260d03a8cf29ac9b5e0608d35f08077d8c087be96287f43af3ccdc"},
- {file = "cryptography-43.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:31e44a986ceccec3d0498e16f3d27b2ee5fdf69ce2ab89b52eaad1d2f33d8778"},
- {file = "cryptography-43.0.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:7b3f5fe74a5ca32d4d0f302ffe6680fcc5c28f8ef0dc0ae8f40c0f3a1b4fca66"},
- {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac1955ce000cb29ab40def14fd1bbfa7af2017cca696ee696925615cafd0dce5"},
- {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:299d3da8e00b7e2b54bb02ef58d73cd5f55fb31f33ebbf33bd00d9aa6807df7e"},
- {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ee0c405832ade84d4de74b9029bedb7b31200600fa524d218fc29bfa371e97f5"},
- {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cb013933d4c127349b3948aa8aaf2f12c0353ad0eccd715ca789c8a0f671646f"},
- {file = "cryptography-43.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:fdcb265de28585de5b859ae13e3846a8e805268a823a12a4da2597f1f5afc9f0"},
- {file = "cryptography-43.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2905ccf93a8a2a416f3ec01b1a7911c3fe4073ef35640e7ee5296754e30b762b"},
- {file = "cryptography-43.0.0-cp39-abi3-win32.whl", hash = "sha256:47ca71115e545954e6c1d207dd13461ab81f4eccfcb1345eac874828b5e3eaaf"},
- {file = "cryptography-43.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:0663585d02f76929792470451a5ba64424acc3cd5227b03921dab0e2f27b1709"},
- {file = "cryptography-43.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2c6d112bf61c5ef44042c253e4859b3cbbb50df2f78fa8fae6747a7814484a70"},
- {file = "cryptography-43.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:844b6d608374e7d08f4f6e6f9f7b951f9256db41421917dfb2d003dde4cd6b66"},
- {file = "cryptography-43.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:51956cf8730665e2bdf8ddb8da0056f699c1a5715648c1b0144670c1ba00b48f"},
- {file = "cryptography-43.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:aae4d918f6b180a8ab8bf6511a419473d107df4dbb4225c7b48c5c9602c38c7f"},
- {file = "cryptography-43.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:232ce02943a579095a339ac4b390fbbe97f5b5d5d107f8a08260ea2768be8cc2"},
- {file = "cryptography-43.0.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5bcb8a5620008a8034d39bce21dc3e23735dfdb6a33a06974739bfa04f853947"},
- {file = "cryptography-43.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:08a24a7070b2b6804c1940ff0f910ff728932a9d0e80e7814234269f9d46d069"},
- {file = "cryptography-43.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e9c5266c432a1e23738d178e51c2c7a5e2ddf790f248be939448c0ba2021f9d1"},
- {file = "cryptography-43.0.0.tar.gz", hash = "sha256:b88075ada2d51aa9f18283532c9f60e72170041bba88d7f37e49cbb10275299e"},
+ {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e"},
+ {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d"},
+ {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902"},
+ {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801"},
+ {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949"},
+ {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9"},
+ {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583"},
+ {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7"},
+ {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b"},
+ {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7"},
+ {file = "cryptography-42.0.8-cp37-abi3-win32.whl", hash = "sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2"},
+ {file = "cryptography-42.0.8-cp37-abi3-win_amd64.whl", hash = "sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba"},
+ {file = "cryptography-42.0.8-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28"},
+ {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e"},
+ {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70"},
+ {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c"},
+ {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7"},
+ {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e"},
+ {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961"},
+ {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1"},
+ {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14"},
+ {file = "cryptography-42.0.8-cp39-abi3-win32.whl", hash = "sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c"},
+ {file = "cryptography-42.0.8-cp39-abi3-win_amd64.whl", hash = "sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a"},
+ {file = "cryptography-42.0.8-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe"},
+ {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c"},
+ {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71"},
+ {file = "cryptography-42.0.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d"},
+ {file = "cryptography-42.0.8-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c"},
+ {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842"},
+ {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648"},
+ {file = "cryptography-42.0.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad"},
+ {file = "cryptography-42.0.8.tar.gz", hash = "sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2"},
]
[package.dependencies]
@@ -246,7 +240,7 @@ nox = ["nox"]
pep8test = ["check-sdist", "click", "mypy", "ruff"]
sdist = ["build"]
ssh = ["bcrypt (>=3.1.5)"]
-test = ["certifi", "cryptography-vectors (==43.0.0)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"]
+test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"]
test-randomorder = ["pytest-randomly"]
[[package]]
@@ -268,35 +262,18 @@ stone = ">=2,<3.3.3"
[[package]]
name = "emoji"
-version = "2.12.1"
+version = "2.13.2"
description = "Emoji for Python"
optional = false
python-versions = ">=3.7"
files = [
- {file = "emoji-2.12.1-py3-none-any.whl", hash = "sha256:a00d62173bdadc2510967a381810101624a2f0986145b8da0cffa42e29430235"},
- {file = "emoji-2.12.1.tar.gz", hash = "sha256:4aa0488817691aa58d83764b6c209f8a27c0b3ab3f89d1b8dceca1a62e4973eb"},
+ {file = "emoji-2.13.2-py3-none-any.whl", hash = "sha256:ef6f2ee63b245e934c763b1a9a0637713955aa3d9e322432e036bb60559de4d6"},
+ {file = "emoji-2.13.2.tar.gz", hash = "sha256:f95d10d96c5f21299ed2c4b32511611ba890b8c07f5f2bf5b04d5d3eee91fd19"},
]
-[package.dependencies]
-typing-extensions = ">=4.7.0"
-
[package.extras]
dev = ["coverage", "pytest (>=7.4.4)"]
-[[package]]
-name = "exceptiongroup"
-version = "1.2.2"
-description = "Backport of PEP 654 (exception groups)"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"},
- {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"},
-]
-
-[package.extras]
-test = ["pytest (>=6)"]
-
[[package]]
name = "hvac"
version = "2.3.0"
@@ -316,53 +293,49 @@ parser = ["pyhcl (>=0.4.4,<0.5.0)"]
[[package]]
name = "idna"
-version = "3.7"
+version = "3.10"
description = "Internationalized Domain Names in Applications (IDNA)"
optional = false
-python-versions = ">=3.5"
+python-versions = ">=3.6"
files = [
- {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"},
- {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"},
+ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"},
+ {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"},
]
+[package.extras]
+all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"]
+
[[package]]
name = "importlib-metadata"
-version = "8.2.0"
+version = "8.5.0"
description = "Read metadata from Python packages"
optional = false
python-versions = ">=3.8"
files = [
- {file = "importlib_metadata-8.2.0-py3-none-any.whl", hash = "sha256:11901fa0c2f97919b288679932bb64febaeacf289d18ac84dd68cb2e74213369"},
- {file = "importlib_metadata-8.2.0.tar.gz", hash = "sha256:72e8d4399996132204f9a16dcc751af254a48f8d1b20b9ff0f98d4a8f901e73d"},
+ {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"},
+ {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"},
]
[package.dependencies]
-zipp = ">=0.5"
+zipp = ">=3.20"
[package.extras]
+check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"]
+cover = ["pytest-cov"]
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+enabler = ["pytest-enabler (>=2.2)"]
perf = ["ipython"]
-test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"]
-
-[[package]]
-name = "iniconfig"
-version = "2.0.0"
-description = "brain-dead simple config-ini parsing"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
- {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
-]
+test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"]
+type = ["pytest-mypy"]
[[package]]
name = "instaloader"
-version = "4.13"
+version = "4.13.1"
description = "Download pictures (or videos) along with their captions and other metadata from Instagram."
optional = false
python-versions = ">=3.8"
files = [
- {file = "instaloader-4.13.tar.gz", hash = "sha256:49b15c3c41ba9287ddecacb57c5fdd1ee706107117b4b8ddd9ccb56ab75c573c"},
+ {file = "instaloader-4.13.1.tar.gz", hash = "sha256:36774ea1076eeb236f8782d221e3737f71ddc023042f0b13761429ef137f1133"},
]
[package.dependencies]
@@ -649,24 +622,13 @@ test = ["pytest", "pytest-cov"]
[[package]]
name = "more-itertools"
-version = "10.4.0"
+version = "10.5.0"
description = "More routines for operating on iterables, beyond itertools"
optional = false
python-versions = ">=3.8"
files = [
- {file = "more-itertools-10.4.0.tar.gz", hash = "sha256:fe0e63c4ab068eac62410ab05cccca2dc71ec44ba8ef29916a0090df061cf923"},
- {file = "more_itertools-10.4.0-py3-none-any.whl", hash = "sha256:0f7d9f83a0a8dcfa8a2694a770590d98a67ea943e3d9f5298309a484758c4e27"},
-]
-
-[[package]]
-name = "packaging"
-version = "24.1"
-description = "Core utilities for Python packages"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"},
- {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"},
+ {file = "more-itertools-10.5.0.tar.gz", hash = "sha256:5482bfef7849c25dc3c6dd53a6173ae4795da2a41a80faea6700d9f5846c5da6"},
+ {file = "more_itertools-10.5.0-py3-none-any.whl", hash = "sha256:037b0d3203ce90cca8ab1defbbdac29d5f993fc20131f3664dc8d6acfa872aef"},
]
[[package]]
@@ -680,21 +642,6 @@ files = [
{file = "pathlib-1.0.1.tar.gz", hash = "sha256:6940718dfc3eff4258203ad5021090933e5c04707d5ca8cc9e73c94a7894ea9f"},
]
-[[package]]
-name = "pluggy"
-version = "1.5.0"
-description = "plugin and hook calling mechanisms for python"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"},
- {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"},
-]
-
-[package.extras]
-dev = ["pre-commit", "tox"]
-testing = ["pytest", "pytest-benchmark"]
-
[[package]]
name = "ply"
version = "3.11"
@@ -708,13 +655,13 @@ files = [
[[package]]
name = "prometheus-client"
-version = "0.20.0"
+version = "0.21.0"
description = "Python client for the Prometheus monitoring system."
optional = false
python-versions = ">=3.8"
files = [
- {file = "prometheus_client-0.20.0-py3-none-any.whl", hash = "sha256:cde524a85bce83ca359cc837f28b8c0db5cac7aa653a588fd7e84ba061c329e7"},
- {file = "prometheus_client-0.20.0.tar.gz", hash = "sha256:287629d00b147a32dcb2be0b9df905da599b2d82f80377083ec8463309a4bb89"},
+ {file = "prometheus_client-0.21.0-py3-none-any.whl", hash = "sha256:4fa6b4dd0ac16d58bb587c04b1caae65b8c5043e85f778f42f5f632f6af2e166"},
+ {file = "prometheus_client-0.21.0.tar.gz", hash = "sha256:96c83c606b71ff2b0a433c98889d275f51ffec6c5e267de37c7a2b5c9aa9233e"},
]
[package.extras]
@@ -814,53 +761,54 @@ files = [
[[package]]
name = "pycryptodome"
-version = "3.20.0"
+version = "3.21.0"
description = "Cryptographic library for Python"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
files = [
- {file = "pycryptodome-3.20.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:f0e6d631bae3f231d3634f91ae4da7a960f7ff87f2865b2d2b831af1dfb04e9a"},
- {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:baee115a9ba6c5d2709a1e88ffe62b73ecc044852a925dcb67713a288c4ec70f"},
- {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:417a276aaa9cb3be91f9014e9d18d10e840a7a9b9a9be64a42f553c5b50b4d1d"},
- {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a1250b7ea809f752b68e3e6f3fd946b5939a52eaeea18c73bdab53e9ba3c2dd"},
- {file = "pycryptodome-3.20.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:d5954acfe9e00bc83ed9f5cb082ed22c592fbbef86dc48b907238be64ead5c33"},
- {file = "pycryptodome-3.20.0-cp27-cp27m-win32.whl", hash = "sha256:06d6de87c19f967f03b4cf9b34e538ef46e99a337e9a61a77dbe44b2cbcf0690"},
- {file = "pycryptodome-3.20.0-cp27-cp27m-win_amd64.whl", hash = "sha256:ec0bb1188c1d13426039af8ffcb4dbe3aad1d7680c35a62d8eaf2a529b5d3d4f"},
- {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:5601c934c498cd267640b57569e73793cb9a83506f7c73a8ec57a516f5b0b091"},
- {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d29daa681517f4bc318cd8a23af87e1f2a7bad2fe361e8aa29c77d652a065de4"},
- {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3427d9e5310af6680678f4cce149f54e0bb4af60101c7f2c16fdf878b39ccccc"},
- {file = "pycryptodome-3.20.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:3cd3ef3aee1079ae44afaeee13393cf68b1058f70576b11439483e34f93cf818"},
- {file = "pycryptodome-3.20.0-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac1c7c0624a862f2e53438a15c9259d1655325fc2ec4392e66dc46cdae24d044"},
- {file = "pycryptodome-3.20.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:76658f0d942051d12a9bd08ca1b6b34fd762a8ee4240984f7c06ddfb55eaf15a"},
- {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f35d6cee81fa145333137009d9c8ba90951d7d77b67c79cbe5f03c7eb74d8fe2"},
- {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76cb39afede7055127e35a444c1c041d2e8d2f1f9c121ecef573757ba4cd2c3c"},
- {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49a4c4dc60b78ec41d2afa392491d788c2e06edf48580fbfb0dd0f828af49d25"},
- {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fb3b87461fa35afa19c971b0a2b7456a7b1db7b4eba9a8424666104925b78128"},
- {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:acc2614e2e5346a4a4eab6e199203034924313626f9620b7b4b38e9ad74b7e0c"},
- {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:210ba1b647837bfc42dd5a813cdecb5b86193ae11a3f5d972b9a0ae2c7e9e4b4"},
- {file = "pycryptodome-3.20.0-cp35-abi3-win32.whl", hash = "sha256:8d6b98d0d83d21fb757a182d52940d028564efe8147baa9ce0f38d057104ae72"},
- {file = "pycryptodome-3.20.0-cp35-abi3-win_amd64.whl", hash = "sha256:9b3ae153c89a480a0ec402e23db8d8d84a3833b65fa4b15b81b83be9d637aab9"},
- {file = "pycryptodome-3.20.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:4401564ebf37dfde45d096974c7a159b52eeabd9969135f0426907db367a652a"},
- {file = "pycryptodome-3.20.0-pp27-pypy_73-win32.whl", hash = "sha256:ec1f93feb3bb93380ab0ebf8b859e8e5678c0f010d2d78367cf6bc30bfeb148e"},
- {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:acae12b9ede49f38eb0ef76fdec2df2e94aad85ae46ec85be3648a57f0a7db04"},
- {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f47888542a0633baff535a04726948e876bf1ed880fddb7c10a736fa99146ab3"},
- {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e0e4a987d38cfc2e71b4a1b591bae4891eeabe5fa0f56154f576e26287bfdea"},
- {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c18b381553638414b38705f07d1ef0a7cf301bc78a5f9bc17a957eb19446834b"},
- {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a60fedd2b37b4cb11ccb5d0399efe26db9e0dd149016c1cc6c8161974ceac2d6"},
- {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:405002eafad114a2f9a930f5db65feef7b53c4784495dd8758069b89baf68eab"},
- {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ab6ab0cb755154ad14e507d1df72de9897e99fd2d4922851a276ccc14f4f1a5"},
- {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:acf6e43fa75aca2d33e93409f2dafe386fe051818ee79ee8a3e21de9caa2ac9e"},
- {file = "pycryptodome-3.20.0.tar.gz", hash = "sha256:09609209ed7de61c2b560cc5c8c4fbf892f8b15b1faf7e4cbffac97db1fffda7"},
+ {file = "pycryptodome-3.21.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:dad9bf36eda068e89059d1f07408e397856be9511d7113ea4b586642a429a4fd"},
+ {file = "pycryptodome-3.21.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:a1752eca64c60852f38bb29e2c86fca30d7672c024128ef5d70cc15868fa10f4"},
+ {file = "pycryptodome-3.21.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:3ba4cc304eac4d4d458f508d4955a88ba25026890e8abff9b60404f76a62c55e"},
+ {file = "pycryptodome-3.21.0-cp27-cp27m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7cb087b8612c8a1a14cf37dd754685be9a8d9869bed2ffaaceb04850a8aeef7e"},
+ {file = "pycryptodome-3.21.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:26412b21df30b2861424a6c6d5b1d8ca8107612a4cfa4d0183e71c5d200fb34a"},
+ {file = "pycryptodome-3.21.0-cp27-cp27m-win32.whl", hash = "sha256:cc2269ab4bce40b027b49663d61d816903a4bd90ad88cb99ed561aadb3888dd3"},
+ {file = "pycryptodome-3.21.0-cp27-cp27m-win_amd64.whl", hash = "sha256:0fa0a05a6a697ccbf2a12cec3d6d2650b50881899b845fac6e87416f8cb7e87d"},
+ {file = "pycryptodome-3.21.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6cce52e196a5f1d6797ff7946cdff2038d3b5f0aba4a43cb6bf46b575fd1b5bb"},
+ {file = "pycryptodome-3.21.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:a915597ffccabe902e7090e199a7bf7a381c5506a747d5e9d27ba55197a2c568"},
+ {file = "pycryptodome-3.21.0-cp27-cp27mu-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4e74c522d630766b03a836c15bff77cb657c5fdf098abf8b1ada2aebc7d0819"},
+ {file = "pycryptodome-3.21.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:a3804675283f4764a02db05f5191eb8fec2bb6ca34d466167fc78a5f05bbe6b3"},
+ {file = "pycryptodome-3.21.0-cp36-abi3-macosx_10_9_universal2.whl", hash = "sha256:2480ec2c72438430da9f601ebc12c518c093c13111a5c1644c82cdfc2e50b1e4"},
+ {file = "pycryptodome-3.21.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:de18954104667f565e2fbb4783b56667f30fb49c4d79b346f52a29cb198d5b6b"},
+ {file = "pycryptodome-3.21.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de4b7263a33947ff440412339cb72b28a5a4c769b5c1ca19e33dd6cd1dcec6e"},
+ {file = "pycryptodome-3.21.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0714206d467fc911042d01ea3a1847c847bc10884cf674c82e12915cfe1649f8"},
+ {file = "pycryptodome-3.21.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d85c1b613121ed3dbaa5a97369b3b757909531a959d229406a75b912dd51dd1"},
+ {file = "pycryptodome-3.21.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:8898a66425a57bcf15e25fc19c12490b87bd939800f39a03ea2de2aea5e3611a"},
+ {file = "pycryptodome-3.21.0-cp36-abi3-musllinux_1_2_i686.whl", hash = "sha256:932c905b71a56474bff8a9c014030bc3c882cee696b448af920399f730a650c2"},
+ {file = "pycryptodome-3.21.0-cp36-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:18caa8cfbc676eaaf28613637a89980ad2fd96e00c564135bf90bc3f0b34dd93"},
+ {file = "pycryptodome-3.21.0-cp36-abi3-win32.whl", hash = "sha256:280b67d20e33bb63171d55b1067f61fbd932e0b1ad976b3a184303a3dad22764"},
+ {file = "pycryptodome-3.21.0-cp36-abi3-win_amd64.whl", hash = "sha256:b7aa25fc0baa5b1d95b7633af4f5f1838467f1815442b22487426f94e0d66c53"},
+ {file = "pycryptodome-3.21.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:2cb635b67011bc147c257e61ce864879ffe6d03342dc74b6045059dfbdedafca"},
+ {file = "pycryptodome-3.21.0-pp27-pypy_73-win32.whl", hash = "sha256:4c26a2f0dc15f81ea3afa3b0c87b87e501f235d332b7f27e2225ecb80c0b1cdd"},
+ {file = "pycryptodome-3.21.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:d5ebe0763c982f069d3877832254f64974139f4f9655058452603ff559c482e8"},
+ {file = "pycryptodome-3.21.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ee86cbde706be13f2dec5a42b52b1c1d1cbb90c8e405c68d0755134735c8dc6"},
+ {file = "pycryptodome-3.21.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fd54003ec3ce4e0f16c484a10bc5d8b9bd77fa662a12b85779a2d2d85d67ee0"},
+ {file = "pycryptodome-3.21.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5dfafca172933506773482b0e18f0cd766fd3920bd03ec85a283df90d8a17bc6"},
+ {file = "pycryptodome-3.21.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:590ef0898a4b0a15485b05210b4a1c9de8806d3ad3d47f74ab1dc07c67a6827f"},
+ {file = "pycryptodome-3.21.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f35e442630bc4bc2e1878482d6f59ea22e280d7121d7adeaedba58c23ab6386b"},
+ {file = "pycryptodome-3.21.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff99f952db3db2fbe98a0b355175f93ec334ba3d01bbde25ad3a5a33abc02b58"},
+ {file = "pycryptodome-3.21.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8acd7d34af70ee63f9a849f957558e49a98f8f1634f86a59d2be62bb8e93f71c"},
+ {file = "pycryptodome-3.21.0.tar.gz", hash = "sha256:f7787e0d469bdae763b876174cf2e6c0f7be79808af26b1da96f1a64bcf47297"},
]
[[package]]
name = "pytelegrambotapi"
-version = "4.22.1"
+version = "4.23.0"
description = "Python Telegram bot api."
optional = false
python-versions = ">=3.8"
files = [
- {file = "pytelegrambotapi-4.22.1-py3-none-any.whl", hash = "sha256:58a4bc11b054683ba4ef43452e125c80786bbeffd1ba08dfa45291f56e5d08c4"},
+ {file = "pytelegrambotapi-4.23.0-py3-none-any.whl", hash = "sha256:4fd4a64f3d5ec389270cf4f1eacd68f6d25d199e1048b76a1caefcb17fbe214b"},
+ {file = "pytelegrambotapi-4.23.0.tar.gz", hash = "sha256:ced74787cfaf59d959799786f12a401cdb3abeb58dcd25568fc91363ba1cccfa"},
]
[package.dependencies]
@@ -877,28 +825,6 @@ redis = ["redis (>=3.4.1)"]
uvicorn = ["uvicorn"]
watchdog = ["watchdog"]
-[[package]]
-name = "pytest"
-version = "8.3.2"
-description = "pytest: simple powerful testing with Python"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"},
- {file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"},
-]
-
-[package.dependencies]
-colorama = {version = "*", markers = "sys_platform == \"win32\""}
-exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
-iniconfig = "*"
-packaging = "*"
-pluggy = ">=1.5,<2"
-tomli = {version = ">=1", markers = "python_version < \"3.11\""}
-
-[package.extras]
-dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
-
[[package]]
name = "python-dateutil"
version = "2.9.0.post0"
@@ -926,20 +852,20 @@ files = [
[[package]]
name = "requests"
-version = "2.32.3"
+version = "2.29.0"
description = "Python HTTP for Humans."
optional = false
-python-versions = ">=3.8"
+python-versions = ">=3.7"
files = [
- {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"},
- {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"},
+ {file = "requests-2.29.0-py3-none-any.whl", hash = "sha256:e8f3c9be120d3333921d213eef078af392fba3933ab7ed2d1cba3b56f2568c3b"},
+ {file = "requests-2.29.0.tar.gz", hash = "sha256:f2e34a75f4749019bb0e3effb66683630e4ffeaf75819fb51bebef1bf5aef059"},
]
[package.dependencies]
certifi = ">=2017.4.17"
charset-normalizer = ">=2,<4"
idna = ">=2.5,<4"
-urllib3 = ">=1.21.1,<3"
+urllib3 = ">=1.21.1,<1.27"
[package.extras]
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
@@ -989,7 +915,7 @@ six = ">=1.12.0"
[[package]]
name = "telegram"
-version = "1.2.0"
+version = "2.0.1"
description = "This is an additional implementation compared to the telebot module. This module is designed for quick initialization, authorization and rendering of various buttons/widgets for telegram bots."
optional = false
python-versions = "^3.9"
@@ -1000,14 +926,13 @@ develop = false
logger = {git = "https://github.com/obervinov/logger-package.git", tag = "v1.0.6"}
messages = {git = "https://github.com/obervinov/messages-package.git", tag = "v1.0.4"}
pyTelegramBotAPI = "^4"
-pytest = "^8"
-vault = {git = "https://github.com/obervinov/vault-package.git", tag = "v2.0.4"}
+vault = {git = "https://github.com/obervinov/vault-package.git", tag = "v3.0.0"}
[package.source]
type = "git"
url = "https://github.com/obervinov/telegram-package.git"
-reference = "v1.2.0"
-resolved_reference = "0a7d08cc3b215e6e7bfe13ccfe72f8190f5ae457"
+reference = "v2.0.1"
+resolved_reference = "5b3dc2f377095f59a1cd9645a865e466c9b40a4b"
[[package]]
name = "tenacity"
@@ -1026,49 +951,26 @@ six = ">=1.9.0"
[package.extras]
doc = ["reno", "sphinx", "tornado (>=4.5)"]
-[[package]]
-name = "tomli"
-version = "2.0.1"
-description = "A lil' TOML parser"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
- {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
-]
-
-[[package]]
-name = "typing-extensions"
-version = "4.12.2"
-description = "Backported and Experimental Type Hints for Python 3.8+"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"},
- {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
-]
-
[[package]]
name = "urllib3"
-version = "2.2.2"
+version = "1.26.20"
description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false
-python-versions = ">=3.8"
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
files = [
- {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"},
- {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"},
+ {file = "urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e"},
+ {file = "urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32"},
]
[package.extras]
-brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
-h2 = ["h2 (>=4,<5)"]
-socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
-zstd = ["zstandard (>=0.18.0)"]
+brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
+secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
+socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
[[package]]
name = "users"
-version = "2.0.5"
-description = "This python module is a simple implementation of user management functionality for telegram bots, such as: authentication, authorization and request limiting."
+version = "3.0.2"
+description = "This python module is a simple implementation of user management functionality for telegram bots, such as: authentication, authorization and requests limiting."
optional = false
python-versions = "^3.9 || ^3.10 || ^3.11"
files = []
@@ -1076,35 +978,39 @@ develop = false
[package.dependencies]
logger = {git = "https://github.com/obervinov/logger-package.git", tag = "v1.0.6"}
-vault = {git = "https://github.com/obervinov/vault-package.git", tag = "v2.0.4"}
+psycopg2-binary = "^2"
+vault = {git = "https://github.com/obervinov/vault-package.git", tag = "v3.0.0"}
[package.source]
type = "git"
url = "https://github.com/obervinov/users-package.git"
-reference = "v2.0.5"
-resolved_reference = "f4f4b758d394fd1820c9ee3ce264b5d18a439823"
+reference = "v3.0.2"
+resolved_reference = "25cbde0ef24379f66557e0bb5dc27d9d43d8867e"
[[package]]
name = "vault"
-version = "2.0.4"
-description = "This is an additional implementation compared to the hvac module. The main purpose of which is to simplify the use and interaction with vault for my standard projects. This module contains a set of methods for working with secrets and quickly configuring Vault."
+version = "3.0.0"
+description = "This is an additional implementation compared to the hvac module. The main purpose of which is to simplify the use and interaction with vault for my standard projects. This module contains a set of methods for working with secrets and database engines in vault."
optional = false
python-versions = "^3.9 || ^3.10 || ^3.11"
files = []
develop = false
[package.dependencies]
+cryptography = "^42"
hvac = "^2"
+idna = "^3"
keyring = "^24"
logger = {git = "https://github.com/obervinov/logger-package.git", tag = "v1.0.6"}
python-dateutil = "^2"
+requests = ">=2.29.0,<2.30.0"
SecretStorage = "^3"
[package.source]
type = "git"
url = "https://github.com/obervinov/vault-package.git"
-reference = "v2.0.4"
-resolved_reference = "54a312b747ad84c391a837c5bddaed7a021c9d76"
+reference = "v3.0.0"
+resolved_reference = "73b8d0431415eae65e2271483e80353a62b30a28"
[[package]]
name = "webdavclient3"
@@ -1123,20 +1029,24 @@ requests = "*"
[[package]]
name = "zipp"
-version = "3.20.0"
+version = "3.20.2"
description = "Backport of pathlib-compatible object wrapper for zip files"
optional = false
python-versions = ">=3.8"
files = [
- {file = "zipp-3.20.0-py3-none-any.whl", hash = "sha256:58da6168be89f0be59beb194da1250516fdaa062ccebd30127ac65d30045e10d"},
- {file = "zipp-3.20.0.tar.gz", hash = "sha256:0145e43d89664cfe1a2e533adc75adafed82fe2da404b4bbb6b026c0157bdb31"},
+ {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"},
+ {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"},
]
[package.extras]
+check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"]
+cover = ["pytest-cov"]
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
-test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"]
+enabler = ["pytest-enabler (>=2.2)"]
+test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"]
+type = ["pytest-mypy"]
[metadata]
lock-version = "2.0"
python-versions = "^3.9"
-content-hash = "d668b2255f52e2bffef853511746eaa0b4c5fca4b16a01736d04a99932ffeffa"
+content-hash = "60e533bec340da4685f73edf8483650533df2c4c5637a5d2b63589aa44bda0f9"
diff --git a/pyproject.toml b/pyproject.toml
index cc40eadbe..81b0be00e 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,9 +1,9 @@
[tool.poetry]
name = "pyinstabot-downloader"
-version = "2.2.1"
+version = "2.3.0"
description = "This project is a Telegram bot that allows you to upload posts from your Instagram profile to clouds like Dropbox, Mega or any WebDav compatible cloud storage."
-authors = ["Bervinov Oleg "]
-maintainers = ["Bervinov Oleg "]
+authors = ["Bervinov Oleg "]
+maintainers = ["Bervinov Oleg "]
license = "MIT"
readme = "README.md"
homepage = "https://github.com/obervinov/pyinstabot-downloader"
@@ -26,9 +26,9 @@ mock = "^5"
webdavclient3 = "^3"
prometheus-client = "^0"
logger = { git = "https://github.com/obervinov/logger-package.git", tag = "v1.0.6" }
-vault = { git = "https://github.com/obervinov/vault-package.git", tag = "v2.0.4" }
-users = { git = "https://github.com/obervinov/users-package.git", tag = "v2.0.5" }
-telegram = { git = "https://github.com/obervinov/telegram-package.git", tag = "v1.2.0" }
+vault = { git = "https://github.com/obervinov/vault-package.git", tag = "v3.0.0" }
+users = { git = "https://github.com/obervinov/users-package.git", tag = "v3.0.2" }
+telegram = { git = "https://github.com/obervinov/telegram-package.git", tag = "v2.0.1" }
[build-system]
requires = ["poetry-core"]
diff --git a/scripts/psql-init.sh b/scripts/psql-init.sh
index 6eb8af406..15210e92a 100644
--- a/scripts/psql-init.sh
+++ b/scripts/psql-init.sh
@@ -3,7 +3,7 @@
NEW_USER_PASSWORD=$(pwgen 24 -c1)
psql -c "CREATE DATABASE pyinstabot-downloader;"
psql -c "CREATE USER pyinstabot-downloader WITH PASSWORD '$NEW_USER_PASSWORD';"
-psql -c "GRANT ALL PRIVILEGES ON DATABASE pyinstabot-downloader TO pyinstabot-downloader;"
+psql -c "ALTER DATABASE pyinstabot-downloader OWNER TO pyinstabot-downloader;"
echo "New user: pyinstabot-downloader"
echo "New password: $NEW_USER_PASSWORD"
echo "Database: pyinstabot-downloader"
diff --git a/scripts/vault-init.sh b/scripts/vault-init.sh
index ffb81565a..0c7b36d28 100644
--- a/scripts/vault-init.sh
+++ b/scripts/vault-init.sh
@@ -1,6 +1,10 @@
# /bin/bash
# Description: Prepare vault for pyinstabot-downloader
+
+# Prepare kv2 engine
vault secrets enable -path=pyinstabot-downloader kv-v2
+
+# Prepare approle
vault policy write pyinstabot-downloader vault/policy.hcl
vault auth enable -path=pyinstabot-downloader approle
vault write auth/pyinstabot-downloader/role/pyinstabot-downloader \
@@ -8,8 +12,36 @@ vault write auth/pyinstabot-downloader/role/pyinstabot-downloader \
token_type=service \
secret_id_num_uses=0 \
token_num_uses=0 \
- token_ttl=1h \
+ token_ttl=24h \
bind_secret_id=true \
mount_point="pyinstabot-downloader" \
secret_id_ttl=0
-# End of snippet
\ No newline at end of file
+
+# Prepare db engine
+vault secrets enable -path=pyinstabot-downloader-database database
+vault write pyinstabot-downloader-database/config/postgresql \
+ plugin_name=postgresql-database-plugin \
+ allowed_roles="pyinstabot-downloader" \
+ verify_connection=false \
+ connection_url="postgresql://{{username}}:{{password}}@localhost:5432/pyinstabot-downloader?sslmode=disable" \
+ username="postgres" \
+ password="changeme"
+vault write pyinstabot-downloader-database/roles/pyinstabot-downloader-bot \
+ db_name=postgresql \
+ creation_statements="CREATE ROLE \"{{name}}\" WITH LOGIN PASSWORD '{{password}}' VALID UNTIL '{{expiration}}'; GRANT ALL PRIVILEGES ON SCHEMA public TO \"{{name}}\"; GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO \"{{name}}\"; GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO \"{{name}}\";" \
+ revocation_statements="REVOKE ALL PRIVILEGES ON SCHEMA public FROM \"{{name}}\"; REVOKE ALL PRIVILEGES ON ALL TABLES IN SCHEMA public FROM \"{{name}}\"; REVOKE ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public FROM \"{{name}}\"; DROP ROLE \"{{name}}\";" \
+ default_ttl="24h" \
+ max_ttl="72h"
+vault write pyinstabot-downloader-database/roles/pyinstabot-downloader-users \
+ db_name=postgresql \
+ creation_statements="CREATE ROLE \"{{name}}\" WITH LOGIN PASSWORD '{{password}}' VALID UNTIL '{{expiration}}'; GRANT ALL PRIVILEGES ON SCHEMA public TO \"{{name}}\"; GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO \"{{name}}\"; GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO \"{{name}}\";" \
+ revocation_statements="REVOKE ALL PRIVILEGES ON SCHEMA public FROM \"{{name}}\"; REVOKE ALL PRIVILEGES ON ALL TABLES IN SCHEMA public FROM \"{{name}}\"; REVOKE ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public FROM \"{{name}}\"; DROP ROLE \"{{name}}\";" \
+ default_ttl="24h" \
+ max_ttl="72h"
+vault write pyinstabot-downloader-database/roles/pyinstabot-downloader-users-rl \
+ db_name=postgresql \
+ creation_statements="CREATE ROLE \"{{name}}\" WITH LOGIN PASSWORD '{{password}}' VALID UNTIL '{{expiration}}'; GRANT ALL PRIVILEGES ON SCHEMA public TO \"{{name}}\"; GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO \"{{name}}\"; GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO \"{{name}}\";" \
+ revocation_statements="REVOKE ALL PRIVILEGES ON SCHEMA public FROM \"{{name}}\"; REVOKE ALL PRIVILEGES ON ALL TABLES IN SCHEMA public FROM \"{{name}}\"; REVOKE ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public FROM \"{{name}}\"; DROP ROLE \"{{name}}\";" \
+ default_ttl="24h" \
+ max_ttl="72h"
+# End of snippet
diff --git a/src/bot.py b/src/bot.py
index 3af4e9504..c367f8aaf 100644
--- a/src/bot.py
+++ b/src/bot.py
@@ -1,6 +1,5 @@
"""
-This module contains the main code for the bot
-to work and contains the main logic linking the additional modules.
+This module contains the main code for the bot to work and contains the main logic linking the additional modules.
"""
from datetime import datetime, timedelta
import re
@@ -14,55 +13,64 @@
from telegram import TelegramBot, exceptions as TelegramExceptions
from users import Users
from vault import VaultClient
-from configs.constants import (TELEGRAM_BOT_NAME, ROLES_MAP, QUEUE_FREQUENCY, STATUSES_MESSAGE_FREQUENCY, METRICS_PORT, METRICS_INTERVAL)
+from configs.constants import (
+ TELEGRAM_BOT_NAME, ROLES_MAP,
+ QUEUE_FREQUENCY, STATUSES_MESSAGE_FREQUENCY,
+ METRICS_PORT, METRICS_INTERVAL,
+ VAULT_DBENGINE_MOUNT_POINT, VAULT_DB_ROLE_MAIN, VAULT_DB_ROLE_USERS, VAULT_DB_ROLE_USERS_RL
+)
from modules.database import DatabaseClient
from modules.exceptions import FailedMessagesStatusUpdater
-from modules.tools import get_hash
+from modules.tools import get_hash, check_proxy
from modules.downloader import Downloader
from modules.uploader import Uploader
from modules.metrics import Metrics
# Vault client
-vault = VaultClient(name=TELEGRAM_BOT_NAME)
+# The need to explicitly specify a mount point will no longer be necessary after solving the https://github.com/obervinov/vault-package/issues/49
+vault = VaultClient(dbengine={"mount_point": VAULT_DBENGINE_MOUNT_POINT})
# Telegram instance
telegram = TelegramBot(vault=vault)
# Telegram bot for decorators
bot = telegram.telegram_bot
# Users module with rate limits option
-users_rl = Users(vault=vault)
+users_rl = Users(vault=vault, rate_limits=True, storage={'db_role': VAULT_DB_ROLE_USERS_RL})
# Users module without rate limits option
-users = Users(vault=vault, rate_limits=False)
+users = Users(vault=vault, storage={'db_role': VAULT_DB_ROLE_USERS})
-# Client for download content from supplier
+# Detected connection type
+check_proxy()
+
+# Client for download content from instagram
# If API disabled, the mock object will be used
-downloader_api_enabled = vault.read_secret(path='configuration/downloader-api').get('enabled', False)
+downloader_api_enabled = vault.kv2engine.read_secret(path='configuration/downloader-api').get('enabled', False)
if downloader_api_enabled == 'True':
- log.info('[Bot]: downloader API is enabled: %s', downloader_api_enabled)
+ log.info('[Bot]: Downloader api is enabled: %s', downloader_api_enabled)
downloader = Downloader(vault=vault)
else:
- log.warning('[Bot]: downloader API is disabled, using mock object, because enabled flag is %s', downloader_api_enabled)
+ log.warning('[Bot]: Downloader api is disabled, using mock object, because enabled flag is %s', downloader_api_enabled)
downloader = MagicMock()
downloader.get_post_content.return_value = {
'post': f"mock_{''.join(random.choices(string.ascii_letters + string.digits, k=10))}",
- 'owner': 'undefined',
+ 'owner': 'mock',
'type': 'fake',
'status': 'completed'
}
-# Client for upload content to the cloud storage
+# Client for upload content to the target storage
# If API disabled, the mock object will be used
-uploader_api_enabled = vault.read_secret(path='configuration/uploader-api').get('enabled', False)
+uploader_api_enabled = vault.kv2engine.read_secret(path='configuration/uploader-api').get('enabled', False)
if uploader_api_enabled == 'True':
- log.info('[Bot]: uploader API is enabled: %s', uploader_api_enabled)
+ log.info('[Bot]: Uploader API is enabled: %s', uploader_api_enabled)
uploader = Uploader(vault=vault)
else:
- log.warning('[Bot]: uploader API is disabled, using mock object, because enabled flag is %s', uploader_api_enabled)
+ log.warning('[Bot]: Uploader API is disabled, using mock object, because enabled flag is %s', uploader_api_enabled)
uploader = MagicMock()
uploader.run_transfers.return_value = 'completed'
# Client for communication with the database
-database = DatabaseClient(vault=vault)
+database = DatabaseClient(vault=vault, db_role=VAULT_DB_ROLE_MAIN)
# Metrics exporter
metrics = Metrics(port=METRICS_PORT, interval=METRICS_INTERVAL, metrics_prefix=TELEGRAM_BOT_NAME, vault=vault, database=database)
@@ -77,18 +85,11 @@ def start_command(message: telegram.telegram_types.Message = None) -> None:
Args:
message (telegram.telegram_types.Message): The message object containing information about the chat.
-
- Returns:
- None
"""
- if users.user_access_check(message.chat.id).get('access', None) == users.user_status_allow:
- log.info('[Bot]: Processing "start" command for user %s...', message.chat.id)
-
- # Add user to the database
- response = database.add_user(user_id=message.chat.id, chat_id=message.chat.id)
- log.info('[Bot]: user %s added to the database: %s', message.chat.id, response)
-
- # Main message
+ requestor = {'user_id': message.chat.id, 'chat_id': message.chat.id, 'message_id': message.message_id}
+ if users.user_access_check(**requestor).get('access', None) == users.user_status_allow:
+ log.info('[Bot]: Processing start command for user %s...', message.chat.id)
+ # Main pinned message
reply_markup = telegram.create_inline_markup(ROLES_MAP.keys())
start_message = telegram.send_styled_message(
chat_id=message.chat.id,
@@ -120,12 +121,13 @@ def bot_callback_query_handler(call: telegram.callback_query = None) -> None:
Args:
call (telegram.callback_query): The callback query object.
-
- Returns:
- None
"""
- log.info('[Bot]: Processing button "%s" for user %s...', call.data, call.message.chat.id)
- if users.user_access_check(call.message.chat.id, ROLES_MAP[call.data]).get('permissions', None) == users.user_status_allow:
+ log.info('[Bot]: Processing button %s for user %s...', call.data, call.message.chat.id)
+ requestor = {
+ 'user_id': call.message.chat.id, 'role_id': ROLES_MAP[call.data],
+ 'chat_id': call.message.chat.id, 'message_id': call.message.message_id
+ }
+ if users.user_access_check(**requestor).get('permissions', None) == users.user_status_allow:
if call.data == "Post":
help_message = telegram.send_styled_message(
chat_id=call.message.chat.id,
@@ -148,7 +150,7 @@ def bot_callback_query_handler(call: telegram.callback_query = None) -> None:
bot.register_next_step_handler(call.message, reschedule_queue, help_message)
else:
- log.error('[Bot]: Handler for button "%s" not found', call.data)
+ log.error('[Bot]: Handler for button %s not found', call.data)
else:
telegram.send_styled_message(
@@ -168,12 +170,10 @@ def unknown_command(message: telegram.telegram_types.Message = None) -> None:
Args:
message (telegram.telegram_types.Message): The message object containing the unrecognized command.
-
- Returns:
- None
"""
- if users.user_access_check(message.chat.id).get('access', None) == users.user_status_allow:
- log.error('[Bot]: Invalid command "%s" from user %s', message.text, message.chat.id)
+ requestor = {'user_id': message.chat.id, 'chat_id': message.chat.id, 'message_id': message.message_id}
+ if users.user_access_check(**requestor).get('access', None) == users.user_status_allow:
+ log.error('[Bot]: Invalid command %s from user %s', message.text, message.chat.id)
telegram.send_styled_message(chat_id=message.chat.id, messages_template={'alias': 'unknown_command'})
else:
telegram.send_styled_message(
@@ -193,9 +193,6 @@ def update_status_message(user_id: str = None) -> None:
Args:
user_id (str): The user id.
-
- Returns:
- None
"""
try:
diff_between_messages = False
@@ -304,30 +301,24 @@ def get_user_messages(user_id: str = None) -> dict:
>>> get_user_messages(user_id='1234567890')
{'queue_list': 'queue is empty
', 'processed_list': 'processed is empty
', 'queue_count': 0, 'processed_count': 0}
"""
- queue_dict = database.get_user_queue(user_id=user_id)
- processed_dict = database.get_user_processed(user_id=user_id)
-
- last_ten_queue = queue_dict.get(user_id, [])[:10] if queue_dict else []
- last_ten_processed = processed_dict.get(user_id, [])[-10:] if processed_dict else []
-
- queue_count = len(queue_dict.get(user_id, [])) if queue_dict else 0
- processed_count = len(processed_dict.get(user_id, [])) if processed_dict else 0
+ queue = database.get_user_queue(user_id=user_id)
+ processed = database.get_user_processed(user_id=user_id)
queue_string = ''
- if last_ten_queue:
- for item in last_ten_queue:
+ if queue[:10]:
+ for item in queue[:10]:
queue_string += f"+ {item['post_id']}: scheduled for {item['scheduled_time']}
\n"
else:
queue_string = 'queue is empty
'
processed_string = ''
- if last_ten_processed:
- for item in last_ten_processed:
+ if processed[-10:]:
+ for item in processed[-10:]:
processed_string += f"* {item['post_id']}: {item['state']} at {item['timestamp']}
\n"
else:
processed_string = 'processed is empty
'
- return {'queue_list': queue_string, 'processed_list': processed_string, 'queue_count': queue_count, 'processed_count': processed_count}
+ return {'queue_list': queue_string, 'processed_list': processed_string, 'queue_count': len(queue), 'processed_count': len(processed)}
def message_parser(message: telegram.telegram_types.Message = None) -> dict:
@@ -384,11 +375,14 @@ def process_one_post(
Returns:
None
"""
- # Check permissions
- user = users_rl.user_access_check(message.chat.id, ROLES_MAP['Post'])
+ requestor = {
+ 'user_id': message.chat.id, 'role_id': ROLES_MAP['Post'],
+ 'chat_id': message.chat.id, 'message_id': message.message_id
+ }
+ user = users_rl.user_access_check(**requestor)
if user.get('permissions', None) == users_rl.user_status_allow:
data = message_parser(message)
- rate_limit = user.get('rate_limits', {}).get('end_time', None)
+ rate_limit = user.get('rate_limits', None)
# Define time to process the message in queue
if rate_limit:
@@ -424,7 +418,11 @@ def process_list_posts(
Returns:
None
"""
- user = users.user_access_check(message.chat.id, ROLES_MAP['Posts List'])
+ requestor = {
+ 'user_id': message.chat.id, 'role_id': ROLES_MAP['Posts List'],
+ 'chat_id': message.chat.id, 'message_id': message.message_id
+ }
+ user = users.user_access_check(**requestor)
if user.get('permissions', None) == users.user_status_allow:
for link in message.text.split('\n'):
message.text = link
@@ -452,7 +450,11 @@ def reschedule_queue(
Returns:
None
"""
- user = users.user_access_check(message.chat.id, ROLES_MAP['Reschedule Queue'])
+ requestor = {
+ 'user_id': message.chat.id, 'role_id': ROLES_MAP['Reschedule Queue'],
+ 'chat_id': message.chat.id, 'message_id': message.message_id
+ }
+ user = users.user_access_check(**requestor)
can_be_deleted = True
if user.get('permissions', None) == users.user_status_allow:
for item in message.text.split('\n'):
@@ -496,16 +498,16 @@ def status_message_updater_thread() -> None:
while True:
time.sleep(STATUSES_MESSAGE_FREQUENCY)
try:
- if database.get_users():
- for user in database.get_users():
- user_id = user[0]
- update_status_message(user_id=user_id)
+ users_dict = []
+ users_dict = database.get_users()
+ for user in users_dict:
+ update_status_message(user_id=user['user_id'])
# pylint: disable=broad-exception-caught
except Exception as exception:
exception_context = {
'call': threading.current_thread().name,
'message': 'Failed to update the message with the status of received messages',
- 'users': database.get_users(),
+ 'users': users_dict,
'user': user,
'exception': exception
}
diff --git a/src/configs/constants.py b/src/configs/constants.py
index f16bee075..f7bb436b6 100644
--- a/src/configs/constants.py
+++ b/src/configs/constants.py
@@ -14,8 +14,15 @@
'Reschedule Queue': 'reschedule_queue',
}
-# Queue handler
+# Other constants
QUEUE_FREQUENCY = 60
STATUSES_MESSAGE_FREQUENCY = 15
METRICS_PORT = 8000
METRICS_INTERVAL = 30
+
+# Vault Database Engine constants
+VAULT_DBENGINE_MOUNT_POINT = f"{TELEGRAM_BOT_NAME}-database"
+# Will be removed after https://github.com/obervinov/users-package/issues/47
+VAULT_DB_ROLE_MAIN = f"{TELEGRAM_BOT_NAME}-bot"
+VAULT_DB_ROLE_USERS = f"{TELEGRAM_BOT_NAME}-users"
+VAULT_DB_ROLE_USERS_RL = f"{TELEGRAM_BOT_NAME}-users-rl"
diff --git a/src/configs/databases.json b/src/configs/databases.json
index 4ec57a45c..d9f6e4a04 100644
--- a/src/configs/databases.json
+++ b/src/configs/databases.json
@@ -65,10 +65,25 @@
{
"name": "users",
"description": "The table stores the user ID and the chat ID for communication with the bot",
+ "columns": [
+ "id SERIAL PRIMARY KEY, ",
+ "user_id VARCHAR(255) UNIQUE NOT NULL, ",
+ "chat_id VARCHAR(255) NOT NULL, ",
+ "status VARCHAR(255) NOT NULL DEFAULT 'denied'"
+ ]
+ },
+ {
+ "name": "users_requests",
+ "description": "The table stores the metadata of the user requests",
"columns": [
"id SERIAL PRIMARY KEY, ",
"user_id VARCHAR(255) NOT NULL, ",
- "chat_id VARCHAR(255) NOT NULL"
+ "message_id VARCHAR(255) NOT NULL, ",
+ "chat_id VARCHAR(255) NOT NULL, ",
+ "authentication VARCHAR(255) NOT NULL, ",
+ "\"authorization\" VARCHAR(255) NOT NULL, ",
+ "timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, ",
+ "rate_limits TIMESTAMP"
]
}
]
diff --git a/src/configs/messages.json b/src/configs/messages.json
index 9ceb4324f..63d1b47a1 100644
--- a/src/configs/messages.json
+++ b/src/configs/messages.json
@@ -29,7 +29,7 @@
"args": [":information:"]
},
"wrong_reschedule_queue": {
- "text": "{0} Incorrect format for rescheduling messages in the queue. Please check this conditions:\n1. Post-id is a string and its length is equal to 11 characters.\n2. Date-time format is correct and the date is in the future.\n3. The message format is correct \nq1wRty12345: scheduled for 2021-12-31 23:59:59
\n4. Each new message is on a new line.\n\nCurrent time: {1}",
+ "text": "{0} Incorrect format for rescheduling messages in the queue. Please check this conditions:\n1. Post-id is a string and its length is equal to 11 characters.\n2. Date-time format is correct and the date is in the future.\n3. The message format is correct: q1wRty12345: scheduled for 2021-12-31 23:59:59
\n4. Each new message is on a new line.\n5. Timestamp is not in the past: current time {1}
",
"args": [":warning:", "current_time"]
},
"unknown_command": {
diff --git a/src/migrations/0001_vault_historical_data.py b/src/migrations/0001_vault_historical_data.py
index afb65a8de..1e33d1fb1 100644
--- a/src/migrations/0001_vault_historical_data.py
+++ b/src/migrations/0001_vault_historical_data.py
@@ -1,4 +1,4 @@
-# pylint: disable=C0103,R0914
+# pylint: disable=C0103,R0914,R0801
"""
Migrates historical data from the Vault to the processed table in the database.
https://github.com/obervinov/pyinstabot-downloader/issues/30
@@ -23,25 +23,25 @@ def execute(obj):
# information about owners
try:
- owners = obj.vault.list_secrets(path='history/')
+ owners = obj.vault.kv2eninge.list_secrets(path='history/')
owners_counter = len(owners)
print(f"Founded {owners_counter} owners in history")
# reade history form Vault
for owner in owners:
# information about owner posts
- posts = obj.vault.read_secret(path=f"history/{owner}")
+ posts = obj.vault.kv2eninge.read_secret(path=f"history/{owner}")
posts_counter = len(posts)
print(f"{NAME}: Founded {posts_counter} posts in history/{owner}")
for post in posts:
- user_id = next(iter(obj.vault.read_secret(path='configuration/users').keys()))
+ user_id = next(iter(obj.vault.kv2eninge.read_secret(path='configuration/users').keys()))
post_id = post
post_url = f"https://www.instagram.com/p/{post}"
post_owner = owner
link_type = 'post'
message_id = 'unknown'
- chat_id = next(iter(obj.vault.read_secret(path='configuration/users').keys()))
+ chat_id = next(iter(obj.vault.kv2eninge.read_secret(path='configuration/users').keys()))
download_status = 'completed'
upload_status = 'completed'
state = 'processed'
@@ -60,9 +60,12 @@ def execute(obj):
)
print(f"{NAME}: Migrating {post_id} from history/{owner}")
- obj.cursor.execute(f"INSERT INTO {table_name} ({columns}) VALUES ({values})")
- obj.database_connection.commit()
+ conn = obj.get_connection()
+ with conn.cursor() as cursor:
+ cursor.execute(f"INSERT INTO {table_name} ({columns}) VALUES ({values})")
+ conn.commit()
print(f"{NAME}: Post {post_id} from history/{owner} has been added to processed table")
+ obj.close_connection(conn)
print(f"{NAME}: Migration has been completed")
# Will be fixed after the issue https://github.com/obervinov/vault-package/issues/46 is resolved
# pylint: disable=broad-exception-caught
diff --git a/src/migrations/0002_messages_table.py b/src/migrations/0002_messages_table.py
index cd49bcf86..52b5ade86 100644
--- a/src/migrations/0002_messages_table.py
+++ b/src/migrations/0002_messages_table.py
@@ -1,4 +1,4 @@
-# pylint: disable=C0103,R0914
+# pylint: disable=C0103,R0914,R0801
"""
Add additional column 'created_at' and replace column 'timestamp' with 'updated_at' in the messages table.
https://github.com/obervinov/pyinstabot-downloader/issues/62
@@ -23,44 +23,53 @@ def execute(obj):
add_columns = [('created_at', 'TIMESTAMP', 'CURRENT_TIMESTAMP'), ('state', 'VARCHAR(255)', "'added'")]
print(f"{NAME}: Start migration for the {table_name} table: Rename columns {rename_columns}, Add columns {add_columns}...")
- # check if the table exists and has the necessary schema for execute the migration
- # check table
- obj.cursor.execute("SELECT * FROM information_schema.tables WHERE table_schema = 'public' AND table_name = %s;", (table_name,))
- table = obj.cursor.fetchone()
+ conn = obj.get_connection()
+ with conn.cursor() as cursor:
+ # check if the table exists and has the necessary schema for execute the migration
+ # check table
+ cursor.execute("SELECT * FROM information_schema.tables WHERE table_schema = 'public' AND table_name = %s;", (table_name,))
+ table = cursor.fetchone()
- # check columns in the table
- obj.cursor.execute("SELECT column_name FROM information_schema.columns WHERE table_schema = 'public' AND table_name = %s;", (table_name,))
- columns = [row[0] for row in obj.cursor.fetchall()]
+ # check columns in the table
+ cursor.execute("SELECT column_name FROM information_schema.columns WHERE table_schema = 'public' AND table_name = %s;", (table_name,))
+ columns = [row[0] for row in cursor.fetchall()]
- if not table:
- print(f"{NAME}: The {table_name} table does not exist. Skip the migration.")
+ if not table:
+ print(f"{NAME}: The {table_name} table does not exist. Skip the migration.")
- elif len(columns) < 1:
- print(f"{NAME}: The {table_name} table does not have the necessary columns to execute the migration. Skip the migration.")
+ elif len(columns) < 1:
+ print(f"{NAME}: The {table_name} table does not have the necessary columns to execute the migration. Skip the migration.")
- else:
- for column in rename_columns:
- try:
- print(f"{NAME}: Rename column {column[0]} to {column[1]} in the {table_name} table...")
- obj.cursor.execute(f"ALTER TABLE {table_name} RENAME COLUMN {column[0]} TO {column[1]}")
- obj.database_connection.commit()
- print(f"{NAME}: Column {column[0]} has been renamed to {column[1]} in the {table_name} table.")
- except obj.errors.DuplicateColumn as error:
- print(f"{NAME}: Columns in the {table_name} table have already been renamed. Skip renaming: {error}")
- obj.database_connection.rollback()
- except obj.errors.UndefinedColumn as error:
- print(f"{NAME}: Columns in the {table_name} table have not been renamed. Skip renaming: {error}")
- obj.database_connection.rollback()
+ elif not all(column in [rc[0] for rc in rename_columns] for column in columns):
+ print(f"{NAME}: The {table_name} table does not have the necessary columns to rename. Skip renaming.")
- for column in add_columns:
- try:
- print(f"{NAME}: Add column {column[0]} to the {table_name} table...")
- obj.cursor.execute(f"ALTER TABLE {table_name} ADD COLUMN {column[0]} {column[1]} DEFAULT {column[2]}")
- obj.database_connection.commit()
- print(f"{NAME}: Column {column[0]} has been added to the {table_name} table.")
- except obj.errors.DuplicateColumn as error:
- print(f"{NAME}: Columns in the {table_name} table have already been added. Skip adding: {error}")
- obj.database_connection.rollback()
- except obj.errors.FeatureNotSupported as error:
- print(f"{NAME}: Columns in the {table_name} table have not been added. Skip adding: {error}")
- obj.database_connection.rollback()
+ else:
+ for column in rename_columns:
+ try:
+ print(f"{NAME}: Rename column {column[0]} to {column[1]} in the {table_name} table...")
+ cursor.execute(f"ALTER TABLE {table_name} RENAME COLUMN {column[0]} TO {column[1]}")
+ conn.commit()
+ print(f"{NAME}: Column {column[0]} has been renamed to {column[1]} in the {table_name} table.")
+ except obj.errors.DuplicateColumn as error:
+ print(f"{NAME}: Columns in the {table_name} table have already been renamed. Skip renaming: {error}")
+ conn.rollback()
+ except obj.errors.UndefinedColumn as error:
+ print(f"{NAME}: Columns in the {table_name} table have not been renamed. Skip renaming: {error}")
+ conn.rollback()
+
+ for column in add_columns:
+ if column[0] in columns:
+ print(f"{NAME}: The {table_name} table already has the {column[0]} column. Skip adding.")
+ else:
+ try:
+ print(f"{NAME}: Add column {column[0]} to the {table_name} table...")
+ cursor.execute(f"ALTER TABLE {table_name} ADD COLUMN {column[0]} {column[1]} DEFAULT {column[2]}")
+ conn.commit()
+ print(f"{NAME}: Column {column[0]} has been added to the {table_name} table.")
+ except obj.errors.DuplicateColumn as error:
+ print(f"{NAME}: Columns in the {table_name} table have already been added. Skip adding: {error}")
+ conn.rollback()
+ except obj.errors.FeatureNotSupported as error:
+ print(f"{NAME}: Columns in the {table_name} table have not been added. Skip adding: {error}")
+ conn.rollback()
+ obj.close_connection(conn)
diff --git a/src/migrations/0003_users_table.py b/src/migrations/0003_users_table.py
new file mode 100644
index 000000000..a9caa28b4
--- /dev/null
+++ b/src/migrations/0003_users_table.py
@@ -0,0 +1,70 @@
+# pylint: disable=C0103,R0914,R0801
+"""
+Add additional column 'status' in the users table.
+https://github.com/obervinov/users-package/blob/v3.0.0/tests/postgres/tables.sql
+"""
+VERSION = '1.0'
+NAME = '0003_users_table'
+
+
+def execute(obj):
+ """
+ Add additional column 'status' in the users table.
+
+ Args:
+ obj: An obj containing the database connection and cursor, as well as the Vault instance.
+
+ Returns:
+ None
+ """
+ # database settings
+ table_name = 'users'
+ add_columns = [('status', 'VARCHAR(255)', "'denied'")]
+ update_columns = [('user_id', 'VARCHAR(255)', 'UNIQUE NOT NULL')]
+ print(f"{NAME}: Start migration for the {table_name} table: Add columns {add_columns} and update columns {update_columns}...")
+
+ # check if the table exists and has the necessary schema for execute the migration
+ conn = obj.get_connection()
+ with conn.cursor() as cursor:
+ # check table
+ cursor.execute("SELECT * FROM information_schema.tables WHERE table_schema = 'public' AND table_name = %s;", (table_name,))
+ table = cursor.fetchone()
+
+ # check columns in the table
+ cursor.execute("SELECT column_name FROM information_schema.columns WHERE table_schema = 'public' AND table_name = %s;", (table_name,))
+ columns = [row[0] for row in cursor.fetchall()]
+
+ if not table:
+ print(f"{NAME}: The {table_name} table does not exist. Skip the migration.")
+
+ else:
+ for column in add_columns:
+ if column[0] in columns:
+ print(f"{NAME}: The {table_name} table already has the {column[0]} column. Skip adding.")
+ else:
+ try:
+ print(f"{NAME}: Add column {column[0]} to the {table_name} table...")
+ cursor.execute(f"ALTER TABLE {table_name} ADD COLUMN {column[0]} {column[1]} DEFAULT {column[2]}")
+ conn.commit()
+ print(f"{NAME}: Column {column[0]} has been added to the {table_name} table.")
+ except obj.errors.DuplicateColumn as error:
+ print(f"{NAME}: Columns in the {table_name} table have already been added. Skip adding: {error}")
+ conn.rollback()
+ except obj.errors.FeatureNotSupported as error:
+ print(f"{NAME}: Columns in the {table_name} table have not been added. Skip adding: {error}")
+ conn.rollback()
+
+ for column in update_columns:
+ if column[0] in columns:
+ try:
+ print(f"{NAME}: Alter column {column[0]} to {column[2]}...")
+ cursor.execute(f"ALTER TABLE {table_name} ALTER COLUMN {column[0]} SET NOT NULL;")
+ cursor.execute(f"ALTER TABLE {table_name} ADD CONSTRAINT {column[0]}_unique UNIQUE ({column[0]});")
+ conn.commit()
+ print(f"{NAME}: Column {column[0]} has been updated to {column[2]}.")
+ # pylint: disable=broad-exception-caught
+ except Exception as error:
+ print(f"{NAME}: Failed to update column {column[0]}: {error}")
+ conn.rollback()
+ else:
+ print(f"{NAME}: The {table_name} table does not have the {column[0]} column. Skip updating.")
diff --git a/src/migrations/0004_vault_users_data.py b/src/migrations/0004_vault_users_data.py
new file mode 100644
index 000000000..9e3379d0e
--- /dev/null
+++ b/src/migrations/0004_vault_users_data.py
@@ -0,0 +1,60 @@
+# pylint: disable=C0103,R0914,R0801
+"""
+Migration for the vault users data to the users table in the database.
+https://github.com/obervinov/users-package/blob/v3.0.0/tests/postgres/tables.sql
+"""
+VERSION = '1.0'
+NAME = '0004_vault_users_data'
+
+
+def execute(obj):
+ """
+ Migration for the vault users data to the users table in the database.
+
+ Args:
+ obj: An obj containing the database connection and cursor, as well as the Vault instance.
+
+ Returns:
+ None
+ """
+ # database settings
+ table_name = 'users'
+ print(f"{NAME}: Start migration from the vault to the {table_name} table...")
+
+ # check if the table exists for execute the migration
+ conn = obj.get_connection()
+ with conn.cursor() as cursor:
+ cursor.execute("SELECT * FROM information_schema.tables WHERE table_schema = 'public' AND table_name = %s;", (table_name,))
+ table = cursor.fetchone()
+
+ if not table:
+ print(f"{NAME}: The {table_name} table does not exist. Skip the migration.")
+
+ else:
+ try:
+ users = obj.vault.kv2engine.list_secrets(path='data/users')
+ users_counter = len(users)
+ print(f"{NAME}: Founded {users_counter} users in users data")
+
+ for user in users:
+ user_last_state = obj.json.loads(obj.vault.kv2engine.read_secret(path=f"data/users/{user}", key='authentication'))
+
+ user_id = user
+ chat_id = 'unknown'
+ status = user_last_state.get('status', 'unknown')
+
+ values = f"'{user_id}', '{chat_id}', '{status}'"
+
+ print(f"{NAME}: Migrating user {user_id} to the {table_name} table...")
+ with conn.cursor() as cursor:
+ cursor.execute(f"INSERT INTO {table_name} (user_id, chat_id, status) VALUES ({values})")
+ conn.commit()
+ print(f"{NAME}: User {user_id} has been added to the {table_name} table")
+ print(f"{NAME}: Migration has been completed")
+ # pylint: disable=broad-exception-caught
+ except Exception as migration_error:
+ print(
+ f"{NAME}: Migration cannot be completed due to an error: {migration_error}. "
+ "It's not a critical error, so the migration will be skipped."
+ )
+ obj.close_connection(conn)
diff --git a/src/modules/database.py b/src/modules/database.py
index c22dfaf30..f88c484d1 100644
--- a/src/modules/database.py
+++ b/src/modules/database.py
@@ -38,12 +38,14 @@ class DatabaseClient:
Attributes:
database_connections (psycopg2.extensions.connection): A connection to the PostgreSQL database.
vault (object): An object representing a HashiCorp Vault client for retrieving secrets.
+ db_role (str): The role to use for generating database credentials.
errors (psycopg2.errors): A collection of error classes for exceptions raised by the psycopg2 module.
+ json (json): A JSON encoder and decoder for working with JSON data to execute database migrations.
Methods:
- _create_connection_pool(): Create a connection pool for the PostgreSQL database.
- _get_connection(): Get a connection from the connection pool.
- _close_connection(connection): Close the connection and return it to the connection pool.
+ create_connection_pool(): Create a connection pool for the PostgreSQL database.
+ get_connection(): Get a connection from the connection pool.
+ close_connection(connection): Close the connection and return it to the connection pool.
_prepare_db(): Prepare the database by creating and initializing the necessary tables.
_migrations(): Execute database migrations to update the database schema or data.
_is_migration_executed(migration_name): Check if a migration has already been executed.
@@ -63,8 +65,7 @@ class DatabaseClient:
get_user_processed(user_id): Get last ten messages from the processed table for the specified user.
check_message_uniqueness(post_id, user_id): Check if a message with the given post ID and chat ID already exists in the queue.
keep_message(message_id, chat_id, message_content, **kwargs): Add a message to the messages table in the database.
- add_user(user_id, chat_id): Add a user to the users table in the database.
- get_users(): Get a list of all users in the database.
+ get_users(only_allowed): Get a list of users from the users table in the database.
get_considered_message(message_type, chat_id): Get a message with specified type and
Rises:
@@ -72,13 +73,15 @@ class DatabaseClient:
"""
def __init__(
self,
- vault: object = None
+ vault: object = None,
+ db_role: str = None
) -> None:
"""
Initializes a new instance of the Database client.
Args:
vault (object): An object representing a HashiCorp Vault client for retrieving secrets with the database configuration.
+ db_role (str): The role to use for generating database credentials.
Examples:
To create a new instance of the Database class:
@@ -87,7 +90,9 @@ def __init__(
>>> vault = Vault()
>>> db = Database(vault=vault)
"""
+ self.json = json
self.vault = vault
+ self.db_role = db_role
self.errors = psycopg2.errors
self.database_connections = self.create_connection_pool()
@@ -102,22 +107,33 @@ def create_connection_pool(self) -> pool.SimpleConnectionPool:
Returns:
pool.SimpleConnectionPool: A connection pool for the PostgreSQL database.
"""
- db_configuration = self.vault.read_secret(path='configuration/database')
+ required_keys_configuration = {"host", "port", "dbname", "connections"}
+ required_keys_credentials = {"username", "password"}
+ db_configuration = self.vault.kv2engine.read_secret(path='configuration/database')
+ db_credentials = self.vault.dbengine.generate_credentials(role=self.db_role)
+
+ if not db_configuration or not db_credentials:
+ raise ValueError('Database configuration or credentials are missing')
+
+ missing_keys = (required_keys_configuration - set(db_configuration.keys())) | (required_keys_credentials - set(db_credentials.keys()))
+ if missing_keys:
+ raise KeyError("Missing keys in the database configuration or credentials: {missing_keys}")
+
log.info(
'[Database]: Creating a connection pool for the %s:%s/%s',
- db_configuration['host'], db_configuration['port'], db_configuration['database']
+ db_configuration['host'], db_configuration['port'], db_configuration['dbname']
)
return pool.SimpleConnectionPool(
minconn=1,
maxconn=db_configuration['connections'],
host=db_configuration['host'],
port=db_configuration['port'],
- user=db_configuration['user'],
- password=db_configuration['password'],
- database=db_configuration['database']
+ user=db_credentials['username'],
+ password=db_credentials['password'],
+ database=db_configuration['dbname']
)
- def _get_connection(self) -> psycopg2.extensions.connection:
+ def get_connection(self) -> psycopg2.extensions.connection:
"""
Get a connection from the connection pool.
@@ -126,7 +142,7 @@ def _get_connection(self) -> psycopg2.extensions.connection:
"""
return self.database_connections.getconn()
- def _close_connection(self, connection: psycopg2.extensions.connection) -> None:
+ def close_connection(self, connection: psycopg2.extensions.connection) -> None:
"""
Close the cursor and return it to the connection pool.
@@ -172,8 +188,10 @@ def _migrations(self) -> None:
# Migrations directory
migrations_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '../migrations'))
sys.path.append(migrations_dir)
+ migration_files = [f for f in os.listdir(migrations_dir) if f.endswith('.py')]
+ migration_files.sort()
- for migration_file in os.listdir(migrations_dir):
+ for migration_file in migration_files:
if migration_file.endswith('.py'):
migration_module_name = migration_file[:-3]
@@ -185,6 +203,8 @@ def _migrations(self) -> None:
self._mark_migration_as_executed(migration_name=migration_module_name, version=version)
else:
log.info('[Database] Migrations: the %s has already been executed and was skipped', migration_module_name)
+ else:
+ log.error('[Database]: Migrations: the %s is not a valid migration file', migration_file)
def _is_migration_executed(
self,
@@ -230,11 +250,11 @@ def _create_table(
To create a new table called 'users' with columns 'id' and 'name', you can call the method like this:
>>> _create_table('users', 'id INTEGER PRIMARY KEY, name TEXT')
"""
- conn = self._get_connection()
+ conn = self.get_connection()
with conn.cursor() as cursor:
cursor.execute(f"CREATE TABLE IF NOT EXISTS {table_name} ({columns})")
conn.commit()
- self._close_connection(conn)
+ self.close_connection(conn)
@reconnect_on_exception
def _insert(
@@ -260,11 +280,11 @@ def _insert(
"""
try:
sql_query = f"INSERT INTO {table_name} ({', '.join(columns)}) VALUES ({', '.join(['%s'] * len(columns))})"
- conn = self._get_connection()
+ conn = self.get_connection()
with conn.cursor() as cursor:
cursor.execute(sql_query, values)
conn.commit()
- self._close_connection(conn)
+ self.close_connection(conn)
except (psycopg2.Error, IndexError) as error:
log.error(
'[Database]: An error occurred while inserting a row into the table %s: %s\nColumns: %s\nValues: %s\nQuery: %s',
@@ -309,11 +329,11 @@ def _select(
if kwargs.get('limit', None):
sql_query += f" LIMIT {kwargs.get('limit')}"
- conn = self._get_connection()
+ conn = self.get_connection()
with conn.cursor() as cursor:
cursor.execute(sql_query)
response = cursor.fetchall()
- self._close_connection(conn)
+ self.close_connection(conn)
return response if response else None
@reconnect_on_exception
@@ -334,11 +354,11 @@ def _update(
Examples:
>>> _update('users', "username='new_username', password='new_password'", "id=1")
"""
- conn = self._get_connection()
+ conn = self.get_connection()
with conn.cursor() as cursor:
cursor.execute(f"UPDATE {table_name} SET {values} WHERE {condition}")
conn.commit()
- self._close_connection(conn)
+ self.close_connection(conn)
@reconnect_on_exception
def _delete(
@@ -357,11 +377,11 @@ def _delete(
To delete all rows from the 'users' table where the 'username' column is 'john':
>>> db._delete('users', "username='john'")
"""
- conn = self._get_connection()
+ conn = self.get_connection()
with conn.cursor() as cursor:
cursor.execute(f"DELETE FROM {table_name} WHERE {condition}")
conn.commit()
- self._close_connection(conn)
+ self.close_connection(conn)
def _reset_stale_records(self) -> None:
"""
@@ -375,14 +395,17 @@ def _reset_stale_records(self) -> None:
columns=("id", "state"),
condition="message_type = 'status_message'",
)
- for message in status_messages:
- if message[1] != 'updated':
- self._update(
- table_name='messages',
- values="state = 'updated'",
- condition=f"id = '{message[0]}'"
- )
- log.info('[Database]: Stale status messages have been reset')
+ if status_messages:
+ for message in status_messages:
+ if message[1] != 'updated':
+ self._update(
+ table_name='messages',
+ values="state = 'updated'",
+ condition=f"id = '{message[0]}'"
+ )
+ log.info('[Database]: Stale status messages have been reset')
+ else:
+ log.info('[Database]: No stale status messages found')
def add_message_to_queue(
self,
@@ -413,12 +436,12 @@ def add_message_to_queue(
>>> data = {
... 'user_id': '12345',
... 'post_id': '67890',
- ... 'post_url': 'https://www.instagram.com/p/67890/',
+ ... 'post_url': 'https://www.example.com/p/67890/',
... 'post_owner': 'johndoe',
... 'link_type': 'profile',
... 'message_id': 'abcde',
... 'chat_id': 'xyz',
- ... 'scheduled_time': '2022-01-01 12:00:00'
+ ... 'scheduled_time': '2022-01-01 12:00:00',
... 'download_status': 'not started',
... 'upload_status': 'not started'
... }
@@ -470,7 +493,7 @@ def get_message_from_queue(
Examples:
>>> database.get_message_from_queue('2022-01-01 12:00:00')
- (1, '123456789', 'vahj5AN8aek', 'https://www.instagram.com/p/vahj5AN8aek', 'johndoe', 'post', '12345', '12346', '123456789',
+ (1, '123456789', 'vahj5AN8aek', 'https://www.example.com/p/vahj5AN8aek', 'johndoe', 'post', '12345', '12346', '123456789',
datetime.datetime(2023, 11, 14, 21, 21, 22, 603440), 'None', 'None', datetime.datetime(2023, 11, 14, 21, 14, 26, 680024), 'waiting')
"""
message = self._select(
@@ -600,7 +623,7 @@ def update_schedule_time_in_queue(
def get_user_queue(
self,
user_id: str = None
- ) -> Union[dict, None]:
+ ) -> dict:
"""
Get messages from the queue table for the specified user.
@@ -608,30 +631,29 @@ def get_user_queue(
user_id (str): The ID of the user.
Returns:
- dict: A dictionary containing messages from the queue for the specified user.
+ dict: A list of dictionaries containing the messages from the queue table for the specified user.
Examples:
>>> get_user_queue(user_id='12345')
- {'12345': [{'post_id': '123456789', 'scheduled_time': '2022-01-01 12:00:00'}]}
+ [{'post_id': '123456789', 'scheduled_time': '2022-01-01 12:00:00'}]
"""
- result = {}
+ result = []
queue = self._select(
table_name='queue',
columns=("post_id", "scheduled_time"),
condition=f"user_id = '{user_id}'",
order_by='scheduled_time ASC',
- limit=1000
+ limit=10000
)
- for message in queue:
- if user_id not in result:
- result[user_id] = []
- result[user_id].append({'post_id': message[0], 'scheduled_time': message[1]})
- return result if result else None
+ if queue:
+ for message in queue:
+ result.append({'post_id': message[0], 'scheduled_time': message[1]})
+ return result
def get_user_processed(
self,
user_id: str = None
- ) -> Union[dict, None]:
+ ) -> dict:
"""
Get last ten messages from the processed table for the specified user.
It is used to display the last messages sent by the bot to the user.
@@ -640,25 +662,24 @@ def get_user_processed(
user_id (str): The ID of the user.
Returns:
- dict: A dictionary containing the last five messages from the processed table for the specified user.
+ dict: A list of dictionaries containing the last ten messages from the processed table for the specified user.
Examples:
>>> get_user_processed(user_id='12345')
- {'12345': [{'post_id': '123456789', 'processed_time': '2022-01-01 12:00:00', 'state': 'completed'}]}
+ [{'post_id': '123456789', 'timestamp': '2022-01-01 12:00:00', 'state': 'processed'}]
"""
- result = {}
+ result = []
processed = self._select(
table_name='processed',
columns=("post_id", "timestamp", "state"),
condition=f"user_id = '{user_id}'",
order_by='timestamp ASC',
- limit=5000
+ limit=10000
)
- for message in processed:
- if user_id not in result:
- result[user_id] = []
- result[user_id].append({'post_id': message[0], 'timestamp': message[1], 'state': message[2]})
- return result if result else None
+ if processed:
+ for message in processed:
+ result.append({'post_id': message[0], 'timestamp': message[1], 'state': message[2]})
+ return result
def check_message_uniqueness(
self,
@@ -760,6 +781,7 @@ def keep_message(
condition=f"id = '{check_exist_message_type[0][0]}'"
)
response = f"{message_id} updated"
+
elif not check_exist_message_type:
self._insert(
table_name='messages',
@@ -767,63 +789,51 @@ def keep_message(
values=(message_id, chat_id, message_type, message_content_hash, 'bot')
)
response = f"{message_id} kept"
+
else:
log.warning('[Database]: Message with ID %s already exists in the messages table and cannot be updated', message_id)
response = f"{message_id} already exists"
+
return response
- def add_user(
+ def get_users(
self,
- user_id: str = None,
- chat_id: str = None
- ) -> str:
+ only_allowed: bool = True
+ ) -> dict:
"""
- Add a user to the users table in the database.
- It is used to store the user ID and chat ID for sending messages to the user.
+ This method will be deprecated after https://github.com/obervinov/users-package/issues/44 (users-package:v3.1.0).
+ Get a dictionary of all users with their metadata from the users table in the database.
+ By default, the method returns only allowed users.
Args:
- user_id (str): The ID of the user.
- chat_id (str): The ID of the chat.
+ only_allowed (bool): A flag indicating whether to return only allowed users. Default is True.
Returns:
- str: A message indicating that the user was added to the users table or that the user already exists.
+ dict: A dictionary containing all users in the database and their metadata.
Examples:
- >>> add_user(user_id='12345', chat_id='67890')
- '12345 added'
- or
- '12345 already exists'
+ >>> get_users()
+ [{'user_id': '12345', 'chat_id': '67890', 'status': 'denied'}, {'user_id': '12346', 'chat_id': '67891', 'status': 'allowed'}]
"""
- exist_user = self._select(table_name='users', columns=("user_id",), condition=f"user_id = '{user_id}'")
- if exist_user:
- result = f"{user_id} already exists"
+ users_dict = []
+ if only_allowed:
+ users = self._select(
+ table_name='users',
+ columns=("user_id", "chat_id", "status"),
+ condition="status = 'allowed'",
+ limit=1000
+ )
else:
- self._insert(
+ users = self._select(
table_name='users',
- columns=("chat_id", "user_id"),
- values=(chat_id, user_id)
+ columns=("user_id", "chat_id", "status"),
+ limit=1000
)
- result = f"{user_id} added"
- return result
-
- def get_users(self) -> list:
- """
- Get a list of all users in the database.
-
- Returns:
- list: A list of all users from the messages table.
- Examples:
- >>> get_users()
- # [('{user_id}', '{chat_id}')]
- [('12345', '67890')]
- """
- users = self._select(
- table_name='users',
- columns=("user_id", "chat_id"),
- limit=1000
- )
- return users if users else None
+ if users:
+ for user in users:
+ users_dict.append({'user_id': user[0], 'chat_id': user[1], 'status': user[2]})
+ return users_dict
def get_considered_message(
self,
diff --git a/src/modules/downloader.py b/src/modules/downloader.py
index 717408b17..4e6bf1082 100644
--- a/src/modules/downloader.py
+++ b/src/modules/downloader.py
@@ -63,13 +63,13 @@ def __init__(
if configuration:
self.configuration = configuration
elif not configuration:
- self.configuration = vault.read_secret(path='configuration/downloader-api')
+ self.configuration = vault.kv2engine.read_secret(path='configuration/downloader-api')
else:
raise FailedCreateDownloaderInstance(
"Failed to initialize the Downloader instance."
"Please check the configuration in class argument or the secret with the configuration in the Vault."
)
- log.info('[Downloader]: creating a new instance of the Downloader...')
+ log.info('[Downloader]: Creating a new instance...')
self.instaloader = instaloader.Instaloader(
quiet=True,
user_agent=self.configuration.get('user-agent', None),
@@ -89,10 +89,10 @@ def __init__(
fatal_status_codes=literal_eval(self.configuration.get('fatal-status-codes', '[]'))
)
auth_status = self._login()
- log.info(
- '[Downloader]: downloader instance created successfully: %s in %s',
- auth_status, self.configuration['username']
- )
+ if auth_status == 'logged_in':
+ log.info('[Downloader]: Instance created successfully with account %s', self.configuration['username'])
+ else:
+ raise FailedAuthInstaloader("Failed to authenticate the Instaloader instance.")
def _login(self) -> Union[str, None]:
"""
@@ -115,7 +115,7 @@ def _login(self) -> Union[str, None]:
self.configuration['username'],
self.configuration['session-file']
)
- log.info('[Downloader]: session file %s was load success', self.configuration['session-file'])
+ log.info('[Downloader]: Session file %s was saved successfully', self.configuration['session-file'])
return 'logged_in'
if self.configuration['login-method'] == 'password':
@@ -124,19 +124,14 @@ def _login(self) -> Union[str, None]:
self.configuration['password']
)
self.instaloader.save_session_to_file(self.configuration['session-file'])
- log.info(
- '[Downloader]: login with password was successful. Save session in %s',
- self.configuration['sessionfile']
- )
+ log.info('[Downloader]: Login with password was successful. Saved session in %s', self.configuration['sessionfile'])
return 'logged_in'
if self.configuration['login-method'] == 'anonymous':
- log.warning('[Downloader]: initialization without authentication into an account (anonymous)')
+ log.warning('[Downloader]: Initialization without authentication into an account (anonymous)')
return None
- raise FailedAuthInstaloader(
- "Failed to authenticate the Instaloader instance. Please check the configuration in the Vault or the class argument."
- )
+ raise FailedAuthInstaloader("Failed to authenticate the Instaloader instance.")
def get_post_content(
self,
@@ -156,21 +151,21 @@ def get_post_content(
'status': 'completed'
}
"""
- log.info('[Downloader]: downloading the contents of the post %s...', shortcode)
+ log.info('[Downloader]: Downloading the contents of the post %s...', shortcode)
try:
post = instaloader.Post.from_shortcode(self.instaloader.context, shortcode)
self.instaloader.download_post(post, '')
- log.info('[Downloader]: the contents of the post %s have been successfully downloaded', shortcode)
+ log.info('[Downloader]: The contents of the post %s have been successfully downloaded', shortcode)
status = 'completed'
owner = post.owner_username
typename = post.typename
except instaloader.exceptions.BadResponseException as error:
- log.error('[Downloader]: error downloading post content: %s', error)
+ log.error('[Downloader]: Error downloading post content: %s', error)
if "Fetching Post metadata failed" in str(error):
status = 'source_not_found'
owner = 'undefined'
typename = 'undefined'
- log.warning('[Downloader]: post %s not found, perhaps it was deleted. Message will be marked as processed.', shortcode)
+ log.warning('[Downloader]: Post %s not found, perhaps it was deleted. Message will be marked as processed.', shortcode)
else:
raise instaloader.exceptions.BadResponseException(error)
diff --git a/src/modules/metrics.py b/src/modules/metrics.py
index c60a4e047..b1d6bdad7 100644
--- a/src/modules/metrics.py
+++ b/src/modules/metrics.py
@@ -1,6 +1,5 @@
"""This module provides a way to expose metrics to Prometheus for monitoring the application."""
import time
-import json
from prometheus_client import start_http_server, Gauge
from logger import log
@@ -10,6 +9,22 @@
class Metrics():
"""
This class provides a way to expose metrics to Prometheus for monitoring the application.
+
+ Attributes:
+ :attribute port (int): port for the metrics server.
+ :attribute interval (int): interval for collecting metrics.
+ :attribute database (Database): instance of the Database class.
+ :attribute running (bool): the status of the metrics server.
+ :attribute thread_status_gauge (Gauge): gauge for the thread status.
+ :attribute access_granted_counter (Gauge): gauge for the access granted counter.
+ :attribute access_denied_counter (Gauge): gauge for the access denied counter.
+ :attribute processed_messages_counter (Gauge): gauge for the processed messages counter.
+ :attribute queue_length_gauge (Gauge): gauge for the queue length.
+
+ Examples:
+ >>> metrics = Metrics(port=8000, interval=1, metrics_prefix='pytest')
+ >>> metrics.run(threads=[thread1, thread2])
+ >>> metrics.stop()
"""
def __init__(
self,
@@ -27,23 +42,18 @@ def __init__(
:param metrics_prefix (str): prefix for the metrics.
Keyword Args:
- :param vault (Vault): instance of the Vault class.
:param database (Database): instance of the Database class.
-
- Returns:
- None
"""
metrics_prefix = metrics_prefix.replace('-', '_')
self.port = port
self.interval = interval
- self.vault = kwargs.get('vault', None)
self.database = kwargs.get('database', None)
+ self.running = True
self.thread_status_gauge = Gauge(f'{metrics_prefix}_thread_status', 'Thread status (1 = running, 0 = not running)', ['thread_name'])
- if self.vault:
+ if self.database:
self.access_granted_counter = Gauge(f'{metrics_prefix}_access_granted_total', 'Total number of users granted access')
self.access_denied_counter = Gauge(f'{metrics_prefix}_access_denied_total', 'Total number of users denied access')
- if self.database:
self.processed_messages_counter = Gauge(f'{metrics_prefix}_processed_messages_total', 'Total number of processed messages')
self.queue_length_gauge = Gauge(f'{metrics_prefix}_queue_length', 'Queue length')
@@ -51,15 +61,13 @@ def collect_users_stats(self) -> None:
"""
The method collects information about users access status and updates the gauge.
"""
- users = self.vault.list_secrets(path='data/users')
+ users_dict = self.database.get_users(only_allowed=False)
access_granted_count = 0
access_denied_count = 0
-
- for user in users:
- user_status = json.loads(self.vault.read_secret(path=f'data/users/{user}')['authentication'])
- if user_status.get('status') == 'denied':
+ for user in users_dict:
+ if user.get('status') == 'denied':
access_denied_count += 1
- elif user_status.get('status') == 'allowed':
+ elif user.get('status') == 'allowed':
access_granted_count += 1
self.access_granted_counter.set(access_granted_count)
@@ -77,12 +85,15 @@ def collect_messages_stats(self) -> None:
"""
processed_messages_count = 0
queue_messages_count = 0
- for user in self.database.get_users():
- user_id = user[0]
- processed_messages = self.database.get_user_processed(user_id=user_id)
- queue_messages = self.database.get_user_queue(user_id=user_id)
- processed_messages_count += len(processed_messages.get(user_id, []))
- queue_messages_count = len(queue_messages.get(user_id, []))
+ users_dict = self.database.get_users(only_allowed=False)
+
+ for user in users_dict:
+ processed_messages = self.database.get_user_processed(user_id=user['user_id'])
+ queue_messages = self.database.get_user_queue(user_id=user['user_id'])
+ if processed_messages:
+ processed_messages_count += len(processed_messages)
+ if queue_messages:
+ queue_messages_count += len(queue_messages)
self.processed_messages_counter.set(processed_messages_count)
self.queue_length_gauge.set(queue_messages_count)
@@ -92,11 +103,17 @@ def run(self, threads: list) -> None:
"""
start_http_server(self.port)
log.info('[Metrics]: Metrics server started on port %s', self.port)
- while True:
- if self.vault:
- self.collect_users_stats()
+ while self.running:
if self.database:
+ self.collect_users_stats()
self.collect_messages_stats()
- time.sleep(self.interval)
for thread in threads:
self.update_thread_status(thread.name, thread.is_alive())
+ time.sleep(self.interval)
+
+ def stop(self) -> None:
+ """
+ The method stops the metrics server.
+ """
+ self.running = False
+ log.info('[Metrics]: Metrics server stopped')
diff --git a/src/modules/tools.py b/src/modules/tools.py
index 751fc5877..549cd4898 100644
--- a/src/modules/tools.py
+++ b/src/modules/tools.py
@@ -1,6 +1,8 @@
"""This module contains the tools for this python project"""
+import os
from typing import Union
import hashlib
+from logger import log
def get_hash(data: Union[str, dict] = None) -> str:
@@ -22,3 +24,15 @@ def get_hash(data: Union[str, dict] = None) -> str:
data = str(data)
hasher.update(data.encode('utf-8'))
return hasher.hexdigest()
+
+
+def check_proxy() -> None:
+ """
+ Check if the proxy is set up.
+ """
+ http_proxy = os.environ.get('HTTP_PROXY', None)
+ https_proxy = os.environ.get('HTTPS_PROXY', None)
+ if http_proxy or https_proxy:
+ log.info('[Tools]: Proxy is set up http: %s, https: %s', http_proxy, https_proxy)
+ else:
+ log.info('[Tools]: Direct connection will be used because the proxy is not set up')
diff --git a/src/modules/uploader.py b/src/modules/uploader.py
index 011b76da5..aeaec888c 100644
--- a/src/modules/uploader.py
+++ b/src/modules/uploader.py
@@ -57,7 +57,7 @@ def __init__(
if configuration:
self.configuration = configuration
elif not configuration:
- self.configuration = vault.read_secret(path='configuration/uploader-api')
+ self.configuration = vault.kv2engine.read_secret(path='configuration/uploader-api')
else:
raise FailedInitUploaderInstance(
"Failed to initialize the Uploader instance."
@@ -106,17 +106,17 @@ def _check_incomplete_transfers(self) -> None:
Returns:
None
"""
- log.info('[class.%s] Uploader: checking incomplete transfers in the temporary directory...', __class__.__name__)
+ log.info('[Uploader]: Checking incomplete transfers...')
for root, dirs, _ in os.walk(self.configuration['source-directory']):
for dir_name in dirs:
sub_directory = os.path.join(root, dir_name)
# Check the subdirectory for files
sub_files = [f for f in os.listdir(sub_directory) if os.path.isfile(os.path.join(sub_directory, f))]
if sub_files:
- log.warning('[class.%s] Uploader: an unloaded artifact was found: %s', __class__.__name__, sub_directory)
+ log.warning('[Uploader]: An unloaded artifact was found: %s', sub_directory)
self.run_transfers(sub_directory=sub_directory)
else:
- log.info('[class.%s] Uploader: remove of an empty directory %s', __class__.__name__, sub_directory)
+ log.info('[Uploader]: Remove of an empty directory %s', sub_directory)
os.rmdir(sub_directory)
def run_transfers(
@@ -137,7 +137,7 @@ def run_transfers(
"""
transfers = {}
result = ""
- log.info('[class.%s] Uploader: preparing media files for transfer to the %s cloud...', __class__.__name__, self.configuration['storage-type'])
+ log.info('[Uploader]: Preparing media files for transfer to the %s...', self.configuration['storage-type'])
for root, _, files in os.walk(f"{self.configuration['source-directory']}{sub_directory}"):
for file in files:
if file.split('.')[-1] in self.configuration.get('exclude-types', None):
@@ -152,7 +152,7 @@ def run_transfers(
result = 'completed'
else:
result = 'not_completed'
- log.info('[class.%s] Uploader: list of all transfers %s', __class__.__name__, transfers)
+ log.info('[Uploader]: List of all transfers %s', transfers)
return result
def upload_to_cloud(
@@ -172,20 +172,20 @@ def upload_to_cloud(
or
None
"""
- log.info('[class.%s] starting upload file %s to %s://%s', __class__.__name__, source, self.configuration['storage-type'], destination)
+ log.info('[Uploader]: Starting upload file %s to %s://%s', source, self.configuration['storage-type'], destination)
response = None
result = None
if self.configuration['storage-type'] == 'mega':
directory = f"{self.configuration['destination-directory']}/{destination}"
- log.info('[class.%s] Uploader: trying found mega folder %s...', __class__.__name__, directory)
+ log.info('[Uploader]: Trying found mega folder %s...', directory)
mega_folder = self.storage.find(directory, exclude_deleted=True)
if not mega_folder:
self.storage.create_folder(directory)
mega_folder = self.storage.find(directory, exclude_deleted=True)
- log.info('[class.%s] Uploader: mega folder not found, created new folder %s', __class__.__name__, mega_folder)
+ log.info('[Uploader]: Mega folder not found, created new folder %s', mega_folder)
else:
- log.info('[class.%s] Uploader: mega folder %s was found', __class__.__name__, mega_folder)
+ log.info('[Uploader]: Mega folder %s was found', mega_folder)
response = self.storage.upload(filename=source, dest=mega_folder[0])
result = "uploaded"
@@ -205,5 +205,5 @@ def upload_to_cloud(
response = self.storage.info(f"{self.configuration['destination-directory']}/{destination}/{source.split('/')[-1]}")['etag']
result = "uploaded"
- log.info('[class.%s] Uploader: %s successful transferred', __class__.__name__, response)
+ log.info('[Uploader]: %s successful transferred', response)
return result
diff --git a/tests/conftest.py b/tests/conftest.py
index d2930de7a..5492d44f2 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -2,13 +2,17 @@
This module stores fixtures for performing tests.
"""
import os
-import sys
-import subprocess
import time
+import threading
import requests
import pytest
+import hvac
+import psycopg2
+from psycopg2 import sql
# pylint: disable=E0401
from vault import VaultClient
+from src.modules.database import DatabaseClient
+from src.modules.metrics import Metrics
def pytest_configure(config):
@@ -30,41 +34,16 @@ def test_example():
config.addinivalue_line("markers", "order: Set the execution order of tests")
-@pytest.fixture(name="prepare_dev_environment", scope='session')
-def fixture_prepare_dev_environment():
+@pytest.fixture(name="vault_url", scope='session')
+def fixture_vault_url():
"""
Prepare a local environment or ci environment and return the URL of the Vault server
- """
- if not os.getenv("CI"):
- if not os.getenv("TG_USERID"):
- print("You need to set the TG_USER_ID environment variable to run the tests (telegram user-id)")
- sys.exit(1)
- if not os.getenv("TG_TOKEN"):
- print("You need to set the TG_TOKEN environment variable to run the tests (telegram token)")
- sys.exit(1)
- command = (
- "vault=$(docker ps -a | grep vault | awk '{print $1}') && "
- "bot=$(docker ps -a | grep pyinstabot-downloader | awk '{print $1}') && "
- "[ -n '$vault' ] && docker container rm -f $vault && "
- "[ -n '$bot' ] && docker container rm -f $bot && "
- "docker compose -f docker-compose.dev.yml up -d"
- )
- with subprocess.Popen(command, shell=True):
- print("Running dev environment...")
- return 'ready'
- return None
+ Returns:
+ str: The URL of the Vault server.
+ """
-@pytest.fixture(name="vault_url", scope='session')
-def fixture_vault_url(prepare_dev_environment):
- """Prepare a local environment or ci environment and return the URL of the Vault server"""
- _ = prepare_dev_environment
- # prepare vault for local environment
- if not os.getenv("CI"):
- url = "http://0.0.0.0:8200"
- # prepare vault for ci environment
- else:
- url = "http://localhost:8200"
+ url = "http://0.0.0.0:8200"
# checking the availability of the vault server
while True:
try:
@@ -77,76 +56,244 @@ def fixture_vault_url(prepare_dev_environment):
return url
-@pytest.fixture(name="name", scope='session')
-def fixture_name():
- """Returns the project name"""
- return "pyinstabot-downloader"
+@pytest.fixture(name="namespace", scope='session')
+def fixture_namespace():
+ """
+ Returns the namespace for the tests
+
+ Returns:
+ str: The namespace for the tests.
+ """
+ return "pytest"
@pytest.fixture(name="policy_path", scope='session')
def fixture_policy_path():
- """Returns the policy path"""
+ """
+ Returns the policy path for the tests
+
+ Returns:
+ str: The policy path for the tests.
+ """
return "tests/vault/policy.hcl"
-@pytest.fixture(name="vault_approle", scope='session')
-def fixture_vault_approle(vault_url, name, policy_path):
- """Prepare a temporary Vault instance and return the Vault client"""
- configurator = VaultClient(
- url=vault_url,
- name=name,
- new=True
+@pytest.fixture(name="psql_tables_path", scope='session')
+def fixture_psql_tables_path():
+ """
+ Returns the path to the postgres sql file with tables
+
+ Returns:
+ str: The path to the postgres sql file with tables.
+ """
+ return "tests/postgres/tables.sql"
+
+
+@pytest.fixture(name="postgres_url", scope='session')
+def fixture_postgres_url(namespace):
+ """
+ Returns the postgres url for the tests
+
+ Returns:
+ str: The postgres url.
+ """
+ database_name = namespace
+ return f"postgresql://{{{{username}}}}:{{{{password}}}}@postgres:5432/{database_name}?sslmode=disable"
+
+
+@pytest.fixture(name="postgres_instance", scope='session')
+def fixture_postgres_instance(psql_tables_path, namespace):
+ """
+ Prepare the postgres database for tests, return the connection and cursor.
+
+ Returns:
+ tuple: The connection and cursor objects for the postgres database.
+ """
+ pytest_db_name = namespace
+ original_db_name = "postgres"
+
+ # Connect to the default 'postgres' database to create a new test database
+ connection = psycopg2.connect(
+ host='0.0.0.0',
+ port=5432,
+ user='postgres',
+ password='postgres',
+ dbname=original_db_name
)
- namespace = configurator.create_namespace(
- name=name
+ connection.autocommit = True
+ cursor = connection.cursor()
+
+ try:
+ # Create a new pytest database
+ cursor.execute(sql.SQL("CREATE DATABASE {}").format(
+ sql.Identifier(pytest_db_name)
+ ))
+ except Exception as error:
+ print(f"Failed to create database {pytest_db_name}: {error}")
+ raise
+ finally:
+ cursor.close()
+ connection.close()
+
+ # Connect to the newly created test database
+ pytest_connection = psycopg2.connect(
+ host='0.0.0.0',
+ port=5432,
+ user='postgres',
+ password='postgres',
+ dbname=pytest_db_name
)
- policy = configurator.create_policy(
- name=name,
- path=policy_path
+ pytest_cursor = pytest_connection.cursor()
+
+ # Execute the SQL script to create tables
+ with open(psql_tables_path, 'r', encoding='utf-8') as sql_file:
+ sql_script = sql_file.read()
+ pytest_cursor.execute(sql_script)
+ pytest_connection.commit()
+
+ yield pytest_connection, pytest_cursor
+
+ pytest_cursor.close()
+ pytest_connection.close()
+
+
+@pytest.fixture(name="prepare_vault", scope='session')
+def fixture_prepare_vault(vault_url, namespace, policy_path, postgres_url, postgres_instance):
+ """
+ Returns the vault client and prepares the vault for the tests
+
+ Returns:
+ object: The vault client.
+ """
+ # Wait for the postgres database to be ready
+ _ = postgres_instance
+
+ # Initialize the vault
+ client = hvac.Client(url=vault_url)
+ init_data = client.sys.initialize()
+
+ # Unseal the vault
+ if client.sys.is_sealed():
+ client.sys.submit_unseal_keys(keys=[init_data['keys'][0], init_data['keys'][1], init_data['keys'][2]])
+ # Authenticate in the vault server using the root token
+ client = hvac.Client(url=vault_url, token=init_data['root_token'])
+
+ # Create policy
+ with open(policy_path, 'rb') as policyfile:
+ _ = client.sys.create_or_update_policy(
+ name=namespace,
+ policy=policyfile.read().decode("utf-8"),
)
- return configurator.create_approle(
- name=name,
+
+ # Create Namespace
+ _ = client.sys.enable_secrets_engine(
+ backend_type='kv',
path=namespace,
- policy=policy
+ options={'version': 2}
)
+ # Prepare AppRole for the namespace
+ client.sys.enable_auth_method(
+ method_type='approle',
+ path=namespace
+ )
+ _ = client.auth.approle.create_or_update_approle(
+ role_name=namespace,
+ token_policies=[namespace],
+ token_type='service',
+ secret_id_num_uses=0,
+ token_num_uses=0,
+ token_ttl='360s',
+ bind_secret_id=True,
+ token_no_default_policy=True,
+ mount_point=namespace
+ )
+ approle_adapter = hvac.api.auth_methods.AppRole(client.adapter)
+
+ # Prepare database engine configuration
+ client.sys.enable_secrets_engine(
+ backend_type='database',
+ path='database'
+ )
+
+ # Configure database engine
+ configuration = client.secrets.database.configure(
+ name="postgresql",
+ plugin_name="postgresql-database-plugin",
+ verify_connection=False,
+ allowed_roles=["pytest"],
+ username="postgres",
+ password="postgres",
+ connection_url=postgres_url
+ )
+ print(f"Configured database engine: {configuration}")
+
+ # Create role for the database
+ statement = [
+ "CREATE ROLE \"{{name}}\" WITH LOGIN PASSWORD '{{password}}' VALID UNTIL '{{expiration}}';",
+ "GRANT ALL PRIVILEGES ON SCHEMA public TO \"{{name}}\";",
+ "GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO \"{{name}}\";",
+ "GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO \"{{name}}\";"
+ ]
+ role = client.secrets.database.create_role(
+ name="pytest",
+ db_name="postgresql",
+ creation_statements=statement,
+ default_ttl="1h",
+ max_ttl="24h"
+ )
+ print(f"Created role: {role}")
+
+ # Return the role_id, secret_id and db_role
+ return {
+ 'id': approle_adapter.read_role_id(role_name=namespace, mount_point=namespace)["data"]["role_id"],
+ 'secret-id': approle_adapter.generate_secret_id(role_name=namespace, mount_point=namespace)["data"]["secret_id"]
+ }
+
@pytest.fixture(name="vault_instance", scope='session')
-def fixture_vault_instance(vault_url, vault_approle, name):
- """Returns an initialized vault instance"""
+def fixture_vault_instance(vault_url, namespace, prepare_vault):
+ """
+ Returns client of the configurator vault
+
+ Returns:
+ object: The vault client.
+ """
return VaultClient(
url=vault_url,
- name=name,
- approle=vault_approle
+ namespace=namespace,
+ auth={
+ 'type': 'approle',
+ 'approle': {
+ 'id': prepare_vault['id'],
+ 'secret-id': prepare_vault['secret-id']
+ }
+ }
)
@pytest.fixture(name="vault_configuration_data", scope='session')
-def fixture_vault_configuration_data(vault_instance):
+def fixture_vault_configuration_data(vault_instance, namespace):
"""
This function sets up a database configuration in the vault_instance object.
Args:
vault_instance: An instance of the Vault class.
-
- Returns:
- None
"""
database = {
- 'host': 'postgres',
+ 'host': '0.0.0.0',
'port': '5432',
- 'user': 'python',
- 'password': 'python',
- 'database': 'pyinstabot-downloader'
+ 'dbname': namespace,
+ 'connections': '10'
}
for key, value in database.items():
- _ = vault_instance.write_secret(
+ _ = vault_instance.kv2engine.write_secret(
path='configuration/database',
key=key,
value=value
)
- _ = vault_instance.write_secret(
+ _ = vault_instance.kv2engine.write_secret(
path='configuration/telegram',
key='token',
value=os.getenv("TG_TOKEN")
@@ -168,23 +315,242 @@ def fixture_vault_configuration_data(vault_instance):
}
user_id = os.getenv("TG_USERID")
for key, value in user_attributes.items():
- _ = vault_instance.write_secret(
+ _ = vault_instance.kv2engine.write_secret(
path=f'configuration/users/{user_id}',
key=key,
value=value
)
+ bot_configurations = [
+ {
+ 'path': 'configuration/downloader-api',
+ 'data': {
+ 'enabled': 'false',
+ }
+ },
+ {
+ 'path': 'configuration/uploader-api',
+ 'data': {
+ 'enabled': 'false',
+ }
+ }
+ ]
+ for configuration in bot_configurations:
+ for key, value in configuration['data'].items():
+ _ = vault_instance.kv2engine.write_secret(
+ path=configuration['path'],
+ key=key,
+ value=value
+ )
- test_owner = {
- "eiD5aech8Oh": "downloaded",
- "eiD5aech8Oa": "downloaded",
- "eiD5aech8Oq": "downloaded",
- "eiD5aech8Ol": "downloaded",
- "eiD5aech8Op": "downloaded",
- "eiD5aech8Oy": "downloaded"
- }
- for key, value in test_owner.items():
- _ = vault_instance.write_secret(
- path='history/testOwner',
- key=key,
- value=value
+
+@pytest.fixture(name="database_class", scope='session')
+def fixture_database_class(vault_instance, namespace):
+ """
+ Returns the database class
+
+ Returns:
+ object: The database class.
+ """
+ return DatabaseClient(vault=vault_instance, db_role=namespace)
+
+
+@pytest.fixture(name="metrics_class", scope='session')
+def fixture_metrics_class(database_class, postgres_users_test_data, postgres_queue_test_data, postgres_processed_test_data):
+ """
+ Returns the metrics class
+ """
+ _ = postgres_users_test_data
+ _ = postgres_queue_test_data
+ _ = postgres_processed_test_data
+
+ metrics = Metrics(port=8000, interval=5, metrics_prefix='pytest', database=database_class)
+ threads_list = threading.enumerate()
+ metrics_thread = threading.Thread(target=metrics.run, args=(threads_list,))
+ metrics_thread.start()
+ time.sleep(10)
+ yield metrics
+ metrics.stop()
+ metrics_thread.join()
+
+
+@pytest.fixture(name="postgres_messages_test_data", scope='session')
+def fixture_postgres_messages_test_data(postgres_instance):
+ """
+ This function sets up test data in the messages table in the postgres database.
+
+ Args:
+ postgres_instance: A tuple containing the connection and cursor objects for the postgres database.
+ """
+ conn, cursor = postgres_instance
+ cursor.execute(
+ "INSERT INTO messages (message_id, chat_id, created_at, updated_at, message_type, producer, message_content_hash, state) "
+ "VALUES ('123456', '123456', '2024-08-27 00:00:00', '2024-08-27 00:00:00', 'status_message', 'pytest', 'hash', 'updating')"
+ )
+ conn.commit()
+
+
+@pytest.fixture(name="postgres_users_test_data", scope='session')
+def fixture_postgres_users_test_data(postgres_instance):
+ """
+ This function sets up test data in the users table in the postgres database.
+
+ Args:
+ postgres_instance: A tuple containing the connection and cursor objects for the postgres database.
+ """
+ data = [
+ {
+ 'user_id': 'test_user_1',
+ 'chat_id': 'test_chat_1',
+ 'status': 'allowed'
+ },
+ {
+ 'user_id': 'test_user_2',
+ 'chat_id': 'test_chat_2',
+ 'status': 'denied'
+ },
+ {
+ 'user_id': 'test_user_3',
+ 'chat_id': 'test_chat_3',
+ 'status': 'allowed'
+ },
+ {
+ 'user_id': 'test_user_4',
+ 'chat_id': 'test_chat_4',
+ 'status': 'allowed'
+ },
+ {
+ 'user_id': 'test_user_5',
+ 'chat_id': 'test_chat_5',
+ 'status': 'allowed'
+ },
+ {
+ 'user_id': 'test_user_6',
+ 'chat_id': 'test_chat_6',
+ 'status': 'allowed'
+ },
+ ]
+ conn, cursor = postgres_instance
+ for user in data:
+ cursor.execute(
+ "INSERT INTO users (user_id, chat_id, status) VALUES (%s, %s, %s)",
+ (user['user_id'], user['chat_id'], user['status'])
+ )
+ conn.commit()
+
+
+@pytest.fixture(name="postgres_queue_test_data", scope='session')
+def fixture_postgres_queue_test_data(postgres_instance):
+ """
+ This function sets up test data in the queue table in the postgres database.
+ """
+ data = [
+ {
+ 'user_id': 'test_user_1',
+ 'post_id': 'test_post_1',
+ 'post_url': 'https://example.com/p/test_post_1',
+ 'post_owner': 'test_owner_1',
+ 'link_type': 'post',
+ 'message_id': 'test_message_1',
+ 'chat_id': 'test_chat_1',
+ 'scheduled_time': '2024-08-27 00:00:00',
+ 'download_status': 'not started',
+ 'upload_status': 'not started',
+ 'state': 'waiting'
+ },
+ {
+ 'user_id': 'test_user_2',
+ 'post_id': 'test_post_2',
+ 'post_url': 'https://example.com/p/test_post_2',
+ 'post_owner': 'test_owner_2',
+ 'link_type': 'post',
+ 'message_id': 'test_message_2',
+ 'chat_id': 'test_chat_2',
+ 'scheduled_time': '2024-08-27 00:00:00',
+ 'download_status': 'not started',
+ 'upload_status': 'not started',
+ 'state': 'waiting'
+ },
+ {
+ 'user_id': 'test_user_3',
+ 'post_id': 'test_post_3',
+ 'post_url': 'https://example.com/p/test_post_3',
+ 'post_owner': 'test_owner_3',
+ 'link_type': 'post',
+ 'message_id': 'test_message_3',
+ 'chat_id': 'test_chat_3',
+ 'scheduled_time': '2024-08-27 00:00:00',
+ 'download_status': 'not started',
+ 'upload_status': 'not started',
+ 'state': 'waiting'
+ }
+ ]
+ conn, cursor = postgres_instance
+ for message in data:
+ cursor.execute(
+ "INSERT INTO queue "
+ "(user_id, post_id, post_url, post_owner, link_type, message_id, chat_id, scheduled_time, download_status, upload_status, state) "
+ "VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)",
+ (
+ message['user_id'], message['post_id'], message['post_url'], message['post_owner'], message['link_type'],
+ message['message_id'], message['chat_id'], message['scheduled_time'], message['download_status'],
+ message['upload_status'], message['state']
+ )
+ )
+ conn.commit()
+
+
+@pytest.fixture(name="postgres_processed_test_data", scope='session')
+def fixture_postgres_processed_test_data(postgres_instance):
+ """
+ This function sets up test data in the processed table in the postgres database.
+ """
+ data = [
+ {
+ 'user_id': 'test_user_4',
+ 'post_id': 'test_post_4',
+ 'post_url': 'https://example.com/p/test_post_4',
+ 'post_owner': 'test_owner_4',
+ 'link_type': 'post',
+ 'message_id': 'test_message_4',
+ 'chat_id': 'test_chat_4',
+ 'download_status': 'completed',
+ 'upload_status': 'completed',
+ 'state': 'processed'
+ },
+ {
+ 'user_id': 'test_user_5',
+ 'post_id': 'test_post_5',
+ 'post_url': 'https://example.com/p/test_post_5',
+ 'post_owner': 'test_owner_5',
+ 'link_type': 'post',
+ 'message_id': 'test_message_5',
+ 'chat_id': 'test_chat_5',
+ 'download_status': 'completed',
+ 'upload_status': 'completed',
+ 'state': 'processed'
+ },
+ {
+ 'user_id': 'test_user_6',
+ 'post_id': 'test_post_6',
+ 'post_url': 'https://example.com/p/test_post_6',
+ 'post_owner': 'test_owner_6',
+ 'link_type': 'post',
+ 'message_id': 'test_message_6',
+ 'chat_id': 'test_chat_6',
+ 'download_status': 'completed',
+ 'upload_status': 'completed',
+ 'state': 'processed'
+ }
+ ]
+ conn, cursor = postgres_instance
+ for message in data:
+ cursor.execute(
+ "INSERT INTO processed (user_id, post_id, post_url, post_owner, link_type, message_id, chat_id, download_status, upload_status, state) "
+ "VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)",
+ (
+ message['user_id'], message['post_id'], message['post_url'], message['post_owner'], message['link_type'],
+ message['message_id'], message['chat_id'], message['download_status'], message['upload_status'],
+ message['state']
+ )
)
+ conn.commit()
diff --git a/tests/postgres/tables.sql b/tests/postgres/tables.sql
new file mode 100644
index 000000000..d76463748
--- /dev/null
+++ b/tests/postgres/tables.sql
@@ -0,0 +1,73 @@
+-- Schema for the users table
+CREATE TABLE users (
+ id serial PRIMARY KEY,
+ user_id VARCHAR (50) UNIQUE NOT NULL,
+ chat_id VARCHAR (50) NOT NULL,
+ status VARCHAR (50) NOT NULL DEFAULT 'denied'
+);
+
+-- Schema for the users_requests table
+CREATE TABLE users_requests (
+ id serial PRIMARY KEY,
+ user_id VARCHAR (50) NOT NULL,
+ message_id VARCHAR (50),
+ chat_id VARCHAR (50),
+ authentication VARCHAR (50) NOT NULL,
+ "authorization" VARCHAR (255) NOT NULL,
+ timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ rate_limits TIMESTAMP
+);
+
+-- Schema for queue table
+CREATE TABLE queue (
+ id serial PRIMARY KEY,
+ user_id VARCHAR (50) NOT NULL,
+ post_id VARCHAR (50) NOT NULL,
+ post_url VARCHAR (255) NOT NULL,
+ post_owner VARCHAR (50) NOT NULL,
+ link_type VARCHAR (50) NOT NULL DEFAULT 'post',
+ message_id VARCHAR (50) NOT NULL,
+ chat_id VARCHAR (50) NOT NULL,
+ scheduled_time TIMESTAMP NOT NULL,
+ download_status VARCHAR (50) NOT NULL DEFAULT 'not started',
+ upload_status VARCHAR (50) NOT NULL DEFAULT 'not started',
+ timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ state VARCHAR (50) NOT NULL DEFAULT 'waiting'
+);
+
+-- Schema for the processed table
+CREATE TABLE processed (
+ id serial PRIMARY KEY,
+ user_id VARCHAR (50) NOT NULL,
+ post_id VARCHAR (50) NOT NULL,
+ post_url VARCHAR (255) NOT NULL,
+ post_owner VARCHAR (50) NOT NULL,
+ link_type VARCHAR (50) NOT NULL DEFAULT 'post',
+ message_id VARCHAR (50) NOT NULL,
+ chat_id VARCHAR (50) NOT NULL,
+ download_status VARCHAR (50) NOT NULL,
+ upload_status VARCHAR (50) NOT NULL,
+ timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ state VARCHAR (50) NOT NULL DEFAULT 'processed'
+);
+
+-- Schema for the migrations table
+CREATE TABLE migrations (
+ id serial PRIMARY KEY,
+ name VARCHAR (255) NOT NULL,
+ version VARCHAR (255) NOT NULL,
+ timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
+);
+
+-- Schema for the messages table
+CREATE TABLE messages (
+ id serial PRIMARY KEY,
+ message_id VARCHAR (50) NOT NULL,
+ chat_id VARCHAR (50) NOT NULL,
+ created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ message_type VARCHAR (50) NOT NULL,
+ producer VARCHAR (50) NOT NULL,
+ message_content_hash VARCHAR (64) NOT NULL,
+ state VARCHAR (50) NOT NULL DEFAULT 'added'
+);
diff --git a/tests/test_database.py b/tests/test_database.py
new file mode 100644
index 000000000..e365af23b
--- /dev/null
+++ b/tests/test_database.py
@@ -0,0 +1,427 @@
+"""
+This module contains tests for the database module.
+"""
+import os
+import sys
+import json
+import importlib
+from datetime import datetime, timedelta
+import pytest
+import psycopg2
+from psycopg2 import pool
+from src.modules.tools import get_hash
+from src.modules.database import DatabaseClient
+
+
+# pylint: disable=too-many-locals
+@pytest.mark.order(2)
+def test_init_database_client(vault_configuration_data, postgres_instance, database_class):
+ """
+ Checking an initialized database client
+ """
+ _ = vault_configuration_data
+ _, cursor = postgres_instance
+
+ # Check general attributes
+ assert isinstance(database_class.vault, object)
+ assert isinstance(database_class.db_role, str)
+ assert isinstance(database_class.database_connections, pool.SimpleConnectionPool)
+
+ # Check tables creation in the database
+ cursor.execute("SELECT * FROM information_schema.tables WHERE table_schema = 'public'")
+ tables_list = cursor.fetchall()
+ tables_configuration_path = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), '../src/configs/databases.json'))
+ with open(tables_configuration_path, encoding='UTF-8') as config_file:
+ database_init_configuration = json.load(config_file)
+ for table in database_init_configuration.get('Tables', None):
+ if table['name'] not in [table[2] for table in tables_list]:
+ assert False
+
+ # Check migrations execution in the database
+ cursor.execute("SELECT name, version FROM migrations")
+ migrations_list = cursor.fetchall()
+ assert len(migrations_list) > 0
+
+ migrations_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '../src/migrations'))
+ sys.path.append(migrations_dir)
+ migration_files = [f for f in os.listdir(migrations_dir) if f.endswith('.py')]
+ migration_files.sort()
+ for migration_file in migration_files:
+ if not migration_file.endswith('.py'):
+ assert False
+ else:
+ migration_module_name = migration_file[:-3]
+ migration_module = importlib.import_module(name=migration_module_name)
+ version = getattr(migration_module, 'VERSION', migration_module_name)
+ name = getattr(migration_module, 'NAME', migration_module_name)
+ if (name, version) not in migrations_list:
+ print(f"Not found migration {name}:{version} in {migrations_list}")
+ assert False
+
+
+@pytest.mark.order(4)
+def test_reset_stale_messages(postgres_instance, postgres_messages_test_data, vault_instance, namespace):
+ """
+ Checking the reset of stale messages when the database client is initialized
+ """
+ _, cursor = postgres_instance
+ _ = postgres_messages_test_data
+ # Reinitialize the database class for triggering the reset of stale messages
+ # Create new instance of the DatabaseClient class because private method _reset_stale_records() is launched only when the class is initialized
+ _ = DatabaseClient(vault=vault_instance, db_role=namespace)
+
+ # Check the reset of stale messages
+ cursor.execute("SELECT state FROM messages")
+ messages_list = cursor.fetchall()
+ assert len(messages_list) > 0
+ for message in messages_list:
+ assert message[0] == 'updated'
+
+
+@pytest.mark.order(5)
+def test_database_connection(postgres_instance, database_class):
+ """
+ Checking the database connection
+ """
+ _ = postgres_instance
+ connection = database_class.get_connection()
+ assert isinstance(connection, psycopg2.extensions.connection)
+ assert not connection.closed
+ database_class.close_connection(connection)
+
+
+@pytest.mark.order(6)
+def test_messages_queue(database_class):
+ """
+ Checking the addition of a message to the queue and extraction of a message from the queue
+ """
+ data = {
+ 'user_id': 'test_case_6',
+ 'post_id': 'test_case_6',
+ 'post_url': 'https://example.com/p/test_case_6',
+ 'post_owner': 'test_case_6',
+ 'link_type': 'post',
+ 'message_id': 'test_case_6',
+ 'chat_id': 'test_case_6',
+ 'scheduled_time': '2022-01-01 12:00:00',
+ 'download_status': 'not started',
+ 'upload_status': 'not started'
+ }
+ status = database_class.add_message_to_queue(data=data)
+
+ # Check the addition of a message to the queue
+ queue_message = database_class.get_message_from_queue(scheduled_time=data['scheduled_time'])
+ queue_item = {}
+ queue_item['user_id'] = queue_message[1]
+ queue_item['post_id'] = queue_message[2]
+ queue_item['post_url'] = queue_message[3]
+ queue_item['post_owner'] = queue_message[4]
+ queue_item['link_type'] = queue_message[5]
+ queue_item['message_id'] = queue_message[6]
+ queue_item['chat_id'] = queue_message[7]
+ queue_item['scheduled_time'] = datetime.strftime(queue_message[8], '%Y-%m-%d %H:%M:%S')
+ queue_item['download_status'] = queue_message[9]
+ queue_item['upload_status'] = queue_message[10]
+ assert status == f"{data['message_id']}: added to queue"
+ assert queue_item == data
+
+
+@pytest.mark.order(7)
+def test_change_message_state_in_queue(database_class, postgres_instance):
+ """
+ Checking the change of the message state in the queue
+ """
+ _, cursor = postgres_instance
+ data = {
+ 'user_id': 'test_case_7',
+ 'post_id': 'test_case_7',
+ 'post_url': 'https://example.com/p/test_case_7',
+ 'post_owner': 'test_case_7',
+ 'link_type': 'post',
+ 'message_id': 'test_case_7',
+ 'chat_id': 'test_case_7',
+ 'scheduled_time': '2022-01-01 12:00:00',
+ 'download_status': 'not started',
+ 'upload_status': 'not started'
+ }
+ status = database_class.add_message_to_queue(data=data)
+ assert status == f"{data['message_id']}: added to queue"
+
+ # Check the change of the message state in the queue
+ updated_status = database_class.update_message_state_in_queue(
+ post_id=data['post_id'],
+ state='processed',
+ download_status='completed',
+ upload_status='completed',
+ post_owner=data['post_owner']
+ )
+ assert updated_status == f"{data['message_id']}: processed"
+
+ # Check records in database
+ cursor.execute(f"SELECT post_id FROM queue WHERE post_id = '{data['post_id']}'")
+ record_queue = cursor.fetchall()
+ assert record_queue == []
+ cursor.execute(f"SELECT post_id, state, upload_status, download_status FROM processed WHERE post_id = '{data['post_id']}'")
+ record_processed = cursor.fetchall()
+ assert record_processed != []
+ assert record_processed[0][0] == data['post_id']
+ assert record_processed[0][1] == 'processed'
+ assert record_processed[0][2] == 'completed'
+ assert record_processed[0][3] == 'completed'
+
+
+@pytest.mark.order(8)
+def test_change_message_schedule_time_in_queue(database_class, postgres_instance):
+ """
+ Checking the change of the message schedule time in the queue
+ """
+ _, cursor = postgres_instance
+ data = {
+ 'user_id': 'test_case_8',
+ 'post_id': 'test_case_8',
+ 'post_url': 'https://example.com/p/test_case_8',
+ 'post_owner': 'test_case_8',
+ 'link_type': 'post',
+ 'message_id': 'test_case_8',
+ 'chat_id': 'test_case_8',
+ 'scheduled_time': '2022-01-01 12:00:00',
+ 'download_status': 'not started',
+ 'upload_status': 'not started'
+ }
+ status = database_class.add_message_to_queue(data=data)
+ assert status == f"{data['message_id']}: added to queue"
+
+ # Check the change of the message schedule time in the queue
+ status = database_class.update_schedule_time_in_queue(
+ post_id=data['post_id'],
+ user_id=data['user_id'],
+ scheduled_time='2022-01-02 13:00:00'
+ )
+ assert status == f"{data['post_id']}: scheduled time updated"
+
+ # Check records in database
+ cursor.execute(f"SELECT scheduled_time FROM queue WHERE post_id = '{data['post_id']}'")
+ record_queue = cursor.fetchall()
+ assert record_queue is not None
+ assert record_queue[0][0] == datetime.strptime('2022-01-02 13:00:00', '%Y-%m-%d %H:%M:%S')
+
+
+@pytest.mark.order(9)
+def test_get_user_queue(database_class):
+ """
+ Checking the extraction of the user queue
+ """
+ user_id = 'test_case_9'
+ timestamp = datetime.now()
+ data = [
+ {
+ 'user_id': user_id,
+ 'post_id': 'test_case_9_1',
+ 'post_url': 'https://example.com/p/test_case_9_1',
+ 'post_owner': 'test_case_9',
+ 'link_type': 'post',
+ 'message_id': 'test_case_9_1',
+ 'chat_id': 'test_case_9',
+ 'scheduled_time': timestamp + timedelta(hours=1),
+ 'download_status': 'not started',
+ 'upload_status': 'not started'
+ },
+ {
+ 'user_id': user_id,
+ 'post_id': 'test_case_9_2',
+ 'post_url': 'https://example.com/p/test_case_9_2',
+ 'post_owner': 'test_case_9',
+ 'link_type': 'post',
+ 'message_id': 'test_case_9_2',
+ 'chat_id': 'test_case_9',
+ 'scheduled_time': timestamp - timedelta(hours=2),
+ 'download_status': 'not started',
+ 'upload_status': 'not started'
+ },
+ {
+ 'user_id': user_id,
+ 'post_id': 'test_case_9_3',
+ 'post_url': 'https://example.com/p/test_case_9_3',
+ 'post_owner': 'test_case_9',
+ 'link_type': 'post',
+ 'message_id': 'test_case_9_3',
+ 'chat_id': 'test_case_9',
+ 'scheduled_time': timestamp + timedelta(hours=3),
+ 'download_status': 'not started',
+ 'upload_status': 'not started'
+ }
+ ]
+ for message in data:
+ status = database_class.add_message_to_queue(data=message)
+ assert status == f"{message['message_id']}: added to queue"
+
+ # Validate the extraction of the user queue (now directly a list)
+ user_queue = database_class.get_user_queue(user_id=user_id)
+ expected_response = sorted([
+ {
+ 'post_id': entry['post_id'],
+ 'scheduled_time': entry['scheduled_time']
+ }
+ for entry in data
+ ], key=lambda x: x['scheduled_time'])
+
+ assert user_queue is not None
+ assert len(user_queue) == len(data)
+ assert user_queue == expected_response
+
+
+@pytest.mark.order(10)
+def test_get_user_processed_data(database_class, postgres_instance):
+ """
+ Checking the extraction of the user processed data
+ """
+ _, cursor = postgres_instance
+ user_id = 'test_case_10'
+ timestamp = datetime.now()
+ data = [
+ {
+ 'user_id': user_id,
+ 'post_id': 'test_case_10_1',
+ 'post_url': 'https://example.com/p/test_case_10_1',
+ 'post_owner': 'test_case_10',
+ 'link_type': 'post',
+ 'message_id': 'test_case_10_1',
+ 'chat_id': 'test_case_10',
+ 'scheduled_time': timestamp + timedelta(hours=1),
+ 'download_status': 'not started',
+ 'upload_status': 'not started'
+ },
+ {
+ 'user_id': user_id,
+ 'post_id': 'test_case_10_2',
+ 'post_url': 'https://example.com/p/test_case_10_2',
+ 'post_owner': 'test_case_10',
+ 'link_type': 'post',
+ 'message_id': 'test_case_10_2',
+ 'chat_id': 'test_case_10',
+ 'scheduled_time': timestamp - timedelta(hours=2),
+ 'download_status': 'not started',
+ 'upload_status': 'not started'
+ },
+ {
+ 'user_id': user_id,
+ 'post_id': 'test_case_10_3',
+ 'post_url': 'https://example.com/p/test_case_10_3',
+ 'post_owner': 'test_case_10',
+ 'link_type': 'post',
+ 'message_id': 'test_case_10_3',
+ 'chat_id': 'test_case_10',
+ 'scheduled_time': timestamp + timedelta(hours=3),
+ 'download_status': 'not started',
+ 'upload_status': 'not started'
+ }
+ ]
+ for message in data:
+ status = database_class.add_message_to_queue(data=message)
+ assert status == f"{message['message_id']}: added to queue"
+ status = database_class.update_message_state_in_queue(
+ post_id=message['post_id'],
+ state='processed',
+ download_status='completed',
+ upload_status='completed',
+ post_owner=message['post_owner']
+ )
+ assert status == f"{message['post_id']}: processed"
+
+ user_processed = database_class.get_user_processed(user_id=user_id)
+ user_queue = database_class.get_user_queue(user_id=user_id)
+
+ for message in data:
+ if user_queue:
+ for q_message in user_queue:
+ assert message['post_id'] != q_message['post_id']
+
+ if user_processed:
+ found = False
+ assert len(user_processed) == len(data)
+ for p_message in user_processed:
+ if message['post_id'] == p_message['post_id']:
+ found = True
+ if not found:
+ print(f"Message {message['post_id']} not found in processed: {user_processed}")
+ assert False
+ else:
+ assert True
+ else:
+ cursor.execute("SELECT * FROM processed")
+ print(cursor.fetchall())
+ assert False
+
+
+@pytest.mark.order(11)
+def test_check_message_uniqueness(database_class):
+ """
+ Checking the uniqueness of the message
+ """
+ data = {
+ 'user_id': 'test_case_11',
+ 'post_id': 'test_case_11',
+ 'post_url': 'https://example.com/p/test_case_11',
+ 'post_owner': 'test_case_11',
+ 'link_type': 'post',
+ 'message_id': 'test_case_11',
+ 'chat_id': 'test_case_11',
+ 'scheduled_time': '2022-01-02 13:00:00',
+ 'download_status': 'not started',
+ 'upload_status': 'not started'
+ }
+ uniqueness = database_class.check_message_uniqueness(post_id=data['post_id'], user_id=data['user_id'])
+ assert uniqueness is True
+
+ status = database_class.add_message_to_queue(data=data)
+ assert status == f"{data['message_id']}: added to queue"
+ uniqueness = database_class.check_message_uniqueness(post_id=data['post_id'], user_id=data['user_id'])
+ assert uniqueness is False
+
+
+@pytest.mark.order(12)
+def test_service_messages(database_class):
+ """
+ Checking the registration of service messages
+ """
+ data = {
+ 'message_id': 'test_case_12',
+ 'chat_id': 'test_case_12',
+ 'message_content': 'Test case 12',
+ 'message_type': 'status_message',
+ 'state': 'updated'
+ }
+
+ # Keep new status_message
+ status = database_class.keep_message(**data)
+ assert status == f"{data['message_id']} kept"
+ new_message = database_class.get_considered_message(message_type=data['message_type'], chat_id=data['chat_id'])
+ assert new_message[0] == data['message_id']
+ assert new_message[1] == data['chat_id']
+ assert new_message[4] == get_hash(data['message_content'])
+ assert new_message[5] == 'added'
+
+ # Update exist message
+ data['message_content'] = 'Updated message'
+ status = database_class.keep_message(**data)
+ assert status == f"{data['message_id']} updated"
+ updated_message = database_class.get_considered_message(message_type=data['message_type'], chat_id=data['chat_id'])
+ assert updated_message[0] == data['message_id']
+ assert updated_message[1] == data['chat_id']
+ assert updated_message[2] != updated_message[3]
+ assert updated_message[3] != new_message[3]
+ assert updated_message[4] == get_hash(data['message_content'])
+ assert updated_message[5] == 'updated'
+
+ # Recreate exist message
+ data['message_content'] = 'Recreated message'
+ status = database_class.keep_message(**data, recreated=True)
+ assert status == f"{data['message_id']} recreated"
+ recreated_message = database_class.get_considered_message(message_type=data['message_type'], chat_id=data['chat_id'])
+ assert recreated_message[0] == data['message_id']
+ assert recreated_message[1] == data['chat_id']
+ assert recreated_message[2] == recreated_message[3]
+ assert recreated_message[2] != updated_message[2]
+ assert recreated_message[3] != updated_message[3]
+ assert recreated_message[4] == get_hash(data['message_content'])
+ assert recreated_message[5] == 'updated'
diff --git a/tests/test_init.py b/tests/test_init.py
deleted file mode 100644
index ab49ceeb7..000000000
--- a/tests/test_init.py
+++ /dev/null
@@ -1,21 +0,0 @@
-"""
-A test for quick setup of the dev environment for testing the release.
-"""
-import subprocess
-import pytest
-
-
-@pytest.mark.order(1)
-def test_init_dev_environment(vault_configuration_data, vault_approle):
- """
- Check the function for the user who is allow access to the bot
- """
- _ = vault_configuration_data
- command = (
- "export VAULT_ADDR=http://vault-server:8200 && "
- f"export VAULT_APPROLE_ID={vault_approle['id']} && "
- f"export VAULT_APPROLE_SECRETID={vault_approle['secret-id']} && "
- "docker compose -f docker-compose.yml up -d --force-recreate --build pyinstabot-downloader"
- )
- with subprocess.Popen(command, shell=True):
- print("Running docker-compose.yml...")
diff --git a/tests/test_metrics.py b/tests/test_metrics.py
new file mode 100644
index 000000000..366001d19
--- /dev/null
+++ b/tests/test_metrics.py
@@ -0,0 +1,59 @@
+"""
+This module contains tests for the database module.
+"""
+import requests
+import pytest
+
+
+@pytest.mark.order(13)
+def test_metrics_instance(metrics_class, database_class):
+ """
+ Checking the creation of a metrics instance.
+ """
+ assert metrics_class.port == 8000
+ assert metrics_class.interval == 5
+ assert metrics_class.database == database_class
+ assert metrics_class.thread_status_gauge is not None
+ assert metrics_class.access_granted_counter is not None
+ assert metrics_class.access_denied_counter is not None
+ assert metrics_class.processed_messages_counter is not None
+ assert metrics_class.queue_length_gauge is not None
+
+
+@pytest.mark.order(14)
+def test_metrics_users_stats(metrics_class, postgres_users_test_data):
+ """
+ Checking the collection of user statistics.
+ """
+ _ = postgres_users_test_data
+ response = requests.get(f"http://0.0.0.0:{metrics_class.port}/", timeout=10)
+ print(response.text)
+ assert "pytest_access_granted_total" in response.text
+ assert "pytest_access_denied_total" in response.text
+ assert "pytest_access_granted_total 5.0" in response.text
+ assert "pytest_access_denied_total 1.0" in response.text
+
+
+@pytest.mark.order(15)
+def test_metrics_threads_status(metrics_class):
+ """
+ Checking the collection of thread statistics.
+ """
+ response = requests.get(f"http://0.0.0.0:{metrics_class.port}/", timeout=10)
+ assert "pytest_thread_status" in response.text
+ assert 'pytest_thread_status{thread_name="MainThread"} 1.0' in response.text
+
+
+@pytest.mark.order(16)
+def test_metrics_messages(metrics_class, postgres_queue_test_data, postgres_processed_test_data):
+ """
+ Checking the collection of processed and queued messages statistics.
+ """
+ _ = postgres_queue_test_data
+ _ = postgres_processed_test_data
+ response = requests.get(f"http://0.0.0.0:{metrics_class.port}/", timeout=10)
+ print(response.text)
+ assert "pytest_processed_messages_total" in response.text
+ assert "pytest_queue_length" in response.text
+ assert "pytest_processed_messages_total 3.0" in response.text
+ assert "pytest_queue_length 3.0" in response.text
diff --git a/tests/vault/policy.hcl b/tests/vault/policy.hcl
index 2a97205a6..0627220e9 100644
--- a/tests/vault/policy.hcl
+++ b/tests/vault/policy.hcl
@@ -19,10 +19,32 @@ path "auth/token/lookup-self" {
# Operations for pytest
# Allow read, create or update operations on the pytest path
-path "sys/mounts/pyinstabot-downloader" {
+path "sys/mounts/pytest" {
capabilities = ["read", "create", "update"]
}
+# Operations for pytest
+# Allow reading database credentials for a role
+path "database/creds/pytest" {
+ capabilities = ["read"]
+}
+
+# Operations for pytest
+# Allow reading database credentials for a role
+path "pytest/config" {
+ capabilities = ["read", "list", "update"]
+}
+
+# Operations for pytest
+# Allow reading database credentials for a role
+path "pytest/data/configuration/*" {
+ capabilities = ["create", "read", "update", "list"]
+}
+
+
+###############################################################
+
+
# Operations for the module
# Read and update namespace configuration
path "pyinstabot-downloader/config" {
@@ -55,3 +77,8 @@ path "pyinstabot-downloader/data/history/*" {
path "pyinstabot-downloader/metadata/configuration/users" {
capabilities = ["read", "list"]
}
+
+# Allow reading database credentials for a role
+path "database/creds/pyinstabot-downloader"{
+ capabilities = ["read"]
+}
diff --git a/vault/policy.hcl b/vault/policy.hcl
index d93dd38df..c2844032e 100644
--- a/vault/policy.hcl
+++ b/vault/policy.hcl
@@ -28,6 +28,11 @@ path "pyinstabot-downloader/data/configuration/*" {
capabilities = ["read", "list"]
}
+# Allowed to read and generate credentials in database engine
+path "pyinstabot-downloader-database/creds/*" {
+ capabilities = ["read", "list", "update"]
+}
+
# Allowed read and write of bot data (!!! deprecated after https://github.com/obervinov/users-package/issues/41)
path "pyinstabot-downloader/data/data/*" {
capabilities = ["read", "list", "create", "update"]