diff --git a/.flake8 b/.flake8 deleted file mode 100644 index 6cc391dc..00000000 --- a/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -[flake8] -ignore = - # whitespace before ':' - E203, - # too many leading ### in a block comment - E266, - # line too long (managed by black) - E501, - # Line break occurred before a binary operator (this is not PEP8 compatible) - W503, - # Missing docstring in public module - D100, - # Missing docstring in public class - D101, - # Missing docstring in public method - D102, - # Missing docstring in public function - D103, - # Missing docstring in public package - D104, - # Missing docstring in magic method - D105, - # Missing docstring in public package - D106, - # Missing docstring in __init__ - D107, - # needed because of https://github.com/ambv/black/issues/144 - D202, - # other string does contain unindexed parameters - P103 -max-line-length = 80 -exclude = migrations snapshots -max-complexity = 10 diff --git a/.isort.cfg b/.isort.cfg deleted file mode 100644 index 6f8d4f1f..00000000 --- a/.isort.cfg +++ /dev/null @@ -1,7 +0,0 @@ -[settings] -known_first_party=nowplaying -multi_line_output=3 -include_trailing_comma=True -force_grid_wrap=0 -combine_as_imports=True -line_length=88 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 69302044..0e9b598d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,35 +1,14 @@ repos: - - repo: local + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.3.4 hooks: - - id: black - name: black - language: system - entry: black - types: [python] - - id: isort - name: isort - language: system - entry: isort - types: [python] - - id: flake8 - name: flake8 - language: system - entry: flake8 - types: [python] - - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.8.0 - hooks: - - id: mypy - additional_dependencies: - - types-openpyxl - - types-pytz - - types-python-dateutil - - types-tqdm + - id: ruff + - id: ruff-format - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v3.4.0 + rev: v4.5.0 hooks: - id: trailing-whitespace - exclude: ^src/api/client.js$ + exclude: ^(src/api/client.js|tests/__snapshots__/.*ambr)$ - id: end-of-file-fixer exclude: ^src/api/client.js$ - id: check-symlinks diff --git a/README.md b/README.md index 646c9b73..0e0d23be 100644 --- a/README.md +++ b/README.md @@ -161,6 +161,13 @@ As documented in [Usage](#Usage), you can also pass in options on the command li ./suisa_sendemeldung.py --bearer-token=abcdefghijklmnopqrstuvwxyzabcdef --stream_id=a-bcdefgh --stdout ``` +## Development + +Snapshot testing is used to test the help output, you can update the snapshots like so: +``` +poetry run pytest -- --snapshot-update +``` + ## Release Management The CI/CD setup uses semantic commit messages following the [conventional commits standard](https://www.conventionalcommits.org/en/v1.0.0/). diff --git a/docs/gen_ref_pages.py b/docs/gen_ref_pages.py index b8fb937e..0acf35e9 100644 --- a/docs/gen_ref_pages.py +++ b/docs/gen_ref_pages.py @@ -28,6 +28,7 @@ with mkdocs_gen_files.open("reference/SUMMARY.md", "w") as nav_file: nav_file.writelines(nav.build_literate_nav()) -readme = Path("README.md").open("r") -with mkdocs_gen_files.open("index.md", "w") as index_file: +with Path("README.md").open("r") as readme, mkdocs_gen_files.open( + "index.md", "w" +) as index_file: index_file.writelines(readme.read()) diff --git a/poetry.lock b/poetry.lock index 49f0498b..fb2baeec 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,17 +2,17 @@ [[package]] name = "acrclient" -version = "0.4.2" +version = "0.4.4" description = "API wrapper for the v2 ACRCloud API" optional = false -python-versions = ">=3.9,<4.0" +python-versions = "<4.0,>=3.11" files = [ - {file = "acrclient-0.4.2-py3-none-any.whl", hash = "sha256:c0dab317929a5b6cdac7ccfeff6a3ec31c7410b87f8c6c095960a233dd84d3bb"}, - {file = "acrclient-0.4.2.tar.gz", hash = "sha256:ce6ae20e6d9189f27eb78df48ba824c2520d1942b56ceb58a8808b952a6f0dcf"}, + {file = "acrclient-0.4.4-py3-none-any.whl", hash = "sha256:10800272e39efb001c0c9a83f52d6ade33f6c774c232705f49f6d338076f74e0"}, + {file = "acrclient-0.4.4.tar.gz", hash = "sha256:ad2e90367309557dab9bcda1d425f5a4ae6cf0a133535417363595e458ca9205"}, ] [package.dependencies] -cachecontrol = ">=0.13.0,<0.14.0" +cachecontrol = ">=0.13,<0.15" requests = ">=2.28.2" [[package]] @@ -59,67 +59,23 @@ files = [ [package.extras] dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] -[[package]] -name = "black" -version = "24.3.0" -description = "The uncompromising code formatter." -optional = false -python-versions = ">=3.8" -files = [ - {file = "black-24.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7d5e026f8da0322b5662fa7a8e752b3fa2dac1c1cbc213c3d7ff9bdd0ab12395"}, - {file = "black-24.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9f50ea1132e2189d8dff0115ab75b65590a3e97de1e143795adb4ce317934995"}, - {file = "black-24.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2af80566f43c85f5797365077fb64a393861a3730bd110971ab7a0c94e873e7"}, - {file = "black-24.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:4be5bb28e090456adfc1255e03967fb67ca846a03be7aadf6249096100ee32d0"}, - {file = "black-24.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4f1373a7808a8f135b774039f61d59e4be7eb56b2513d3d2f02a8b9365b8a8a9"}, - {file = "black-24.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aadf7a02d947936ee418777e0247ea114f78aff0d0959461057cae8a04f20597"}, - {file = "black-24.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c02e4ea2ae09d16314d30912a58ada9a5c4fdfedf9512d23326128ac08ac3d"}, - {file = "black-24.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:bf21b7b230718a5f08bd32d5e4f1db7fc8788345c8aea1d155fc17852b3410f5"}, - {file = "black-24.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:2818cf72dfd5d289e48f37ccfa08b460bf469e67fb7c4abb07edc2e9f16fb63f"}, - {file = "black-24.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4acf672def7eb1725f41f38bf6bf425c8237248bb0804faa3965c036f7672d11"}, - {file = "black-24.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7ed6668cbbfcd231fa0dc1b137d3e40c04c7f786e626b405c62bcd5db5857e4"}, - {file = "black-24.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:56f52cfbd3dabe2798d76dbdd299faa046a901041faf2cf33288bc4e6dae57b5"}, - {file = "black-24.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:79dcf34b33e38ed1b17434693763301d7ccbd1c5860674a8f871bd15139e7837"}, - {file = "black-24.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e19cb1c6365fd6dc38a6eae2dcb691d7d83935c10215aef8e6c38edee3f77abd"}, - {file = "black-24.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b76c275e4c1c5ce6e9870911384bff5ca31ab63d19c76811cb1fb162678213"}, - {file = "black-24.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:b5991d523eee14756f3c8d5df5231550ae8993e2286b8014e2fdea7156ed0959"}, - {file = "black-24.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c45f8dff244b3c431b36e3224b6be4a127c6aca780853574c00faf99258041eb"}, - {file = "black-24.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6905238a754ceb7788a73f02b45637d820b2f5478b20fec82ea865e4f5d4d9f7"}, - {file = "black-24.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7de8d330763c66663661a1ffd432274a2f92f07feeddd89ffd085b5744f85e7"}, - {file = "black-24.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:7bb041dca0d784697af4646d3b62ba4a6b028276ae878e53f6b4f74ddd6db99f"}, - {file = "black-24.3.0-py3-none-any.whl", hash = "sha256:41622020d7120e01d377f74249e677039d20e6344ff5851de8a10f11f513bf93"}, - {file = "black-24.3.0.tar.gz", hash = "sha256:a0c9c4a0771afc6919578cec71ce82a3e31e054904e7197deacbc9382671c41f"}, -] - -[package.dependencies] -click = ">=8.0.0" -mypy-extensions = ">=0.4.3" -packaging = ">=22.0" -pathspec = ">=0.9.0" -platformdirs = ">=2" - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] - [[package]] name = "cachecontrol" -version = "0.13.1" +version = "0.14.0" description = "httplib2 caching for requests" optional = false python-versions = ">=3.7" files = [ - {file = "cachecontrol-0.13.1-py3-none-any.whl", hash = "sha256:95dedbec849f46dda3137866dc28b9d133fc9af55f5b805ab1291833e4457aa4"}, - {file = "cachecontrol-0.13.1.tar.gz", hash = "sha256:f012366b79d2243a6118309ce73151bf52a38d4a5dac8ea57f09bd29087e506b"}, + {file = "cachecontrol-0.14.0-py3-none-any.whl", hash = "sha256:f5bf3f0620c38db2e5122c0726bdebb0d16869de966ea6a2befe92470b740ea0"}, + {file = "cachecontrol-0.14.0.tar.gz", hash = "sha256:7db1195b41c81f8274a7bbd97c956f44e8348265a1bc7641c37dfebc39f0c938"}, ] [package.dependencies] -msgpack = ">=0.5.2" +msgpack = ">=0.5.2,<2.0.0" requests = ">=2.16.0" [package.extras] -dev = ["CacheControl[filecache,redis]", "black", "build", "cherrypy", "mypy", "pytest", "pytest-cov", "sphinx", "tox", "types-redis", "types-requests"] +dev = ["CacheControl[filecache,redis]", "black", "build", "cherrypy", "furo", "mypy", "pytest", "pytest-cov", "sphinx", "sphinx-copybutton", "tox", "types-redis", "types-requests"] filecache = ["filelock (>=3.8.0)"] redis = ["redis (>=2.10.5)"] @@ -394,99 +350,6 @@ docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1 testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] typing = ["typing-extensions (>=4.8)"] -[[package]] -name = "flake8" -version = "7.0.0" -description = "the modular source code checker: pep8 pyflakes and co" -optional = false -python-versions = ">=3.8.1" -files = [ - {file = "flake8-7.0.0-py2.py3-none-any.whl", hash = "sha256:a6dfbb75e03252917f2473ea9653f7cd799c3064e54d4c8140044c5c065f53c3"}, - {file = "flake8-7.0.0.tar.gz", hash = "sha256:33f96621059e65eec474169085dc92bf26e7b2d47366b70be2f67ab80dc25132"}, -] - -[package.dependencies] -mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.11.0,<2.12.0" -pyflakes = ">=3.2.0,<3.3.0" - -[[package]] -name = "flake8-debugger" -version = "4.1.2" -description = "ipdb/pdb statement checker plugin for flake8" -optional = false -python-versions = ">=3.7" -files = [ - {file = "flake8-debugger-4.1.2.tar.gz", hash = "sha256:52b002560941e36d9bf806fca2523dc7fb8560a295d5f1a6e15ac2ded7a73840"}, - {file = "flake8_debugger-4.1.2-py3-none-any.whl", hash = "sha256:0a5e55aeddcc81da631ad9c8c366e7318998f83ff00985a49e6b3ecf61e571bf"}, -] - -[package.dependencies] -flake8 = ">=3.0" -pycodestyle = "*" - -[[package]] -name = "flake8-docstrings" -version = "1.7.0" -description = "Extension for flake8 which uses pydocstyle to check docstrings" -optional = false -python-versions = ">=3.7" -files = [ - {file = "flake8_docstrings-1.7.0-py2.py3-none-any.whl", hash = "sha256:51f2344026da083fc084166a9353f5082b01f72901df422f74b4d953ae88ac75"}, - {file = "flake8_docstrings-1.7.0.tar.gz", hash = "sha256:4c8cc748dc16e6869728699e5d0d685da9a10b0ea718e090b1ba088e67a941af"}, -] - -[package.dependencies] -flake8 = ">=3" -pydocstyle = ">=2.1" - -[[package]] -name = "flake8-isort" -version = "6.1.1" -description = "flake8 plugin that integrates isort" -optional = false -python-versions = ">=3.8" -files = [ - {file = "flake8_isort-6.1.1-py3-none-any.whl", hash = "sha256:0fec4dc3a15aefbdbe4012e51d5531a2eb5fa8b981cdfbc882296a59b54ede12"}, - {file = "flake8_isort-6.1.1.tar.gz", hash = "sha256:c1f82f3cf06a80c13e1d09bfae460e9666255d5c780b859f19f8318d420370b3"}, -] - -[package.dependencies] -flake8 = "*" -isort = ">=5.0.0,<6" - -[package.extras] -test = ["pytest"] - -[[package]] -name = "flake8-string-format" -version = "0.3.0" -description = "string format checker, plugin for flake8" -optional = false -python-versions = "*" -files = [ - {file = "flake8-string-format-0.3.0.tar.gz", hash = "sha256:65f3da786a1461ef77fca3780b314edb2853c377f2e35069723348c8917deaa2"}, - {file = "flake8_string_format-0.3.0-py2.py3-none-any.whl", hash = "sha256:812ff431f10576a74c89be4e85b8e075a705be39bc40c4b4278b5b13e2afa9af"}, -] - -[package.dependencies] -flake8 = "*" - -[[package]] -name = "flake8-tuple" -version = "0.4.1" -description = "Check code for 1 element tuple." -optional = false -python-versions = "*" -files = [ - {file = "flake8_tuple-0.4.1-py2.py3-none-any.whl", hash = "sha256:d828cc8e461c50cacca116e9abb0c9e3be565e8451d3f5c00578c63670aae680"}, - {file = "flake8_tuple-0.4.1.tar.gz", hash = "sha256:8a1b42aab134ef4c3fef13c6a8f383363f158b19fbc165bd91aed9c51851a61d"}, -] - -[package.dependencies] -flake8 = "*" -six = "*" - [[package]] name = "freezegun" version = "1.4.0" @@ -1103,45 +966,6 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] -[[package]] -name = "pycodestyle" -version = "2.11.1" -description = "Python style guide checker" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, - {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, -] - -[[package]] -name = "pydocstyle" -version = "6.3.0" -description = "Python docstring style checker" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"}, - {file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"}, -] - -[package.dependencies] -snowballstemmer = ">=2.2.0" - -[package.extras] -toml = ["tomli (>=1.2.3)"] - -[[package]] -name = "pyflakes" -version = "3.2.0" -description = "passive checker of Python programs" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, - {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, -] - [[package]] name = "pygments" version = "2.17.2" @@ -1289,6 +1113,21 @@ files = [ pylint = ">=2.15.0" pytest = ">=7.0" +[[package]] +name = "pytest-ruff" +version = "0.3.1" +description = "pytest plugin to check ruff requirements." +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pytest_ruff-0.3.1-py3-none-any.whl", hash = "sha256:008556576fb1bda93a432ad381432bfd5575cc94627d22bfdece46561b8e4f7f"}, + {file = "pytest_ruff-0.3.1.tar.gz", hash = "sha256:c9f7392a3384af73a6a72741a4035a605480a7a8e7a4bd8da05a98e6664cffb5"}, +] + +[package.dependencies] +pytest = ">=5" +ruff = ">=0.0.242" + [[package]] name = "python-dateutil" version = "2.9.0.post0" @@ -1563,6 +1402,32 @@ requests = ">=2.22,<3" [package.extras] fixture = ["fixtures"] +[[package]] +name = "ruff" +version = "0.3.4" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.3.4-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:60c870a7d46efcbc8385d27ec07fe534ac32f3b251e4fc44b3cbfd9e09609ef4"}, + {file = "ruff-0.3.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6fc14fa742e1d8f24910e1fff0bd5e26d395b0e0e04cc1b15c7c5e5fe5b4af91"}, + {file = "ruff-0.3.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3ee7880f653cc03749a3bfea720cf2a192e4f884925b0cf7eecce82f0ce5854"}, + {file = "ruff-0.3.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cf133dd744f2470b347f602452a88e70dadfbe0fcfb5fd46e093d55da65f82f7"}, + {file = "ruff-0.3.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f3860057590e810c7ffea75669bdc6927bfd91e29b4baa9258fd48b540a4365"}, + {file = "ruff-0.3.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:986f2377f7cf12efac1f515fc1a5b753c000ed1e0a6de96747cdf2da20a1b369"}, + {file = "ruff-0.3.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fd98e85869603e65f554fdc5cddf0712e352fe6e61d29d5a6fe087ec82b76c"}, + {file = "ruff-0.3.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64abeed785dad51801b423fa51840b1764b35d6c461ea8caef9cf9e5e5ab34d9"}, + {file = "ruff-0.3.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df52972138318bc7546d92348a1ee58449bc3f9eaf0db278906eb511889c4b50"}, + {file = "ruff-0.3.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:98e98300056445ba2cc27d0b325fd044dc17fcc38e4e4d2c7711585bd0a958ed"}, + {file = "ruff-0.3.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:519cf6a0ebed244dce1dc8aecd3dc99add7a2ee15bb68cf19588bb5bf58e0488"}, + {file = "ruff-0.3.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:bb0acfb921030d00070539c038cd24bb1df73a2981e9f55942514af8b17be94e"}, + {file = "ruff-0.3.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:cf187a7e7098233d0d0c71175375c5162f880126c4c716fa28a8ac418dcf3378"}, + {file = "ruff-0.3.4-py3-none-win32.whl", hash = "sha256:af27ac187c0a331e8ef91d84bf1c3c6a5dea97e912a7560ac0cef25c526a4102"}, + {file = "ruff-0.3.4-py3-none-win_amd64.whl", hash = "sha256:de0d5069b165e5a32b3c6ffbb81c350b1e3d3483347196ffdf86dc0ef9e37dd6"}, + {file = "ruff-0.3.4-py3-none-win_arm64.whl", hash = "sha256:6810563cc08ad0096b57c717bd78aeac888a1bfd38654d9113cb3dc4d3f74232"}, + {file = "ruff-0.3.4.tar.gz", hash = "sha256:f0f4484c6541a99862b693e13a151435a279b271cff20e37101116a21e2a1ad1"}, +] + [[package]] name = "six" version = "1.16.0" @@ -1575,16 +1440,19 @@ files = [ ] [[package]] -name = "snowballstemmer" -version = "2.2.0" -description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +name = "syrupy" +version = "4.6.1" +description = "Pytest Snapshot Test Utility" optional = false -python-versions = "*" +python-versions = ">=3.8.1,<4" files = [ - {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, - {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, + {file = "syrupy-4.6.1-py3-none-any.whl", hash = "sha256:203e52f9cb9fa749cf683f29bd68f02c16c3bc7e7e5fe8f2fc59bdfe488ce133"}, + {file = "syrupy-4.6.1.tar.gz", hash = "sha256:37a835c9ce7857eeef86d62145885e10b3cb9615bc6abeb4ce404b3f18e1bb36"}, ] +[package.dependencies] +pytest = ">=7.0.0,<9.0.0" + [[package]] name = "text-unidecode" version = "1.3" @@ -1782,4 +1650,4 @@ test = ["pytest (>=6.0.0)", "setuptools (>=65)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "312ea05afc2946efd87f4853d9882c6de17ba227beccd7a4bda819f885432c1f" +content-hash = "67ddeec641f326fee912f8f6920a46f17743e214941d581b66470fa97cc9c167" diff --git a/pyproject.toml b/pyproject.toml index 396a0c7e..4a42b25f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,7 +13,7 @@ suisa_sendemeldung = 'suisa_sendemeldung.suisa_sendemeldung:main' [tool.poetry.dependencies] python = "^3.11" -acrclient = "^0.4.1" +acrclient = "^0.4.4" ConfigArgParse = "^1.5.5" iso3901 = "^0.3.0.post1" openpyxl = "^3.1.2" @@ -25,23 +25,7 @@ tqdm = "^4.65.0" Babel = "^2.12.1" [tool.poetry.group.dev.dependencies] -black = ">=23.3,<25.0" -flake8 = ">=6,<8" -flake8-debugger = "^4.1.2" -flake8-docstrings = "^1.7.0" -flake8-isort = "^6.0.0" -flake8-string-format = "^0.3.0" -flake8-tuple = "^0.4.1" freezegun = "^1.2.2" -isort = "^5.12.0" -mock = "^5.0.2" -pytest = ">=7.4,<9.0" -pytest-cov = ">=4.1,<6.0" -pytest-env = ">=0.8.2,<1.2.0" -pytest-pylint = ">=0.19,<0.22" -requests-mock = "^1.11.0" -types-requests = "^2.31.0.1" -wheel = ">=0.40,<0.44" mkdocs = "^1.5.3" mkdocs-material = "^9.5.14" mkdocs-gen-files = "^0.5.0" @@ -49,21 +33,26 @@ mkdocs-literate-nav = "^0.6.1" mkdocs-section-index = "^0.3.8" mkdocs-autorefs = "^1.0.1" mkdocstrings = {extras = ["python"], version = "^0.24.1"} +mock = "^5.0.2" +pytest = ">=7.4,<9.0" +pytest-cov = ">=4.1,<6.0" +pytest-env = ">=0.8.2,<1.2.0" pytest-mypy = "^0.10.3" +pytest-pylint = ">=0.19,<0.22" +pytest-ruff = "^0.3.1" +requests-mock = "^1.11.0" +ruff = "^0.3.4" +syrupy = "^4.6.1" types-openpyxl = "^3.1.0.20240311" types-python-dateutil = "^2.9.0.20240316" types-pytz = "^2024.1.0.20240203" +types-requests = "^2.31.0.1" types-tqdm = "^4.66.0.20240106" - -[tool.isort] -profile = "black" +wheel = ">=0.40,<0.44" [tool.pytest.ini_options] -minversion = "7.4" -addopts = "--doctest-modules --cov=suisa_sendemeldung --pylint --cov-fail-under=100 --ignore=docs/ --mypy" -filterwarnings = [ - "ignore::DeprecationWarning:pylint", -] +minversion = "8.1" +addopts = "--doctest-modules --cov=suisa_sendemeldung --cov-fail-under=100 --ignore=docs/ --mypy --ruff" [build-system] requires = ["poetry-core"] diff --git a/ruff.toml b/ruff.toml new file mode 100644 index 00000000..a79589f2 --- /dev/null +++ b/ruff.toml @@ -0,0 +1,78 @@ +# [ruff](https://docs.astral.sh/ruff/) config +# +# templated with https://github.com/radiorabe/backstage-software-templates + +[lint] +select = [ + "F", # pyflakes + "E", # pycodestyle errors + "I", # isort + "C90", # mccabe + "N", # pep8-naming + "D", # pydocstyle + "UP", # pyupgrade + "ANN", # flake8-annotations + "ASYNC", # flake8-async + "S", # flake8-bandit + "BLE", # flake8-blind-exception + "FBT", # flake8-boolean-trap + "B", # flake8-bugbear + "A", # flake8-builtins + "COM", # flake8-commas + "C4", # flake8-comprehensions + "DTZ", # flake8-datetimez + "T10", # flake8-debugger + "EM", # flake8-errmsg + "EXE", # flake8-executable + "FA", # flake8-future-annotations + "ISC", # flake8-implicit-str-concat + "ICN", # flake8-import-conventions + "G", # flake8-logging-format + "INP", # flake8-no-pep420 + "PIE", # flake8-pie + "T20", # flake8-print + "PYI", # flake8-pyi + "PT", # flake8-pytest-style + "Q", # flake8-quotes + "RSE", # flake8-raise + "RET", # flake8-return + "SLF", # flake8-self + "SLOT", # flake8-slots + "SIM", # flake8-simplify + "TID", # flake8-tidy-imports + "TCH", # flake8-type-checking + "INT", # flake8-gettext + "ARG", # flake8-unused-arguments + "PTH", # flake8-use-pathlib + "TD", # flake8-todos + "ERA", # eradicate + "PGH", # pygrep-hooks + "PL", # Pylint + "TRY", # tryceratops + "PERF", # Perflint + "RUF", # ruff specific rules +] +ignore = [ + "D203", # we prefer blank-line-before-class (D211) for black compat + "D213", # we prefer multi-line-summary-first-line (D212) + "COM812", # ignore due to conflict with formatter + "ISC001", # ignore due to conflict with formatter +] + +[lint.per-file-ignores] +"tests/**/*.py" = [ + "D", # pydocstyle is optional for tests + "ANN", # flake8-annotations are optional for tests + "S101", # assert is allow in tests + "S105", # tests may have hardcoded secrets + "S106", # tests may have hardcoded passwords + "S108", # /tmp is allowed in tests since it's expected to be mocked + "DTZ00", # tests often run in UTC + "INP001", # tests do not need a dunder init +] +"**/__init__.py" = [ + "D104", # dunder init does not need a docstring because it might be empty +] +"docs/gen_ref_pages.py" = [ + "INP001", # mkdocs does not need a dunder init +] diff --git a/suisa_sendemeldung/acrclient.py b/suisa_sendemeldung/acrclient.py index af390990..8bc92185 100644 --- a/suisa_sendemeldung/acrclient.py +++ b/suisa_sendemeldung/acrclient.py @@ -1,9 +1,13 @@ """module containing the ACRCloud client.""" +from __future__ import annotations + from datetime import date, datetime, timedelta +from typing import Any, Self import pytz from acrclient import Client +from acrclient.models import GetBmCsProjectsResultsParams from tqdm import tqdm @@ -21,25 +25,28 @@ class ACRClient(Client): # timezone of ACRCloud ACR_TIMEZONE = "UTC" - def __init__(self, bearer_token, base_url="https://eu-api-v2.acrcloud.com"): + def __init__( + self: Self, bearer_token: str, base_url: str = "https://eu-api-v2.acrcloud.com" + ) -> None: + """Init subclass with default_date.""" super().__init__(bearer_token=bearer_token, base_url=base_url) - self.default_date = date.today() - timedelta(days=1) + self.default_date: date = date.today() - timedelta(days=1) # noqa: DTZ011 def get_data( - self, - project_id, - stream_id, - requested_date=None, - timezone=ACR_TIMEZONE, - ): + self: Self, + project_id: int, + stream_id: str, + requested_date: date | None = None, + timezone: str = ACR_TIMEZONE, + ) -> Any: # noqa: ANN401 """Fetch metadata from ACRCloud for `stream_id`. Arguments: --------- project_id: The Project ID of the stream. stream_id: The ID of the stream. - requested_date (optional): The date of the entries you want (default: yesterday). - timezone (optional): The timezone to use for localization. + requested_date: The date of the entries you want (default: yesterday). + timezone: The timezone to use for localization. Returns: ------- @@ -51,27 +58,28 @@ def get_data( data = self.get_bm_cs_projects_results( project_id=project_id, stream_id=stream_id, - params={ - "date": requested_date.strftime("%Y%m%d"), - }, + params=GetBmCsProjectsResultsParams( + type="day", + date=requested_date.strftime("%Y%m%d"), + ), ) for entry in data: metadata = entry.get("metadata") ts_utc = pytz.utc.localize( - datetime.strptime(metadata.get("timestamp_utc"), ACRClient.TS_FMT), + datetime.strptime(metadata.get("timestamp_utc"), ACRClient.TS_FMT), # noqa: DTZ007 ) ts_local = ts_utc.astimezone(pytz.timezone(timezone)) metadata.update({"timestamp_local": ts_local.strftime(ACRClient.TS_FMT)}) return data - def get_interval_data( - self, - project_id, - stream_id, - start, - end, - timezone=ACR_TIMEZONE, + def get_interval_data( # noqa: PLR0913, ANN201 + self: Self, + project_id: int, + stream_id: str, + start: date, + end: date, + timezone: str = ACR_TIMEZONE, ): # pylint: disable-msg=too-many-locals,too-many-arguments """Get data specified by interval from start to end. @@ -92,7 +100,7 @@ def get_interval_data( # if we have to localize the timestamps we may need more data if timezone != ACRClient.ACR_TIMEZONE: # compute utc offset - offset = pytz.timezone(timezone).utcoffset(datetime.now()) + offset = pytz.timezone(timezone).utcoffset(datetime.now()) # noqa: DTZ005 # decrease start by 1 day if we're ahead of utc if offset > timedelta(seconds=1): computed_start = start - timedelta(days=1) @@ -113,7 +121,8 @@ def get_interval_data( dates.append(ptr) ptr += timedelta(days=1) data = [] - # make the prefix longer by this amount so tqdm lines up with the one in the main code + # make the prefix longer by this amount so tqdm lines up with + # the one in the main code ljust_amount: int = 27 for ptr in tqdm(dates, desc="load ACRCloud data".ljust(ljust_amount)): data += self.get_data( @@ -128,7 +137,7 @@ def get_interval_data( for entry in reversed(data): metadata = entry.get("metadata") timestamp = metadata.get("timestamp_local") - timestamp_date = datetime.strptime(timestamp, ACRClient.TS_FMT).date() + timestamp_date = datetime.strptime(timestamp, ACRClient.TS_FMT).date() # noqa: DTZ007 if timestamp_date < start or timestamp_date > end: data.remove(entry) diff --git a/suisa_sendemeldung/suisa_sendemeldung.py b/suisa_sendemeldung/suisa_sendemeldung.py index 8afd7ade..71d66d23 100644 --- a/suisa_sendemeldung/suisa_sendemeldung.py +++ b/suisa_sendemeldung/suisa_sendemeldung.py @@ -1,13 +1,13 @@ """SUISA Sendemeldung bugs SUISA with email once per month. Fetches data on our playout history and formats them in a CSV file format -containing the data (like Track, Title and ISRC) requested by SUISA. Also takes care of sending -the report to SUISA via email for hands-off operations. +containing the data (like Track, Title and ISRC) requested by SUISA. Also +takes care of sending the report to SUISA via email for hands-off operations. """ from __future__ import annotations -from argparse import Namespace as ArgparseNamespace +import sys from csv import reader, writer from datetime import date, datetime, timedelta from email.encoders import encode_base64 @@ -16,10 +16,10 @@ from email.mime.text import MIMEText from email.utils import formatdate from io import BytesIO, StringIO -from os.path import basename, expanduser +from pathlib import Path from smtplib import SMTP from string import Template -from typing import Any +from typing import TYPE_CHECKING, Any import cridlib import pytz @@ -29,11 +29,15 @@ from iso3901 import ISRC from openpyxl import Workbook from openpyxl.styles import Border, Font, PatternFill, Side -from openpyxl.worksheet.worksheet import Worksheet from tqdm import tqdm from .acrclient import ACRClient +if TYPE_CHECKING: # pragma: no cover + from argparse import Namespace as ArgparseNamespace + + from openpyxl.worksheet.worksheet import Worksheet + _EMAIL_TEMPLATE = """ Hallo SUISA @@ -80,7 +84,9 @@ -- $email_footer -""" +""" # noqa: E501 + +_ACRTOKEN_MAXLEN = 32 def validate_arguments(parser: ArgumentParser, args: ArgparseNamespace) -> None: @@ -96,19 +102,23 @@ def validate_arguments(parser: ArgumentParser, args: ArgparseNamespace) -> None: """ msgs = [] # check length of bearer_token - if not len(args.bearer_token) >= 32: + if not len(args.bearer_token) >= _ACRTOKEN_MAXLEN: msgs.append( "".join( ( - "wrong format on bearer_token, ", - f"expected larger than 32 characters but got {len(args.bearer_token)}", + "wrong format on bearer_token, " + "expected larger than 32 characters " + f"but got {len(args.bearer_token)}" ), ), ) # check length of stream_id if len(args.stream_id) not in [9, 10]: msgs.append( - f"wrong format on stream_id, expected 9 or 10 characters but got {len(args.stream_id)}", + ( + "wrong format on stream_id, " + f"expected 9 or 10 characters but got {len(args.stream_id)}" + ), ) # one output option has to be set if not (args.file or args.email or args.stdout): @@ -126,12 +136,13 @@ def validate_arguments(parser: ArgumentParser, args: ArgparseNamespace) -> None: parser.error("\n- " + "\n- ".join(msgs)) -def get_arguments(parser: ArgumentParser) -> ArgparseNamespace: # pragma: no cover +def get_arguments(parser: ArgumentParser, sysargs: list[str]) -> ArgparseNamespace: """Create :class:`ArgumentParser` with arguments. Arguments: --------- parser: the parser to add arguments + sysargs: sys.arg[1:] or something else for testing Returns: ------- @@ -225,14 +236,21 @@ def get_arguments(parser: ArgumentParser) -> ArgparseNamespace: # pragma: no co parser.add_argument( "--email-subject", env_var="EMAIL_SUBJECT", - help="The subject of the email, placeholders are $station_name, $year and $month", + help=""" + Template for subject of the email. + + Placeholders are $station_name, $year and $month. + """, default="SUISA Sendemeldung von $station_name für $year-$month", ) parser.add_argument( "--email-text", env_var="EMAIL_TEXT", help=""" - A template for the Email text, placeholders are $station_name, $month, $year, $previous_year, $responsible_email, and $email_footer. + Template for email text. + + Placeholders are $station_name, $month, $year, $previous_year, + $responsible_email, and $email_footer. """, default=_EMAIL_TEMPLATE, ) @@ -281,7 +299,11 @@ def get_arguments(parser: ArgumentParser) -> ArgparseNamespace: # pragma: no co "--filename", env_var="FILENAME", help=""" - file to write to (default: __.csv when reporting last month, _.csv else) + Output filename. + + Default: + - __.csv when reporting last month + - _.csv else """, ) parser.add_argument( @@ -290,9 +312,9 @@ def get_arguments(parser: ArgumentParser) -> ArgparseNamespace: # pragma: no co help="also print to stdout", action="store_true", ) - args = parser.parse_args() - validate_arguments(parser, args) - return args + args = parser.parse_args(sysargs) + validate_arguments(parser, args) # pragma: no cover + return args # pragma: no cover def parse_date(args: ArgparseNamespace) -> tuple[date, date]: @@ -310,7 +332,7 @@ def parse_date(args: ArgparseNamespace) -> tuple[date, date]: """ # date parsing logic if args.last_month: - today = date.today() + today = date.today() # noqa: DTZ011 # get first of this month this_month = today.replace(day=1) # last day of last month = first day of this month - 1 day @@ -318,12 +340,12 @@ def parse_date(args: ArgparseNamespace) -> tuple[date, date]: start_date = end_date.replace(day=1) else: if args.end_date: - end_date = datetime.strptime(args.end_date, "%Y-%m-%d").date() + end_date = datetime.strptime(args.end_date, "%Y-%m-%d").date() # noqa: DTZ007 else: # if no end_date was set, default to today - end_date = date.today() + end_date = date.today() # noqa: DTZ011 if args.start_date: - start_date = datetime.strptime(args.start_date, "%Y-%m-%d").date() + start_date = datetime.strptime(args.start_date, "%Y-%m-%d").date() # noqa: DTZ007 else: # if no start_date was set, default to 30 days before end_date start_date = end_date - timedelta(days=30) @@ -350,11 +372,14 @@ def parse_filename(args: ArgparseNamespace, start_date: date) -> str: date_part = f"{start_date.strftime('%Y')}_{start_date.strftime('%m')}" filename = f"{args.station_name_short}_{date_part}.{args.filetype}" else: - filename = f"{args.station_name_short}_{start_date.strftime('%Y-%m-%d')}.{args.filetype}" + filename = ( + f"{args.station_name_short}_" + f"{start_date.strftime('%Y-%m-%d')}.{args.filetype}" + ) return filename -def check_duplicate(entry_a: Any, entry_b: Any) -> bool: +def check_duplicate(entry_a: Any, entry_b: Any) -> bool: # noqa: ANN401 """Check if two entries are duplicates by checking their acrid in all music items. Arguments: @@ -382,7 +407,7 @@ def check_duplicate(entry_a: Any, entry_b: Any) -> bool: return False -def merge_duplicates(data: Any) -> Any: +def merge_duplicates(data: Any) -> Any: # noqa: ANN401 """Merge consecutive entries into one if they are duplicates. Arguments: @@ -414,10 +439,11 @@ def merge_duplicates(data: Any) -> Any: def funge_release_date(release_date: str = "") -> str: """Make a release_date from ACR conform to what seems to be the spec.""" - if len(release_date) == 10: - # we can make it look like what suisa has in their examples if it's the right length + if len(release_date) == 10: # noqa: PLR2004 + # we can make it look like what suisa has in their examples if it's the + # right length try: - return datetime.strptime(release_date, "%Y-%m-%d").strftime("%Y%m%d") + return datetime.strptime(release_date, "%Y-%m-%d").strftime("%Y%m%d") # noqa: DTZ007 except ValueError: return "" # we discard other records since there is no way to convert records like a plain @@ -427,7 +453,7 @@ def funge_release_date(release_date: str = "") -> str: return "" -def get_artist(music: Any) -> str: +def get_artist(music: Any) -> str: # noqa: ANN401 """Get artist from a given dict. Arguments: @@ -460,7 +486,7 @@ def get_artist(music: Any) -> str: return artist -def get_isrc(music: Any) -> str: +def get_isrc(music: Any) -> str: # noqa: ANN401 """Get a valid ISRC from the music record or return an empty string.""" isrc = "" if music.get("external_ids", {}).get("isrc"): @@ -534,7 +560,7 @@ def get_csv(data: dict, station_name: str = "") -> str: for entry in tqdm(data, desc="preparing tracks for report"): metadata = entry.get("metadata") # parse timestamp - timestamp = datetime.strptime(metadata.get("timestamp_local"), ACRClient.TS_FMT) + timestamp = datetime.strptime(metadata.get("timestamp_local"), ACRClient.TS_FMT) # noqa: DTZ007 ts_date = timestamp.strftime("%Y%m%d") ts_time = timestamp.strftime("%H:%M:%S") @@ -563,9 +589,8 @@ def get_csv(data: dict, station_name: str = "") -> str: if c.get("role", "") in ["C", "Composer", "W", "Writer"] ], ) - if works_composer: - if not composer or composer == artist: - composer = works_composer + if works_composer and (not composer or composer == artist): + composer = works_composer isrc = get_isrc(music) label = music.get("label") @@ -580,7 +605,7 @@ def get_csv(data: dict, station_name: str = "") -> str: # cridlib only supports timezone-aware datetime values, so we convert one timestamp_utc = pytz.utc.localize( - datetime.strptime(metadata.get("timestamp_utc"), ACRClient.TS_FMT), + datetime.strptime(metadata.get("timestamp_utc"), ACRClient.TS_FMT), # noqa: DTZ007 ) # we include the acrid in our CRID so we know about the data's provenience # in case any questions about the data we delivered are asked @@ -619,7 +644,7 @@ def get_csv(data: dict, station_name: str = "") -> str: return csv.getvalue() -def get_xlsx(data: Any, station_name: str = "") -> BytesIO: +def get_xlsx(data: Any, station_name: str = "") -> BytesIO: # noqa: ANN401 """Create SUISA compatible xlsx data. Arguments: @@ -693,7 +718,7 @@ def write_csv(filename: str, csv: str) -> None: # pragma: no cover csv: The data to write to `filename`. """ - with open(filename, mode="w", encoding="utf-8") as csvfile: + with Path(filename).open("w", encoding="utf-8") as csvfile: csvfile.write(csv) @@ -706,11 +731,11 @@ def write_xlsx(filename: str, xlsx: BytesIO) -> None: # pragma: no cover xlsx: The data to write to `filename`. """ - with open(filename, mode="wb") as xlsxfile: + with Path(filename).open("wb") as xlsxfile: xlsxfile.write(xlsx.getvalue()) -def get_email_attachment(filename: str, filetype: str, data: Any) -> MIMEBase: +def get_email_attachment(filename: str, filetype: str, data: Any) -> MIMEBase: # noqa: ANN401 """Create attachment based on required filetype and data. Arguments: @@ -736,20 +761,20 @@ def get_email_attachment(filename: str, filetype: str, data: Any) -> MIMEBase: part = MIMEBase(maintype, subtype) part.set_payload(payload) encode_base64(part) - part.add_header("Content-Disposition", f"attachment; filename={basename(filename)}") + part.add_header( + "Content-Disposition", f"attachment; filename={Path(filename).name}" + ) return part -# reducing the arguments even more does not seem practical -# pylint: disable-msg=too-many-arguments,invalid-name -def create_message( +def create_message( # noqa: PLR0913 sender: str, recipient: str, subject: str, text: str, filename: str, filetype: str, - data: Any, + data: Any, # noqa: ANN401 cc: str | None = None, bcc: str | None = None, ) -> MIMEMultipart: @@ -812,19 +837,19 @@ def send_message( def main() -> None: # pragma: no cover """Entrypoint for SUISA Sendemeldung .""" - default_config_file = basename(__file__).replace(".py", ".conf") - # config file in /etc gets overriden by the one in $HOME which gets overriden by the one in the - # current directory + default_config_file: str = Path(__file__).name.replace(".py", ".conf") + # config file in /etc gets overriden by the one in $HOME which gets overriden by the + # one in the current directory default_config_files = [ - "/etc/" + default_config_file, - expanduser("~") + "/" + default_config_file, + str(Path("/etc") / default_config_file), + str(Path("~").expanduser() / default_config_file), default_config_file, ] parser = ArgumentParser( default_config_files=default_config_files, description="ACRCloud client for SUISA reporting @ RaBe.", ) - args = get_arguments(parser) + args = get_arguments(parser, sys.argv[1:]) start_date, end_date = parse_date(args) filename = parse_filename(args, start_date) @@ -862,7 +887,7 @@ def main() -> None: # pragma: no cover locale=args.locale, ), "in_three_months": format_date( - datetime.now() + relativedelta(months=+3), + datetime.now() + relativedelta(months=+3), # noqa: DTZ005 format="long", locale=args.locale, ), @@ -892,7 +917,7 @@ def main() -> None: # pragma: no cover elif args.file and args.filetype == "csv": write_csv(filename, data) if args.stdout and args.filetype == "csv": - print(data) + print(data) # noqa: T201 if __name__ == "__main__": # pragma: no cover diff --git a/tests/__snapshots__/test_suisa_sendemeldung.ambr b/tests/__snapshots__/test_suisa_sendemeldung.ambr new file mode 100644 index 00000000..14a56702 --- /dev/null +++ b/tests/__snapshots__/test_suisa_sendemeldung.ambr @@ -0,0 +1,89 @@ +# serializer version: 1 +# name: test_cli_help + ''' + usage: pytest [-h] --bearer-token BEARER_TOKEN --project-id PROJECT_ID + --stream-id STREAM_ID [--station-name STATION_NAME] + [--station-name-short STATION_NAME_SHORT] [--file] + [--filetype {xlsx,csv}] [--email] [--email-from EMAIL_FROM] + [--email-to EMAIL_TO] [--email-cc EMAIL_CC] + [--email-bcc EMAIL_BCC] [--email-server EMAIL_SERVER] + [--email-login EMAIL_LOGIN] [--email-pass EMAIL_PASS] + [--email-subject EMAIL_SUBJECT] [--email-text EMAIL_TEXT] + [--email-footer EMAIL_FOOTER] + [--responsible-email RESPONSIBLE_EMAIL] + [--start-date START_DATE] [--end-date END_DATE] [--last-month] + --timezone TIMEZONE [--locale LOCALE] [--filename FILENAME] + [--stdout] + + options: + -h, --help show this help message and exit + --bearer-token BEARER_TOKEN + the bearer token for ACRCloud (required) [env var: + BEARER_TOKEN] + --project-id PROJECT_ID + the id of the project at ACRCloud (required) [env var: + PROJECT_ID] + --stream-id STREAM_ID + the id of the stream at ACRCloud (required) [env var: + STREAM_ID] + --station-name STATION_NAME + Station name, used in Output and Emails [env var: + STATION_NAME] + --station-name-short STATION_NAME_SHORT + Shortname for station as used in Filenames (locally + and in attachment) [env var: STATION_NAME_SHORT] + --file create file [env var: FILE] + --filetype {xlsx,csv} + filetype to attach to email or write to file [env var: + FILETYPE] + --email send an email [env var: EMAIL] + --email-from EMAIL_FROM + the sender of the email [env var: EMAIL_FROM] + --email-to EMAIL_TO the recipients of the email [env var: EMAIL_TO] + --email-cc EMAIL_CC the cc recipients of the email [env var: EMAIL_CC] + --email-bcc EMAIL_BCC + the bcc recipients of the email [env var: EMAIL_BCC] + --email-server EMAIL_SERVER + the smtp server to send the mail with [env var: + EMAIL_SERVER] + --email-login EMAIL_LOGIN + the username to logon to the smtp server (default: + email_from) [env var: EMAIL_LOGIN] + --email-pass EMAIL_PASS + the password for the smtp server [env var: EMAIL_PASS] + --email-subject EMAIL_SUBJECT + Template for subject of the email. Placeholders are + $station_name, $year and $month. [env var: + EMAIL_SUBJECT] + --email-text EMAIL_TEXT + Template for email text. Placeholders are + $station_name, $month, $year, $previous_year, + $responsible_email, and $email_footer. [env var: + EMAIL_TEXT] + --email-footer EMAIL_FOOTER + Footer for the Email [env var: EMAIL_FOOTER] + --responsible-email RESPONSIBLE_EMAIL + Used to hint whom to contact in the emails text. [env + var: RESPONSIBLE_EMAIL] + --start-date START_DATE + the start date of the interval in format YYYY-MM-DD + (default: 30 days before end_date) [env var: + START_DATE] + --end-date END_DATE the end date of the interval in format YYYY-MM-DD + (default: today) [env var: END_DATE] + --last-month download data of whole last month [env var: + LAST_MONTH] + --timezone TIMEZONE set the timezone for localization [env var: TIMEZONE] + --locale LOCALE set locale for date and time formatting [env var: + LOCALE] + --filename FILENAME Output filename. Default: - + __.csv when reporting + last month - _.csv + else [env var: FILENAME] + --stdout also print to stdout [env var: STDOUT] + + In general, command-line values override environment variables which override + defaults. + + ''' +# --- diff --git a/tests/test_suisa_sendemeldung.py b/tests/test_suisa_sendemeldung.py index 7317901e..d53387c4 100644 --- a/tests/test_suisa_sendemeldung.py +++ b/tests/test_suisa_sendemeldung.py @@ -1,14 +1,15 @@ """Test the suisa_sendemeldung.suisa_sendemeldung module.""" +import contextlib from datetime import date, datetime, timezone from email.message import Message from io import BytesIO from unittest.mock import call, patch +import pytest from configargparse import ArgumentParser # type: ignore[import-untyped] from freezegun import freeze_time from openpyxl import load_workbook -from pytest import mark from suisa_sendemeldung import suisa_sendemeldung @@ -32,9 +33,9 @@ def test_validate_arguments(): suisa_sendemeldung.validate_arguments(mock, args) mock.error.assert_called_once_with( "\n" - "- wrong format on bearer_token, expected larger than 32 characters but got 31\n" + "- wrong format on bearer_token, expected larger than 32 characters but got 31\n" # noqa: E501 "- wrong format on stream_id, expected 9 or 10 characters but got 12\n" - "- no output option has been set, specify one of --file, --email or --stdout\n" + "- no output option has been set, specify one of --file, --email or --stdout\n" # noqa: E501 "- argument --last_month not allowed with --start_date or --end_date", ) @@ -44,7 +45,7 @@ def test_validate_arguments(): suisa_sendemeldung.validate_arguments(mock, args) mock.error.assert_called_once_with( "\n" - "- wrong format on bearer_token, expected larger than 32 characters but got 31\n" + "- wrong format on bearer_token, expected larger than 32 characters but got 31\n" # noqa: E501 "- wrong format on stream_id, expected 9 or 10 characters but got 12\n" "- xlsx cannot be printed to stdout, please set --filetype to csv\n" "- argument --last_month not allowed with --start_date or --end_date", @@ -176,12 +177,12 @@ def test_merge_duplicates(): record_2 = {"metadata": {"music": [{"acrid": "987654321"}], "played_duration": 10}} raw_records = [record_1, record_1, record_2] results = suisa_sendemeldung.merge_duplicates(raw_records) - assert len(results) == 2 - assert results[0]["metadata"]["played_duration"] == 20 + assert len(results) == 2 # noqa: PLR2004 + assert results[0]["metadata"]["played_duration"] == 20 # noqa: PLR2004 -@mark.parametrize( - "test_date,expected", +@pytest.mark.parametrize( + ("test_date", "expected"), [ ("0000-00-00", ""), ], @@ -203,7 +204,7 @@ def test_get_csv(mock_cridlib_get): csv = suisa_sendemeldung.get_csv(data) # pylint: disable=line-too-long assert csv == ( - "Titel,Komponist,Interpret,Interpreten-Info,Sender,Sendedatum,Sendedauer,Sendezeit,Werkverzeichnisangaben,ISRC,Label,CD ID / Katalog-Nummer,Aufnahmedatum,Aufnahmeland,Erstveröffentlichungsdatum,Titel des Tonträgers (Albumtitel),Autor Text,Track Nummer,Genre,Programm,Bestellnummer,Marke,Label Code,EAN/GTIN,Identifikationsnummer\r\n" + "Titel,Komponist,Interpret,Interpreten-Info,Sender,Sendedatum,Sendedauer,Sendezeit,Werkverzeichnisangaben,ISRC,Label,CD ID / Katalog-Nummer,Aufnahmedatum,Aufnahmeland,Erstveröffentlichungsdatum,Titel des Tonträgers (Albumtitel),Autor Text,Track Nummer,Genre,Programm,Bestellnummer,Marke,Label Code,EAN/GTIN,Identifikationsnummer\r\n" # noqa: E501 ) # pylint: enable=line-too-long mock_cridlib_get.assert_not_called() @@ -327,15 +328,18 @@ def test_get_csv(mock_cridlib_get): ] csv = suisa_sendemeldung.get_csv(data, station_name="Station Name") # pylint: disable=line-too-long - assert csv == ( - "Titel,Komponist,Interpret,Interpreten-Info,Sender,Sendedatum,Sendedauer,Sendezeit,Werkverzeichnisangaben,ISRC,Label,CD ID / Katalog-Nummer,Aufnahmedatum,Aufnahmeland,Erstveröffentlichungsdatum,Titel des Tonträgers (Albumtitel),Autor Text,Track Nummer,Genre,Programm,Bestellnummer,Marke,Label Code,EAN/GTIN,Identifikationsnummer\r\n" - "Uhrenvergleich,,,,Station Name,19930301,00:01:00,13:12:00,,,,,,,,,,,,,,,,,crid://rabe.ch/v1/test\r\n" - 'Meme Dub,Da Composah,Da Gang,,Station Name,19930301,00:01:00,13:37:00,,DEZ650710376,,,,,,"album, but string",,,,,,,,,crid://rabe.ch/v1/test\r\n' - 'Bubbles,,"Mary\'s Surprise Act, Climmy Jiff",,Station Name,19930301,00:01:00,16:20:00,,DEZ650710376,Jane Records,,,,20221213,Da Alboom,,,,,,,,greedy-capitalist-number,crid://rabe.ch/v1/test\r\n' - ",,Artists as string not list,,Station Name,19930301,00:01:00,17:17:17,,,,,,,,,,,,,,,,,crid://rabe.ch/v1/test\r\n" - "Long Playing,,,,Station Name,19930301,19:48:57,18:18:18,,,,,,,,,,,,,,,,,crid://rabe.ch/v1/test\r\n" - "composer in works,Worker,,,Station Name,19930301,19:48:57,18:18:18,,,,,,,,,,,,,,,,,crid://rabe.ch/v1/test\r\n" - "composer better in works,composer,same,,Station Name,19930301,19:48:57,18:18:18,,,,,,,,,,,,,,,,,crid://rabe.ch/v1/test\r\n" + assert ( + csv + == ( + "Titel,Komponist,Interpret,Interpreten-Info,Sender,Sendedatum,Sendedauer,Sendezeit,Werkverzeichnisangaben,ISRC,Label,CD ID / Katalog-Nummer,Aufnahmedatum,Aufnahmeland,Erstveröffentlichungsdatum,Titel des Tonträgers (Albumtitel),Autor Text,Track Nummer,Genre,Programm,Bestellnummer,Marke,Label Code,EAN/GTIN,Identifikationsnummer\r\n" # noqa: E501 + "Uhrenvergleich,,,,Station Name,19930301,00:01:00,13:12:00,,,,,,,,,,,,,,,,,crid://rabe.ch/v1/test\r\n" + 'Meme Dub,Da Composah,Da Gang,,Station Name,19930301,00:01:00,13:37:00,,DEZ650710376,,,,,,"album, but string",,,,,,,,,crid://rabe.ch/v1/test\r\n' # noqa: E501 + 'Bubbles,,"Mary\'s Surprise Act, Climmy Jiff",,Station Name,19930301,00:01:00,16:20:00,,DEZ650710376,Jane Records,,,,20221213,Da Alboom,,,,,,,,greedy-capitalist-number,crid://rabe.ch/v1/test\r\n' # noqa: E501 + ",,Artists as string not list,,Station Name,19930301,00:01:00,17:17:17,,,,,,,,,,,,,,,,,crid://rabe.ch/v1/test\r\n" + "Long Playing,,,,Station Name,19930301,19:48:57,18:18:18,,,,,,,,,,,,,,,,,crid://rabe.ch/v1/test\r\n" + "composer in works,Worker,,,Station Name,19930301,19:48:57,18:18:18,,,,,,,,,,,,,,,,,crid://rabe.ch/v1/test\r\n" + "composer better in works,composer,same,,Station Name,19930301,19:48:57,18:18:18,,,,,,,,,,,,,,,,,crid://rabe.ch/v1/test\r\n" + ) ) # pylint: enable=line-too-long mock_cridlib_get.assert_has_calls( @@ -482,8 +486,8 @@ def test_send_message(): ctx.login.assert_called_once_with("test@example.org", "password") -@mark.parametrize( - "test_music,expected", +@pytest.mark.parametrize( + ("test_music", "expected"), [ ({"external_ids": {"isrc": "DEZ650710376"}}, "DEZ650710376"), ({"external_ids": {"isrc": ["DEZ650710376"]}}, "DEZ650710376"), @@ -502,3 +506,12 @@ def test_get_isrc(test_music, expected): isrc = suisa_sendemeldung.get_isrc(test_music) assert isrc == expected + + +def test_cli_help(snapshot, capsys): + """Snapshot test cli output.""" + parser = ArgumentParser() + with contextlib.suppress(SystemExit): + suisa_sendemeldung.get_arguments(parser, ["-h"]) + captured = capsys.readouterr() + assert captured.out == snapshot