diff --git a/.copier-answers.yml b/.copier-answers.yml index 761f955..4a56f53 100644 --- a/.copier-answers.yml +++ b/.copier-answers.yml @@ -1,6 +1,6 @@ # Autogenerated. Do not edit this by hand, use `copier update`. --- -_commit: 0.3.7 +_commit: 0.4.4 _src_path: https://github.com/lkubb/salt-extension-copier author: EITR Technologies, LLC author_email: devops@eitr.tech @@ -18,6 +18,7 @@ no_saltext_namespace: false package_name: grafana project_name: grafana python_requires: '3.8' +relax_pylint: false salt_version: '3006' source_url: https://github.com/salt-extensions/saltext-grafana ssh_fixtures: false diff --git a/.envrc b/.envrc new file mode 100644 index 0000000..716c153 --- /dev/null +++ b/.envrc @@ -0,0 +1,7 @@ +layout_saltext() { + VIRTUAL_ENV="$(python3 tools/initialize.py --print-venv)" + PATH_add "$VIRTUAL_ENV/bin" + export VIRTUAL_ENV +} + +layout_saltext diff --git a/.github/workflows/pr.yml b/.github/workflows/pr.yml index 47c40ed..fc650b6 100644 --- a/.github/workflows/pr.yml +++ b/.github/workflows/pr.yml @@ -1,3 +1,4 @@ +--- name: Pull Request or Push on: diff --git a/.github/workflows/tag.yml b/.github/workflows/tag.yml index dab0796..016aeea 100644 --- a/.github/workflows/tag.yml +++ b/.github/workflows/tag.yml @@ -1,3 +1,4 @@ +--- name: Tagged Releases on: @@ -16,7 +17,7 @@ jobs: - name: Extract tag name id: get_version - run: echo "version=$(echo ${GITHUB_REF#refs/tags/v})" >> $GITHUB_OUTPUT + run: echo "version=${GITHUB_REF#refs/tags/v}" >> "$GITHUB_OUTPUT" call_central_workflow: needs: get_tag_version diff --git a/.gitignore b/.gitignore index 52c47db..b2b4b49 100644 --- a/.gitignore +++ b/.gitignore @@ -102,6 +102,7 @@ celerybeat.pid *.sage.py # Environments +!.envrc .env .venv env/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4702f73..250aa3a 100755 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -4,14 +4,14 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.6.0 hooks: - - id: check-merge-conflict # Check for files that contain merge conflict strings. - - id: trailing-whitespace # Trims trailing whitespace. + - id: check-merge-conflict # Check for files that contain merge conflict strings. + args: [--assume-in-merge] + - id: trailing-whitespace # Trim trailing whitespace. args: [--markdown-linebreak-ext=md] - - id: mixed-line-ending # Replaces or checks mixed line ending. + - id: mixed-line-ending # Ensure files use UNIX-style newlines only. args: [--fix=lf] - - id: end-of-file-fixer # Makes sure files end in a newline and only a newline. - - id: check-merge-conflict # Check for files that contain merge conflict strings. - - id: check-ast # Simply check whether files parse as valid python. + - id: end-of-file-fixer # Ensure files end with a newline. + - id: check-ast # Check whether files parse as valid Python. # ----- Formatting ----------------------------------------------------------------------------> - repo: https://github.com/saltstack/pre-commit-remove-import-headers @@ -24,7 +24,7 @@ repos: - id: check-cli-examples name: Check CLI examples on execution modules entry: python .pre-commit-hooks/check-cli-examples.py - language: system + language: python files: ^src/saltext/grafana/modules/.*\.py$ - repo: local @@ -32,7 +32,7 @@ repos: - id: check-docs name: Check rST doc files exist for modules/states entry: python .pre-commit-hooks/make-autodocs.py - language: system + language: python pass_filenames: false - repo: https://github.com/s0undt3ch/salt-rewrite @@ -56,7 +56,7 @@ repos: args: [--silent, -E, fix_docstrings] - repo: https://github.com/asottile/pyupgrade - rev: v3.15.2 + rev: v3.16.0 hooks: - id: pyupgrade name: Rewrite Code to be Py3.8+ @@ -75,14 +75,14 @@ repos: exclude: src/saltext/grafana/(__init__|version).py - repo: https://github.com/psf/black - rev: 24.2.0 + rev: 24.8.0 hooks: - id: black args: [-l 100] exclude: src/saltext/grafana/version.py - repo: https://github.com/adamchainz/blacken-docs - rev: 1.16.0 + rev: 1.18.0 hooks: - id: blacken-docs args: [--skip-errors] @@ -93,7 +93,7 @@ repos: # ----- Security ------------------------------------------------------------------------------> - repo: https://github.com/PyCQA/bandit - rev: 1.7.8 + rev: 1.7.9 hooks: - id: bandit alias: bandit-salt @@ -101,7 +101,7 @@ repos: args: [--silent, -lll, --skip, B701] exclude: src/saltext/grafana/version.py - repo: https://github.com/PyCQA/bandit - rev: 1.7.8 + rev: 1.7.9 hooks: - id: bandit alias: bandit-tests @@ -111,29 +111,35 @@ repos: # <---- Security ------------------------------------------------------------------------------- # ----- Code Analysis -------------------------------------------------------------------------> - - repo: https://github.com/saltstack/mirrors-nox - rev: v2022.11.21 + + - repo: local hooks: - id: nox alias: lint-src name: Lint Source Code + language: python + entry: nox -e lint-code-pre-commit -- files: ^((setup|noxfile)|src/.*)\.py$ require_serial: true - args: - - -e - - lint-code-pre-commit - - -- + additional_dependencies: + - nox==2024.4.15 + - uv==0.4.0 # Makes this hook much faster - - repo: https://github.com/saltstack/mirrors-nox - rev: v2022.11.21 - hooks: - id: nox alias: lint-tests name: Lint Tests + language: python + entry: nox -e lint-tests-pre-commit -- files: ^tests/.*\.py$ require_serial: true - args: - - -e - - lint-tests-pre-commit - - -- + additional_dependencies: + - nox==2024.4.15 + - uv==0.4.0 # Makes this hook much faster + + - repo: https://github.com/Mateusz-Grzelinski/actionlint-py + rev: 1ca29a1b5d949b3586800190ad6cc98317cb43b8 # v1.7.1.15 + hooks: + - id: actionlint + additional_dependencies: + - shellcheck-py>=0.9.0.5 # <---- Code Analysis -------------------------------------------------------------------------- diff --git a/.pre-commit-hooks/make-autodocs.py b/.pre-commit-hooks/make-autodocs.py index 4760e3f..ff331b1 100644 --- a/.pre-commit-hooks/make-autodocs.py +++ b/.pre-commit-hooks/make-autodocs.py @@ -33,9 +33,14 @@ def write_module(rst_path, path, use_virtualname=True): virtualname = "``" + _find_virtualname(path) + "``" else: virtualname = make_import_path(path) + header_len = len(virtualname) + # The check-merge-conflict pre-commit hook chokes here: + # https://github.com/pre-commit/pre-commit-hooks/issues/100 + if header_len == 7: + header_len += 1 module_contents = f"""\ {virtualname} -{'='*len(virtualname)} +{'='*header_len} .. automodule:: {make_import_path(path)} :members: diff --git a/.pylintrc b/.pylintrc index 5692f3b..b304cb9 100755 --- a/.pylintrc +++ b/.pylintrc @@ -39,7 +39,7 @@ extension-pkg-whitelist= fail-on= # Specify a score threshold under which the program will exit with error. -fail-under=10 +fail-under=10.0 # Interpret the stdin as a python script, whose filename needs to be passed as # the module_or_package argument. @@ -59,10 +59,11 @@ ignore-paths= # Emacs file locks ignore-patterns=^\.# -# List of module names for which member attributes should not be checked -# (useful for modules/projects where namespaces are manipulated during runtime -# and thus existing member attributes cannot be deduced by static analysis). It -# supports qualified module names, as well as Unix pattern matching. +# List of module names for which member attributes should not be checked and +# will not be imported (useful for modules/projects where namespaces are +# manipulated during runtime and thus existing member attributes cannot be +# deduced by static analysis). It supports qualified module names, as well as +# Unix pattern matching. ignored-modules= # Python code to execute, usually for sys.path manipulation such as @@ -86,9 +87,13 @@ load-plugins= # Pickle collected data for later comparisons. persistent=yes +# Resolve imports to .pyi stubs if available. May reduce no-member messages and +# increase not-an-iterable messages. +prefer-stubs=no + # Minimum Python version to use for version dependent checks. Will default to # the version used to run pylint. -py-version=3.10 +py-version=3.8 # Discover python modules and packages in the file system subtree. recursive=no @@ -285,19 +290,19 @@ exclude-too-few-public-methods= ignored-parents= # Maximum number of arguments for function / method. -max-args=15 +max-args=35 # Maximum number of attributes for a class (see R0902). -max-attributes=7 +max-attributes=15 # Maximum number of boolean expressions in an if statement (see R0916). -max-bool-expr=5 +max-bool-expr=8 # Maximum number of branch for function / method body. -max-branches=12 +max-branches=48 # Maximum number of locals for function / method body. -max-locals=15 +max-locals=40 # Maximum number of parents for a class (see R0901). max-parents=7 @@ -306,10 +311,10 @@ max-parents=7 max-public-methods=25 # Maximum number of return / yield for function / method body. -max-returns=6 +max-returns=10 # Maximum number of statements in function / method body. -max-statements=50 +max-statements=100 # Minimum number of public methods for a class (see R0903). min-public-methods=2 @@ -324,7 +329,7 @@ overgeneral-exceptions=builtins.BaseException,builtins.Exception [FORMAT] # Expected format of line ending, e.g. empty (any line ending), LF or CRLF. -expected-line-ending-format= +expected-line-ending-format=LF # Regexp for a line that is allowed to be longer than the limit. ignore-long-lines=^\s*(# )??$ @@ -337,10 +342,10 @@ indent-after-paren=4 indent-string=' ' # Maximum number of characters on a single line. -max-line-length=100 +max-line-length=120 # Maximum number of lines in a module. -max-module-lines=2000 +max-module-lines=3000 # Allow the body of a class to be on the same line as the declaration if body # contains single statement. @@ -421,43 +426,18 @@ confidence=HIGH, # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use "--disable=all --enable=classes # --disable=W". -disable=R, - locally-disabled, - file-ignored, - unexpected-special-method-signature, - import-error, - no-member, - unsubscriptable-object, - blacklisted-name, - invalid-name, - missing-docstring, - empty-docstring, - unidiomatic-typecheck, - wrong-import-order, - ungrouped-imports, - wrong-import-position, - bad-mcs-method-argument, - bad-mcs-classmethod-argument, - line-too-long, - too-many-lines, - bad-continuation, - exec-used, - attribute-defined-outside-init, - protected-access, - reimported, - fixme, - global-statement, - unused-variable, - unused-argument, - redefined-outer-name, - redefined-builtin, - undefined-loop-variable, - logging-format-interpolation, - invalid-format-index, - line-too-long, - import-outside-toplevel, - deprecated-method, - keyword-arg-before-vararg, +disable=duplicate-code, + fixme, + keyword-arg-before-vararg, + line-too-long, + logging-fstring-interpolation, + missing-class-docstring, + missing-function-docstring, + missing-module-docstring, + protected-access, + too-few-public-methods, + ungrouped-imports, + wrong-import-position # Enable the message, report, category or checker with the given id(s). You can # either give multiple identifier separated by comma (,) or put this option @@ -495,6 +475,11 @@ max-nested-blocks=5 # printed. never-returning-functions=sys.exit,argparse.parse_error +# Let 'consider-using-join' be raised when the separator to join on would be +# non-empty (resulting in expected fixes of the type: ``"- " + " - +# ".join(items)``) +suggest-join-with-non-empty-separator=yes + [REPORTS] @@ -509,8 +494,9 @@ evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor # used to format the message information. See doc for all details. msg-template= -# Set the output format. Available formats are text, parseable, colorized, json -# and msvs (visual studio). You can also give a reporter class, e.g. +# Set the output format. Available formats are: text, parseable, colorized, +# json2 (improved json format), json (old json format) and msvs (visual +# studio). You can also give a reporter class, e.g. # mypackage.mymodule.MyReporterClass. #output-format= @@ -544,8 +530,8 @@ min-similarity-lines=4 # Limits count of emitted suggestions for spelling mistakes. max-spelling-suggestions=4 -# Spelling dictionary name. No available dictionaries : You need to install the -# system dependency for enchant to work.. +# Spelling dictionary name. No available dictionaries : You need to install +# both the python package and the system dependency for enchant to work. spelling-dict= # List of comma separated words that should be considered directives if they @@ -633,27 +619,27 @@ signature-mutators= # List of additional names supposed to be defined in builtins. Remember that # you should avoid defining new builtins when possible. additional-builtins=__opts__, - __salt__, - __pillar__, - __grains__, - __context__, - __runner__, - __ret__, - __env__, - __low__, - __states__, - __lowstate__, - __running__, - __active_provider_name__, - __master_opts__, - __jid_event__, - __instance_id__, - __salt_system_encoding__, - __proxy__, - __serializers__, - __reg__, - __executors__, - __events__ + __salt__, + __pillar__, + __grains__, + __context__, + __runner__, + __ret__, + __env__, + __low__, + __states__, + __lowstate__, + __running__, + __active_provider_name__, + __master_opts__, + __jid_event__, + __instance_id__, + __salt_system_encoding__, + __proxy__, + __serializers__, + __reg__, + __executors__, + __events__ # Tells whether unused global variables should be treated as a violation. allow-global-unused-variables=yes diff --git a/README.md b/README.md index 179eaf3..20665a2 100644 --- a/README.md +++ b/README.md @@ -4,82 +4,98 @@ Salt Extension for interacting with Grafana ## Security -If you think you have found a security vulnerability, see -[Salt's security guide][security]. +If you discover a security vulnerability, please refer +to [Salt's security guide][security]. ## User Documentation -This README is for people aiming to contribute to the project. -If you just want to get started with the extension, check out the +For setup and usage instructions, please refer to the [User Documentation][docs]. ## Contributing -The saltext-grafana project team welcomes contributions from the community. +The saltext-grafana project welcomes contributions from anyone! -The [Salt Contributing guide][salt-contributing] has a lot of relevant -information, but if you'd like to jump right in here's how to get started: +The [Salt Extensions guide][salt-extensions-guide] provides comprehensive instructions on all aspects +of Salt extension development, including [writing tests][writing-tests], [running tests][running-tests], +[writing documentation][writing-docs] and [rendering the docs][rendering-docs]. +### Quickstart + +To get started contributing, first clone this repository (or your fork): ```bash # Clone the repo -git clone --origin salt git@github.com:salt-extensions/saltext-grafana.git +git clone --origin upstream git@github.com:salt-extensions/saltext-grafana.git # Change to the repo dir cd saltext-grafana +``` -# Create a new venv -python3 -m venv env --prompt saltext-grafana -source env/bin/activate +#### Automatic +If you have installed [direnv][direnv], allowing the project's `.envrc` ensures +a proper development environment is present and the virtual environment is active. -# On mac, you may need to upgrade pip -python -m pip install --upgrade pip +Without `direnv`, you can still run the automation explicitly: -# On WSL or some flavors of linux you may need to install the `enchant` -# library in order to build the docs -sudo apt-get install -y enchant +```bash +python3 tools/initialize.py +source .venv/bin/activate +``` -# Install extension + test/dev/doc dependencies into your environment -python -m pip install -e '.[tests,dev,docs]' +#### Manual +Please follow the [first steps][first-steps], skipping the repository initialization and first commit. -# Run tests! -python -m nox -e tests-3 +### Pull request + +Always make changes in a feature branch: + +```bash +git switch -c my-feature-branch +``` -# skip requirements install for next time -export SKIP_REQUIREMENTS_INSTALL=1 +To [submit a Pull Request][submitting-pr], you'll need a fork of this repository in +your own GitHub account. If you followed the instructions above, +set your fork as the `origin` remote now: -# Build the docs, serve, and view in your web browser: -python -m nox -e docs && (cd docs/_build/html; python -m webbrowser localhost:8000; python -m http.server; cd -) +```bash +git remote add origin git@github.com:.git ``` -Writing code isn't the only way to contribute! We value contributions in any of -these areas: +Ensure you followed the [first steps][first-steps] and commit your changes, fixing any +failing `pre-commit` hooks. Then push the feature branch to your fork and submit a PR. + +### Ways to contribute + +Contributions come in many forms, and they’re all valuable! Here are some ways you can help +without writing code: -* Documentation - especially examples of how to use this module to solve - specific problems. -* Triaging [issues][issues] and participating in [discussions][discussions] -* Reviewing [Pull Requests][PRs] (we really like - [Conventional Comments][comments]!) +* **Documentation**: Especially examples showing how to use this project + to solve specific problems. +* **Triaging issues**: Help manage [issues][issues] and participate in [discussions][discussions]. +* **Reviewing [Pull Requests][PRs]**: We especially appreciate reviews using [Conventional Comments][comments]. -You could also contribute in other ways: +You can also contribute by: * Writing blog posts -* Posting on social media about how you used Salt + Grafana to solve your - problems, including videos +* Sharing your experiences using Salt + Grafana + on social media * Giving talks at conferences * Publishing videos -* Asking/answering questions in IRC, Discord or email groups +* Engaging in IRC, Discord or email groups Any of these things are super valuable to our community, and we sincerely appreciate every contribution! - -For more information, build the docs and head over to http://localhost:8000/ — -that's where you'll find the rest of the documentation. - - [security]: https://github.com/saltstack/salt/blob/master/SECURITY.md -[salt-contributing]: https://docs.saltproject.io/en/master/topics/development/contributing.html +[salt-extensions-guide]: https://salt-extensions.github.io/salt-extension-copier/ +[writing-tests]: https://salt-extensions.github.io/salt-extension-copier/topics/testing/writing.html +[running-tests]: https://salt-extensions.github.io/salt-extension-copier/topics/testing/running.html +[writing-docs]: https://salt-extensions.github.io/salt-extension-copier/topics/documenting/writing.html +[rendering-docs]: https://salt-extensions.github.io/salt-extension-copier/topics/documenting/building.html +[first-steps]: https://salt-extensions.github.io/salt-extension-copier/topics/creation.html#initialize-the-python-virtual-environment +[submitting-pr]: https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request-from-a-fork +[direnv]: https://direnv.net [issues]: https://github.com/salt-extensions/saltext-grafana/issues [PRs]: https://github.com/salt-extensions/saltext-grafana/pulls [discussions]: https://github.com/salt-extensions/saltext-grafana/discussions diff --git a/noxfile.py b/noxfile.py index c76a262..344a8b6 100755 --- a/noxfile.py +++ b/noxfile.py @@ -1,11 +1,10 @@ -# pylint: disable=missing-module-docstring,import-error,protected-access,missing-function-docstring import datetime import json import os -import pathlib import shutil import sys import tempfile +from importlib import metadata from pathlib import Path import nox @@ -17,6 +16,9 @@ nox.options.reuse_existing_virtualenvs = True # Don't fail on missing interpreters nox.options.error_on_missing_interpreters = False +# Speed up all sessions by using uv if possible +if tuple(map(int, metadata.version("nox").split("."))) >= (2024, 3): + nox.options.default_venv_backend = "uv|virtualenv" # Python versions to test against PYTHON_VERSIONS = ("3", "3.8", "3.9", "3.10", "3.11", "3.12") @@ -37,7 +39,7 @@ os.environ["PYTHONDONTWRITEBYTECODE"] = "1" # Global Path Definitions -REPO_ROOT = pathlib.Path(__file__).resolve().parent +REPO_ROOT = Path(__file__).resolve().parent # Change current directory to REPO_ROOT os.chdir(str(REPO_ROOT)) @@ -85,14 +87,17 @@ def _install_requirements( install_extras=None, ): install_extras = install_extras or [] + no_progress = "--progress-bar=off" + if isinstance(session._runner.venv, VirtualEnv) and session._runner.venv.venv_backend == "uv": + no_progress = "--no-progress" if SKIP_REQUIREMENTS_INSTALL is False: # Always have the wheel package installed - session.install("--progress-bar=off", "wheel", silent=PIP_INSTALL_SILENT) + session.install(no_progress, "wheel", silent=PIP_INSTALL_SILENT) if install_coverage_requirements: - session.install("--progress-bar=off", COVERAGE_REQUIREMENT, silent=PIP_INSTALL_SILENT) + session.install(no_progress, COVERAGE_REQUIREMENT, silent=PIP_INSTALL_SILENT) if install_salt: - session.install("--progress-bar=off", SALT_REQUIREMENT, silent=PIP_INSTALL_SILENT) + session.install(no_progress, SALT_REQUIREMENT, silent=PIP_INSTALL_SILENT) if install_test_requirements: install_extras.append("tests") @@ -104,7 +109,7 @@ def _install_requirements( "EXTRA_REQUIREMENTS_INSTALL='%s'", EXTRA_REQUIREMENTS_INSTALL, ) - install_command = ["--progress-bar=off"] + install_command = [no_progress] install_command += [req.strip() for req in EXTRA_REQUIREMENTS_INSTALL.split()] session.install(*install_command, silent=PIP_INSTALL_SILENT) @@ -170,7 +175,7 @@ def tests(session): if arg.startswith(f"tests{os.sep}"): break try: - pathlib.Path(arg).resolve().relative_to(REPO_ROOT / "tests") + Path(arg).resolve().relative_to(REPO_ROOT / "tests") break except ValueError: continue @@ -244,7 +249,7 @@ def _lint(session, rcfile, flags, paths, tee_output=True): install_salt=False, install_coverage_requirements=False, install_test_requirements=False, - install_extras=["dev", "tests"], + install_extras=["lint", "tests"], ) if tee_output: @@ -307,12 +312,25 @@ def _lint_pre_commit(session, rcfile, flags, paths): ) # Let's patch nox to make it run inside the pre-commit virtualenv - session._runner.venv = VirtualEnv( - os.environ["VIRTUAL_ENV"], - interpreter=session._runner.func.python, - reuse_existing=True, - venv=True, - ) + try: + # nox >= 2024.03.02 + # pylint: disable=unexpected-keyword-arg + venv = VirtualEnv( + os.environ["VIRTUAL_ENV"], + interpreter=session._runner.func.python, + reuse_existing=True, + venv_backend="venv", + ) + except TypeError: + # nox < 2024.03.02 + # pylint: disable=unexpected-keyword-arg + venv = VirtualEnv( + os.environ["VIRTUAL_ENV"], + interpreter=session._runner.func.python, + reuse_existing=True, + venv=True, + ) + session._runner.venv = venv _lint(session, rcfile, flags, paths, tee_output=False) @@ -344,7 +362,7 @@ def lint_tests(session): Run PyLint against the test suite. Set PYLINT_REPORT to a path to capture output. """ flags = [ - "--disable=I,redefined-outer-name,missing-function-docstring,no-member,missing-module-docstring" + "--disable=I,redefined-outer-name,no-member,missing-module-docstring,missing-function-docstring,missing-class-docstring,attribute-defined-outside-init,inconsistent-return-statements,too-few-public-methods,too-many-public-methods", ] if session.posargs: paths = session.posargs @@ -372,7 +390,7 @@ def lint_tests_pre_commit(session): Run PyLint against the code and the test suite. Set PYLINT_REPORT to a path to capture output. """ flags = [ - "--disable=I,redefined-outer-name,missing-function-docstring,no-member,missing-module-docstring", + "--disable=I,redefined-outer-name,no-member,missing-module-docstring,missing-function-docstring,missing-class-docstring,attribute-defined-outside-init,inconsistent-return-statements,too-few-public-methods,too-many-public-methods", ] if session.posargs: paths = session.posargs @@ -407,37 +425,8 @@ def docs(session): os.chdir(str(REPO_ROOT)) -@nox.session(name="docs-html", python="3") -@nox.parametrize("clean", [False, True]) -@nox.parametrize("include_api_docs", [False, True]) -def docs_html(session, clean, include_api_docs): - """ - Build Sphinx HTML Documentation - - TODO: Add option for `make linkcheck` and `make coverage` - calls via Sphinx. Ran into problems with two when - using Furo theme and latest Sphinx. - """ - _install_requirements( - session, - install_coverage_requirements=False, - install_test_requirements=False, - install_source=True, - install_extras=["docs"], - ) - if include_api_docs: - gen_api_docs(session) - build_dir = Path("docs", "_build", "html") - sphinxopts = "-Wn" - if clean: - sphinxopts += "E" - args = [sphinxopts, "--keep-going", "docs", str(build_dir)] - session.run("sphinx-build", *args, external=True) - - @nox.session(name="docs-dev", python="3") -@nox.parametrize("clean", [False, True]) -def docs_dev(session, clean) -> None: +def docs_dev(session): """ Build and serve the Sphinx HTML documentation, with live reloading on file changes, via sphinx-autobuild. @@ -452,10 +441,18 @@ def docs_dev(session, clean) -> None: install_extras=["docs", "docsauto"], ) - # Launching LIVE reloading Sphinx session build_dir = Path("docs", "_build", "html") - args = ["--watch", ".", "--open-browser", "docs", str(build_dir)] - if clean and build_dir.exists(): + + # Allow specifying sphinx-autobuild options, like --host. + args = ["--watch", "."] + session.posargs + if not any(arg.startswith("--host") for arg in args): + # If the user is overriding the host to something other than localhost, + # it's likely they are rendering on a remote/headless system and don't + # want the browser to open. + args.append("--open-browser") + args += ["docs", str(build_dir)] + + if build_dir.exists(): shutil.rmtree(build_dir) session.run("sphinx-autobuild", *args) @@ -496,30 +493,3 @@ def docs_crosslink_info(session): "python", "-m", "sphinx.ext.intersphinx", mapping_entry[0].rstrip("/") + "/objects.inv" ) os.chdir(str(REPO_ROOT)) - - -@nox.session(name="gen-api-docs", python="3") -def gen_api_docs(session): - """ - Generate API Docs - """ - _install_requirements( - session, - install_coverage_requirements=False, - install_test_requirements=False, - install_source=True, - install_extras=["docs"], - ) - try: - shutil.rmtree("docs/ref") - except FileNotFoundError: - pass - session.run( - "sphinx-apidoc", - "--implicit-namespaces", - "--module-first", - "-o", - "docs/ref/", - "src/saltext", - "src/saltext/grafana/config/schemas", - ) diff --git a/pyproject.toml b/pyproject.toml index 996432c..641185e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,10 +53,8 @@ Tracker = "https://github.com/salt-extensions/saltext-grafana/issues" [project.optional-dependencies] changelog = ["towncrier==22.12.0"] dev = [ - "nox", - "pre-commit>=2.4.0", - "pylint", - "saltpylint", + "nox[uv]>=2024.3", + "pre-commit>=2.21.0", ] docs = [ "sphinx", @@ -71,8 +69,7 @@ docs = [ ] docsauto = ["sphinx-autobuild"] lint = [ - "pylint", - "saltpylint", + "pylint==3.2.6", ] tests = [ "pytest>=7.2.0", diff --git a/tests/conftest.py b/tests/conftest.py index 47d380a..31272ce 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -30,10 +30,26 @@ def salt_factories_config(): @pytest.fixture(scope="package") -def master(salt_factories): - return salt_factories.salt_master_daemon(random_string("master-")) +def master_config(): + """ + Salt master configuration overrides for integration tests. + """ + return {} + + +@pytest.fixture(scope="package") +def master(salt_factories, master_config): + return salt_factories.salt_master_daemon(random_string("master-"), overrides=master_config) + + +@pytest.fixture(scope="package") +def minion_config(): + """ + Salt minion configuration overrides for integration tests. + """ + return {} @pytest.fixture(scope="package") -def minion(master): - return master.salt_minion_daemon(random_string("minion-")) +def minion(master, minion_config): + return master.salt_minion_daemon(random_string("minion-"), overrides=minion_config) diff --git a/tools/helpers/__init__.py b/tools/helpers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tools/helpers/cmd.py b/tools/helpers/cmd.py new file mode 100644 index 0000000..9e8eb80 --- /dev/null +++ b/tools/helpers/cmd.py @@ -0,0 +1,286 @@ +""" +Polyfill for very basic ``plumbum`` functionality, no external libs required. +Makes scripts that call a lot of CLI commands much more pleasant to write. +""" + +import os +import platform +import shlex +import shutil +import subprocess +from contextlib import contextmanager +from dataclasses import dataclass +from dataclasses import field +from pathlib import Path + + +class CommandNotFound(RuntimeError): + """ + Raised when a command cannot be found in $PATH + """ + + +@dataclass(frozen=True) +class ProcessResult: + """ + The full process result, returned by ``.run`` methods. + The ``__call__`` ones just return stdout. + """ + + retcode: int + stdout: str | bytes + stderr: str | bytes + argv: tuple + + def check(self, retcode=None): + """ + Check if the retcode is expected. retcode can be a list. + """ + if retcode is None: + expected = [0] + elif not isinstance(retcode, (list, tuple)): + expected = [retcode] + if self.retcode not in expected: + raise ProcessExecutionError(self.argv, self.retcode, self.stdout, self.stderr) + + def __str__(self): + msg = [ + "Process execution result:", + f"Command: {shlex.join(self.argv)}", + f"Retcode: {self.retcode}", + "Stdout: |", + ] + msg += [" " * 10 + "| " + line for line in str(self.stdout).splitlines()] + msg.append("Stderr: |") + msg += [" " * 10 + "| " + line for line in str(self.stderr).splitlines()] + return "\n".join(msg) + + +class ProcessExecutionError(OSError): + """ + Raised by ProcessResult.check when an unexpected retcode was returned. + """ + + def __init__(self, argv, retcode, stdout, stderr): + self.argv = argv + self.retcode = retcode + if isinstance(stdout, bytes): + stdout = ascii(stdout) + if isinstance(stderr, bytes): + stderr = ascii(stderr) + self.stdout = stdout + self.stderr = stderr + + def __str__(self): + msg = [ + "Process finished with unexpected exit code", + f"Retcode: {self.retcode}", + f"Command: {shlex.join(self.argv)}", + "Stdout: |", + ] + msg += [" " * 10 + "| " + line for line in str(self.stdout).splitlines()] + msg.append("Stderr: |") + msg += [" " * 10 + "| " + line for line in str(self.stderr).splitlines()] + return "\n".join(msg) + + +class Local: + """ + Glue for command environment defaults. + Should be treated as a singleton. + + Example: + + local = Local() + + some_cmd = local["some_cmd"] + with local.cwd(some_path), local.env(FOO="bar"): + some_cmd("baz") + + # A changed $PATH requires to rediscover commands. + with local.prepend_path(important_path): + local["other_cmd"]() + with local.venv(venv_path): + local["python"]("-m", "pip", "install", "salt") + + """ + + def __init__(self): + # Explicitly cast values to strings to avoid problems on Windows + self._env = {k: str(v) for k, v in os.environ.items()} + + def __getitem__(self, exe): + """ + Return a LocalCommand in this context. + """ + return LocalCommand(exe, _local=self) + + @property + def path(self): + """ + List of paths in the context's $PATH. + """ + return self._env.get("PATH", "").split(os.pathsep) + + @contextmanager + def cwd(self, path): + """ + Set the default current working directory for commands inside this context. + """ + prev = Path(os.getcwd()) + new = prev / path + os.cwd(new) + try: + yield + finally: + os.cwd(prev) + + @contextmanager + def env(self, **kwargs): + """ + Override default env vars (sourced from the current process' environment) + for commands inside this context. + """ + prev = self._env.copy() + self._env.update((k, str(v)) for k, v in kwargs.items()) + try: + yield + finally: + self._env = prev + + @contextmanager + def path_prepend(self, *args): + """ + Prepend paths to $PATH for commands inside this context. + + Note: If you have saved a reference to an already requested command, + its $PATH will be updated, but it might not be the command + that would have been returned by a new request. + """ + new_path = [str(arg) for arg in args] + self.path + with self.env(PATH=os.pathsep.join(new_path)): + yield + + @contextmanager + def venv(self, venv_dir): + """ + Enter a Python virtual environment. Effectively prepends its bin dir + to $PATH and sets ``VIRTUAL_ENV``. + """ + venv_dir = Path(venv_dir) + if not venv_dir.is_dir() or not (venv_dir / "pyvenv.cfg").exists(): + raise ValueError(f"Not a virtual environment: {venv_dir}") + venv_bin_dir = venv_dir / "bin" + if platform.system() == "Windows": + venv_bin_dir = venv_dir / "Scripts" + with self.path_prepend(venv_bin_dir), self.env(VIRTUAL_ENV=str(venv_dir)): + yield + + +@dataclass(frozen=True) +class Executable: + """ + Utility class used to avoid repeated command lookups. + """ + + _exe: str + + def __str__(self): + return self._exe + + def __repr__(self): + return f"Executable <{self._exe}>" + + +@dataclass(frozen=True) +class Command: + """ + A command object, can be instantiated directly. Does not follow ``Local``. + """ + + exe: Executable | str + args: tuple[str, ...] = () + + def __post_init__(self): + if not isinstance(self.exe, Executable): + if not (full_exe := self._which(self.exe)): + raise CommandNotFound(self.exe) + object.__setattr__(self, "exe", Executable(full_exe)) + + def _which(self, exe): + return shutil.which(exe) + + def _get_env(self, overrides=None): + base = {k: str(v) for k, v in os.environ.items()} + base.update(overrides or {}) + return base + + def __getitem__(self, arg_or_args): + """ + Returns a subcommand with bound parameters. + + Example: + + git = Command("git")["-c", "commit.gpgsign=0"] + # ... + git("add", ".") + git("commit", "-m", "testcommit") + + """ + if not isinstance(arg_or_args, tuple): + arg_or_args = (arg_or_args,) + return type(self)(self.exe, tuple(*self.args, *arg_or_args), _local=self._local) + + def __call__(self, *args, **kwargs): + """ + Run this command and return stdout. + """ + return self.run(*args, **kwargs).stdout + + def __str__(self): + return shlex.join([self.exe] + list(self.args)) + + def __repr__(self): + return f"Command<{self.exe}, {self.args!r}>" + + def run(self, *args, check=True, env=None, **kwargs): + """ + Run this command and return the full output. + """ + kwargs.setdefault("stdout", subprocess.PIPE) + kwargs.setdefault("stderr", subprocess.PIPE) + kwargs.setdefault("text", True) + argv = [str(self.exe), *self.args, *args] + proc = subprocess.run(argv, check=False, env=self._get_env(env), **kwargs) + ret = ProcessResult( + retcode=proc.returncode, + stdout=proc.stdout, + stderr=proc.stderr, + argv=argv, + ) + if check: + ret.check() + return ret + + +@dataclass(frozen=True) +class LocalCommand(Command): + """ + Command returned by Local()["some_command"]. Follows local contexts. + """ + + _local: Local = field(kw_only=True, repr=False) + + def _which(self, exe): + return shutil.which(exe, path=self._local._env.get("PATH", "")) + + def _get_env(self, overrides=None): + base = self._local._env.copy() + base.update(overrides or {}) + return base + + +# Should be imported from here. +local = Local() +# We must assume git is installed +git = local["git"] diff --git a/tools/helpers/copier.py b/tools/helpers/copier.py new file mode 100644 index 0000000..91cf291 --- /dev/null +++ b/tools/helpers/copier.py @@ -0,0 +1,68 @@ +import sys +from functools import wraps +from pathlib import Path + +from . import prompt + +try: + # In case we have it, use it. + # It's always installed in the Copier environment, so if you ensure you + # call this via ``copier_python``, this will work. + import yaml +except ImportError: + yaml = None + + +COPIER_ANSWERS = Path(".copier-answers.yml").resolve() + + +def _needs_answers(func): + @wraps(func) + def _wrapper(*args, **kwargs): + if not COPIER_ANSWERS.exists(): + raise RuntimeError(f"Missing answers file at {COPIER_ANSWERS}") + return func(*args, **kwargs) + + return _wrapper + + +@_needs_answers +def load_answers(): + """ + Load the complete answers file. Depends on PyYAML. + """ + if not yaml: + raise RuntimeError("Missing pyyaml in environment") + with open(COPIER_ANSWERS) as f: + return yaml.safe_load(f) + + +@_needs_answers +def discover_project_name(): + """ + Specifically discover project name. No dependency. + """ + for line in COPIER_ANSWERS.read_text().splitlines(): + if line.startswith("project_name"): + return line.split(":", maxsplit=1)[1].strip() + raise RuntimeError("Failed discovering project name") + + +def finish_task(msg, success, err_exit=False, extra=None): + """ + Print final conclusion of task (migration) run in Copier. + + We usually want to exit with 0, even when something fails, + because a failing task/migration should not crash Copier. + """ + print("\n", file=sys.stderr) + if success: + prompt.pprint(f"\n ✓ {msg}", bold=True, bg=prompt.DARKGREEN, stream=sys.stderr) + elif success is None: + prompt.pprint( + f"\n ✓ {msg}", bold=True, fg=prompt.YELLOW, bg=prompt.DARKGREEN, stream=sys.stderr + ) + success = True + else: + prompt.warn(f" ✗ {msg}", extra) + raise SystemExit(int(not success and err_exit)) diff --git a/tools/helpers/git.py b/tools/helpers/git.py new file mode 100644 index 0000000..5336031 --- /dev/null +++ b/tools/helpers/git.py @@ -0,0 +1,30 @@ +from pathlib import Path + +from .cmd import git + + +def ensure_git(): + """ + Ensure the repository has been initialized. + """ + if Path(".git").is_dir(): + return + git("init", "--initial-branch", "main") + + +def list_untracked(): + """ + List untracked files. + """ + for path in git("ls-files", "-z", "-o", "--exclude-standard").split("\x00"): + if path: + yield path + + +def list_conflicted(): + """ + List files with merge conflicts. + """ + for path in git("diff", "-z", "--name-only", "--diff-filter=U", "--relative").split("\x00"): + if path: + yield path diff --git a/tools/helpers/pre_commit.py b/tools/helpers/pre_commit.py new file mode 100644 index 0000000..bcab18a --- /dev/null +++ b/tools/helpers/pre_commit.py @@ -0,0 +1,107 @@ +import re + +from . import prompt +from .cmd import ProcessExecutionError +from .cmd import git +from .cmd import local +from .git import list_untracked + +PRE_COMMIT_TEST_REGEX = re.compile( + r"^(?P[^\n]+?)\.{4,}.*(?PFailed|Passed|Skipped)$" +) +NON_IDEMPOTENT_HOOKS = ( + "trim trailing whitespace", + "mixed line ending", + "fix end of files", + "Remove Python Import Header Comments", + "Check rST doc files exist for modules/states", + "Salt extensions docstrings auto-fixes", + "Rewrite the test suite", + "Rewrite Code to be Py3.", + "isort", + "black", + "blacken-docs", +) + + +def parse_pre_commit(data): + """ + Parse pre-commit output into a list of passing hooks and a mapping of + failing hooks to their output. + """ + passing = [] + failing = {} + cur = None + for line in data.splitlines(): + if match := PRE_COMMIT_TEST_REGEX.match(line): + cur = None + if match.group("resolution") != "Failed": + passing.append(match.group("test")) + continue + cur = match.group("test") + failing[cur] = [] + continue + try: + failing[cur].append(line) + except KeyError: + # in case the parsing logic fails, let's not crash everything + continue + return passing, {test: "\n".join(output).strip() for test, output in failing.items()} + + +def check_pre_commit_rerun(data): + """ + Check if we can expect failing hooks to turn green during a rerun. + """ + _, failing = parse_pre_commit(data) + for hook in failing: + if hook.startswith(NON_IDEMPOTENT_HOOKS): + return True + return False + + +def run_pre_commit(venv, retries=2): + """ + Run pre-commit in a loop until it passes, there is no chance of + autoformatting to make it pass or a maximum number of runs is reached. + + Usually, a maximum of two runs is necessary (if a hook reformats the + output of another later one again). + """ + new_files = set() + + def _run_pre_commit_loop(retries_left): + untracked_files = set(map(str, list_untracked())) + nonlocal new_files + new_files = new_files.union(untracked_files) + # Ensure pre-commit runs on all paths. + # We don't want to git add . because this removes merge conflicts + git("add", "--intent-to-add", *untracked_files) + with local.venv(venv): + try: + local["python"]("-m", "pre_commit", "run", "--all-files") + except ProcessExecutionError as err: + if retries_left > 0 and check_pre_commit_rerun(err.stdout): + return _run_pre_commit_loop(retries_left - 1) + raise + + prompt.status( + "Running pre-commit hooks against all files. This can take a minute, please be patient" + ) + + try: + _run_pre_commit_loop(retries) + return True + except ProcessExecutionError as err: + _, failing = parse_pre_commit(err.stdout) + if failing: + msg = f"Please fix all ({len(failing)}) failing hooks" + else: + msg = f"Output: {err.stderr or err.stdout}" + prompt.warn(f"Pre-commit is failing. {msg}") + for i, failing_hook in enumerate(failing): + prompt.warn(f"✗ Failing hook ({i + 1}): {failing_hook}", failing[failing_hook]) + finally: + # Undo git add --intent-to-add to allow RenovateBot to detect new files correctly + git("restore", "--staged", *new_files) + return False diff --git a/tools/helpers/prompt.py b/tools/helpers/prompt.py new file mode 100644 index 0000000..d0f38c3 --- /dev/null +++ b/tools/helpers/prompt.py @@ -0,0 +1,51 @@ +import platform +import sys + +DARKGREEN = (0, 100, 0) +DARKRED = (139, 0, 0) +YELLOW = (255, 255, 0) + + +def ensure_utf8(): + """ + On Windows, ensure stdout/stderr output uses UTF-8 encoding. + """ + if platform.system() != "Windows": + return + for stream in (sys.stdout, sys.stderr): + if stream.encoding != "utf-8": + stream.reconfigure(encoding="utf-8") + + +def pprint(msg, bold=False, fg=None, bg=None, stream=None): + """ + Ugly helper for printing a bit more fancy output. + Stand-in for questionary/prompt_toolkit. + """ + out = "" + if bold: + out += "\033[1m" + if fg: + red, green, blue = fg + out += f"\033[38;2;{red};{green};{blue}m" + if bg: + red, green, blue = bg + out += f"\033[48;2;{red};{green};{blue}m" + out += msg + if bold or fg or bg: + out += "\033[0m" + print(out, file=stream or sys.stdout) + + +def status(msg, message=None): + out = f"\n → {msg}" + pprint(out, bold=True, fg=DARKGREEN, stream=sys.stderr) + if message: + pprint(message, stream=sys.stderr) + + +def warn(header, message=None): + out = f"\n{header}" + pprint(out, bold=True, bg=DARKRED, stream=sys.stderr) + if message: + pprint(message, stream=sys.stderr) diff --git a/tools/helpers/venv.py b/tools/helpers/venv.py new file mode 100644 index 0000000..bf04216 --- /dev/null +++ b/tools/helpers/venv.py @@ -0,0 +1,96 @@ +from pathlib import Path + +from . import prompt +from .cmd import CommandNotFound +from .cmd import local +from .copier import discover_project_name + +# Should follow the version used for relenv packages, see +# https://github.com/saltstack/salt/blob/master/cicd/shared-gh-workflows-context.yml +RECOMMENDED_PYVER = "3.10" +# For discovery of existing virtual environment, descending priority. +VENV_DIRS = ( + ".venv", + "venv", + ".env", + "env", +) + + +try: + uv = local["uv"] +except CommandNotFound: + uv = None + + +def is_venv(path): + if (venv_path := Path(path)).is_dir and (venv_path / "pyvenv.cfg").exists(): + return venv_path + return False + + +def discover_venv(project_root="."): + base = Path(project_root).resolve() + for name in VENV_DIRS: + if found := is_venv(base / name): + return found + raise RuntimeError(f"No venv found in {base}") + + +def create_venv(project_root=".", directory=None): + base = Path(project_root).resolve() + venv = (base / (directory or VENV_DIRS[0])).resolve() + if is_venv(venv): + raise RuntimeError(f"Venv at {venv} already exists") + prompt.status(f"Creating virtual environment at {venv}") + if uv is not None: + prompt.status("Found `uv`. Creating venv") + uv( + "venv", + "--python", + RECOMMENDED_PYVER, + f"--prompt=saltext-{discover_project_name()}", + ) + prompt.status("Installing pip into venv") + # Ensure there's still a `pip` (+ setuptools/wheel) inside the venv for compatibility + uv("venv", "--seed") + else: + prompt.status("Did not find `uv`. Falling back to `venv`") + try: + python = local[f"python{RECOMMENDED_PYVER}"] + except CommandNotFound: + python = local["python3"] + version = python("--version").split(" ")[1] + if not version.startswith(RECOMMENDED_PYVER): + raise RuntimeError( + f"No `python{RECOMMENDED_PYVER}` executable found in $PATH, exiting" + ) + python("-m", "venv", VENV_DIRS[0], f"--prompt=saltext-{discover_project_name()}") + return venv + + +def ensure_project_venv(project_root=".", reinstall=True): + exists = False + try: + venv = discover_venv(project_root) + prompt.status(f"Found existing virtual environment at {venv}") + exists = True + except RuntimeError: + venv = create_venv(project_root) + if not reinstall: + return venv + prompt.status(("Reinstalling" if exists else "Installing") + " project and dependencies") + with local.venv(venv): + if uv is not None: + uv("pip", "install", "-e", ".[dev,tests,docs]") + else: + try: + # We install uv into the virtualenv, so it might be available now. + # It speeds up this step a lot. + local["uv"]("pip", "install", "-e", ".[dev,tests,docs]") + except CommandNotFound: + local["python"]("-m", "pip", "install", "-e", ".[dev,tests,docs]") + if not exists or not (Path(project_root) / ".git" / "hooks" / "pre-commit").exists(): + prompt.status("Installing pre-commit hooks") + local["python"]("-m", "pre_commit", "install", "--install-hooks") + return venv diff --git a/tools/initialize.py b/tools/initialize.py new file mode 100644 index 0000000..1c07f68 --- /dev/null +++ b/tools/initialize.py @@ -0,0 +1,25 @@ +import sys + +from helpers import prompt +from helpers.copier import finish_task +from helpers.git import ensure_git +from helpers.venv import ensure_project_venv + +if __name__ == "__main__": + try: + prompt.ensure_utf8() + ensure_git() + venv = ensure_project_venv() + except Exception as err: # pylint: disable=broad-except + finish_task( + f"Failed initializing environment: {err}", + False, + True, + extra=( + "No worries, just follow the manual steps documented here: " + "https://salt-extensions.github.io/salt-extension-copier/topics/creation.html#first-steps" + ), + ) + if len(sys.argv) > 1 and sys.argv[1] == "--print-venv": + print(venv) + finish_task("Successfully initialized environment", True)