Skip to content

Commit

Permalink
chore: add pre-commit hook (#66)
Browse files Browse the repository at this point in the history
  • Loading branch information
yoshi-automation authored Dec 9, 2020
1 parent 9f33d77 commit d33ff76
Show file tree
Hide file tree
Showing 6 changed files with 50 additions and 14 deletions.
17 changes: 17 additions & 0 deletions packages/google-cloud-containeranalysis/.pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
# See https://pre-commit.com for more information
# See https://pre-commit.com/hooks.html for more hooks
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v3.3.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-yaml
- repo: https://github.com/psf/black
rev: 19.10b0
hooks:
- id: black
- repo: https://gitlab.com/pycqa/flake8
rev: 3.8.4
hooks:
- id: flake8
10 changes: 10 additions & 0 deletions packages/google-cloud-containeranalysis/CONTRIBUTING.rst
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,16 @@ Coding Style
should point to the official ``googleapis`` checkout and the
the branch should be the main branch on that remote (``master``).

- This repository contains configuration for the
`pre-commit <https://pre-commit.com/>`__ tool, which automates checking
our linters during a commit. If you have it installed on your ``$PATH``,
you can enable enforcing those checks via:

.. code-block:: bash
$ pre-commit install
pre-commit installed at .git/hooks/pre-commit
Exceptions to PEP8:

- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for
Expand Down
5 changes: 2 additions & 3 deletions packages/google-cloud-containeranalysis/noxfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,9 +81,8 @@ def default(session):
session.run(
"py.test",
"--quiet",
"--cov=google.cloud.containeranalysis",
"--cov=google.cloud",
"--cov=tests.unit",
"--cov=google/cloud",
"--cov=tests/unit",
"--cov-append",
"--cov-config=.coveragerc",
"--cov-report=",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
import os
from pathlib import Path
import sys
from typing import Callable, Dict, List, Optional

import nox

Expand Down Expand Up @@ -68,7 +69,7 @@
TEST_CONFIG.update(TEST_CONFIG_OVERRIDE)


def get_pytest_env_vars():
def get_pytest_env_vars() -> Dict[str, str]:
"""Returns a dict for pytest invocation."""
ret = {}

Expand Down Expand Up @@ -97,7 +98,7 @@ def get_pytest_env_vars():
#


def _determine_local_import_names(start_dir):
def _determine_local_import_names(start_dir: str) -> List[str]:
"""Determines all import names that should be considered "local".
This is used when running the linter to insure that import order is
Expand Down Expand Up @@ -135,7 +136,7 @@ def _determine_local_import_names(start_dir):


@nox.session
def lint(session):
def lint(session: nox.sessions.Session) -> None:
if not TEST_CONFIG['enforce_type_hints']:
session.install("flake8", "flake8-import-order")
else:
Expand All @@ -154,7 +155,7 @@ def lint(session):


@nox.session
def blacken(session):
def blacken(session: nox.sessions.Session) -> None:
session.install("black")
python_files = [path for path in os.listdir(".") if path.endswith(".py")]

Expand All @@ -168,7 +169,7 @@ def blacken(session):
PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"]


def _session_tests(session, post_install=None):
def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None:
"""Runs py.test for a particular project."""
if os.path.exists("requirements.txt"):
session.install("-r", "requirements.txt")
Expand All @@ -194,7 +195,7 @@ def _session_tests(session, post_install=None):


@nox.session(python=ALL_VERSIONS)
def py(session):
def py(session: nox.sessions.Session) -> None:
"""Runs py.test for a sample using the specified version of Python."""
if session.python in TESTED_VERSIONS:
_session_tests(session)
Expand All @@ -209,7 +210,7 @@ def py(session):
#


def _get_repo_root():
def _get_repo_root() -> Optional[str]:
""" Returns the root folder of the project. """
# Get root of this repository. Assume we don't have directories nested deeper than 10 items.
p = Path(os.getcwd())
Expand All @@ -232,7 +233,7 @@ def _get_repo_root():

@nox.session
@nox.parametrize("path", GENERATED_READMES)
def readmegen(session, path):
def readmegen(session: nox.sessions.Session, path: str) -> None:
"""(Re-)generates the readme for a sample."""
session.install("jinja2", "pyyaml")
dir_ = os.path.dirname(path)
Expand Down
7 changes: 4 additions & 3 deletions packages/google-cloud-containeranalysis/synth.metadata
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
"git": {
"name": ".",
"remote": "https://github.com/googleapis/python-containeranalysis.git",
"sha": "0c81a4f2e13d0b38ac9d44ac17b86fbefea3c713"
"sha": "48c62ed2739ecee8acc4b9244e56be293c08e481"
}
},
{
Expand All @@ -19,14 +19,14 @@
"git": {
"name": "synthtool",
"remote": "https://github.com/googleapis/synthtool.git",
"sha": "9a7d9fbb7045c34c9d3d22c1ff766eeae51f04c9"
"sha": "18c5dbdb4ac8cf75d4d8174e7b4558f48e76f8a1"
}
},
{
"git": {
"name": "synthtool",
"remote": "https://github.com/googleapis/synthtool.git",
"sha": "9a7d9fbb7045c34c9d3d22c1ff766eeae51f04c9"
"sha": "18c5dbdb4ac8cf75d4d8174e7b4558f48e76f8a1"
}
}
],
Expand Down Expand Up @@ -85,6 +85,7 @@
".kokoro/test-samples.sh",
".kokoro/trampoline.sh",
".kokoro/trampoline_v2.sh",
".pre-commit-config.yaml",
".trampolinerc",
"CODE_OF_CONDUCT.md",
"CONTRIBUTING.rst",
Expand Down
8 changes: 8 additions & 0 deletions packages/google-cloud-containeranalysis/synth.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,14 @@
r"""google-cloud-containeranalysis""",
)

# Fix imported type from grafeas

s.replace(
"google/**/types/containeranalysis.py",
"from grafeas\.v1 import vulnerability_pb2 as vulnerability",
"from grafeas.grafeas_v1.types import vulnerability"
)

# Insert helper method to get grafeas client

s.replace(
Expand Down

0 comments on commit d33ff76

Please sign in to comment.