From a1d0a53ac6708f8c338a78915dcdb40c198df066 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Tue, 11 Aug 2020 14:15:17 -0700 Subject: [PATCH] feat!: generate with microgenerator (#36) --- packages/grafeas/.coveragerc | 30 +- packages/grafeas/.gitignore | 3 +- packages/grafeas/.kokoro/build.sh | 8 +- .../grafeas/.kokoro/docker/docs/Dockerfile | 98 + .../.kokoro/docker/docs/fetch_gpg_keys.sh | 45 + packages/grafeas/.kokoro/docs/common.cfg | 21 +- .../grafeas/.kokoro/docs/docs-presubmit.cfg | 17 + packages/grafeas/.kokoro/publish-docs.sh | 39 +- packages/grafeas/.kokoro/trampoline_v2.sh | 487 +++ packages/grafeas/.trampolinerc | 51 + packages/grafeas/README.rst | 10 + packages/grafeas/UPGRADING.md | 167 + packages/grafeas/docs/UPGRADING.md | 1 + packages/grafeas/docs/conf.py | 11 +- packages/grafeas/docs/gapic/v1/api.rst | 6 - packages/grafeas/docs/gapic/v1/types.rst | 5 - packages/grafeas/docs/grafeas_v1/services.rst | 6 + packages/grafeas/docs/grafeas_v1/types.rst | 5 + packages/grafeas/docs/index.rst | 16 +- packages/grafeas/grafeas/grafeas/__init__.py | 150 + packages/grafeas/grafeas/grafeas/py.typed | 2 + .../grafeas/grafeas/grafeas_v1/__init__.py | 156 +- .../grafeas/grafeas_v1/gapic/__init__.py | 0 .../grafeas/grafeas/grafeas_v1/gapic/enums.py | 257 -- .../grafeas_v1/gapic/grafeas_client_config.py | 112 - .../grafeas_v1/gapic/transports/__init__.py | 0 .../grafeas/grafeas_v1/proto/__init__.py | 0 .../grafeas_v1/proto/attestation_pb2_grpc.py | 3 - .../grafeas_v1/proto/build_pb2_grpc.py | 3 - .../grafeas/grafeas_v1/proto/common_pb2.py | 324 -- .../grafeas_v1/proto/common_pb2_grpc.py | 3 - .../grafeas/grafeas_v1/proto/cvss_pb2.py | 570 --- .../grafeas/grafeas_v1/proto/cvss_pb2_grpc.py | 3 - .../grafeas_v1/proto/deployment_pb2_grpc.py | 3 - .../grafeas_v1/proto/discovery_pb2_grpc.py | 3 - .../grafeas_v1/proto/grafeas_pb2_grpc.py | 700 ---- .../grafeas/grafeas_v1/proto/image_pb2.py | 432 -- .../grafeas_v1/proto/image_pb2_grpc.py | 3 - .../grafeas_v1/proto/package_pb2_grpc.py | 3 - .../grafeas_v1/proto/provenance_pb2_grpc.py | 3 - .../grafeas_v1/proto/upgrade_pb2_grpc.py | 3 - .../proto/vulnerability_pb2_grpc.py | 3 - packages/grafeas/grafeas/grafeas_v1/py.typed | 2 + .../{ => grafeas_v1/services}/__init__.py | 16 +- .../services/grafeas/__init__.py} | 17 +- .../services/grafeas/async_client.py | 1272 ++++++ .../grafeas_v1/services/grafeas/client.py | 1319 ++++++ .../grafeas_v1/services/grafeas/pagers.py | 404 ++ .../services/grafeas/transports/__init__.py | 36 + .../services/grafeas/transports/base.py | 369 ++ .../services/grafeas/transports/grpc.py | 590 +++ .../grafeas/transports/grpc_asyncio.py | 595 +++ .../grafeas/grafeas_v1/types/__init__.py | 163 + .../grafeas/grafeas_v1/types/attestation.py | 95 + .../grafeas/grafeas/grafeas_v1/types/build.py | 72 + .../grafeas/grafeas_v1/types/common.py | 121 + .../grafeas/grafeas/grafeas_v1/types/cvss.py | 121 + .../grafeas/grafeas_v1/types/deployment.py | 89 + .../grafeas/grafeas_v1/types/discovery.py | 94 + .../grafeas/grafeas_v1/types/grafeas.py | 620 +++ .../grafeas/grafeas/grafeas_v1/types/image.py | 122 + .../grafeas/grafeas_v1/types/package.py | 186 + .../grafeas/grafeas_v1/types/provenance.py | 414 ++ .../grafeas/grafeas_v1/types/upgrade.py | 209 + .../grafeas/grafeas_v1/types/vulnerability.py | 343 ++ packages/grafeas/mypy.ini | 3 + packages/grafeas/noxfile.py | 47 +- packages/grafeas/samples/AUTHORING_GUIDE.md | 1 + packages/grafeas/samples/CONTRIBUTING.md | 1 + .../scripts/fixup_grafeas_v1_keywords.py | 191 + packages/grafeas/setup.py | 17 +- packages/grafeas/synth.metadata | 10 +- packages/grafeas/synth.py | 381 +- .../tests/unit/gapic/grafeas_v1/__init__.py | 1 + .../unit/gapic/grafeas_v1/test_grafeas.py | 3549 +++++++++++++++++ .../unit/gapic/v1/test_grafeas_client_v1.py | 842 ---- 76 files changed, 12409 insertions(+), 3665 deletions(-) create mode 100644 packages/grafeas/.kokoro/docker/docs/Dockerfile create mode 100755 packages/grafeas/.kokoro/docker/docs/fetch_gpg_keys.sh create mode 100644 packages/grafeas/.kokoro/docs/docs-presubmit.cfg create mode 100755 packages/grafeas/.kokoro/trampoline_v2.sh create mode 100644 packages/grafeas/.trampolinerc create mode 100644 packages/grafeas/UPGRADING.md create mode 120000 packages/grafeas/docs/UPGRADING.md delete mode 100644 packages/grafeas/docs/gapic/v1/api.rst delete mode 100644 packages/grafeas/docs/gapic/v1/types.rst create mode 100644 packages/grafeas/docs/grafeas_v1/services.rst create mode 100644 packages/grafeas/docs/grafeas_v1/types.rst create mode 100644 packages/grafeas/grafeas/grafeas/__init__.py create mode 100644 packages/grafeas/grafeas/grafeas/py.typed delete mode 100644 packages/grafeas/grafeas/grafeas_v1/gapic/__init__.py delete mode 100644 packages/grafeas/grafeas/grafeas_v1/gapic/enums.py delete mode 100644 packages/grafeas/grafeas/grafeas_v1/gapic/grafeas_client_config.py delete mode 100644 packages/grafeas/grafeas/grafeas_v1/gapic/transports/__init__.py delete mode 100644 packages/grafeas/grafeas/grafeas_v1/proto/__init__.py delete mode 100644 packages/grafeas/grafeas/grafeas_v1/proto/attestation_pb2_grpc.py delete mode 100644 packages/grafeas/grafeas/grafeas_v1/proto/build_pb2_grpc.py delete mode 100644 packages/grafeas/grafeas/grafeas_v1/proto/common_pb2.py delete mode 100644 packages/grafeas/grafeas/grafeas_v1/proto/common_pb2_grpc.py delete mode 100644 packages/grafeas/grafeas/grafeas_v1/proto/cvss_pb2.py delete mode 100644 packages/grafeas/grafeas/grafeas_v1/proto/cvss_pb2_grpc.py delete mode 100644 packages/grafeas/grafeas/grafeas_v1/proto/deployment_pb2_grpc.py delete mode 100644 packages/grafeas/grafeas/grafeas_v1/proto/discovery_pb2_grpc.py delete mode 100644 packages/grafeas/grafeas/grafeas_v1/proto/grafeas_pb2_grpc.py delete mode 100644 packages/grafeas/grafeas/grafeas_v1/proto/image_pb2.py delete mode 100644 packages/grafeas/grafeas/grafeas_v1/proto/image_pb2_grpc.py delete mode 100644 packages/grafeas/grafeas/grafeas_v1/proto/package_pb2_grpc.py delete mode 100644 packages/grafeas/grafeas/grafeas_v1/proto/provenance_pb2_grpc.py delete mode 100644 packages/grafeas/grafeas/grafeas_v1/proto/upgrade_pb2_grpc.py delete mode 100644 packages/grafeas/grafeas/grafeas_v1/proto/vulnerability_pb2_grpc.py create mode 100644 packages/grafeas/grafeas/grafeas_v1/py.typed rename packages/grafeas/grafeas/{ => grafeas_v1/services}/__init__.py (67%) rename packages/grafeas/grafeas/{grafeas.py => grafeas_v1/services/grafeas/__init__.py} (71%) create mode 100644 packages/grafeas/grafeas/grafeas_v1/services/grafeas/async_client.py create mode 100644 packages/grafeas/grafeas/grafeas_v1/services/grafeas/client.py create mode 100644 packages/grafeas/grafeas/grafeas_v1/services/grafeas/pagers.py create mode 100644 packages/grafeas/grafeas/grafeas_v1/services/grafeas/transports/__init__.py create mode 100644 packages/grafeas/grafeas/grafeas_v1/services/grafeas/transports/base.py create mode 100644 packages/grafeas/grafeas/grafeas_v1/services/grafeas/transports/grpc.py create mode 100644 packages/grafeas/grafeas/grafeas_v1/services/grafeas/transports/grpc_asyncio.py create mode 100644 packages/grafeas/grafeas/grafeas_v1/types/__init__.py create mode 100644 packages/grafeas/grafeas/grafeas_v1/types/attestation.py create mode 100644 packages/grafeas/grafeas/grafeas_v1/types/build.py create mode 100644 packages/grafeas/grafeas/grafeas_v1/types/common.py create mode 100644 packages/grafeas/grafeas/grafeas_v1/types/cvss.py create mode 100644 packages/grafeas/grafeas/grafeas_v1/types/deployment.py create mode 100644 packages/grafeas/grafeas/grafeas_v1/types/discovery.py create mode 100644 packages/grafeas/grafeas/grafeas_v1/types/grafeas.py create mode 100644 packages/grafeas/grafeas/grafeas_v1/types/image.py create mode 100644 packages/grafeas/grafeas/grafeas_v1/types/package.py create mode 100644 packages/grafeas/grafeas/grafeas_v1/types/provenance.py create mode 100644 packages/grafeas/grafeas/grafeas_v1/types/upgrade.py create mode 100644 packages/grafeas/grafeas/grafeas_v1/types/vulnerability.py create mode 100644 packages/grafeas/mypy.ini create mode 100644 packages/grafeas/samples/AUTHORING_GUIDE.md create mode 100644 packages/grafeas/samples/CONTRIBUTING.md create mode 100644 packages/grafeas/scripts/fixup_grafeas_v1_keywords.py create mode 100644 packages/grafeas/tests/unit/gapic/grafeas_v1/__init__.py create mode 100644 packages/grafeas/tests/unit/gapic/grafeas_v1/test_grafeas.py delete mode 100644 packages/grafeas/tests/unit/gapic/v1/test_grafeas_client_v1.py diff --git a/packages/grafeas/.coveragerc b/packages/grafeas/.coveragerc index dd39c8546c41..a336e0799b98 100644 --- a/packages/grafeas/.coveragerc +++ b/packages/grafeas/.coveragerc @@ -1,35 +1,17 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! [run] branch = True [report] fail_under = 100 show_missing = True +omit = grafeas/__init__.py exclude_lines = # Re-enable the standard pragma pragma: NO COVER # Ignore debug-only repr def __repr__ - # Ignore abstract methods - raise NotImplementedError -omit = - */gapic/*.py - */proto/*.py - */core/*.py - */site-packages/*.py \ No newline at end of file + # Ignore pkg_resources exceptions. + # This is added at the module level as a safeguard for if someone + # generates the code and tries to run it without pip installing. This + # makes it virtually impossible to test properly. + except pkg_resources.DistributionNotFound \ No newline at end of file diff --git a/packages/grafeas/.gitignore b/packages/grafeas/.gitignore index b87e1ed580d9..b9daa52f118d 100644 --- a/packages/grafeas/.gitignore +++ b/packages/grafeas/.gitignore @@ -46,6 +46,7 @@ pip-log.txt # Built documentation docs/_build bigquery/docs/generated +docs.metadata # Virtual environment env/ @@ -57,4 +58,4 @@ system_tests/local_test_setup # Make sure a generated file isn't accidentally committed. pylintrc -pylintrc.test \ No newline at end of file +pylintrc.test diff --git a/packages/grafeas/.kokoro/build.sh b/packages/grafeas/.kokoro/build.sh index e8ac5b1fb078..f1bee15a6537 100755 --- a/packages/grafeas/.kokoro/build.sh +++ b/packages/grafeas/.kokoro/build.sh @@ -36,4 +36,10 @@ python3.6 -m pip uninstall --yes --quiet nox-automation python3.6 -m pip install --upgrade --quiet nox python3.6 -m nox --version -python3.6 -m nox +# If NOX_SESSION is set, it only runs the specified session, +# otherwise run all the sessions. +if [[ -n "${NOX_SESSION:-}" ]]; then + python3.6 -m nox -s "${NOX_SESSION:-}" +else + python3.6 -m nox +fi diff --git a/packages/grafeas/.kokoro/docker/docs/Dockerfile b/packages/grafeas/.kokoro/docker/docs/Dockerfile new file mode 100644 index 000000000000..412b0b56a921 --- /dev/null +++ b/packages/grafeas/.kokoro/docker/docs/Dockerfile @@ -0,0 +1,98 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ubuntu:20.04 + +ENV DEBIAN_FRONTEND noninteractive + +# Ensure local Python is preferred over distribution Python. +ENV PATH /usr/local/bin:$PATH + +# Install dependencies. +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + apt-transport-https \ + build-essential \ + ca-certificates \ + curl \ + dirmngr \ + git \ + gpg-agent \ + graphviz \ + libbz2-dev \ + libdb5.3-dev \ + libexpat1-dev \ + libffi-dev \ + liblzma-dev \ + libreadline-dev \ + libsnappy-dev \ + libssl-dev \ + libsqlite3-dev \ + portaudio19-dev \ + redis-server \ + software-properties-common \ + ssh \ + sudo \ + tcl \ + tcl-dev \ + tk \ + tk-dev \ + uuid-dev \ + wget \ + zlib1g-dev \ + && add-apt-repository universe \ + && apt-get update \ + && apt-get -y install jq \ + && apt-get clean autoclean \ + && apt-get autoremove -y \ + && rm -rf /var/lib/apt/lists/* \ + && rm -f /var/cache/apt/archives/*.deb + + +COPY fetch_gpg_keys.sh /tmp +# Install the desired versions of Python. +RUN set -ex \ + && export GNUPGHOME="$(mktemp -d)" \ + && echo "disable-ipv6" >> "${GNUPGHOME}/dirmngr.conf" \ + && /tmp/fetch_gpg_keys.sh \ + && for PYTHON_VERSION in 3.7.8 3.8.5; do \ + wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz" \ + && wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz.asc "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz.asc" \ + && gpg --batch --verify python-${PYTHON_VERSION}.tar.xz.asc python-${PYTHON_VERSION}.tar.xz \ + && rm -r python-${PYTHON_VERSION}.tar.xz.asc \ + && mkdir -p /usr/src/python-${PYTHON_VERSION} \ + && tar -xJC /usr/src/python-${PYTHON_VERSION} --strip-components=1 -f python-${PYTHON_VERSION}.tar.xz \ + && rm python-${PYTHON_VERSION}.tar.xz \ + && cd /usr/src/python-${PYTHON_VERSION} \ + && ./configure \ + --enable-shared \ + # This works only on Python 2.7 and throws a warning on every other + # version, but seems otherwise harmless. + --enable-unicode=ucs4 \ + --with-system-ffi \ + --without-ensurepip \ + && make -j$(nproc) \ + && make install \ + && ldconfig \ + ; done \ + && rm -rf "${GNUPGHOME}" \ + && rm -rf /usr/src/python* \ + && rm -rf ~/.cache/ + +RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ + && python3.7 /tmp/get-pip.py \ + && python3.8 /tmp/get-pip.py \ + && rm /tmp/get-pip.py + +CMD ["python3.7"] diff --git a/packages/grafeas/.kokoro/docker/docs/fetch_gpg_keys.sh b/packages/grafeas/.kokoro/docker/docs/fetch_gpg_keys.sh new file mode 100755 index 000000000000..d653dd868e4b --- /dev/null +++ b/packages/grafeas/.kokoro/docker/docs/fetch_gpg_keys.sh @@ -0,0 +1,45 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# A script to fetch gpg keys with retry. +# Avoid jinja parsing the file. +# + +function retry { + if [[ "${#}" -le 1 ]]; then + echo "Usage: ${0} retry_count commands.." + exit 1 + fi + local retries=${1} + local command="${@:2}" + until [[ "${retries}" -le 0 ]]; do + $command && return 0 + if [[ $? -ne 0 ]]; then + echo "command failed, retrying" + ((retries--)) + fi + done + return 1 +} + +# 3.6.9, 3.7.5 (Ned Deily) +retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \ + 0D96DF4D4110E5C43FBFB17F2D347EA6AA65421D + +# 3.8.0 (Ɓukasz Langa) +retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \ + E3FF2839C048B25C084DEBE9B26995E310250568 + +# diff --git a/packages/grafeas/.kokoro/docs/common.cfg b/packages/grafeas/.kokoro/docs/common.cfg index f97c893838b5..8f7afcd12080 100644 --- a/packages/grafeas/.kokoro/docs/common.cfg +++ b/packages/grafeas/.kokoro/docs/common.cfg @@ -11,12 +11,12 @@ action { gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-grafeas/.kokoro/trampoline.sh" +build_file: "python-grafeas/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" + value: "gcr.io/cloud-devrel-kokoro-resources/python-lib-docs" } env_vars: { key: "TRAMPOLINE_BUILD_FILE" @@ -28,6 +28,23 @@ env_vars: { value: "docs-staging" } +env_vars: { + key: "V2_STAGING_BUCKET" + value: "docs-staging-v2-staging" +} + +# It will upload the docker image after successful builds. +env_vars: { + key: "TRAMPOLINE_IMAGE_UPLOAD" + value: "true" +} + +# It will always build the docker image. +env_vars: { + key: "TRAMPOLINE_DOCKERFILE" + value: ".kokoro/docker/docs/Dockerfile" +} + # Fetch the token needed for reporting release status to GitHub before_action { fetch_keystore { diff --git a/packages/grafeas/.kokoro/docs/docs-presubmit.cfg b/packages/grafeas/.kokoro/docs/docs-presubmit.cfg new file mode 100644 index 000000000000..1118107829b7 --- /dev/null +++ b/packages/grafeas/.kokoro/docs/docs-presubmit.cfg @@ -0,0 +1,17 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "STAGING_BUCKET" + value: "gcloud-python-test" +} + +env_vars: { + key: "V2_STAGING_BUCKET" + value: "gcloud-python-test" +} + +# We only upload the image in the main `docs` build. +env_vars: { + key: "TRAMPOLINE_IMAGE_UPLOAD" + value: "false" +} diff --git a/packages/grafeas/.kokoro/publish-docs.sh b/packages/grafeas/.kokoro/publish-docs.sh index 917e0d1573cf..8acb14e802b0 100755 --- a/packages/grafeas/.kokoro/publish-docs.sh +++ b/packages/grafeas/.kokoro/publish-docs.sh @@ -18,26 +18,16 @@ set -eo pipefail # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 -cd github/python-grafeas - -# Remove old nox -python3.6 -m pip uninstall --yes --quiet nox-automation +export PATH="${HOME}/.local/bin:${PATH}" # Install nox -python3.6 -m pip install --upgrade --quiet nox -python3.6 -m nox --version +python3 -m pip install --user --upgrade --quiet nox +python3 -m nox --version # build docs nox -s docs -python3 -m pip install gcp-docuploader - -# install a json parser -sudo apt-get update -sudo apt-get -y install software-properties-common -sudo add-apt-repository universe -sudo apt-get update -sudo apt-get -y install jq +python3 -m pip install --user gcp-docuploader # create metadata python3 -m docuploader create-metadata \ @@ -52,4 +42,23 @@ python3 -m docuploader create-metadata \ cat docs.metadata # upload docs -python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket docs-staging +python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" + + +# docfx yaml files +nox -s docfx + +# create metadata. +python3 -m docuploader create-metadata \ + --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ + --version=$(python3 setup.py --version) \ + --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ + --distribution-name=$(python3 setup.py --name) \ + --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ + --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ + --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) + +cat docs.metadata + +# upload docs +python3 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}" diff --git a/packages/grafeas/.kokoro/trampoline_v2.sh b/packages/grafeas/.kokoro/trampoline_v2.sh new file mode 100755 index 000000000000..719bcd5ba84d --- /dev/null +++ b/packages/grafeas/.kokoro/trampoline_v2.sh @@ -0,0 +1,487 @@ +#!/usr/bin/env bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# trampoline_v2.sh +# +# This script does 3 things. +# +# 1. Prepare the Docker image for the test +# 2. Run the Docker with appropriate flags to run the test +# 3. Upload the newly built Docker image +# +# in a way that is somewhat compatible with trampoline_v1. +# +# To run this script, first download few files from gcs to /dev/shm. +# (/dev/shm is passed into the container as KOKORO_GFILE_DIR). +# +# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/secrets_viewer_service_account.json /dev/shm +# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/automl_secrets.txt /dev/shm +# +# Then run the script. +# .kokoro/trampoline_v2.sh +# +# These environment variables are required: +# TRAMPOLINE_IMAGE: The docker image to use. +# TRAMPOLINE_DOCKERFILE: The location of the Dockerfile. +# +# You can optionally change these environment variables: +# TRAMPOLINE_IMAGE_UPLOAD: +# (true|false): Whether to upload the Docker image after the +# successful builds. +# TRAMPOLINE_BUILD_FILE: The script to run in the docker container. +# TRAMPOLINE_WORKSPACE: The workspace path in the docker container. +# Defaults to /workspace. +# Potentially there are some repo specific envvars in .trampolinerc in +# the project root. + + +set -euo pipefail + +TRAMPOLINE_VERSION="2.0.5" + +if command -v tput >/dev/null && [[ -n "${TERM:-}" ]]; then + readonly IO_COLOR_RED="$(tput setaf 1)" + readonly IO_COLOR_GREEN="$(tput setaf 2)" + readonly IO_COLOR_YELLOW="$(tput setaf 3)" + readonly IO_COLOR_RESET="$(tput sgr0)" +else + readonly IO_COLOR_RED="" + readonly IO_COLOR_GREEN="" + readonly IO_COLOR_YELLOW="" + readonly IO_COLOR_RESET="" +fi + +function function_exists { + [ $(LC_ALL=C type -t $1)"" == "function" ] +} + +# Logs a message using the given color. The first argument must be one +# of the IO_COLOR_* variables defined above, such as +# "${IO_COLOR_YELLOW}". The remaining arguments will be logged in the +# given color. The log message will also have an RFC-3339 timestamp +# prepended (in UTC). You can disable the color output by setting +# TERM=vt100. +function log_impl() { + local color="$1" + shift + local timestamp="$(date -u "+%Y-%m-%dT%H:%M:%SZ")" + echo "================================================================" + echo "${color}${timestamp}:" "$@" "${IO_COLOR_RESET}" + echo "================================================================" +} + +# Logs the given message with normal coloring and a timestamp. +function log() { + log_impl "${IO_COLOR_RESET}" "$@" +} + +# Logs the given message in green with a timestamp. +function log_green() { + log_impl "${IO_COLOR_GREEN}" "$@" +} + +# Logs the given message in yellow with a timestamp. +function log_yellow() { + log_impl "${IO_COLOR_YELLOW}" "$@" +} + +# Logs the given message in red with a timestamp. +function log_red() { + log_impl "${IO_COLOR_RED}" "$@" +} + +readonly tmpdir=$(mktemp -d -t ci-XXXXXXXX) +readonly tmphome="${tmpdir}/h" +mkdir -p "${tmphome}" + +function cleanup() { + rm -rf "${tmpdir}" +} +trap cleanup EXIT + +RUNNING_IN_CI="${RUNNING_IN_CI:-false}" + +# The workspace in the container, defaults to /workspace. +TRAMPOLINE_WORKSPACE="${TRAMPOLINE_WORKSPACE:-/workspace}" + +pass_down_envvars=( + # TRAMPOLINE_V2 variables. + # Tells scripts whether they are running as part of CI or not. + "RUNNING_IN_CI" + # Indicates which CI system we're in. + "TRAMPOLINE_CI" + # Indicates the version of the script. + "TRAMPOLINE_VERSION" +) + +log_yellow "Building with Trampoline ${TRAMPOLINE_VERSION}" + +# Detect which CI systems we're in. If we're in any of the CI systems +# we support, `RUNNING_IN_CI` will be true and `TRAMPOLINE_CI` will be +# the name of the CI system. Both envvars will be passing down to the +# container for telling which CI system we're in. +if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then + # descriptive env var for indicating it's on CI. + RUNNING_IN_CI="true" + TRAMPOLINE_CI="kokoro" + if [[ "${TRAMPOLINE_USE_LEGACY_SERVICE_ACCOUNT:-}" == "true" ]]; then + if [[ ! -f "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" ]]; then + log_red "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json does not exist. Did you forget to mount cloud-devrel-kokoro-resources/trampoline? Aborting." + exit 1 + fi + # This service account will be activated later. + TRAMPOLINE_SERVICE_ACCOUNT="${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" + else + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + gcloud auth list + fi + log_yellow "Configuring Container Registry access" + gcloud auth configure-docker --quiet + fi + pass_down_envvars+=( + # KOKORO dynamic variables. + "KOKORO_BUILD_NUMBER" + "KOKORO_BUILD_ID" + "KOKORO_JOB_NAME" + "KOKORO_GIT_COMMIT" + "KOKORO_GITHUB_COMMIT" + "KOKORO_GITHUB_PULL_REQUEST_NUMBER" + "KOKORO_GITHUB_PULL_REQUEST_COMMIT" + # For Build Cop Bot + "KOKORO_GITHUB_COMMIT_URL" + "KOKORO_GITHUB_PULL_REQUEST_URL" + ) +elif [[ "${TRAVIS:-}" == "true" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="travis" + pass_down_envvars+=( + "TRAVIS_BRANCH" + "TRAVIS_BUILD_ID" + "TRAVIS_BUILD_NUMBER" + "TRAVIS_BUILD_WEB_URL" + "TRAVIS_COMMIT" + "TRAVIS_COMMIT_MESSAGE" + "TRAVIS_COMMIT_RANGE" + "TRAVIS_JOB_NAME" + "TRAVIS_JOB_NUMBER" + "TRAVIS_JOB_WEB_URL" + "TRAVIS_PULL_REQUEST" + "TRAVIS_PULL_REQUEST_BRANCH" + "TRAVIS_PULL_REQUEST_SHA" + "TRAVIS_PULL_REQUEST_SLUG" + "TRAVIS_REPO_SLUG" + "TRAVIS_SECURE_ENV_VARS" + "TRAVIS_TAG" + ) +elif [[ -n "${GITHUB_RUN_ID:-}" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="github-workflow" + pass_down_envvars+=( + "GITHUB_WORKFLOW" + "GITHUB_RUN_ID" + "GITHUB_RUN_NUMBER" + "GITHUB_ACTION" + "GITHUB_ACTIONS" + "GITHUB_ACTOR" + "GITHUB_REPOSITORY" + "GITHUB_EVENT_NAME" + "GITHUB_EVENT_PATH" + "GITHUB_SHA" + "GITHUB_REF" + "GITHUB_HEAD_REF" + "GITHUB_BASE_REF" + ) +elif [[ "${CIRCLECI:-}" == "true" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="circleci" + pass_down_envvars+=( + "CIRCLE_BRANCH" + "CIRCLE_BUILD_NUM" + "CIRCLE_BUILD_URL" + "CIRCLE_COMPARE_URL" + "CIRCLE_JOB" + "CIRCLE_NODE_INDEX" + "CIRCLE_NODE_TOTAL" + "CIRCLE_PREVIOUS_BUILD_NUM" + "CIRCLE_PROJECT_REPONAME" + "CIRCLE_PROJECT_USERNAME" + "CIRCLE_REPOSITORY_URL" + "CIRCLE_SHA1" + "CIRCLE_STAGE" + "CIRCLE_USERNAME" + "CIRCLE_WORKFLOW_ID" + "CIRCLE_WORKFLOW_JOB_ID" + "CIRCLE_WORKFLOW_UPSTREAM_JOB_IDS" + "CIRCLE_WORKFLOW_WORKSPACE_ID" + ) +fi + +# Configure the service account for pulling the docker image. +function repo_root() { + local dir="$1" + while [[ ! -d "${dir}/.git" ]]; do + dir="$(dirname "$dir")" + done + echo "${dir}" +} + +# Detect the project root. In CI builds, we assume the script is in +# the git tree and traverse from there, otherwise, traverse from `pwd` +# to find `.git` directory. +if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + PROGRAM_PATH="$(realpath "$0")" + PROGRAM_DIR="$(dirname "${PROGRAM_PATH}")" + PROJECT_ROOT="$(repo_root "${PROGRAM_DIR}")" +else + PROJECT_ROOT="$(repo_root $(pwd))" +fi + +log_yellow "Changing to the project root: ${PROJECT_ROOT}." +cd "${PROJECT_ROOT}" + +# To support relative path for `TRAMPOLINE_SERVICE_ACCOUNT`, we need +# to use this environment variable in `PROJECT_ROOT`. +if [[ -n "${TRAMPOLINE_SERVICE_ACCOUNT:-}" ]]; then + + mkdir -p "${tmpdir}/gcloud" + gcloud_config_dir="${tmpdir}/gcloud" + + log_yellow "Using isolated gcloud config: ${gcloud_config_dir}." + export CLOUDSDK_CONFIG="${gcloud_config_dir}" + + log_yellow "Using ${TRAMPOLINE_SERVICE_ACCOUNT} for authentication." + gcloud auth activate-service-account \ + --key-file "${TRAMPOLINE_SERVICE_ACCOUNT}" + log_yellow "Configuring Container Registry access" + gcloud auth configure-docker --quiet +fi + +required_envvars=( + # The basic trampoline configurations. + "TRAMPOLINE_IMAGE" + "TRAMPOLINE_BUILD_FILE" +) + +if [[ -f "${PROJECT_ROOT}/.trampolinerc" ]]; then + source "${PROJECT_ROOT}/.trampolinerc" +fi + +log_yellow "Checking environment variables." +for e in "${required_envvars[@]}" +do + if [[ -z "${!e:-}" ]]; then + log "Missing ${e} env var. Aborting." + exit 1 + fi +done + +# We want to support legacy style TRAMPOLINE_BUILD_FILE used with V1 +# script: e.g. "github/repo-name/.kokoro/run_tests.sh" +TRAMPOLINE_BUILD_FILE="${TRAMPOLINE_BUILD_FILE#github/*/}" +log_yellow "Using TRAMPOLINE_BUILD_FILE: ${TRAMPOLINE_BUILD_FILE}" + +# ignore error on docker operations and test execution +set +e + +log_yellow "Preparing Docker image." +# We only download the docker image in CI builds. +if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + # Download the docker image specified by `TRAMPOLINE_IMAGE` + + # We may want to add --max-concurrent-downloads flag. + + log_yellow "Start pulling the Docker image: ${TRAMPOLINE_IMAGE}." + if docker pull "${TRAMPOLINE_IMAGE}"; then + log_green "Finished pulling the Docker image: ${TRAMPOLINE_IMAGE}." + has_image="true" + else + log_red "Failed pulling the Docker image: ${TRAMPOLINE_IMAGE}." + has_image="false" + fi +else + # For local run, check if we have the image. + if docker images "${TRAMPOLINE_IMAGE}:latest" | grep "${TRAMPOLINE_IMAGE}"; then + has_image="true" + else + has_image="false" + fi +fi + + +# The default user for a Docker container has uid 0 (root). To avoid +# creating root-owned files in the build directory we tell docker to +# use the current user ID. +user_uid="$(id -u)" +user_gid="$(id -g)" +user_name="$(id -un)" + +# To allow docker in docker, we add the user to the docker group in +# the host os. +docker_gid=$(cut -d: -f3 < <(getent group docker)) + +update_cache="false" +if [[ "${TRAMPOLINE_DOCKERFILE:-none}" != "none" ]]; then + # Build the Docker image from the source. + context_dir=$(dirname "${TRAMPOLINE_DOCKERFILE}") + docker_build_flags=( + "-f" "${TRAMPOLINE_DOCKERFILE}" + "-t" "${TRAMPOLINE_IMAGE}" + "--build-arg" "UID=${user_uid}" + "--build-arg" "USERNAME=${user_name}" + ) + if [[ "${has_image}" == "true" ]]; then + docker_build_flags+=("--cache-from" "${TRAMPOLINE_IMAGE}") + fi + + log_yellow "Start building the docker image." + if [[ "${TRAMPOLINE_VERBOSE:-false}" == "true" ]]; then + echo "docker build" "${docker_build_flags[@]}" "${context_dir}" + fi + + # ON CI systems, we want to suppress docker build logs, only + # output the logs when it fails. + if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + if docker build "${docker_build_flags[@]}" "${context_dir}" \ + > "${tmpdir}/docker_build.log" 2>&1; then + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + cat "${tmpdir}/docker_build.log" + fi + + log_green "Finished building the docker image." + update_cache="true" + else + log_red "Failed to build the Docker image, aborting." + log_yellow "Dumping the build logs:" + cat "${tmpdir}/docker_build.log" + exit 1 + fi + else + if docker build "${docker_build_flags[@]}" "${context_dir}"; then + log_green "Finished building the docker image." + update_cache="true" + else + log_red "Failed to build the Docker image, aborting." + exit 1 + fi + fi +else + if [[ "${has_image}" != "true" ]]; then + log_red "We do not have ${TRAMPOLINE_IMAGE} locally, aborting." + exit 1 + fi +fi + +# We use an array for the flags so they are easier to document. +docker_flags=( + # Remove the container after it exists. + "--rm" + + # Use the host network. + "--network=host" + + # Run in priviledged mode. We are not using docker for sandboxing or + # isolation, just for packaging our dev tools. + "--privileged" + + # Run the docker script with the user id. Because the docker image gets to + # write in ${PWD} you typically want this to be your user id. + # To allow docker in docker, we need to use docker gid on the host. + "--user" "${user_uid}:${docker_gid}" + + # Pass down the USER. + "--env" "USER=${user_name}" + + # Mount the project directory inside the Docker container. + "--volume" "${PROJECT_ROOT}:${TRAMPOLINE_WORKSPACE}" + "--workdir" "${TRAMPOLINE_WORKSPACE}" + "--env" "PROJECT_ROOT=${TRAMPOLINE_WORKSPACE}" + + # Mount the temporary home directory. + "--volume" "${tmphome}:/h" + "--env" "HOME=/h" + + # Allow docker in docker. + "--volume" "/var/run/docker.sock:/var/run/docker.sock" + + # Mount the /tmp so that docker in docker can mount the files + # there correctly. + "--volume" "/tmp:/tmp" + # Pass down the KOKORO_GFILE_DIR and KOKORO_KEYSTORE_DIR + # TODO(tmatsuo): This part is not portable. + "--env" "TRAMPOLINE_SECRET_DIR=/secrets" + "--volume" "${KOKORO_GFILE_DIR:-/dev/shm}:/secrets/gfile" + "--env" "KOKORO_GFILE_DIR=/secrets/gfile" + "--volume" "${KOKORO_KEYSTORE_DIR:-/dev/shm}:/secrets/keystore" + "--env" "KOKORO_KEYSTORE_DIR=/secrets/keystore" +) + +# Add an option for nicer output if the build gets a tty. +if [[ -t 0 ]]; then + docker_flags+=("-it") +fi + +# Passing down env vars +for e in "${pass_down_envvars[@]}" +do + if [[ -n "${!e:-}" ]]; then + docker_flags+=("--env" "${e}=${!e}") + fi +done + +# If arguments are given, all arguments will become the commands run +# in the container, otherwise run TRAMPOLINE_BUILD_FILE. +if [[ $# -ge 1 ]]; then + log_yellow "Running the given commands '" "${@:1}" "' in the container." + readonly commands=("${@:1}") + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" + fi + docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" +else + log_yellow "Running the tests in a Docker container." + docker_flags+=("--entrypoint=${TRAMPOLINE_BUILD_FILE}") + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" + fi + docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" +fi + + +test_retval=$? + +if [[ ${test_retval} -eq 0 ]]; then + log_green "Build finished with ${test_retval}" +else + log_red "Build finished with ${test_retval}" +fi + +# Only upload it when the test passes. +if [[ "${update_cache}" == "true" ]] && \ + [[ $test_retval == 0 ]] && \ + [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]]; then + log_yellow "Uploading the Docker image." + if docker push "${TRAMPOLINE_IMAGE}"; then + log_green "Finished uploading the Docker image." + else + log_red "Failed uploading the Docker image." + fi + # Call trampoline_after_upload_hook if it's defined. + if function_exists trampoline_after_upload_hook; then + trampoline_after_upload_hook + fi + +fi + +exit "${test_retval}" diff --git a/packages/grafeas/.trampolinerc b/packages/grafeas/.trampolinerc new file mode 100644 index 000000000000..995ee29111e1 --- /dev/null +++ b/packages/grafeas/.trampolinerc @@ -0,0 +1,51 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Template for .trampolinerc + +# Add required env vars here. +required_envvars+=( + "STAGING_BUCKET" + "V2_STAGING_BUCKET" +) + +# Add env vars which are passed down into the container here. +pass_down_envvars+=( + "STAGING_BUCKET" + "V2_STAGING_BUCKET" +) + +# Prevent unintentional override on the default image. +if [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]] && \ + [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then + echo "Please set TRAMPOLINE_IMAGE if you want to upload the Docker image." + exit 1 +fi + +# Define the default value if it makes sense. +if [[ -z "${TRAMPOLINE_IMAGE_UPLOAD:-}" ]]; then + TRAMPOLINE_IMAGE_UPLOAD="" +fi + +if [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then + TRAMPOLINE_IMAGE="" +fi + +if [[ -z "${TRAMPOLINE_DOCKERFILE:-}" ]]; then + TRAMPOLINE_DOCKERFILE="" +fi + +if [[ -z "${TRAMPOLINE_BUILD_FILE:-}" ]]; then + TRAMPOLINE_BUILD_FILE="" +fi diff --git a/packages/grafeas/README.rst b/packages/grafeas/README.rst index b8997cffaf93..a37f3fcfc925 100644 --- a/packages/grafeas/README.rst +++ b/packages/grafeas/README.rst @@ -26,6 +26,16 @@ dependencies. .. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Python >= 3.6 + +Deprecated Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python == 2.7. + +The last version of this library compatible with Python 2.7 is grafeas==0.4.1. + Mac/Linux ^^^^^^^^^ diff --git a/packages/grafeas/UPGRADING.md b/packages/grafeas/UPGRADING.md new file mode 100644 index 000000000000..154b1f2e9980 --- /dev/null +++ b/packages/grafeas/UPGRADING.md @@ -0,0 +1,167 @@ +# 1.0.0 Migration Guide + +The 1.0 release of the `grafeas` client is a significant upgrade based on a [next-gen code generator](https://github.com/googleapis/gapic-generator-python), and includes substantial interface changes. Existing code written for earlier versions of this library will likely require updates to use this version. This document describes the changes that have been made, and what you need to do to update your usage. + +If you experience issues or have questions, please file an [issue](https://github.com/googleapis/grafeas/issues). + +## Supported Python Versions + +> **WARNING**: Breaking change + +The 1.0.0 release requires Python 3.6+. + + +## Method Calls + +> **WARNING**: Breaking change + +Methods expect request objects. We provide a script that will convert most common use cases. + +* Install the library + +```py +python3 -m pip install grafeas +``` + +* The script `fixup_grafeas_v1_keywords.py` is shipped with the library. It expects +an input directory (with the code to convert) and an empty destination directory. + +```sh +$ fixup_grafeas_v1_keywords.py --input-directory .samples/ --output-directory samples/ +``` + +**Before:** +```py +from grafeas import grafeas_v1 +from grafeas.grafeas_v1.gapic.transports import grafeas_grpc_transport + +address = "[SERVICE_ADDRESS]" +scopes = ("[SCOPE]") +transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) + +client = grafeas_v1.GrafeasClient(transport) + +parent = "projects/my-project" +notes = client.list_notes(parent) +``` + + +**After:** +```py +from grafeas import grafeas_v1 +from grafeas.grafeas_v1.services.grafeas.transports import GrafeasGrpcTransport + +address = "[SERVICE_ADDRESS]" +scopes = ("[SCOPE]") +transport = GrafeasGrpcTransport(host=address scopes=scopes) + +client = grafeas_v1.GrafeasClient(transport=transport) + +parent = "projects/my-project" +request = {"parent": parent} +notes = client.list_notes(request=request) +``` + +### More Details + +In `grafeas<1.0.0`, parameters required by the API were positional parameters and optional parameters were keyword parameters. + +**Before:** +```py + def list_notes( + self, + parent, + filter_=None, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): +``` + +In the 1.0.0 release, all methods have a single positional parameter `request`. Method docstrings indicate whether a parameter is required or optional. + +Some methods have additional keyword only parameters. The available parameters depend on the [`google.api.method_signature` annotation](https://github.com/googleapis/googleapis/blob/b77cacf1ed06e0301a39d6328b599e24102f04be/grafeas/v1/grafeas.proto#L763) specified by the API producer. + + +**After:** +```py + def list_notes(self, + request: grafeas.ListNotesRequest = None, + *, + parent: str = None, + filter: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListNotesPager: +``` + +> **NOTE:** The `request` parameter and flattened keyword parameters for the API are mutually exclusive. +> Passing both will result in an error. + + +Both of these calls are valid: + +```py +response = client.list_notes( + request={ + "parent": parent, + "filter": filter, + } +) +``` + +```py +response = client.list_notes( + parent=parent, + filter=filter, +) +``` + +This call is invalid because it mixes `request` with a keyword argument `filter`. Executing this code +will result in an error. + +```py +response = client.list_notes( + request={ + "parent": parent, + }, + filter=filter +) +``` + + + +## Enums and Types + + +> **WARNING**: Breaking change + +The submodules `enums` and `types` have been removed. + +**Before:** +```py +from grafeas import grafeas_v1 + +severity = grafeas_v1.gapic.enums.Severity.HIGH +request = grafeas_v1.types.ListOccurrencesRequest() +``` + + +**After:** +```py +from grafeas import grafeas_v1 + +severity = grafeas_v1.Severity.HIGH +request = grafeas_v1.ListOccurrencesRequest() +``` + +## Project Path Helper Method + +The project path helper method `project_path` has been removed. Please construct this path manually. + +```py +project = 'my-project' +project_path = f'projects/{project}' +``` \ No newline at end of file diff --git a/packages/grafeas/docs/UPGRADING.md b/packages/grafeas/docs/UPGRADING.md new file mode 120000 index 000000000000..01097c8c0fb8 --- /dev/null +++ b/packages/grafeas/docs/UPGRADING.md @@ -0,0 +1 @@ +../UPGRADING.md \ No newline at end of file diff --git a/packages/grafeas/docs/conf.py b/packages/grafeas/docs/conf.py index 0a4008b5b8fb..78eea6990cae 100644 --- a/packages/grafeas/docs/conf.py +++ b/packages/grafeas/docs/conf.py @@ -20,6 +20,10 @@ # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath("..")) +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + __version__ = "" # -- General configuration ------------------------------------------------ @@ -90,7 +94,12 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ["_build"] +exclude_patterns = [ + "_build", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] # The reST default role (used for this markup: `text`) to use for all # documents. diff --git a/packages/grafeas/docs/gapic/v1/api.rst b/packages/grafeas/docs/gapic/v1/api.rst deleted file mode 100644 index 66589f53952e..000000000000 --- a/packages/grafeas/docs/gapic/v1/api.rst +++ /dev/null @@ -1,6 +0,0 @@ -Client for Grafeas API -================================= - -.. automodule:: grafeas.grafeas_v1 - :members: - :inherited-members: \ No newline at end of file diff --git a/packages/grafeas/docs/gapic/v1/types.rst b/packages/grafeas/docs/gapic/v1/types.rst deleted file mode 100644 index ee1343181d2d..000000000000 --- a/packages/grafeas/docs/gapic/v1/types.rst +++ /dev/null @@ -1,5 +0,0 @@ -Types for Grafeas API Client -======================================= - -.. automodule:: grafeas.grafeas_v1.types - :members: \ No newline at end of file diff --git a/packages/grafeas/docs/grafeas_v1/services.rst b/packages/grafeas/docs/grafeas_v1/services.rst new file mode 100644 index 000000000000..0d27b7e2d0d7 --- /dev/null +++ b/packages/grafeas/docs/grafeas_v1/services.rst @@ -0,0 +1,6 @@ +Services for Grafeas Grafeas v1 API +=================================== + +.. automodule:: grafeas.grafeas_v1.services.grafeas + :members: + :inherited-members: diff --git a/packages/grafeas/docs/grafeas_v1/types.rst b/packages/grafeas/docs/grafeas_v1/types.rst new file mode 100644 index 000000000000..0d2ca040a55b --- /dev/null +++ b/packages/grafeas/docs/grafeas_v1/types.rst @@ -0,0 +1,5 @@ +Types for Grafeas Grafeas v1 API +================================ + +.. automodule:: grafeas.grafeas_v1.types + :members: diff --git a/packages/grafeas/docs/index.rst b/packages/grafeas/docs/index.rst index 690b4eb5214f..3e1184edced8 100644 --- a/packages/grafeas/docs/index.rst +++ b/packages/grafeas/docs/index.rst @@ -7,8 +7,8 @@ Api Reference .. toctree:: :maxdepth: 2 - gapic/v1/api - gapic/v1/types + grafeas_v1/services + grafeas_v1/types Changelog --------- @@ -18,4 +18,14 @@ For a list of all ``grafeas`` releases: .. toctree:: :maxdepth: 2 - changelog \ No newline at end of file + changelog + +Migration Guide +--------------- + +See the guide below for instructions on migrating to the 1.x release of this library. + +.. toctree:: + :maxdepth: 2 + + UPGRADING diff --git a/packages/grafeas/grafeas/grafeas/__init__.py b/packages/grafeas/grafeas/grafeas/__init__.py new file mode 100644 index 000000000000..7f69a2782a7e --- /dev/null +++ b/packages/grafeas/grafeas/grafeas/__init__.py @@ -0,0 +1,150 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from grafeas.grafeas_v1.services.grafeas.async_client import GrafeasAsyncClient +from grafeas.grafeas_v1.services.grafeas.client import GrafeasClient +from grafeas.grafeas_v1.types.attestation import AttestationNote +from grafeas.grafeas_v1.types.attestation import AttestationOccurrence +from grafeas.grafeas_v1.types.build import BuildNote +from grafeas.grafeas_v1.types.build import BuildOccurrence +from grafeas.grafeas_v1.types.common import NoteKind +from grafeas.grafeas_v1.types.common import RelatedUrl +from grafeas.grafeas_v1.types.common import Signature +from grafeas.grafeas_v1.types.cvss import CVSSv3 +from grafeas.grafeas_v1.types.deployment import DeploymentNote +from grafeas.grafeas_v1.types.deployment import DeploymentOccurrence +from grafeas.grafeas_v1.types.discovery import DiscoveryNote +from grafeas.grafeas_v1.types.discovery import DiscoveryOccurrence +from grafeas.grafeas_v1.types.grafeas import BatchCreateNotesRequest +from grafeas.grafeas_v1.types.grafeas import BatchCreateNotesResponse +from grafeas.grafeas_v1.types.grafeas import BatchCreateOccurrencesRequest +from grafeas.grafeas_v1.types.grafeas import BatchCreateOccurrencesResponse +from grafeas.grafeas_v1.types.grafeas import CreateNoteRequest +from grafeas.grafeas_v1.types.grafeas import CreateOccurrenceRequest +from grafeas.grafeas_v1.types.grafeas import DeleteNoteRequest +from grafeas.grafeas_v1.types.grafeas import DeleteOccurrenceRequest +from grafeas.grafeas_v1.types.grafeas import GetNoteRequest +from grafeas.grafeas_v1.types.grafeas import GetOccurrenceNoteRequest +from grafeas.grafeas_v1.types.grafeas import GetOccurrenceRequest +from grafeas.grafeas_v1.types.grafeas import ListNoteOccurrencesRequest +from grafeas.grafeas_v1.types.grafeas import ListNoteOccurrencesResponse +from grafeas.grafeas_v1.types.grafeas import ListNotesRequest +from grafeas.grafeas_v1.types.grafeas import ListNotesResponse +from grafeas.grafeas_v1.types.grafeas import ListOccurrencesRequest +from grafeas.grafeas_v1.types.grafeas import ListOccurrencesResponse +from grafeas.grafeas_v1.types.grafeas import Note +from grafeas.grafeas_v1.types.grafeas import Occurrence +from grafeas.grafeas_v1.types.grafeas import UpdateNoteRequest +from grafeas.grafeas_v1.types.grafeas import UpdateOccurrenceRequest +from grafeas.grafeas_v1.types.image import Fingerprint +from grafeas.grafeas_v1.types.image import ImageNote +from grafeas.grafeas_v1.types.image import ImageOccurrence +from grafeas.grafeas_v1.types.image import Layer +from grafeas.grafeas_v1.types.package import Architecture +from grafeas.grafeas_v1.types.package import Distribution +from grafeas.grafeas_v1.types.package import Location +from grafeas.grafeas_v1.types.package import PackageNote +from grafeas.grafeas_v1.types.package import PackageOccurrence +from grafeas.grafeas_v1.types.package import Version +from grafeas.grafeas_v1.types.provenance import AliasContext +from grafeas.grafeas_v1.types.provenance import Artifact +from grafeas.grafeas_v1.types.provenance import BuildProvenance +from grafeas.grafeas_v1.types.provenance import CloudRepoSourceContext +from grafeas.grafeas_v1.types.provenance import Command +from grafeas.grafeas_v1.types.provenance import FileHashes +from grafeas.grafeas_v1.types.provenance import GerritSourceContext +from grafeas.grafeas_v1.types.provenance import GitSourceContext +from grafeas.grafeas_v1.types.provenance import Hash +from grafeas.grafeas_v1.types.provenance import ProjectRepoId +from grafeas.grafeas_v1.types.provenance import RepoId +from grafeas.grafeas_v1.types.provenance import Source +from grafeas.grafeas_v1.types.provenance import SourceContext +from grafeas.grafeas_v1.types.upgrade import UpgradeDistribution +from grafeas.grafeas_v1.types.upgrade import UpgradeNote +from grafeas.grafeas_v1.types.upgrade import UpgradeOccurrence +from grafeas.grafeas_v1.types.upgrade import WindowsUpdate +from grafeas.grafeas_v1.types.vulnerability import Severity +from grafeas.grafeas_v1.types.vulnerability import VulnerabilityNote +from grafeas.grafeas_v1.types.vulnerability import VulnerabilityOccurrence + +__all__ = ( + "AliasContext", + "Architecture", + "Artifact", + "AttestationNote", + "AttestationOccurrence", + "BatchCreateNotesRequest", + "BatchCreateNotesResponse", + "BatchCreateOccurrencesRequest", + "BatchCreateOccurrencesResponse", + "BuildNote", + "BuildOccurrence", + "BuildProvenance", + "CVSSv3", + "CloudRepoSourceContext", + "Command", + "CreateNoteRequest", + "CreateOccurrenceRequest", + "DeleteNoteRequest", + "DeleteOccurrenceRequest", + "DeploymentNote", + "DeploymentOccurrence", + "DiscoveryNote", + "DiscoveryOccurrence", + "Distribution", + "FileHashes", + "Fingerprint", + "GerritSourceContext", + "GetNoteRequest", + "GetOccurrenceNoteRequest", + "GetOccurrenceRequest", + "GitSourceContext", + "GrafeasAsyncClient", + "GrafeasClient", + "Hash", + "ImageNote", + "ImageOccurrence", + "Layer", + "ListNoteOccurrencesRequest", + "ListNoteOccurrencesResponse", + "ListNotesRequest", + "ListNotesResponse", + "ListOccurrencesRequest", + "ListOccurrencesResponse", + "Location", + "Note", + "NoteKind", + "Occurrence", + "PackageNote", + "PackageOccurrence", + "ProjectRepoId", + "RelatedUrl", + "RepoId", + "Severity", + "Signature", + "Source", + "SourceContext", + "UpdateNoteRequest", + "UpdateOccurrenceRequest", + "UpgradeDistribution", + "UpgradeNote", + "UpgradeOccurrence", + "Version", + "VulnerabilityNote", + "VulnerabilityOccurrence", + "WindowsUpdate", +) diff --git a/packages/grafeas/grafeas/grafeas/py.typed b/packages/grafeas/grafeas/grafeas/py.typed new file mode 100644 index 000000000000..846a558a7874 --- /dev/null +++ b/packages/grafeas/grafeas/grafeas/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The grafeas-grafeas package uses inline types. diff --git a/packages/grafeas/grafeas/grafeas_v1/__init__.py b/packages/grafeas/grafeas/grafeas_v1/__init__.py index 2d0a0d57988a..98e1ad1c8c28 100644 --- a/packages/grafeas/grafeas/grafeas_v1/__init__.py +++ b/packages/grafeas/grafeas/grafeas_v1/__init__.py @@ -1,45 +1,149 @@ # -*- coding: utf-8 -*- -# + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# - -from __future__ import absolute_import -import sys -import warnings - -from grafeas.grafeas_v1 import types -from grafeas.grafeas_v1.gapic import enums -from grafeas.grafeas_v1.gapic import grafeas_client - - -if sys.version_info[:2] == (2, 7): - message = ( - "A future version of this library will drop support for Python 2.7. " - "More details about Python 2 support for Google Cloud Client Libraries " - "can be found at https://cloud.google.com/python/docs/python2-sunset/" - ) - warnings.warn(message, DeprecationWarning) - - -class GrafeasClient(grafeas_client.GrafeasClient): - __doc__ = grafeas_client.GrafeasClient.__doc__ - enums = enums +from .services.grafeas import GrafeasClient +from .types.attestation import AttestationNote +from .types.attestation import AttestationOccurrence +from .types.build import BuildNote +from .types.build import BuildOccurrence +from .types.common import NoteKind +from .types.common import RelatedUrl +from .types.common import Signature +from .types.cvss import CVSSv3 +from .types.deployment import DeploymentNote +from .types.deployment import DeploymentOccurrence +from .types.discovery import DiscoveryNote +from .types.discovery import DiscoveryOccurrence +from .types.grafeas import BatchCreateNotesRequest +from .types.grafeas import BatchCreateNotesResponse +from .types.grafeas import BatchCreateOccurrencesRequest +from .types.grafeas import BatchCreateOccurrencesResponse +from .types.grafeas import CreateNoteRequest +from .types.grafeas import CreateOccurrenceRequest +from .types.grafeas import DeleteNoteRequest +from .types.grafeas import DeleteOccurrenceRequest +from .types.grafeas import GetNoteRequest +from .types.grafeas import GetOccurrenceNoteRequest +from .types.grafeas import GetOccurrenceRequest +from .types.grafeas import ListNoteOccurrencesRequest +from .types.grafeas import ListNoteOccurrencesResponse +from .types.grafeas import ListNotesRequest +from .types.grafeas import ListNotesResponse +from .types.grafeas import ListOccurrencesRequest +from .types.grafeas import ListOccurrencesResponse +from .types.grafeas import Note +from .types.grafeas import Occurrence +from .types.grafeas import UpdateNoteRequest +from .types.grafeas import UpdateOccurrenceRequest +from .types.image import Fingerprint +from .types.image import ImageNote +from .types.image import ImageOccurrence +from .types.image import Layer +from .types.package import Architecture +from .types.package import Distribution +from .types.package import Location +from .types.package import PackageNote +from .types.package import PackageOccurrence +from .types.package import Version +from .types.provenance import AliasContext +from .types.provenance import Artifact +from .types.provenance import BuildProvenance +from .types.provenance import CloudRepoSourceContext +from .types.provenance import Command +from .types.provenance import FileHashes +from .types.provenance import GerritSourceContext +from .types.provenance import GitSourceContext +from .types.provenance import Hash +from .types.provenance import ProjectRepoId +from .types.provenance import RepoId +from .types.provenance import Source +from .types.provenance import SourceContext +from .types.upgrade import UpgradeDistribution +from .types.upgrade import UpgradeNote +from .types.upgrade import UpgradeOccurrence +from .types.upgrade import WindowsUpdate +from .types.vulnerability import Severity +from .types.vulnerability import VulnerabilityNote +from .types.vulnerability import VulnerabilityOccurrence __all__ = ( - "enums", - "types", + "AliasContext", + "Architecture", + "Artifact", + "AttestationNote", + "AttestationOccurrence", + "BatchCreateNotesRequest", + "BatchCreateNotesResponse", + "BatchCreateOccurrencesRequest", + "BatchCreateOccurrencesResponse", + "BuildNote", + "BuildOccurrence", + "BuildProvenance", + "CVSSv3", + "CloudRepoSourceContext", + "Command", + "CreateNoteRequest", + "CreateOccurrenceRequest", + "DeleteNoteRequest", + "DeleteOccurrenceRequest", + "DeploymentNote", + "DeploymentOccurrence", + "DiscoveryNote", + "DiscoveryOccurrence", + "Distribution", + "FileHashes", + "Fingerprint", + "GerritSourceContext", + "GetNoteRequest", + "GetOccurrenceNoteRequest", + "GetOccurrenceRequest", + "GitSourceContext", + "Hash", + "ImageNote", + "ImageOccurrence", + "Layer", + "ListNoteOccurrencesRequest", + "ListNoteOccurrencesResponse", + "ListNotesRequest", + "ListNotesResponse", + "ListOccurrencesRequest", + "ListOccurrencesResponse", + "Location", + "Note", + "NoteKind", + "Occurrence", + "PackageNote", + "PackageOccurrence", + "ProjectRepoId", + "RelatedUrl", + "RepoId", + "Severity", + "Signature", + "Source", + "SourceContext", + "UpdateNoteRequest", + "UpdateOccurrenceRequest", + "UpgradeDistribution", + "UpgradeNote", + "UpgradeOccurrence", + "Version", + "VulnerabilityNote", + "VulnerabilityOccurrence", + "WindowsUpdate", "GrafeasClient", ) diff --git a/packages/grafeas/grafeas/grafeas_v1/gapic/__init__.py b/packages/grafeas/grafeas/grafeas_v1/gapic/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/packages/grafeas/grafeas/grafeas_v1/gapic/enums.py b/packages/grafeas/grafeas/grafeas_v1/gapic/enums.py deleted file mode 100644 index 59dd0c26cea4..000000000000 --- a/packages/grafeas/grafeas/grafeas_v1/gapic/enums.py +++ /dev/null @@ -1,257 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Wrappers for protocol buffer enum types.""" - -import enum - - -class Architecture(enum.IntEnum): - """ - Instruction set architectures supported by various package managers. - - Attributes: - ARCHITECTURE_UNSPECIFIED (int): Unknown architecture. - X86 (int): X86 architecture. - X64 (int): X64 architecture. - """ - - ARCHITECTURE_UNSPECIFIED = 0 - X86 = 1 - X64 = 2 - - -class NoteKind(enum.IntEnum): - """ - Kind represents the kinds of notes supported. - - Attributes: - NOTE_KIND_UNSPECIFIED (int): Unknown. - VULNERABILITY (int): The note and occurrence represent a package vulnerability. - BUILD (int): The note and occurrence assert build provenance. - IMAGE (int): This represents an image basis relationship. - PACKAGE (int): This represents a package installed via a package manager. - DEPLOYMENT (int): The note and occurrence track deployment events. - DISCOVERY (int): The note and occurrence track the initial discovery status of a resource. - ATTESTATION (int): This represents a logical "role" that can attest to artifacts. - UPGRADE (int): This represents an available package upgrade. - """ - - NOTE_KIND_UNSPECIFIED = 0 - VULNERABILITY = 1 - BUILD = 2 - IMAGE = 3 - PACKAGE = 4 - DEPLOYMENT = 5 - DISCOVERY = 6 - ATTESTATION = 7 - UPGRADE = 8 - - -class Severity(enum.IntEnum): - """ - Note provider assigned severity/impact ranking. - - Attributes: - SEVERITY_UNSPECIFIED (int): Unknown. - MINIMAL (int): Minimal severity. - LOW (int): Low severity. - MEDIUM (int): Medium severity. - HIGH (int): High severity. - CRITICAL (int): Critical severity. - """ - - SEVERITY_UNSPECIFIED = 0 - MINIMAL = 1 - LOW = 2 - MEDIUM = 3 - HIGH = 4 - CRITICAL = 5 - - -class AliasContext(object): - class Kind(enum.IntEnum): - """ - The type of an alias. - - Attributes: - KIND_UNSPECIFIED (int): Unknown. - FIXED (int): Git tag. - MOVABLE (int): Git branch. - OTHER (int): Used to specify non-standard aliases. For example, if a Git repo has a - ref named "refs/foo/bar". - """ - - KIND_UNSPECIFIED = 0 - FIXED = 1 - MOVABLE = 2 - OTHER = 4 - - -class CVSSv3(object): - class AttackComplexity(enum.IntEnum): - """ - Attributes: - ATTACK_COMPLEXITY_UNSPECIFIED (int) - ATTACK_COMPLEXITY_LOW (int) - ATTACK_COMPLEXITY_HIGH (int) - """ - - ATTACK_COMPLEXITY_UNSPECIFIED = 0 - ATTACK_COMPLEXITY_LOW = 1 - ATTACK_COMPLEXITY_HIGH = 2 - - class AttackVector(enum.IntEnum): - """ - Attributes: - ATTACK_VECTOR_UNSPECIFIED (int) - ATTACK_VECTOR_NETWORK (int) - ATTACK_VECTOR_ADJACENT (int) - ATTACK_VECTOR_LOCAL (int) - ATTACK_VECTOR_PHYSICAL (int) - """ - - ATTACK_VECTOR_UNSPECIFIED = 0 - ATTACK_VECTOR_NETWORK = 1 - ATTACK_VECTOR_ADJACENT = 2 - ATTACK_VECTOR_LOCAL = 3 - ATTACK_VECTOR_PHYSICAL = 4 - - class Impact(enum.IntEnum): - """ - Attributes: - IMPACT_UNSPECIFIED (int) - IMPACT_HIGH (int) - IMPACT_LOW (int) - IMPACT_NONE (int) - """ - - IMPACT_UNSPECIFIED = 0 - IMPACT_HIGH = 1 - IMPACT_LOW = 2 - IMPACT_NONE = 3 - - class PrivilegesRequired(enum.IntEnum): - """ - Attributes: - PRIVILEGES_REQUIRED_UNSPECIFIED (int) - PRIVILEGES_REQUIRED_NONE (int) - PRIVILEGES_REQUIRED_LOW (int) - PRIVILEGES_REQUIRED_HIGH (int) - """ - - PRIVILEGES_REQUIRED_UNSPECIFIED = 0 - PRIVILEGES_REQUIRED_NONE = 1 - PRIVILEGES_REQUIRED_LOW = 2 - PRIVILEGES_REQUIRED_HIGH = 3 - - class Scope(enum.IntEnum): - """ - Attributes: - SCOPE_UNSPECIFIED (int) - SCOPE_UNCHANGED (int) - SCOPE_CHANGED (int) - """ - - SCOPE_UNSPECIFIED = 0 - SCOPE_UNCHANGED = 1 - SCOPE_CHANGED = 2 - - class UserInteraction(enum.IntEnum): - """ - Attributes: - USER_INTERACTION_UNSPECIFIED (int) - USER_INTERACTION_NONE (int) - USER_INTERACTION_REQUIRED (int) - """ - - USER_INTERACTION_UNSPECIFIED = 0 - USER_INTERACTION_NONE = 1 - USER_INTERACTION_REQUIRED = 2 - - -class DeploymentOccurrence(object): - class Platform(enum.IntEnum): - """ - Types of platforms. - - Attributes: - PLATFORM_UNSPECIFIED (int): Unknown. - GKE (int): Google Container Engine. - FLEX (int): Google App Engine: Flexible Environment. - CUSTOM (int): Custom user-defined platform. - """ - - PLATFORM_UNSPECIFIED = 0 - GKE = 1 - FLEX = 2 - CUSTOM = 3 - - -class DiscoveryOccurrence(object): - class AnalysisStatus(enum.IntEnum): - """ - Analysis status for a resource. Currently for initial analysis only (not - updated in continuous analysis). - - Attributes: - ANALYSIS_STATUS_UNSPECIFIED (int): Unknown. - PENDING (int): Resource is known but no action has been taken yet. - SCANNING (int): Resource is being analyzed. - FINISHED_SUCCESS (int): Analysis has finished successfully. - FINISHED_FAILED (int): Analysis has finished unsuccessfully, the analysis itself is in a bad - state. - FINISHED_UNSUPPORTED (int): The resource is known not to be supported - """ - - ANALYSIS_STATUS_UNSPECIFIED = 0 - PENDING = 1 - SCANNING = 2 - FINISHED_SUCCESS = 3 - FINISHED_FAILED = 4 - FINISHED_UNSUPPORTED = 5 - - class ContinuousAnalysis(enum.IntEnum): - """ - Whether the resource is continuously analyzed. - - Attributes: - CONTINUOUS_ANALYSIS_UNSPECIFIED (int): Unknown. - ACTIVE (int): The resource is continuously analyzed. - INACTIVE (int): The resource is ignored for continuous analysis. - """ - - CONTINUOUS_ANALYSIS_UNSPECIFIED = 0 - ACTIVE = 1 - INACTIVE = 2 - - -class Version(object): - class VersionKind(enum.IntEnum): - """ - Whether this is an ordinary package version or a sentinel MIN/MAX version. - - Attributes: - VERSION_KIND_UNSPECIFIED (int): Unknown. - NORMAL (int): A standard package version. - MINIMUM (int): A special version representing negative infinity. - MAXIMUM (int): A special version representing positive infinity. - """ - - VERSION_KIND_UNSPECIFIED = 0 - NORMAL = 1 - MINIMUM = 2 - MAXIMUM = 3 diff --git a/packages/grafeas/grafeas/grafeas_v1/gapic/grafeas_client_config.py b/packages/grafeas/grafeas/grafeas_v1/gapic/grafeas_client_config.py deleted file mode 100644 index f3b49a815127..000000000000 --- a/packages/grafeas/grafeas/grafeas_v1/gapic/grafeas_client_config.py +++ /dev/null @@ -1,112 +0,0 @@ -config = { - "interfaces": { - "grafeas.v1.Grafeas": { - "retry_codes": { - "retry_policy_1_codes": ["UNAVAILABLE", "DEADLINE_EXCEEDED"], - "no_retry_codes": [], - "no_retry_1_codes": [], - }, - "retry_params": { - "retry_policy_1_params": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 30000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 30000, - "total_timeout_millis": 30000, - }, - "no_retry_params": { - "initial_retry_delay_millis": 0, - "retry_delay_multiplier": 0.0, - "max_retry_delay_millis": 0, - "initial_rpc_timeout_millis": 0, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 0, - "total_timeout_millis": 0, - }, - "no_retry_1_params": { - "initial_retry_delay_millis": 0, - "retry_delay_multiplier": 0.0, - "max_retry_delay_millis": 0, - "initial_rpc_timeout_millis": 30000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 30000, - "total_timeout_millis": 30000, - }, - }, - "methods": { - "GetOccurrence": { - "timeout_millis": 30000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "ListOccurrences": { - "timeout_millis": 30000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "DeleteOccurrence": { - "timeout_millis": 30000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "CreateOccurrence": { - "timeout_millis": 30000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "BatchCreateOccurrences": { - "timeout_millis": 30000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "UpdateOccurrence": { - "timeout_millis": 30000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "GetOccurrenceNote": { - "timeout_millis": 30000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "GetNote": { - "timeout_millis": 30000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "ListNotes": { - "timeout_millis": 30000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "DeleteNote": { - "timeout_millis": 30000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "CreateNote": { - "timeout_millis": 30000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "BatchCreateNotes": { - "timeout_millis": 30000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "UpdateNote": { - "timeout_millis": 30000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "ListNoteOccurrences": { - "timeout_millis": 30000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - }, - } - } -} diff --git a/packages/grafeas/grafeas/grafeas_v1/gapic/transports/__init__.py b/packages/grafeas/grafeas/grafeas_v1/gapic/transports/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/packages/grafeas/grafeas/grafeas_v1/proto/__init__.py b/packages/grafeas/grafeas/grafeas_v1/proto/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/packages/grafeas/grafeas/grafeas_v1/proto/attestation_pb2_grpc.py b/packages/grafeas/grafeas/grafeas_v1/proto/attestation_pb2_grpc.py deleted file mode 100644 index 8a9393943bdf..000000000000 --- a/packages/grafeas/grafeas/grafeas_v1/proto/attestation_pb2_grpc.py +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc diff --git a/packages/grafeas/grafeas/grafeas_v1/proto/build_pb2_grpc.py b/packages/grafeas/grafeas/grafeas_v1/proto/build_pb2_grpc.py deleted file mode 100644 index 8a9393943bdf..000000000000 --- a/packages/grafeas/grafeas/grafeas_v1/proto/build_pb2_grpc.py +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc diff --git a/packages/grafeas/grafeas/grafeas_v1/proto/common_pb2.py b/packages/grafeas/grafeas/grafeas_v1/proto/common_pb2.py deleted file mode 100644 index 207e6f95e3a2..000000000000 --- a/packages/grafeas/grafeas/grafeas_v1/proto/common_pb2.py +++ /dev/null @@ -1,324 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: grafeas/grafeas_v1/proto/common.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="grafeas/grafeas_v1/proto/common.proto", - package="grafeas.v1", - syntax="proto3", - serialized_options=b"\n\rio.grafeas.v1P\001ZFgoogle.golang.org/genproto/googleapis/grafeas/grafeas_v1/proto;grafeas\242\002\003GRA", - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n%grafeas/grafeas_v1/proto/common.proto\x12\ngrafeas.v1"(\n\nRelatedUrl\x12\x0b\n\x03url\x18\x01 \x01(\t\x12\r\n\x05label\x18\x02 \x01(\t"5\n\tSignature\x12\x11\n\tsignature\x18\x01 \x01(\x0c\x12\x15\n\rpublic_key_id\x18\x02 \x01(\t*\x98\x01\n\x08NoteKind\x12\x19\n\x15NOTE_KIND_UNSPECIFIED\x10\x00\x12\x11\n\rVULNERABILITY\x10\x01\x12\t\n\x05\x42UILD\x10\x02\x12\t\n\x05IMAGE\x10\x03\x12\x0b\n\x07PACKAGE\x10\x04\x12\x0e\n\nDEPLOYMENT\x10\x05\x12\r\n\tDISCOVERY\x10\x06\x12\x0f\n\x0b\x41TTESTATION\x10\x07\x12\x0b\n\x07UPGRADE\x10\x08\x42_\n\rio.grafeas.v1P\x01ZFgoogle.golang.org/genproto/googleapis/grafeas/grafeas_v1/proto;grafeas\xa2\x02\x03GRAb\x06proto3', -) - -_NOTEKIND = _descriptor.EnumDescriptor( - name="NoteKind", - full_name="grafeas.v1.NoteKind", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="NOTE_KIND_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="VULNERABILITY", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="BUILD", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="IMAGE", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PACKAGE", - index=4, - number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="DEPLOYMENT", - index=5, - number=5, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="DISCOVERY", - index=6, - number=6, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ATTESTATION", - index=7, - number=7, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="UPGRADE", - index=8, - number=8, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=151, - serialized_end=303, -) -_sym_db.RegisterEnumDescriptor(_NOTEKIND) - -NoteKind = enum_type_wrapper.EnumTypeWrapper(_NOTEKIND) -NOTE_KIND_UNSPECIFIED = 0 -VULNERABILITY = 1 -BUILD = 2 -IMAGE = 3 -PACKAGE = 4 -DEPLOYMENT = 5 -DISCOVERY = 6 -ATTESTATION = 7 -UPGRADE = 8 - - -_RELATEDURL = _descriptor.Descriptor( - name="RelatedUrl", - full_name="grafeas.v1.RelatedUrl", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="url", - full_name="grafeas.v1.RelatedUrl.url", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="label", - full_name="grafeas.v1.RelatedUrl.label", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=53, - serialized_end=93, -) - - -_SIGNATURE = _descriptor.Descriptor( - name="Signature", - full_name="grafeas.v1.Signature", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="signature", - full_name="grafeas.v1.Signature.signature", - index=0, - number=1, - type=12, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"", - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="public_key_id", - full_name="grafeas.v1.Signature.public_key_id", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=95, - serialized_end=148, -) - -DESCRIPTOR.message_types_by_name["RelatedUrl"] = _RELATEDURL -DESCRIPTOR.message_types_by_name["Signature"] = _SIGNATURE -DESCRIPTOR.enum_types_by_name["NoteKind"] = _NOTEKIND -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -RelatedUrl = _reflection.GeneratedProtocolMessageType( - "RelatedUrl", - (_message.Message,), - { - "DESCRIPTOR": _RELATEDURL, - "__module__": "grafeas.grafeas_v1.proto.common_pb2", - "__doc__": """Metadata for any related URL information. - - Attributes: - url: - Specific URL associated with the resource. - label: - Label to describe usage of the URL. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.RelatedUrl) - }, -) -_sym_db.RegisterMessage(RelatedUrl) - -Signature = _reflection.GeneratedProtocolMessageType( - "Signature", - (_message.Message,), - { - "DESCRIPTOR": _SIGNATURE, - "__module__": "grafeas.grafeas_v1.proto.common_pb2", - "__doc__": """Verifiers (e.g. Kritis implementations) MUST verify signatures with - respect to the trust anchors defined in policy (e.g. a Kritis policy). - Typically this means that the verifier has been configured with a map - from ``public_key_id`` to public key material (and any required - parameters, e.g. signing algorithm). In particular, verification - implementations MUST NOT treat the signature ``public_key_id`` as - anything more than a key lookup hint. The ``public_key_id`` DOES NOT - validate or authenticate a public key; it only provides a mechanism - for quickly selecting a public key ALREADY CONFIGURED on the verifier - through a trusted channel. Verification implementations MUST reject - signatures in any of the following circumstances: \* The - ``public_key_id`` is not recognized by the verifier. \* The public key - that ``public_key_id`` refers to does not verify the signature with - respect to the payload. The ``signature`` contents SHOULD NOT be - “attached” (where the payload is included with the serialized - ``signature`` bytes). Verifiers MUST ignore any “attached” payload and - only verify signatures with respect to explicitly provided payload - (e.g. a ``payload`` field on the proto message that holds this - Signature, or the canonical serialization of the proto message that - holds this signature). - - Attributes: - signature: - The content of the signature, an opaque bytestring. The - payload that this signature verifies MUST be unambiguously - provided with the Signature during verification. A wrapper - message might provide the payload explicitly. Alternatively, a - message might have a canonical serialization that can always - be unambiguously computed to derive the payload. - public_key_id: - The identifier for the public key that verifies this - signature. \* The ``public_key_id`` is required. \* The - ``public_key_id`` MUST be an RFC3986 conformant URI. \* When - possible, the ``public_key_id`` SHOULD be an immutable - reference, such as a cryptographic digest. Examples of valid - ``public_key_id``\ s: OpenPGP V4 public key fingerprint: \* - “openpgp4fpr:74FAF3B861BDA0870C7B6DEF607E48D2A663AEEA” See - https://www.iana.org/assignments/uri-schemes/prov/openpgp4fpr - for more details on this scheme. RFC6920 digest-named - SubjectPublicKeyInfo (digest of the DER serialization): \* - “ni:///sha-256;cD9o9Cq6LG3jD0iKXqEi_vdjJGecm_iXkbqVoScViaU” \* - “nih:///sha-256;703f68f42aba2c6de30f488a5ea122fef76324679c9bf8 - 9791ba95a1271589a5” - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.Signature) - }, -) -_sym_db.RegisterMessage(Signature) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/packages/grafeas/grafeas/grafeas_v1/proto/common_pb2_grpc.py b/packages/grafeas/grafeas/grafeas_v1/proto/common_pb2_grpc.py deleted file mode 100644 index 8a9393943bdf..000000000000 --- a/packages/grafeas/grafeas/grafeas_v1/proto/common_pb2_grpc.py +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc diff --git a/packages/grafeas/grafeas/grafeas_v1/proto/cvss_pb2.py b/packages/grafeas/grafeas/grafeas_v1/proto/cvss_pb2.py deleted file mode 100644 index 718116bde60c..000000000000 --- a/packages/grafeas/grafeas/grafeas_v1/proto/cvss_pb2.py +++ /dev/null @@ -1,570 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: grafeas/grafeas_v1/proto/cvss.proto -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="grafeas/grafeas_v1/proto/cvss.proto", - package="grafeas.v1", - syntax="proto3", - serialized_options=b"\n\rio.grafeas.v1P\001ZFgoogle.golang.org/genproto/googleapis/grafeas/grafeas_v1/proto;grafeas\242\002\003GRA", - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n#grafeas/grafeas_v1/proto/cvss.proto\x12\ngrafeas.v1"\xc5\t\n\x06\x43VSSv3\x12\x12\n\nbase_score\x18\x01 \x01(\x02\x12\x1c\n\x14\x65xploitability_score\x18\x02 \x01(\x02\x12\x14\n\x0cimpact_score\x18\x03 \x01(\x02\x12\x36\n\rattack_vector\x18\x05 \x01(\x0e\x32\x1f.grafeas.v1.CVSSv3.AttackVector\x12>\n\x11\x61ttack_complexity\x18\x06 \x01(\x0e\x32#.grafeas.v1.CVSSv3.AttackComplexity\x12\x42\n\x13privileges_required\x18\x07 \x01(\x0e\x32%.grafeas.v1.CVSSv3.PrivilegesRequired\x12<\n\x10user_interaction\x18\x08 \x01(\x0e\x32".grafeas.v1.CVSSv3.UserInteraction\x12\'\n\x05scope\x18\t \x01(\x0e\x32\x18.grafeas.v1.CVSSv3.Scope\x12\x39\n\x16\x63onfidentiality_impact\x18\n \x01(\x0e\x32\x19.grafeas.v1.CVSSv3.Impact\x12\x33\n\x10integrity_impact\x18\x0b \x01(\x0e\x32\x19.grafeas.v1.CVSSv3.Impact\x12\x36\n\x13\x61vailability_impact\x18\x0c \x01(\x0e\x32\x19.grafeas.v1.CVSSv3.Impact"\x99\x01\n\x0c\x41ttackVector\x12\x1d\n\x19\x41TTACK_VECTOR_UNSPECIFIED\x10\x00\x12\x19\n\x15\x41TTACK_VECTOR_NETWORK\x10\x01\x12\x1a\n\x16\x41TTACK_VECTOR_ADJACENT\x10\x02\x12\x17\n\x13\x41TTACK_VECTOR_LOCAL\x10\x03\x12\x1a\n\x16\x41TTACK_VECTOR_PHYSICAL\x10\x04"l\n\x10\x41ttackComplexity\x12!\n\x1d\x41TTACK_COMPLEXITY_UNSPECIFIED\x10\x00\x12\x19\n\x15\x41TTACK_COMPLEXITY_LOW\x10\x01\x12\x1a\n\x16\x41TTACK_COMPLEXITY_HIGH\x10\x02"\x92\x01\n\x12PrivilegesRequired\x12#\n\x1fPRIVILEGES_REQUIRED_UNSPECIFIED\x10\x00\x12\x1c\n\x18PRIVILEGES_REQUIRED_NONE\x10\x01\x12\x1b\n\x17PRIVILEGES_REQUIRED_LOW\x10\x02\x12\x1c\n\x18PRIVILEGES_REQUIRED_HIGH\x10\x03"m\n\x0fUserInteraction\x12 \n\x1cUSER_INTERACTION_UNSPECIFIED\x10\x00\x12\x19\n\x15USER_INTERACTION_NONE\x10\x01\x12\x1d\n\x19USER_INTERACTION_REQUIRED\x10\x02"F\n\x05Scope\x12\x15\n\x11SCOPE_UNSPECIFIED\x10\x00\x12\x13\n\x0fSCOPE_UNCHANGED\x10\x01\x12\x11\n\rSCOPE_CHANGED\x10\x02"R\n\x06Impact\x12\x16\n\x12IMPACT_UNSPECIFIED\x10\x00\x12\x0f\n\x0bIMPACT_HIGH\x10\x01\x12\x0e\n\nIMPACT_LOW\x10\x02\x12\x0f\n\x0bIMPACT_NONE\x10\x03\x42_\n\rio.grafeas.v1P\x01ZFgoogle.golang.org/genproto/googleapis/grafeas/grafeas_v1/proto;grafeas\xa2\x02\x03GRAb\x06proto3', -) - - -_CVSSV3_ATTACKVECTOR = _descriptor.EnumDescriptor( - name="AttackVector", - full_name="grafeas.v1.CVSSv3.AttackVector", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="ATTACK_VECTOR_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ATTACK_VECTOR_NETWORK", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ATTACK_VECTOR_ADJACENT", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ATTACK_VECTOR_LOCAL", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ATTACK_VECTOR_PHYSICAL", - index=4, - number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=594, - serialized_end=747, -) -_sym_db.RegisterEnumDescriptor(_CVSSV3_ATTACKVECTOR) - -_CVSSV3_ATTACKCOMPLEXITY = _descriptor.EnumDescriptor( - name="AttackComplexity", - full_name="grafeas.v1.CVSSv3.AttackComplexity", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="ATTACK_COMPLEXITY_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ATTACK_COMPLEXITY_LOW", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ATTACK_COMPLEXITY_HIGH", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=749, - serialized_end=857, -) -_sym_db.RegisterEnumDescriptor(_CVSSV3_ATTACKCOMPLEXITY) - -_CVSSV3_PRIVILEGESREQUIRED = _descriptor.EnumDescriptor( - name="PrivilegesRequired", - full_name="grafeas.v1.CVSSv3.PrivilegesRequired", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="PRIVILEGES_REQUIRED_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PRIVILEGES_REQUIRED_NONE", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PRIVILEGES_REQUIRED_LOW", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PRIVILEGES_REQUIRED_HIGH", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=860, - serialized_end=1006, -) -_sym_db.RegisterEnumDescriptor(_CVSSV3_PRIVILEGESREQUIRED) - -_CVSSV3_USERINTERACTION = _descriptor.EnumDescriptor( - name="UserInteraction", - full_name="grafeas.v1.CVSSv3.UserInteraction", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="USER_INTERACTION_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="USER_INTERACTION_NONE", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="USER_INTERACTION_REQUIRED", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=1008, - serialized_end=1117, -) -_sym_db.RegisterEnumDescriptor(_CVSSV3_USERINTERACTION) - -_CVSSV3_SCOPE = _descriptor.EnumDescriptor( - name="Scope", - full_name="grafeas.v1.CVSSv3.Scope", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="SCOPE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="SCOPE_UNCHANGED", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="SCOPE_CHANGED", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=1119, - serialized_end=1189, -) -_sym_db.RegisterEnumDescriptor(_CVSSV3_SCOPE) - -_CVSSV3_IMPACT = _descriptor.EnumDescriptor( - name="Impact", - full_name="grafeas.v1.CVSSv3.Impact", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="IMPACT_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="IMPACT_HIGH", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="IMPACT_LOW", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="IMPACT_NONE", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=1191, - serialized_end=1273, -) -_sym_db.RegisterEnumDescriptor(_CVSSV3_IMPACT) - - -_CVSSV3 = _descriptor.Descriptor( - name="CVSSv3", - full_name="grafeas.v1.CVSSv3", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="base_score", - full_name="grafeas.v1.CVSSv3.base_score", - index=0, - number=1, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="exploitability_score", - full_name="grafeas.v1.CVSSv3.exploitability_score", - index=1, - number=2, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="impact_score", - full_name="grafeas.v1.CVSSv3.impact_score", - index=2, - number=3, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="attack_vector", - full_name="grafeas.v1.CVSSv3.attack_vector", - index=3, - number=5, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="attack_complexity", - full_name="grafeas.v1.CVSSv3.attack_complexity", - index=4, - number=6, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="privileges_required", - full_name="grafeas.v1.CVSSv3.privileges_required", - index=5, - number=7, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="user_interaction", - full_name="grafeas.v1.CVSSv3.user_interaction", - index=6, - number=8, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="scope", - full_name="grafeas.v1.CVSSv3.scope", - index=7, - number=9, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="confidentiality_impact", - full_name="grafeas.v1.CVSSv3.confidentiality_impact", - index=8, - number=10, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="integrity_impact", - full_name="grafeas.v1.CVSSv3.integrity_impact", - index=9, - number=11, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="availability_impact", - full_name="grafeas.v1.CVSSv3.availability_impact", - index=10, - number=12, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[ - _CVSSV3_ATTACKVECTOR, - _CVSSV3_ATTACKCOMPLEXITY, - _CVSSV3_PRIVILEGESREQUIRED, - _CVSSV3_USERINTERACTION, - _CVSSV3_SCOPE, - _CVSSV3_IMPACT, - ], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=52, - serialized_end=1273, -) - -_CVSSV3.fields_by_name["attack_vector"].enum_type = _CVSSV3_ATTACKVECTOR -_CVSSV3.fields_by_name["attack_complexity"].enum_type = _CVSSV3_ATTACKCOMPLEXITY -_CVSSV3.fields_by_name["privileges_required"].enum_type = _CVSSV3_PRIVILEGESREQUIRED -_CVSSV3.fields_by_name["user_interaction"].enum_type = _CVSSV3_USERINTERACTION -_CVSSV3.fields_by_name["scope"].enum_type = _CVSSV3_SCOPE -_CVSSV3.fields_by_name["confidentiality_impact"].enum_type = _CVSSV3_IMPACT -_CVSSV3.fields_by_name["integrity_impact"].enum_type = _CVSSV3_IMPACT -_CVSSV3.fields_by_name["availability_impact"].enum_type = _CVSSV3_IMPACT -_CVSSV3_ATTACKVECTOR.containing_type = _CVSSV3 -_CVSSV3_ATTACKCOMPLEXITY.containing_type = _CVSSV3 -_CVSSV3_PRIVILEGESREQUIRED.containing_type = _CVSSV3 -_CVSSV3_USERINTERACTION.containing_type = _CVSSV3 -_CVSSV3_SCOPE.containing_type = _CVSSV3 -_CVSSV3_IMPACT.containing_type = _CVSSV3 -DESCRIPTOR.message_types_by_name["CVSSv3"] = _CVSSV3 -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -CVSSv3 = _reflection.GeneratedProtocolMessageType( - "CVSSv3", - (_message.Message,), - { - "DESCRIPTOR": _CVSSV3, - "__module__": "grafeas.grafeas_v1.proto.cvss_pb2", - "__doc__": """Common Vulnerability Scoring System version 3. For details, see - https://www.first.org/cvss/specification-document - - Attributes: - base_score: - The base score is a function of the base metric scores. - attack_vector: - Base Metrics Represents the intrinsic characteristics of a - vulnerability that are constant over time and across user - environments. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.CVSSv3) - }, -) -_sym_db.RegisterMessage(CVSSv3) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/packages/grafeas/grafeas/grafeas_v1/proto/cvss_pb2_grpc.py b/packages/grafeas/grafeas/grafeas_v1/proto/cvss_pb2_grpc.py deleted file mode 100644 index 8a9393943bdf..000000000000 --- a/packages/grafeas/grafeas/grafeas_v1/proto/cvss_pb2_grpc.py +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc diff --git a/packages/grafeas/grafeas/grafeas_v1/proto/deployment_pb2_grpc.py b/packages/grafeas/grafeas/grafeas_v1/proto/deployment_pb2_grpc.py deleted file mode 100644 index 8a9393943bdf..000000000000 --- a/packages/grafeas/grafeas/grafeas_v1/proto/deployment_pb2_grpc.py +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc diff --git a/packages/grafeas/grafeas/grafeas_v1/proto/discovery_pb2_grpc.py b/packages/grafeas/grafeas/grafeas_v1/proto/discovery_pb2_grpc.py deleted file mode 100644 index 8a9393943bdf..000000000000 --- a/packages/grafeas/grafeas/grafeas_v1/proto/discovery_pb2_grpc.py +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc diff --git a/packages/grafeas/grafeas/grafeas_v1/proto/grafeas_pb2_grpc.py b/packages/grafeas/grafeas/grafeas_v1/proto/grafeas_pb2_grpc.py deleted file mode 100644 index 1a65a40b1871..000000000000 --- a/packages/grafeas/grafeas/grafeas_v1/proto/grafeas_pb2_grpc.py +++ /dev/null @@ -1,700 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc - -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from grafeas.grafeas_v1.proto import ( - grafeas_pb2 as grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2, -) - - -class GrafeasStub(object): - """[Grafeas](https://grafeas.io) API. - - Retrieves analysis results of Cloud components such as Docker container - images. - - Analysis results are stored as a series of occurrences. An `Occurrence` - contains information about a specific analysis instance on a resource. An - occurrence refers to a `Note`. A note contains details describing the - analysis and is generally stored in a separate project, called a `Provider`. - Multiple occurrences can refer to the same note. - - For example, an SSL vulnerability could affect multiple images. In this case, - there would be one note for the vulnerability and an occurrence for each - image with the vulnerability referring to that note. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.GetOccurrence = channel.unary_unary( - "/grafeas.v1.Grafeas/GetOccurrence", - request_serializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.GetOccurrenceRequest.SerializeToString, - response_deserializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.Occurrence.FromString, - ) - self.ListOccurrences = channel.unary_unary( - "/grafeas.v1.Grafeas/ListOccurrences", - request_serializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.ListOccurrencesRequest.SerializeToString, - response_deserializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.ListOccurrencesResponse.FromString, - ) - self.DeleteOccurrence = channel.unary_unary( - "/grafeas.v1.Grafeas/DeleteOccurrence", - request_serializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.DeleteOccurrenceRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.CreateOccurrence = channel.unary_unary( - "/grafeas.v1.Grafeas/CreateOccurrence", - request_serializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.CreateOccurrenceRequest.SerializeToString, - response_deserializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.Occurrence.FromString, - ) - self.BatchCreateOccurrences = channel.unary_unary( - "/grafeas.v1.Grafeas/BatchCreateOccurrences", - request_serializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.BatchCreateOccurrencesRequest.SerializeToString, - response_deserializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.BatchCreateOccurrencesResponse.FromString, - ) - self.UpdateOccurrence = channel.unary_unary( - "/grafeas.v1.Grafeas/UpdateOccurrence", - request_serializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.UpdateOccurrenceRequest.SerializeToString, - response_deserializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.Occurrence.FromString, - ) - self.GetOccurrenceNote = channel.unary_unary( - "/grafeas.v1.Grafeas/GetOccurrenceNote", - request_serializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.GetOccurrenceNoteRequest.SerializeToString, - response_deserializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.Note.FromString, - ) - self.GetNote = channel.unary_unary( - "/grafeas.v1.Grafeas/GetNote", - request_serializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.GetNoteRequest.SerializeToString, - response_deserializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.Note.FromString, - ) - self.ListNotes = channel.unary_unary( - "/grafeas.v1.Grafeas/ListNotes", - request_serializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.ListNotesRequest.SerializeToString, - response_deserializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.ListNotesResponse.FromString, - ) - self.DeleteNote = channel.unary_unary( - "/grafeas.v1.Grafeas/DeleteNote", - request_serializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.DeleteNoteRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.CreateNote = channel.unary_unary( - "/grafeas.v1.Grafeas/CreateNote", - request_serializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.CreateNoteRequest.SerializeToString, - response_deserializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.Note.FromString, - ) - self.BatchCreateNotes = channel.unary_unary( - "/grafeas.v1.Grafeas/BatchCreateNotes", - request_serializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.BatchCreateNotesRequest.SerializeToString, - response_deserializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.BatchCreateNotesResponse.FromString, - ) - self.UpdateNote = channel.unary_unary( - "/grafeas.v1.Grafeas/UpdateNote", - request_serializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.UpdateNoteRequest.SerializeToString, - response_deserializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.Note.FromString, - ) - self.ListNoteOccurrences = channel.unary_unary( - "/grafeas.v1.Grafeas/ListNoteOccurrences", - request_serializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.ListNoteOccurrencesRequest.SerializeToString, - response_deserializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.ListNoteOccurrencesResponse.FromString, - ) - - -class GrafeasServicer(object): - """[Grafeas](https://grafeas.io) API. - - Retrieves analysis results of Cloud components such as Docker container - images. - - Analysis results are stored as a series of occurrences. An `Occurrence` - contains information about a specific analysis instance on a resource. An - occurrence refers to a `Note`. A note contains details describing the - analysis and is generally stored in a separate project, called a `Provider`. - Multiple occurrences can refer to the same note. - - For example, an SSL vulnerability could affect multiple images. In this case, - there would be one note for the vulnerability and an occurrence for each - image with the vulnerability referring to that note. - """ - - def GetOccurrence(self, request, context): - """Gets the specified occurrence. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListOccurrences(self, request, context): - """Lists occurrences for the specified project. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteOccurrence(self, request, context): - """Deletes the specified occurrence. For example, use this method to delete an - occurrence when the occurrence is no longer applicable for the given - resource. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateOccurrence(self, request, context): - """Creates a new occurrence. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def BatchCreateOccurrences(self, request, context): - """Creates new occurrences in batch. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateOccurrence(self, request, context): - """Updates the specified occurrence. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetOccurrenceNote(self, request, context): - """Gets the note attached to the specified occurrence. Consumer projects can - use this method to get a note that belongs to a provider project. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetNote(self, request, context): - """Gets the specified note. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListNotes(self, request, context): - """Lists notes for the specified project. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteNote(self, request, context): - """Deletes the specified note. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateNote(self, request, context): - """Creates a new note. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def BatchCreateNotes(self, request, context): - """Creates new notes in batch. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateNote(self, request, context): - """Updates the specified note. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListNoteOccurrences(self, request, context): - """Lists occurrences referencing the specified note. Provider projects can use - this method to get all occurrences across consumer projects referencing the - specified note. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_GrafeasServicer_to_server(servicer, server): - rpc_method_handlers = { - "GetOccurrence": grpc.unary_unary_rpc_method_handler( - servicer.GetOccurrence, - request_deserializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.GetOccurrenceRequest.FromString, - response_serializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.Occurrence.SerializeToString, - ), - "ListOccurrences": grpc.unary_unary_rpc_method_handler( - servicer.ListOccurrences, - request_deserializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.ListOccurrencesRequest.FromString, - response_serializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.ListOccurrencesResponse.SerializeToString, - ), - "DeleteOccurrence": grpc.unary_unary_rpc_method_handler( - servicer.DeleteOccurrence, - request_deserializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.DeleteOccurrenceRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "CreateOccurrence": grpc.unary_unary_rpc_method_handler( - servicer.CreateOccurrence, - request_deserializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.CreateOccurrenceRequest.FromString, - response_serializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.Occurrence.SerializeToString, - ), - "BatchCreateOccurrences": grpc.unary_unary_rpc_method_handler( - servicer.BatchCreateOccurrences, - request_deserializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.BatchCreateOccurrencesRequest.FromString, - response_serializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.BatchCreateOccurrencesResponse.SerializeToString, - ), - "UpdateOccurrence": grpc.unary_unary_rpc_method_handler( - servicer.UpdateOccurrence, - request_deserializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.UpdateOccurrenceRequest.FromString, - response_serializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.Occurrence.SerializeToString, - ), - "GetOccurrenceNote": grpc.unary_unary_rpc_method_handler( - servicer.GetOccurrenceNote, - request_deserializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.GetOccurrenceNoteRequest.FromString, - response_serializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.Note.SerializeToString, - ), - "GetNote": grpc.unary_unary_rpc_method_handler( - servicer.GetNote, - request_deserializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.GetNoteRequest.FromString, - response_serializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.Note.SerializeToString, - ), - "ListNotes": grpc.unary_unary_rpc_method_handler( - servicer.ListNotes, - request_deserializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.ListNotesRequest.FromString, - response_serializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.ListNotesResponse.SerializeToString, - ), - "DeleteNote": grpc.unary_unary_rpc_method_handler( - servicer.DeleteNote, - request_deserializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.DeleteNoteRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "CreateNote": grpc.unary_unary_rpc_method_handler( - servicer.CreateNote, - request_deserializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.CreateNoteRequest.FromString, - response_serializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.Note.SerializeToString, - ), - "BatchCreateNotes": grpc.unary_unary_rpc_method_handler( - servicer.BatchCreateNotes, - request_deserializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.BatchCreateNotesRequest.FromString, - response_serializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.BatchCreateNotesResponse.SerializeToString, - ), - "UpdateNote": grpc.unary_unary_rpc_method_handler( - servicer.UpdateNote, - request_deserializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.UpdateNoteRequest.FromString, - response_serializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.Note.SerializeToString, - ), - "ListNoteOccurrences": grpc.unary_unary_rpc_method_handler( - servicer.ListNoteOccurrences, - request_deserializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.ListNoteOccurrencesRequest.FromString, - response_serializer=grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.ListNoteOccurrencesResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "grafeas.v1.Grafeas", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) - - -# This class is part of an EXPERIMENTAL API. -class Grafeas(object): - """[Grafeas](https://grafeas.io) API. - - Retrieves analysis results of Cloud components such as Docker container - images. - - Analysis results are stored as a series of occurrences. An `Occurrence` - contains information about a specific analysis instance on a resource. An - occurrence refers to a `Note`. A note contains details describing the - analysis and is generally stored in a separate project, called a `Provider`. - Multiple occurrences can refer to the same note. - - For example, an SSL vulnerability could affect multiple images. In this case, - there would be one note for the vulnerability and an occurrence for each - image with the vulnerability referring to that note. - """ - - @staticmethod - def GetOccurrence( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/grafeas.v1.Grafeas/GetOccurrence", - grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.GetOccurrenceRequest.SerializeToString, - grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.Occurrence.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ListOccurrences( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/grafeas.v1.Grafeas/ListOccurrences", - grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.ListOccurrencesRequest.SerializeToString, - grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.ListOccurrencesResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def DeleteOccurrence( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/grafeas.v1.Grafeas/DeleteOccurrence", - grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.DeleteOccurrenceRequest.SerializeToString, - google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def CreateOccurrence( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/grafeas.v1.Grafeas/CreateOccurrence", - grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.CreateOccurrenceRequest.SerializeToString, - grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.Occurrence.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def BatchCreateOccurrences( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/grafeas.v1.Grafeas/BatchCreateOccurrences", - grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.BatchCreateOccurrencesRequest.SerializeToString, - grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.BatchCreateOccurrencesResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def UpdateOccurrence( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/grafeas.v1.Grafeas/UpdateOccurrence", - grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.UpdateOccurrenceRequest.SerializeToString, - grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.Occurrence.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def GetOccurrenceNote( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/grafeas.v1.Grafeas/GetOccurrenceNote", - grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.GetOccurrenceNoteRequest.SerializeToString, - grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.Note.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def GetNote( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/grafeas.v1.Grafeas/GetNote", - grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.GetNoteRequest.SerializeToString, - grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.Note.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ListNotes( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/grafeas.v1.Grafeas/ListNotes", - grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.ListNotesRequest.SerializeToString, - grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.ListNotesResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def DeleteNote( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/grafeas.v1.Grafeas/DeleteNote", - grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.DeleteNoteRequest.SerializeToString, - google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def CreateNote( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/grafeas.v1.Grafeas/CreateNote", - grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.CreateNoteRequest.SerializeToString, - grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.Note.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def BatchCreateNotes( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/grafeas.v1.Grafeas/BatchCreateNotes", - grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.BatchCreateNotesRequest.SerializeToString, - grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.BatchCreateNotesResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def UpdateNote( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/grafeas.v1.Grafeas/UpdateNote", - grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.UpdateNoteRequest.SerializeToString, - grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.Note.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ListNoteOccurrences( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/grafeas.v1.Grafeas/ListNoteOccurrences", - grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.ListNoteOccurrencesRequest.SerializeToString, - grafeas_dot_grafeas__v1_dot_proto_dot_grafeas__pb2.ListNoteOccurrencesResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) diff --git a/packages/grafeas/grafeas/grafeas_v1/proto/image_pb2.py b/packages/grafeas/grafeas/grafeas_v1/proto/image_pb2.py deleted file mode 100644 index 754c66eff210..000000000000 --- a/packages/grafeas/grafeas/grafeas_v1/proto/image_pb2.py +++ /dev/null @@ -1,432 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: grafeas/grafeas_v1/proto/image.proto -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="grafeas/grafeas_v1/proto/image.proto", - package="grafeas.v1", - syntax="proto3", - serialized_options=b"\n\rio.grafeas.v1P\001ZFgoogle.golang.org/genproto/googleapis/grafeas/grafeas_v1/proto;grafeas\242\002\003GRA", - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n$grafeas/grafeas_v1/proto/image.proto\x12\ngrafeas.v1"-\n\x05Layer\x12\x11\n\tdirective\x18\x01 \x01(\t\x12\x11\n\targuments\x18\x02 \x01(\t"@\n\x0b\x46ingerprint\x12\x0f\n\x07v1_name\x18\x01 \x01(\t\x12\x0f\n\x07v2_blob\x18\x02 \x03(\t\x12\x0f\n\x07v2_name\x18\x03 \x01(\t"O\n\tImageNote\x12\x14\n\x0cresource_url\x18\x01 \x01(\t\x12,\n\x0b\x66ingerprint\x18\x02 \x01(\x0b\x32\x17.grafeas.v1.Fingerprint"\x93\x01\n\x0fImageOccurrence\x12,\n\x0b\x66ingerprint\x18\x01 \x01(\x0b\x32\x17.grafeas.v1.Fingerprint\x12\x10\n\x08\x64istance\x18\x02 \x01(\x05\x12%\n\nlayer_info\x18\x03 \x03(\x0b\x32\x11.grafeas.v1.Layer\x12\x19\n\x11\x62\x61se_resource_url\x18\x04 \x01(\tB_\n\rio.grafeas.v1P\x01ZFgoogle.golang.org/genproto/googleapis/grafeas/grafeas_v1/proto;grafeas\xa2\x02\x03GRAb\x06proto3', -) - - -_LAYER = _descriptor.Descriptor( - name="Layer", - full_name="grafeas.v1.Layer", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="directive", - full_name="grafeas.v1.Layer.directive", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="arguments", - full_name="grafeas.v1.Layer.arguments", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=52, - serialized_end=97, -) - - -_FINGERPRINT = _descriptor.Descriptor( - name="Fingerprint", - full_name="grafeas.v1.Fingerprint", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="v1_name", - full_name="grafeas.v1.Fingerprint.v1_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="v2_blob", - full_name="grafeas.v1.Fingerprint.v2_blob", - index=1, - number=2, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="v2_name", - full_name="grafeas.v1.Fingerprint.v2_name", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=99, - serialized_end=163, -) - - -_IMAGENOTE = _descriptor.Descriptor( - name="ImageNote", - full_name="grafeas.v1.ImageNote", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="resource_url", - full_name="grafeas.v1.ImageNote.resource_url", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="fingerprint", - full_name="grafeas.v1.ImageNote.fingerprint", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=165, - serialized_end=244, -) - - -_IMAGEOCCURRENCE = _descriptor.Descriptor( - name="ImageOccurrence", - full_name="grafeas.v1.ImageOccurrence", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="fingerprint", - full_name="grafeas.v1.ImageOccurrence.fingerprint", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="distance", - full_name="grafeas.v1.ImageOccurrence.distance", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="layer_info", - full_name="grafeas.v1.ImageOccurrence.layer_info", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="base_resource_url", - full_name="grafeas.v1.ImageOccurrence.base_resource_url", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=247, - serialized_end=394, -) - -_IMAGENOTE.fields_by_name["fingerprint"].message_type = _FINGERPRINT -_IMAGEOCCURRENCE.fields_by_name["fingerprint"].message_type = _FINGERPRINT -_IMAGEOCCURRENCE.fields_by_name["layer_info"].message_type = _LAYER -DESCRIPTOR.message_types_by_name["Layer"] = _LAYER -DESCRIPTOR.message_types_by_name["Fingerprint"] = _FINGERPRINT -DESCRIPTOR.message_types_by_name["ImageNote"] = _IMAGENOTE -DESCRIPTOR.message_types_by_name["ImageOccurrence"] = _IMAGEOCCURRENCE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -Layer = _reflection.GeneratedProtocolMessageType( - "Layer", - (_message.Message,), - { - "DESCRIPTOR": _LAYER, - "__module__": "grafeas.grafeas_v1.proto.image_pb2", - "__doc__": """Layer holds metadata specific to a layer of a Docker image. - - Attributes: - directive: - Required. The recovered Dockerfile directive used to construct - this layer. See - https://docs.docker.com/engine/reference/builder/ for more - information. - arguments: - The recovered arguments to the Dockerfile directive. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.Layer) - }, -) -_sym_db.RegisterMessage(Layer) - -Fingerprint = _reflection.GeneratedProtocolMessageType( - "Fingerprint", - (_message.Message,), - { - "DESCRIPTOR": _FINGERPRINT, - "__module__": "grafeas.grafeas_v1.proto.image_pb2", - "__doc__": """A set of properties that uniquely identify a given Docker image. - - Attributes: - v1_name: - Required. The layer ID of the final layer in the Docker - image’s v1 representation. - v2_blob: - Required. The ordered list of v2 blobs that represent a given - image. - v2_name: - Output only. The name of the image’s v2 blobs computed via: - [bottom] := v2_blob[bottom] [N] := sha256(v2_blob[N] + " " + - v2_name[N+1]) Only the name of the final blob is kept. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.Fingerprint) - }, -) -_sym_db.RegisterMessage(Fingerprint) - -ImageNote = _reflection.GeneratedProtocolMessageType( - "ImageNote", - (_message.Message,), - { - "DESCRIPTOR": _IMAGENOTE, - "__module__": "grafeas.grafeas_v1.proto.image_pb2", - "__doc__": """Basis describes the base image portion (Note) of the DockerImage - relationship. Linked occurrences are derived from this or an - equivalent image via: FROM Or an equivalent - reference, e.g., a tag of the resource_url. - - Attributes: - resource_url: - Required. Immutable. The resource_url for the resource - representing the basis of associated occurrence images. - fingerprint: - Required. Immutable. The fingerprint of the base image. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.ImageNote) - }, -) -_sym_db.RegisterMessage(ImageNote) - -ImageOccurrence = _reflection.GeneratedProtocolMessageType( - "ImageOccurrence", - (_message.Message,), - { - "DESCRIPTOR": _IMAGEOCCURRENCE, - "__module__": "grafeas.grafeas_v1.proto.image_pb2", - "__doc__": """Details of the derived image portion of the DockerImage relationship. - This image would be produced from a Dockerfile with FROM - . - - Attributes: - fingerprint: - Required. The fingerprint of the derived image. - distance: - Output only. The number of layers by which this image differs - from the associated image basis. - layer_info: - This contains layer-specific metadata, if populated it has - length “distance” and is ordered with [distance] being the - layer immediately following the base image and [1] being the - final layer. - base_resource_url: - Output only. This contains the base image URL for the derived - image occurrence. - """, - # @@protoc_insertion_point(class_scope:grafeas.v1.ImageOccurrence) - }, -) -_sym_db.RegisterMessage(ImageOccurrence) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/packages/grafeas/grafeas/grafeas_v1/proto/image_pb2_grpc.py b/packages/grafeas/grafeas/grafeas_v1/proto/image_pb2_grpc.py deleted file mode 100644 index 8a9393943bdf..000000000000 --- a/packages/grafeas/grafeas/grafeas_v1/proto/image_pb2_grpc.py +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc diff --git a/packages/grafeas/grafeas/grafeas_v1/proto/package_pb2_grpc.py b/packages/grafeas/grafeas/grafeas_v1/proto/package_pb2_grpc.py deleted file mode 100644 index 8a9393943bdf..000000000000 --- a/packages/grafeas/grafeas/grafeas_v1/proto/package_pb2_grpc.py +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc diff --git a/packages/grafeas/grafeas/grafeas_v1/proto/provenance_pb2_grpc.py b/packages/grafeas/grafeas/grafeas_v1/proto/provenance_pb2_grpc.py deleted file mode 100644 index 8a9393943bdf..000000000000 --- a/packages/grafeas/grafeas/grafeas_v1/proto/provenance_pb2_grpc.py +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc diff --git a/packages/grafeas/grafeas/grafeas_v1/proto/upgrade_pb2_grpc.py b/packages/grafeas/grafeas/grafeas_v1/proto/upgrade_pb2_grpc.py deleted file mode 100644 index 8a9393943bdf..000000000000 --- a/packages/grafeas/grafeas/grafeas_v1/proto/upgrade_pb2_grpc.py +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc diff --git a/packages/grafeas/grafeas/grafeas_v1/proto/vulnerability_pb2_grpc.py b/packages/grafeas/grafeas/grafeas_v1/proto/vulnerability_pb2_grpc.py deleted file mode 100644 index 8a9393943bdf..000000000000 --- a/packages/grafeas/grafeas/grafeas_v1/proto/vulnerability_pb2_grpc.py +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc diff --git a/packages/grafeas/grafeas/grafeas_v1/py.typed b/packages/grafeas/grafeas/grafeas_v1/py.typed new file mode 100644 index 000000000000..846a558a7874 --- /dev/null +++ b/packages/grafeas/grafeas/grafeas_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The grafeas-grafeas package uses inline types. diff --git a/packages/grafeas/grafeas/__init__.py b/packages/grafeas/grafeas/grafeas_v1/services/__init__.py similarity index 67% rename from packages/grafeas/grafeas/__init__.py rename to packages/grafeas/grafeas/grafeas_v1/services/__init__.py index 8fcc60e2b9c6..42ffdf2bc43d 100644 --- a/packages/grafeas/grafeas/__init__.py +++ b/packages/grafeas/grafeas/grafeas_v1/services/__init__.py @@ -1,24 +1,16 @@ # -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC + +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil - - __path__ = pkgutil.extend_path(__path__, __name__) +# diff --git a/packages/grafeas/grafeas/grafeas.py b/packages/grafeas/grafeas/grafeas_v1/services/grafeas/__init__.py similarity index 71% rename from packages/grafeas/grafeas/grafeas.py rename to packages/grafeas/grafeas/grafeas_v1/services/grafeas/__init__.py index be586f6cfd64..d06e1144ae94 100644 --- a/packages/grafeas/grafeas/grafeas.py +++ b/packages/grafeas/grafeas/grafeas_v1/services/grafeas/__init__.py @@ -1,29 +1,24 @@ # -*- coding: utf-8 -*- -# + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# - -from __future__ import absolute_import - -from grafeas.grafeas_v1 import GrafeasClient -from grafeas.grafeas_v1 import enums -from grafeas.grafeas_v1 import types - +from .client import GrafeasClient +from .async_client import GrafeasAsyncClient __all__ = ( - "enums", - "types", "GrafeasClient", + "GrafeasAsyncClient", ) diff --git a/packages/grafeas/grafeas/grafeas_v1/services/grafeas/async_client.py b/packages/grafeas/grafeas/grafeas_v1/services/grafeas/async_client.py new file mode 100644 index 000000000000..fc42be2bde8c --- /dev/null +++ b/packages/grafeas/grafeas/grafeas_v1/services/grafeas/async_client.py @@ -0,0 +1,1272 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from grafeas.grafeas_v1.services.grafeas import pagers +from grafeas.grafeas_v1.types import attestation +from grafeas.grafeas_v1.types import build +from grafeas.grafeas_v1.types import common +from grafeas.grafeas_v1.types import deployment +from grafeas.grafeas_v1.types import discovery +from grafeas.grafeas_v1.types import grafeas +from grafeas.grafeas_v1.types import image +from grafeas.grafeas_v1.types import package +from grafeas.grafeas_v1.types import upgrade +from grafeas.grafeas_v1.types import vulnerability + +from .transports.base import GrafeasTransport +from .transports.grpc_asyncio import GrafeasGrpcAsyncIOTransport +from .client import GrafeasClient + + +class GrafeasAsyncClient: + """`Grafeas `__ API. + + Retrieves analysis results of Cloud components such as Docker + container images. + + Analysis results are stored as a series of occurrences. An + ``Occurrence`` contains information about a specific analysis + instance on a resource. An occurrence refers to a ``Note``. A note + contains details describing the analysis and is generally stored in + a separate project, called a ``Provider``. Multiple occurrences can + refer to the same note. + + For example, an SSL vulnerability could affect multiple images. In + this case, there would be one note for the vulnerability and an + occurrence for each image with the vulnerability referring to that + note. + """ + + _client: GrafeasClient + + DEFAULT_ENDPOINT = GrafeasClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = GrafeasClient.DEFAULT_MTLS_ENDPOINT + + occurrence_path = staticmethod(GrafeasClient.occurrence_path) + + note_path = staticmethod(GrafeasClient.note_path) + + get_transport_class = functools.partial( + type(GrafeasClient).get_transport_class, type(GrafeasClient) + ) + + def __init__( + self, *, transport: Union[str, GrafeasTransport] = "grpc_asyncio", + ) -> None: + """Instantiate the grafeas client. + + Args: + transport (Union[str, ~.GrafeasTransport]): The + transport to use. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + + self._client = GrafeasClient(transport=transport,) + + async def get_occurrence( + self, + request: grafeas.GetOccurrenceRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> grafeas.Occurrence: + r"""Gets the specified occurrence. + + Args: + request (:class:`~.grafeas.GetOccurrenceRequest`): + The request object. Request to get an occurrence. + name (:class:`str`): + The name of the occurrence in the form of + ``projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.grafeas.Occurrence: + An instance of an analysis type that + has been found on a resource. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = grafeas.GetOccurrenceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_occurrence, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_occurrences( + self, + request: grafeas.ListOccurrencesRequest = None, + *, + parent: str = None, + filter: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListOccurrencesAsyncPager: + r"""Lists occurrences for the specified project. + + Args: + request (:class:`~.grafeas.ListOccurrencesRequest`): + The request object. Request to list occurrences. + parent (:class:`str`): + The name of the project to list occurrences for in the + form of ``projects/[PROJECT_ID]``. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + The filter expression. + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListOccurrencesAsyncPager: + Response for listing occurrences. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent, filter]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = grafeas.ListOccurrencesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_occurrences, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListOccurrencesAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_occurrence( + self, + request: grafeas.DeleteOccurrenceRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the specified occurrence. For example, use + this method to delete an occurrence when the occurrence + is no longer applicable for the given resource. + + Args: + request (:class:`~.grafeas.DeleteOccurrenceRequest`): + The request object. Request to delete an occurrence. + name (:class:`str`): + The name of the occurrence in the form of + ``projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = grafeas.DeleteOccurrenceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_occurrence, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def create_occurrence( + self, + request: grafeas.CreateOccurrenceRequest = None, + *, + parent: str = None, + occurrence: grafeas.Occurrence = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> grafeas.Occurrence: + r"""Creates a new occurrence. + + Args: + request (:class:`~.grafeas.CreateOccurrenceRequest`): + The request object. Request to create a new occurrence. + parent (:class:`str`): + The name of the project in the form of + ``projects/[PROJECT_ID]``, under which the occurrence is + to be created. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + occurrence (:class:`~.grafeas.Occurrence`): + The occurrence to create. + This corresponds to the ``occurrence`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.grafeas.Occurrence: + An instance of an analysis type that + has been found on a resource. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent, occurrence]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = grafeas.CreateOccurrenceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if occurrence is not None: + request.occurrence = occurrence + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_occurrence, + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def batch_create_occurrences( + self, + request: grafeas.BatchCreateOccurrencesRequest = None, + *, + parent: str = None, + occurrences: Sequence[grafeas.Occurrence] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> grafeas.BatchCreateOccurrencesResponse: + r"""Creates new occurrences in batch. + + Args: + request (:class:`~.grafeas.BatchCreateOccurrencesRequest`): + The request object. Request to create occurrences in + batch. + parent (:class:`str`): + The name of the project in the form of + ``projects/[PROJECT_ID]``, under which the occurrences + are to be created. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + occurrences (:class:`Sequence[~.grafeas.Occurrence]`): + The occurrences to create. Max + allowed length is 1000. + This corresponds to the ``occurrences`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.grafeas.BatchCreateOccurrencesResponse: + Response for creating occurrences in + batch. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent, occurrences]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = grafeas.BatchCreateOccurrencesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if occurrences is not None: + request.occurrences = occurrences + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.batch_create_occurrences, + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def update_occurrence( + self, + request: grafeas.UpdateOccurrenceRequest = None, + *, + name: str = None, + occurrence: grafeas.Occurrence = None, + update_mask: field_mask.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> grafeas.Occurrence: + r"""Updates the specified occurrence. + + Args: + request (:class:`~.grafeas.UpdateOccurrenceRequest`): + The request object. Request to update an occurrence. + name (:class:`str`): + The name of the occurrence in the form of + ``projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + occurrence (:class:`~.grafeas.Occurrence`): + The updated occurrence. + This corresponds to the ``occurrence`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`~.field_mask.FieldMask`): + The fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.grafeas.Occurrence: + An instance of an analysis type that + has been found on a resource. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name, occurrence, update_mask]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = grafeas.UpdateOccurrenceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if occurrence is not None: + request.occurrence = occurrence + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_occurrence, + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_occurrence_note( + self, + request: grafeas.GetOccurrenceNoteRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> grafeas.Note: + r"""Gets the note attached to the specified occurrence. + Consumer projects can use this method to get a note that + belongs to a provider project. + + Args: + request (:class:`~.grafeas.GetOccurrenceNoteRequest`): + The request object. Request to get the note to which the + specified occurrence is attached. + name (:class:`str`): + The name of the occurrence in the form of + ``projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.grafeas.Note: + A type of analysis that can be done + for a resource. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = grafeas.GetOccurrenceNoteRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_occurrence_note, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_note( + self, + request: grafeas.GetNoteRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> grafeas.Note: + r"""Gets the specified note. + + Args: + request (:class:`~.grafeas.GetNoteRequest`): + The request object. Request to get a note. + name (:class:`str`): + The name of the note in the form of + ``projects/[PROVIDER_ID]/notes/[NOTE_ID]``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.grafeas.Note: + A type of analysis that can be done + for a resource. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = grafeas.GetNoteRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_note, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_notes( + self, + request: grafeas.ListNotesRequest = None, + *, + parent: str = None, + filter: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListNotesAsyncPager: + r"""Lists notes for the specified project. + + Args: + request (:class:`~.grafeas.ListNotesRequest`): + The request object. Request to list notes. + parent (:class:`str`): + The name of the project to list notes for in the form of + ``projects/[PROJECT_ID]``. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + The filter expression. + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListNotesAsyncPager: + Response for listing notes. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent, filter]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = grafeas.ListNotesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_notes, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListNotesAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_note( + self, + request: grafeas.DeleteNoteRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the specified note. + + Args: + request (:class:`~.grafeas.DeleteNoteRequest`): + The request object. Request to delete a note. + name (:class:`str`): + The name of the note in the form of + ``projects/[PROVIDER_ID]/notes/[NOTE_ID]``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = grafeas.DeleteNoteRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_note, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def create_note( + self, + request: grafeas.CreateNoteRequest = None, + *, + parent: str = None, + note_id: str = None, + note: grafeas.Note = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> grafeas.Note: + r"""Creates a new note. + + Args: + request (:class:`~.grafeas.CreateNoteRequest`): + The request object. Request to create a new note. + parent (:class:`str`): + The name of the project in the form of + ``projects/[PROJECT_ID]``, under which the note is to be + created. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + note_id (:class:`str`): + The ID to use for this note. + This corresponds to the ``note_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + note (:class:`~.grafeas.Note`): + The note to create. + This corresponds to the ``note`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.grafeas.Note: + A type of analysis that can be done + for a resource. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent, note_id, note]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = grafeas.CreateNoteRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if note_id is not None: + request.note_id = note_id + if note is not None: + request.note = note + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_note, + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def batch_create_notes( + self, + request: grafeas.BatchCreateNotesRequest = None, + *, + parent: str = None, + notes: Sequence[grafeas.BatchCreateNotesRequest.NotesEntry] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> grafeas.BatchCreateNotesResponse: + r"""Creates new notes in batch. + + Args: + request (:class:`~.grafeas.BatchCreateNotesRequest`): + The request object. Request to create notes in batch. + parent (:class:`str`): + The name of the project in the form of + ``projects/[PROJECT_ID]``, under which the notes are to + be created. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + notes (:class:`Sequence[~.grafeas.BatchCreateNotesRequest.NotesEntry]`): + The notes to create. Max allowed + length is 1000. + This corresponds to the ``notes`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.grafeas.BatchCreateNotesResponse: + Response for creating notes in batch. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent, notes]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = grafeas.BatchCreateNotesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if notes is not None: + request.notes = notes + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.batch_create_notes, + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def update_note( + self, + request: grafeas.UpdateNoteRequest = None, + *, + name: str = None, + note: grafeas.Note = None, + update_mask: field_mask.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> grafeas.Note: + r"""Updates the specified note. + + Args: + request (:class:`~.grafeas.UpdateNoteRequest`): + The request object. Request to update a note. + name (:class:`str`): + The name of the note in the form of + ``projects/[PROVIDER_ID]/notes/[NOTE_ID]``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + note (:class:`~.grafeas.Note`): + The updated note. + This corresponds to the ``note`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`~.field_mask.FieldMask`): + The fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.grafeas.Note: + A type of analysis that can be done + for a resource. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name, note, update_mask]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = grafeas.UpdateNoteRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if note is not None: + request.note = note + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_note, + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_note_occurrences( + self, + request: grafeas.ListNoteOccurrencesRequest = None, + *, + name: str = None, + filter: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListNoteOccurrencesAsyncPager: + r"""Lists occurrences referencing the specified note. + Provider projects can use this method to get all + occurrences across consumer projects referencing the + specified note. + + Args: + request (:class:`~.grafeas.ListNoteOccurrencesRequest`): + The request object. Request to list occurrences for a + note. + name (:class:`str`): + The name of the note to list occurrences for in the form + of ``projects/[PROVIDER_ID]/notes/[NOTE_ID]``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + The filter expression. + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListNoteOccurrencesAsyncPager: + Response for listing occurrences for + a note. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name, filter]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = grafeas.ListNoteOccurrencesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_note_occurrences, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListNoteOccurrencesAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + +try: + _client_info = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("grafeas",).version, + ) +except pkg_resources.DistributionNotFound: + _client_info = gapic_v1.client_info.ClientInfo() + + +__all__ = ("GrafeasAsyncClient",) diff --git a/packages/grafeas/grafeas/grafeas_v1/services/grafeas/client.py b/packages/grafeas/grafeas/grafeas_v1/services/grafeas/client.py new file mode 100644 index 000000000000..03f388ef875b --- /dev/null +++ b/packages/grafeas/grafeas/grafeas_v1/services/grafeas/client.py @@ -0,0 +1,1319 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import os +import re +from typing import Callable, Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from grafeas.grafeas_v1.services.grafeas import pagers +from grafeas.grafeas_v1.types import attestation +from grafeas.grafeas_v1.types import build +from grafeas.grafeas_v1.types import common +from grafeas.grafeas_v1.types import deployment +from grafeas.grafeas_v1.types import discovery +from grafeas.grafeas_v1.types import grafeas +from grafeas.grafeas_v1.types import image +from grafeas.grafeas_v1.types import package +from grafeas.grafeas_v1.types import upgrade +from grafeas.grafeas_v1.types import vulnerability + +from .transports.base import GrafeasTransport +from .transports.grpc import GrafeasGrpcTransport +from .transports.grpc_asyncio import GrafeasGrpcAsyncIOTransport + + +class GrafeasClientMeta(type): + """Metaclass for the Grafeas client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[GrafeasTransport]] + _transport_registry["grpc"] = GrafeasGrpcTransport + _transport_registry["grpc_asyncio"] = GrafeasGrpcAsyncIOTransport + + def get_transport_class(cls, label: str = None,) -> Type[GrafeasTransport]: + """Return an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class GrafeasClient(metaclass=GrafeasClientMeta): + """`Grafeas `__ API. + + Retrieves analysis results of Cloud components such as Docker + container images. + + Analysis results are stored as a series of occurrences. An + ``Occurrence`` contains information about a specific analysis + instance on a resource. An occurrence refers to a ``Note``. A note + contains details describing the analysis and is generally stored in + a separate project, called a ``Provider``. Multiple occurrences can + refer to the same note. + + For example, an SSL vulnerability could affect multiple images. In + this case, there would be one note for the vulnerability and an + occurrence for each image with the vulnerability referring to that + note. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Convert api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "containeranalysis.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @staticmethod + def note_path(project: str, note: str,) -> str: + """Return a fully-qualified note string.""" + return "projects/{project}/notes/{note}".format(project=project, note=note,) + + @staticmethod + def parse_note_path(path: str) -> Dict[str, str]: + """Parse a note path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/notes/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def occurrence_path(project: str, occurrence: str,) -> str: + """Return a fully-qualified occurrence string.""" + return "projects/{project}/occurrences/{occurrence}".format( + project=project, occurrence=occurrence, + ) + + @staticmethod + def parse_occurrence_path(path: str) -> Dict[str, str]: + """Parse a occurrence path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/occurrences/(?P.+?)$", path + ) + return m.groupdict() if m else {} + + def __init__(self, *, transport: Union[str, GrafeasTransport] = None,) -> None: + """Instantiate the grafeas client. + + Args: + transport (Union[str, ~.GrafeasTransport]): The + transport to use. + + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + + if isinstance(transport, GrafeasTransport): + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport() + + def get_occurrence( + self, + request: grafeas.GetOccurrenceRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> grafeas.Occurrence: + r"""Gets the specified occurrence. + + Args: + request (:class:`~.grafeas.GetOccurrenceRequest`): + The request object. Request to get an occurrence. + name (:class:`str`): + The name of the occurrence in the form of + ``projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.grafeas.Occurrence: + An instance of an analysis type that + has been found on a resource. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a grafeas.GetOccurrenceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, grafeas.GetOccurrenceRequest): + request = grafeas.GetOccurrenceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_occurrence] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_occurrences( + self, + request: grafeas.ListOccurrencesRequest = None, + *, + parent: str = None, + filter: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListOccurrencesPager: + r"""Lists occurrences for the specified project. + + Args: + request (:class:`~.grafeas.ListOccurrencesRequest`): + The request object. Request to list occurrences. + parent (:class:`str`): + The name of the project to list occurrences for in the + form of ``projects/[PROJECT_ID]``. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + The filter expression. + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListOccurrencesPager: + Response for listing occurrences. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, filter]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a grafeas.ListOccurrencesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, grafeas.ListOccurrencesRequest): + request = grafeas.ListOccurrencesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_occurrences] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListOccurrencesPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_occurrence( + self, + request: grafeas.DeleteOccurrenceRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the specified occurrence. For example, use + this method to delete an occurrence when the occurrence + is no longer applicable for the given resource. + + Args: + request (:class:`~.grafeas.DeleteOccurrenceRequest`): + The request object. Request to delete an occurrence. + name (:class:`str`): + The name of the occurrence in the form of + ``projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a grafeas.DeleteOccurrenceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, grafeas.DeleteOccurrenceRequest): + request = grafeas.DeleteOccurrenceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_occurrence] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def create_occurrence( + self, + request: grafeas.CreateOccurrenceRequest = None, + *, + parent: str = None, + occurrence: grafeas.Occurrence = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> grafeas.Occurrence: + r"""Creates a new occurrence. + + Args: + request (:class:`~.grafeas.CreateOccurrenceRequest`): + The request object. Request to create a new occurrence. + parent (:class:`str`): + The name of the project in the form of + ``projects/[PROJECT_ID]``, under which the occurrence is + to be created. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + occurrence (:class:`~.grafeas.Occurrence`): + The occurrence to create. + This corresponds to the ``occurrence`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.grafeas.Occurrence: + An instance of an analysis type that + has been found on a resource. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, occurrence]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a grafeas.CreateOccurrenceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, grafeas.CreateOccurrenceRequest): + request = grafeas.CreateOccurrenceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if occurrence is not None: + request.occurrence = occurrence + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_occurrence] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def batch_create_occurrences( + self, + request: grafeas.BatchCreateOccurrencesRequest = None, + *, + parent: str = None, + occurrences: Sequence[grafeas.Occurrence] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> grafeas.BatchCreateOccurrencesResponse: + r"""Creates new occurrences in batch. + + Args: + request (:class:`~.grafeas.BatchCreateOccurrencesRequest`): + The request object. Request to create occurrences in + batch. + parent (:class:`str`): + The name of the project in the form of + ``projects/[PROJECT_ID]``, under which the occurrences + are to be created. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + occurrences (:class:`Sequence[~.grafeas.Occurrence]`): + The occurrences to create. Max + allowed length is 1000. + This corresponds to the ``occurrences`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.grafeas.BatchCreateOccurrencesResponse: + Response for creating occurrences in + batch. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, occurrences]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a grafeas.BatchCreateOccurrencesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, grafeas.BatchCreateOccurrencesRequest): + request = grafeas.BatchCreateOccurrencesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if occurrences is not None: + request.occurrences = occurrences + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.batch_create_occurrences] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def update_occurrence( + self, + request: grafeas.UpdateOccurrenceRequest = None, + *, + name: str = None, + occurrence: grafeas.Occurrence = None, + update_mask: field_mask.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> grafeas.Occurrence: + r"""Updates the specified occurrence. + + Args: + request (:class:`~.grafeas.UpdateOccurrenceRequest`): + The request object. Request to update an occurrence. + name (:class:`str`): + The name of the occurrence in the form of + ``projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + occurrence (:class:`~.grafeas.Occurrence`): + The updated occurrence. + This corresponds to the ``occurrence`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`~.field_mask.FieldMask`): + The fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.grafeas.Occurrence: + An instance of an analysis type that + has been found on a resource. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, occurrence, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a grafeas.UpdateOccurrenceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, grafeas.UpdateOccurrenceRequest): + request = grafeas.UpdateOccurrenceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if occurrence is not None: + request.occurrence = occurrence + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_occurrence] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_occurrence_note( + self, + request: grafeas.GetOccurrenceNoteRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> grafeas.Note: + r"""Gets the note attached to the specified occurrence. + Consumer projects can use this method to get a note that + belongs to a provider project. + + Args: + request (:class:`~.grafeas.GetOccurrenceNoteRequest`): + The request object. Request to get the note to which the + specified occurrence is attached. + name (:class:`str`): + The name of the occurrence in the form of + ``projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.grafeas.Note: + A type of analysis that can be done + for a resource. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a grafeas.GetOccurrenceNoteRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, grafeas.GetOccurrenceNoteRequest): + request = grafeas.GetOccurrenceNoteRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_occurrence_note] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_note( + self, + request: grafeas.GetNoteRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> grafeas.Note: + r"""Gets the specified note. + + Args: + request (:class:`~.grafeas.GetNoteRequest`): + The request object. Request to get a note. + name (:class:`str`): + The name of the note in the form of + ``projects/[PROVIDER_ID]/notes/[NOTE_ID]``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.grafeas.Note: + A type of analysis that can be done + for a resource. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a grafeas.GetNoteRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, grafeas.GetNoteRequest): + request = grafeas.GetNoteRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_note] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_notes( + self, + request: grafeas.ListNotesRequest = None, + *, + parent: str = None, + filter: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListNotesPager: + r"""Lists notes for the specified project. + + Args: + request (:class:`~.grafeas.ListNotesRequest`): + The request object. Request to list notes. + parent (:class:`str`): + The name of the project to list notes for in the form of + ``projects/[PROJECT_ID]``. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + The filter expression. + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListNotesPager: + Response for listing notes. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, filter]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a grafeas.ListNotesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, grafeas.ListNotesRequest): + request = grafeas.ListNotesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_notes] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListNotesPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_note( + self, + request: grafeas.DeleteNoteRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the specified note. + + Args: + request (:class:`~.grafeas.DeleteNoteRequest`): + The request object. Request to delete a note. + name (:class:`str`): + The name of the note in the form of + ``projects/[PROVIDER_ID]/notes/[NOTE_ID]``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a grafeas.DeleteNoteRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, grafeas.DeleteNoteRequest): + request = grafeas.DeleteNoteRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_note] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def create_note( + self, + request: grafeas.CreateNoteRequest = None, + *, + parent: str = None, + note_id: str = None, + note: grafeas.Note = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> grafeas.Note: + r"""Creates a new note. + + Args: + request (:class:`~.grafeas.CreateNoteRequest`): + The request object. Request to create a new note. + parent (:class:`str`): + The name of the project in the form of + ``projects/[PROJECT_ID]``, under which the note is to be + created. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + note_id (:class:`str`): + The ID to use for this note. + This corresponds to the ``note_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + note (:class:`~.grafeas.Note`): + The note to create. + This corresponds to the ``note`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.grafeas.Note: + A type of analysis that can be done + for a resource. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, note_id, note]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a grafeas.CreateNoteRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, grafeas.CreateNoteRequest): + request = grafeas.CreateNoteRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if note_id is not None: + request.note_id = note_id + if note is not None: + request.note = note + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_note] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def batch_create_notes( + self, + request: grafeas.BatchCreateNotesRequest = None, + *, + parent: str = None, + notes: Sequence[grafeas.BatchCreateNotesRequest.NotesEntry] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> grafeas.BatchCreateNotesResponse: + r"""Creates new notes in batch. + + Args: + request (:class:`~.grafeas.BatchCreateNotesRequest`): + The request object. Request to create notes in batch. + parent (:class:`str`): + The name of the project in the form of + ``projects/[PROJECT_ID]``, under which the notes are to + be created. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + notes (:class:`Sequence[~.grafeas.BatchCreateNotesRequest.NotesEntry]`): + The notes to create. Max allowed + length is 1000. + This corresponds to the ``notes`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.grafeas.BatchCreateNotesResponse: + Response for creating notes in batch. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, notes]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a grafeas.BatchCreateNotesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, grafeas.BatchCreateNotesRequest): + request = grafeas.BatchCreateNotesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if notes is not None: + request.notes = notes + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.batch_create_notes] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def update_note( + self, + request: grafeas.UpdateNoteRequest = None, + *, + name: str = None, + note: grafeas.Note = None, + update_mask: field_mask.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> grafeas.Note: + r"""Updates the specified note. + + Args: + request (:class:`~.grafeas.UpdateNoteRequest`): + The request object. Request to update a note. + name (:class:`str`): + The name of the note in the form of + ``projects/[PROVIDER_ID]/notes/[NOTE_ID]``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + note (:class:`~.grafeas.Note`): + The updated note. + This corresponds to the ``note`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`~.field_mask.FieldMask`): + The fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.grafeas.Note: + A type of analysis that can be done + for a resource. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, note, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a grafeas.UpdateNoteRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, grafeas.UpdateNoteRequest): + request = grafeas.UpdateNoteRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if note is not None: + request.note = note + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_note] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_note_occurrences( + self, + request: grafeas.ListNoteOccurrencesRequest = None, + *, + name: str = None, + filter: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListNoteOccurrencesPager: + r"""Lists occurrences referencing the specified note. + Provider projects can use this method to get all + occurrences across consumer projects referencing the + specified note. + + Args: + request (:class:`~.grafeas.ListNoteOccurrencesRequest`): + The request object. Request to list occurrences for a + note. + name (:class:`str`): + The name of the note to list occurrences for in the form + of ``projects/[PROVIDER_ID]/notes/[NOTE_ID]``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + The filter expression. + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListNoteOccurrencesPager: + Response for listing occurrences for + a note. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, filter]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a grafeas.ListNoteOccurrencesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, grafeas.ListNoteOccurrencesRequest): + request = grafeas.ListNoteOccurrencesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_note_occurrences] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListNoteOccurrencesPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + +try: + _client_info = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("grafeas",).version, + ) +except pkg_resources.DistributionNotFound: + _client_info = gapic_v1.client_info.ClientInfo() + + +__all__ = ("GrafeasClient",) diff --git a/packages/grafeas/grafeas/grafeas_v1/services/grafeas/pagers.py b/packages/grafeas/grafeas/grafeas_v1/services/grafeas/pagers.py new file mode 100644 index 000000000000..5a9e2293ef3a --- /dev/null +++ b/packages/grafeas/grafeas/grafeas_v1/services/grafeas/pagers.py @@ -0,0 +1,404 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple + +from grafeas.grafeas_v1.types import grafeas + + +class ListOccurrencesPager: + """A pager for iterating through ``list_occurrences`` requests. + + This class thinly wraps an initial + :class:`~.grafeas.ListOccurrencesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``occurrences`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListOccurrences`` requests and continue to iterate + through the ``occurrences`` field on the + corresponding responses. + + All the usual :class:`~.grafeas.ListOccurrencesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., grafeas.ListOccurrencesResponse], + request: grafeas.ListOccurrencesRequest, + response: grafeas.ListOccurrencesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.grafeas.ListOccurrencesRequest`): + The initial request object. + response (:class:`~.grafeas.ListOccurrencesResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = grafeas.ListOccurrencesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[grafeas.ListOccurrencesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[grafeas.Occurrence]: + for page in self.pages: + yield from page.occurrences + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListOccurrencesAsyncPager: + """A pager for iterating through ``list_occurrences`` requests. + + This class thinly wraps an initial + :class:`~.grafeas.ListOccurrencesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``occurrences`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListOccurrences`` requests and continue to iterate + through the ``occurrences`` field on the + corresponding responses. + + All the usual :class:`~.grafeas.ListOccurrencesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[grafeas.ListOccurrencesResponse]], + request: grafeas.ListOccurrencesRequest, + response: grafeas.ListOccurrencesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.grafeas.ListOccurrencesRequest`): + The initial request object. + response (:class:`~.grafeas.ListOccurrencesResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = grafeas.ListOccurrencesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[grafeas.ListOccurrencesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[grafeas.Occurrence]: + async def async_generator(): + async for page in self.pages: + for response in page.occurrences: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListNotesPager: + """A pager for iterating through ``list_notes`` requests. + + This class thinly wraps an initial + :class:`~.grafeas.ListNotesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``notes`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListNotes`` requests and continue to iterate + through the ``notes`` field on the + corresponding responses. + + All the usual :class:`~.grafeas.ListNotesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., grafeas.ListNotesResponse], + request: grafeas.ListNotesRequest, + response: grafeas.ListNotesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.grafeas.ListNotesRequest`): + The initial request object. + response (:class:`~.grafeas.ListNotesResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = grafeas.ListNotesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[grafeas.ListNotesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[grafeas.Note]: + for page in self.pages: + yield from page.notes + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListNotesAsyncPager: + """A pager for iterating through ``list_notes`` requests. + + This class thinly wraps an initial + :class:`~.grafeas.ListNotesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``notes`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListNotes`` requests and continue to iterate + through the ``notes`` field on the + corresponding responses. + + All the usual :class:`~.grafeas.ListNotesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[grafeas.ListNotesResponse]], + request: grafeas.ListNotesRequest, + response: grafeas.ListNotesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.grafeas.ListNotesRequest`): + The initial request object. + response (:class:`~.grafeas.ListNotesResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = grafeas.ListNotesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[grafeas.ListNotesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[grafeas.Note]: + async def async_generator(): + async for page in self.pages: + for response in page.notes: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListNoteOccurrencesPager: + """A pager for iterating through ``list_note_occurrences`` requests. + + This class thinly wraps an initial + :class:`~.grafeas.ListNoteOccurrencesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``occurrences`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListNoteOccurrences`` requests and continue to iterate + through the ``occurrences`` field on the + corresponding responses. + + All the usual :class:`~.grafeas.ListNoteOccurrencesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., grafeas.ListNoteOccurrencesResponse], + request: grafeas.ListNoteOccurrencesRequest, + response: grafeas.ListNoteOccurrencesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.grafeas.ListNoteOccurrencesRequest`): + The initial request object. + response (:class:`~.grafeas.ListNoteOccurrencesResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = grafeas.ListNoteOccurrencesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[grafeas.ListNoteOccurrencesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[grafeas.Occurrence]: + for page in self.pages: + yield from page.occurrences + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListNoteOccurrencesAsyncPager: + """A pager for iterating through ``list_note_occurrences`` requests. + + This class thinly wraps an initial + :class:`~.grafeas.ListNoteOccurrencesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``occurrences`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListNoteOccurrences`` requests and continue to iterate + through the ``occurrences`` field on the + corresponding responses. + + All the usual :class:`~.grafeas.ListNoteOccurrencesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[grafeas.ListNoteOccurrencesResponse]], + request: grafeas.ListNoteOccurrencesRequest, + response: grafeas.ListNoteOccurrencesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.grafeas.ListNoteOccurrencesRequest`): + The initial request object. + response (:class:`~.grafeas.ListNoteOccurrencesResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = grafeas.ListNoteOccurrencesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[grafeas.ListNoteOccurrencesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[grafeas.Occurrence]: + async def async_generator(): + async for page in self.pages: + for response in page.occurrences: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/grafeas/grafeas/grafeas_v1/services/grafeas/transports/__init__.py b/packages/grafeas/grafeas/grafeas_v1/services/grafeas/transports/__init__.py new file mode 100644 index 000000000000..6a261fea7bef --- /dev/null +++ b/packages/grafeas/grafeas/grafeas_v1/services/grafeas/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from typing import Dict, Type + +from .base import GrafeasTransport +from .grpc import GrafeasGrpcTransport +from .grpc_asyncio import GrafeasGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[GrafeasTransport]] +_transport_registry["grpc"] = GrafeasGrpcTransport +_transport_registry["grpc_asyncio"] = GrafeasGrpcAsyncIOTransport + + +__all__ = ( + "GrafeasTransport", + "GrafeasGrpcTransport", + "GrafeasGrpcAsyncIOTransport", +) diff --git a/packages/grafeas/grafeas/grafeas_v1/services/grafeas/transports/base.py b/packages/grafeas/grafeas/grafeas_v1/services/grafeas/transports/base.py new file mode 100644 index 000000000000..25ae1ff93e3e --- /dev/null +++ b/packages/grafeas/grafeas/grafeas_v1/services/grafeas/transports/base.py @@ -0,0 +1,369 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import abc +import typing +import pkg_resources + +from google import auth +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore + +from google.protobuf import empty_pb2 as empty # type: ignore +from grafeas.grafeas_v1.types import grafeas + + +try: + _client_info = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("grafeas",).version, + ) +except pkg_resources.DistributionNotFound: + _client_info = gapic_v1.client_info.ClientInfo() + + +class GrafeasTransport(abc.ABC): + """Abstract transport class for Grafeas.""" + + AUTH_SCOPES = () + + def __init__( + self, + *, + host: str = "", + credentials: credentials.Credentials = None, + credentials_file: typing.Optional[str] = None, + scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, + quota_project_id: typing.Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scope (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = auth.load_credentials_from_file( + credentials_file, scopes=scopes, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = auth.default( + scopes=scopes, quota_project_id=quota_project_id + ) + + # Save the credentials. + self._credentials = credentials + + # Lifted into its own function so it can be stubbed out during tests. + self._prep_wrapped_messages() + + def _prep_wrapped_messages(self): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_occurrence: gapic_v1.method.wrap_method( + self.get_occurrence, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ), + self.list_occurrences: gapic_v1.method.wrap_method( + self.list_occurrences, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ), + self.delete_occurrence: gapic_v1.method.wrap_method( + self.delete_occurrence, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ), + self.create_occurrence: gapic_v1.method.wrap_method( + self.create_occurrence, default_timeout=30.0, client_info=_client_info, + ), + self.batch_create_occurrences: gapic_v1.method.wrap_method( + self.batch_create_occurrences, + default_timeout=30.0, + client_info=_client_info, + ), + self.update_occurrence: gapic_v1.method.wrap_method( + self.update_occurrence, default_timeout=30.0, client_info=_client_info, + ), + self.get_occurrence_note: gapic_v1.method.wrap_method( + self.get_occurrence_note, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ), + self.get_note: gapic_v1.method.wrap_method( + self.get_note, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ), + self.list_notes: gapic_v1.method.wrap_method( + self.list_notes, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ), + self.delete_note: gapic_v1.method.wrap_method( + self.delete_note, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ), + self.create_note: gapic_v1.method.wrap_method( + self.create_note, default_timeout=30.0, client_info=_client_info, + ), + self.batch_create_notes: gapic_v1.method.wrap_method( + self.batch_create_notes, default_timeout=30.0, client_info=_client_info, + ), + self.update_note: gapic_v1.method.wrap_method( + self.update_note, default_timeout=30.0, client_info=_client_info, + ), + self.list_note_occurrences: gapic_v1.method.wrap_method( + self.list_note_occurrences, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ), + } + + @property + def get_occurrence( + self, + ) -> typing.Callable[ + [grafeas.GetOccurrenceRequest], + typing.Union[grafeas.Occurrence, typing.Awaitable[grafeas.Occurrence]], + ]: + raise NotImplementedError() + + @property + def list_occurrences( + self, + ) -> typing.Callable[ + [grafeas.ListOccurrencesRequest], + typing.Union[ + grafeas.ListOccurrencesResponse, + typing.Awaitable[grafeas.ListOccurrencesResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_occurrence( + self, + ) -> typing.Callable[ + [grafeas.DeleteOccurrenceRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def create_occurrence( + self, + ) -> typing.Callable[ + [grafeas.CreateOccurrenceRequest], + typing.Union[grafeas.Occurrence, typing.Awaitable[grafeas.Occurrence]], + ]: + raise NotImplementedError() + + @property + def batch_create_occurrences( + self, + ) -> typing.Callable[ + [grafeas.BatchCreateOccurrencesRequest], + typing.Union[ + grafeas.BatchCreateOccurrencesResponse, + typing.Awaitable[grafeas.BatchCreateOccurrencesResponse], + ], + ]: + raise NotImplementedError() + + @property + def update_occurrence( + self, + ) -> typing.Callable[ + [grafeas.UpdateOccurrenceRequest], + typing.Union[grafeas.Occurrence, typing.Awaitable[grafeas.Occurrence]], + ]: + raise NotImplementedError() + + @property + def get_occurrence_note( + self, + ) -> typing.Callable[ + [grafeas.GetOccurrenceNoteRequest], + typing.Union[grafeas.Note, typing.Awaitable[grafeas.Note]], + ]: + raise NotImplementedError() + + @property + def get_note( + self, + ) -> typing.Callable[ + [grafeas.GetNoteRequest], + typing.Union[grafeas.Note, typing.Awaitable[grafeas.Note]], + ]: + raise NotImplementedError() + + @property + def list_notes( + self, + ) -> typing.Callable[ + [grafeas.ListNotesRequest], + typing.Union[ + grafeas.ListNotesResponse, typing.Awaitable[grafeas.ListNotesResponse] + ], + ]: + raise NotImplementedError() + + @property + def delete_note( + self, + ) -> typing.Callable[ + [grafeas.DeleteNoteRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def create_note( + self, + ) -> typing.Callable[ + [grafeas.CreateNoteRequest], + typing.Union[grafeas.Note, typing.Awaitable[grafeas.Note]], + ]: + raise NotImplementedError() + + @property + def batch_create_notes( + self, + ) -> typing.Callable[ + [grafeas.BatchCreateNotesRequest], + typing.Union[ + grafeas.BatchCreateNotesResponse, + typing.Awaitable[grafeas.BatchCreateNotesResponse], + ], + ]: + raise NotImplementedError() + + @property + def update_note( + self, + ) -> typing.Callable[ + [grafeas.UpdateNoteRequest], + typing.Union[grafeas.Note, typing.Awaitable[grafeas.Note]], + ]: + raise NotImplementedError() + + @property + def list_note_occurrences( + self, + ) -> typing.Callable[ + [grafeas.ListNoteOccurrencesRequest], + typing.Union[ + grafeas.ListNoteOccurrencesResponse, + typing.Awaitable[grafeas.ListNoteOccurrencesResponse], + ], + ]: + raise NotImplementedError() + + +__all__ = ("GrafeasTransport",) diff --git a/packages/grafeas/grafeas/grafeas_v1/services/grafeas/transports/grpc.py b/packages/grafeas/grafeas/grafeas_v1/services/grafeas/transports/grpc.py new file mode 100644 index 000000000000..e76fe349b664 --- /dev/null +++ b/packages/grafeas/grafeas/grafeas_v1/services/grafeas/transports/grpc.py @@ -0,0 +1,590 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import grpc_helpers # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + + +import grpc # type: ignore + +from google.protobuf import empty_pb2 as empty # type: ignore +from grafeas.grafeas_v1.types import grafeas + +from .base import GrafeasTransport + + +class GrafeasGrpcTransport(GrafeasTransport): + """gRPC backend transport for Grafeas. + + `Grafeas `__ API. + + Retrieves analysis results of Cloud components such as Docker + container images. + + Analysis results are stored as a series of occurrences. An + ``Occurrence`` contains information about a specific analysis + instance on a resource. An occurrence refers to a ``Note``. A note + contains details describing the analysis and is generally stored in + a separate project, called a ``Provider``. Multiple occurrences can + refer to the same note. + + For example, an SSL vulnerability could affect multiple images. In + this case, there would be one note for the vulnerability and an + occurrence for each image with the vulnerability referring to that + note. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If + provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A + callback to provide client SSL certificate bytes and private key + bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` + is None. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + elif api_mtls_endpoint: + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + self._stubs = {} # type: Dict[str, Callable] + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + @classmethod + def create_channel( + cls, + host: str = "", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + address (Optionsl[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Sanity check: Only create a new channel if we do not already + # have one. + if not hasattr(self, "_grpc_channel"): + self._grpc_channel = self.create_channel( + self._host, credentials=self._credentials, + ) + + # Return the channel from cache. + return self._grpc_channel + + @property + def get_occurrence( + self, + ) -> Callable[[grafeas.GetOccurrenceRequest], grafeas.Occurrence]: + r"""Return a callable for the get occurrence method over gRPC. + + Gets the specified occurrence. + + Returns: + Callable[[~.GetOccurrenceRequest], + ~.Occurrence]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_occurrence" not in self._stubs: + self._stubs["get_occurrence"] = self.grpc_channel.unary_unary( + "/grafeas.v1.Grafeas/GetOccurrence", + request_serializer=grafeas.GetOccurrenceRequest.serialize, + response_deserializer=grafeas.Occurrence.deserialize, + ) + return self._stubs["get_occurrence"] + + @property + def list_occurrences( + self, + ) -> Callable[[grafeas.ListOccurrencesRequest], grafeas.ListOccurrencesResponse]: + r"""Return a callable for the list occurrences method over gRPC. + + Lists occurrences for the specified project. + + Returns: + Callable[[~.ListOccurrencesRequest], + ~.ListOccurrencesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_occurrences" not in self._stubs: + self._stubs["list_occurrences"] = self.grpc_channel.unary_unary( + "/grafeas.v1.Grafeas/ListOccurrences", + request_serializer=grafeas.ListOccurrencesRequest.serialize, + response_deserializer=grafeas.ListOccurrencesResponse.deserialize, + ) + return self._stubs["list_occurrences"] + + @property + def delete_occurrence( + self, + ) -> Callable[[grafeas.DeleteOccurrenceRequest], empty.Empty]: + r"""Return a callable for the delete occurrence method over gRPC. + + Deletes the specified occurrence. For example, use + this method to delete an occurrence when the occurrence + is no longer applicable for the given resource. + + Returns: + Callable[[~.DeleteOccurrenceRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_occurrence" not in self._stubs: + self._stubs["delete_occurrence"] = self.grpc_channel.unary_unary( + "/grafeas.v1.Grafeas/DeleteOccurrence", + request_serializer=grafeas.DeleteOccurrenceRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_occurrence"] + + @property + def create_occurrence( + self, + ) -> Callable[[grafeas.CreateOccurrenceRequest], grafeas.Occurrence]: + r"""Return a callable for the create occurrence method over gRPC. + + Creates a new occurrence. + + Returns: + Callable[[~.CreateOccurrenceRequest], + ~.Occurrence]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_occurrence" not in self._stubs: + self._stubs["create_occurrence"] = self.grpc_channel.unary_unary( + "/grafeas.v1.Grafeas/CreateOccurrence", + request_serializer=grafeas.CreateOccurrenceRequest.serialize, + response_deserializer=grafeas.Occurrence.deserialize, + ) + return self._stubs["create_occurrence"] + + @property + def batch_create_occurrences( + self, + ) -> Callable[ + [grafeas.BatchCreateOccurrencesRequest], grafeas.BatchCreateOccurrencesResponse + ]: + r"""Return a callable for the batch create occurrences method over gRPC. + + Creates new occurrences in batch. + + Returns: + Callable[[~.BatchCreateOccurrencesRequest], + ~.BatchCreateOccurrencesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_create_occurrences" not in self._stubs: + self._stubs["batch_create_occurrences"] = self.grpc_channel.unary_unary( + "/grafeas.v1.Grafeas/BatchCreateOccurrences", + request_serializer=grafeas.BatchCreateOccurrencesRequest.serialize, + response_deserializer=grafeas.BatchCreateOccurrencesResponse.deserialize, + ) + return self._stubs["batch_create_occurrences"] + + @property + def update_occurrence( + self, + ) -> Callable[[grafeas.UpdateOccurrenceRequest], grafeas.Occurrence]: + r"""Return a callable for the update occurrence method over gRPC. + + Updates the specified occurrence. + + Returns: + Callable[[~.UpdateOccurrenceRequest], + ~.Occurrence]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_occurrence" not in self._stubs: + self._stubs["update_occurrence"] = self.grpc_channel.unary_unary( + "/grafeas.v1.Grafeas/UpdateOccurrence", + request_serializer=grafeas.UpdateOccurrenceRequest.serialize, + response_deserializer=grafeas.Occurrence.deserialize, + ) + return self._stubs["update_occurrence"] + + @property + def get_occurrence_note( + self, + ) -> Callable[[grafeas.GetOccurrenceNoteRequest], grafeas.Note]: + r"""Return a callable for the get occurrence note method over gRPC. + + Gets the note attached to the specified occurrence. + Consumer projects can use this method to get a note that + belongs to a provider project. + + Returns: + Callable[[~.GetOccurrenceNoteRequest], + ~.Note]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_occurrence_note" not in self._stubs: + self._stubs["get_occurrence_note"] = self.grpc_channel.unary_unary( + "/grafeas.v1.Grafeas/GetOccurrenceNote", + request_serializer=grafeas.GetOccurrenceNoteRequest.serialize, + response_deserializer=grafeas.Note.deserialize, + ) + return self._stubs["get_occurrence_note"] + + @property + def get_note(self) -> Callable[[grafeas.GetNoteRequest], grafeas.Note]: + r"""Return a callable for the get note method over gRPC. + + Gets the specified note. + + Returns: + Callable[[~.GetNoteRequest], + ~.Note]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_note" not in self._stubs: + self._stubs["get_note"] = self.grpc_channel.unary_unary( + "/grafeas.v1.Grafeas/GetNote", + request_serializer=grafeas.GetNoteRequest.serialize, + response_deserializer=grafeas.Note.deserialize, + ) + return self._stubs["get_note"] + + @property + def list_notes( + self, + ) -> Callable[[grafeas.ListNotesRequest], grafeas.ListNotesResponse]: + r"""Return a callable for the list notes method over gRPC. + + Lists notes for the specified project. + + Returns: + Callable[[~.ListNotesRequest], + ~.ListNotesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_notes" not in self._stubs: + self._stubs["list_notes"] = self.grpc_channel.unary_unary( + "/grafeas.v1.Grafeas/ListNotes", + request_serializer=grafeas.ListNotesRequest.serialize, + response_deserializer=grafeas.ListNotesResponse.deserialize, + ) + return self._stubs["list_notes"] + + @property + def delete_note(self) -> Callable[[grafeas.DeleteNoteRequest], empty.Empty]: + r"""Return a callable for the delete note method over gRPC. + + Deletes the specified note. + + Returns: + Callable[[~.DeleteNoteRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_note" not in self._stubs: + self._stubs["delete_note"] = self.grpc_channel.unary_unary( + "/grafeas.v1.Grafeas/DeleteNote", + request_serializer=grafeas.DeleteNoteRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_note"] + + @property + def create_note(self) -> Callable[[grafeas.CreateNoteRequest], grafeas.Note]: + r"""Return a callable for the create note method over gRPC. + + Creates a new note. + + Returns: + Callable[[~.CreateNoteRequest], + ~.Note]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_note" not in self._stubs: + self._stubs["create_note"] = self.grpc_channel.unary_unary( + "/grafeas.v1.Grafeas/CreateNote", + request_serializer=grafeas.CreateNoteRequest.serialize, + response_deserializer=grafeas.Note.deserialize, + ) + return self._stubs["create_note"] + + @property + def batch_create_notes( + self, + ) -> Callable[[grafeas.BatchCreateNotesRequest], grafeas.BatchCreateNotesResponse]: + r"""Return a callable for the batch create notes method over gRPC. + + Creates new notes in batch. + + Returns: + Callable[[~.BatchCreateNotesRequest], + ~.BatchCreateNotesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_create_notes" not in self._stubs: + self._stubs["batch_create_notes"] = self.grpc_channel.unary_unary( + "/grafeas.v1.Grafeas/BatchCreateNotes", + request_serializer=grafeas.BatchCreateNotesRequest.serialize, + response_deserializer=grafeas.BatchCreateNotesResponse.deserialize, + ) + return self._stubs["batch_create_notes"] + + @property + def update_note(self) -> Callable[[grafeas.UpdateNoteRequest], grafeas.Note]: + r"""Return a callable for the update note method over gRPC. + + Updates the specified note. + + Returns: + Callable[[~.UpdateNoteRequest], + ~.Note]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_note" not in self._stubs: + self._stubs["update_note"] = self.grpc_channel.unary_unary( + "/grafeas.v1.Grafeas/UpdateNote", + request_serializer=grafeas.UpdateNoteRequest.serialize, + response_deserializer=grafeas.Note.deserialize, + ) + return self._stubs["update_note"] + + @property + def list_note_occurrences( + self, + ) -> Callable[ + [grafeas.ListNoteOccurrencesRequest], grafeas.ListNoteOccurrencesResponse + ]: + r"""Return a callable for the list note occurrences method over gRPC. + + Lists occurrences referencing the specified note. + Provider projects can use this method to get all + occurrences across consumer projects referencing the + specified note. + + Returns: + Callable[[~.ListNoteOccurrencesRequest], + ~.ListNoteOccurrencesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_note_occurrences" not in self._stubs: + self._stubs["list_note_occurrences"] = self.grpc_channel.unary_unary( + "/grafeas.v1.Grafeas/ListNoteOccurrences", + request_serializer=grafeas.ListNoteOccurrencesRequest.serialize, + response_deserializer=grafeas.ListNoteOccurrencesResponse.deserialize, + ) + return self._stubs["list_note_occurrences"] + + +__all__ = ("GrafeasGrpcTransport",) diff --git a/packages/grafeas/grafeas/grafeas_v1/services/grafeas/transports/grpc_asyncio.py b/packages/grafeas/grafeas/grafeas_v1/services/grafeas/transports/grpc_asyncio.py new file mode 100644 index 000000000000..c3318a1b4202 --- /dev/null +++ b/packages/grafeas/grafeas/grafeas_v1/services/grafeas/transports/grpc_asyncio.py @@ -0,0 +1,595 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.protobuf import empty_pb2 as empty # type: ignore +from grafeas.grafeas_v1.types import grafeas + +from .base import GrafeasTransport +from .grpc import GrafeasGrpcTransport + + +class GrafeasGrpcAsyncIOTransport(GrafeasTransport): + """gRPC AsyncIO backend transport for Grafeas. + + `Grafeas `__ API. + + Retrieves analysis results of Cloud components such as Docker + container images. + + Analysis results are stored as a series of occurrences. An + ``Occurrence`` contains information about a specific analysis + instance on a resource. An occurrence refers to a ``Note``. A note + contains details describing the analysis and is generally stored in + a separate project, called a ``Provider``. Multiple occurrences can + refer to the same note. + + For example, an SSL vulnerability could affect multiple images. In + this case, there would be one note for the vulnerability and an + occurrence for each image with the vulnerability referring to that + note. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + address (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If + provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A + callback to provide client SSL certificate bytes and private key + bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` + is None. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + elif api_mtls_endpoint: + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + self._stubs = {} + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Sanity check: Only create a new channel if we do not already + # have one. + if not hasattr(self, "_grpc_channel"): + self._grpc_channel = self.create_channel( + self._host, credentials=self._credentials, + ) + + # Return the channel from cache. + return self._grpc_channel + + @property + def get_occurrence( + self, + ) -> Callable[[grafeas.GetOccurrenceRequest], Awaitable[grafeas.Occurrence]]: + r"""Return a callable for the get occurrence method over gRPC. + + Gets the specified occurrence. + + Returns: + Callable[[~.GetOccurrenceRequest], + Awaitable[~.Occurrence]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_occurrence" not in self._stubs: + self._stubs["get_occurrence"] = self.grpc_channel.unary_unary( + "/grafeas.v1.Grafeas/GetOccurrence", + request_serializer=grafeas.GetOccurrenceRequest.serialize, + response_deserializer=grafeas.Occurrence.deserialize, + ) + return self._stubs["get_occurrence"] + + @property + def list_occurrences( + self, + ) -> Callable[ + [grafeas.ListOccurrencesRequest], Awaitable[grafeas.ListOccurrencesResponse] + ]: + r"""Return a callable for the list occurrences method over gRPC. + + Lists occurrences for the specified project. + + Returns: + Callable[[~.ListOccurrencesRequest], + Awaitable[~.ListOccurrencesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_occurrences" not in self._stubs: + self._stubs["list_occurrences"] = self.grpc_channel.unary_unary( + "/grafeas.v1.Grafeas/ListOccurrences", + request_serializer=grafeas.ListOccurrencesRequest.serialize, + response_deserializer=grafeas.ListOccurrencesResponse.deserialize, + ) + return self._stubs["list_occurrences"] + + @property + def delete_occurrence( + self, + ) -> Callable[[grafeas.DeleteOccurrenceRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the delete occurrence method over gRPC. + + Deletes the specified occurrence. For example, use + this method to delete an occurrence when the occurrence + is no longer applicable for the given resource. + + Returns: + Callable[[~.DeleteOccurrenceRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_occurrence" not in self._stubs: + self._stubs["delete_occurrence"] = self.grpc_channel.unary_unary( + "/grafeas.v1.Grafeas/DeleteOccurrence", + request_serializer=grafeas.DeleteOccurrenceRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_occurrence"] + + @property + def create_occurrence( + self, + ) -> Callable[[grafeas.CreateOccurrenceRequest], Awaitable[grafeas.Occurrence]]: + r"""Return a callable for the create occurrence method over gRPC. + + Creates a new occurrence. + + Returns: + Callable[[~.CreateOccurrenceRequest], + Awaitable[~.Occurrence]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_occurrence" not in self._stubs: + self._stubs["create_occurrence"] = self.grpc_channel.unary_unary( + "/grafeas.v1.Grafeas/CreateOccurrence", + request_serializer=grafeas.CreateOccurrenceRequest.serialize, + response_deserializer=grafeas.Occurrence.deserialize, + ) + return self._stubs["create_occurrence"] + + @property + def batch_create_occurrences( + self, + ) -> Callable[ + [grafeas.BatchCreateOccurrencesRequest], + Awaitable[grafeas.BatchCreateOccurrencesResponse], + ]: + r"""Return a callable for the batch create occurrences method over gRPC. + + Creates new occurrences in batch. + + Returns: + Callable[[~.BatchCreateOccurrencesRequest], + Awaitable[~.BatchCreateOccurrencesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_create_occurrences" not in self._stubs: + self._stubs["batch_create_occurrences"] = self.grpc_channel.unary_unary( + "/grafeas.v1.Grafeas/BatchCreateOccurrences", + request_serializer=grafeas.BatchCreateOccurrencesRequest.serialize, + response_deserializer=grafeas.BatchCreateOccurrencesResponse.deserialize, + ) + return self._stubs["batch_create_occurrences"] + + @property + def update_occurrence( + self, + ) -> Callable[[grafeas.UpdateOccurrenceRequest], Awaitable[grafeas.Occurrence]]: + r"""Return a callable for the update occurrence method over gRPC. + + Updates the specified occurrence. + + Returns: + Callable[[~.UpdateOccurrenceRequest], + Awaitable[~.Occurrence]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_occurrence" not in self._stubs: + self._stubs["update_occurrence"] = self.grpc_channel.unary_unary( + "/grafeas.v1.Grafeas/UpdateOccurrence", + request_serializer=grafeas.UpdateOccurrenceRequest.serialize, + response_deserializer=grafeas.Occurrence.deserialize, + ) + return self._stubs["update_occurrence"] + + @property + def get_occurrence_note( + self, + ) -> Callable[[grafeas.GetOccurrenceNoteRequest], Awaitable[grafeas.Note]]: + r"""Return a callable for the get occurrence note method over gRPC. + + Gets the note attached to the specified occurrence. + Consumer projects can use this method to get a note that + belongs to a provider project. + + Returns: + Callable[[~.GetOccurrenceNoteRequest], + Awaitable[~.Note]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_occurrence_note" not in self._stubs: + self._stubs["get_occurrence_note"] = self.grpc_channel.unary_unary( + "/grafeas.v1.Grafeas/GetOccurrenceNote", + request_serializer=grafeas.GetOccurrenceNoteRequest.serialize, + response_deserializer=grafeas.Note.deserialize, + ) + return self._stubs["get_occurrence_note"] + + @property + def get_note(self) -> Callable[[grafeas.GetNoteRequest], Awaitable[grafeas.Note]]: + r"""Return a callable for the get note method over gRPC. + + Gets the specified note. + + Returns: + Callable[[~.GetNoteRequest], + Awaitable[~.Note]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_note" not in self._stubs: + self._stubs["get_note"] = self.grpc_channel.unary_unary( + "/grafeas.v1.Grafeas/GetNote", + request_serializer=grafeas.GetNoteRequest.serialize, + response_deserializer=grafeas.Note.deserialize, + ) + return self._stubs["get_note"] + + @property + def list_notes( + self, + ) -> Callable[[grafeas.ListNotesRequest], Awaitable[grafeas.ListNotesResponse]]: + r"""Return a callable for the list notes method over gRPC. + + Lists notes for the specified project. + + Returns: + Callable[[~.ListNotesRequest], + Awaitable[~.ListNotesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_notes" not in self._stubs: + self._stubs["list_notes"] = self.grpc_channel.unary_unary( + "/grafeas.v1.Grafeas/ListNotes", + request_serializer=grafeas.ListNotesRequest.serialize, + response_deserializer=grafeas.ListNotesResponse.deserialize, + ) + return self._stubs["list_notes"] + + @property + def delete_note( + self, + ) -> Callable[[grafeas.DeleteNoteRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the delete note method over gRPC. + + Deletes the specified note. + + Returns: + Callable[[~.DeleteNoteRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_note" not in self._stubs: + self._stubs["delete_note"] = self.grpc_channel.unary_unary( + "/grafeas.v1.Grafeas/DeleteNote", + request_serializer=grafeas.DeleteNoteRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_note"] + + @property + def create_note( + self, + ) -> Callable[[grafeas.CreateNoteRequest], Awaitable[grafeas.Note]]: + r"""Return a callable for the create note method over gRPC. + + Creates a new note. + + Returns: + Callable[[~.CreateNoteRequest], + Awaitable[~.Note]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_note" not in self._stubs: + self._stubs["create_note"] = self.grpc_channel.unary_unary( + "/grafeas.v1.Grafeas/CreateNote", + request_serializer=grafeas.CreateNoteRequest.serialize, + response_deserializer=grafeas.Note.deserialize, + ) + return self._stubs["create_note"] + + @property + def batch_create_notes( + self, + ) -> Callable[ + [grafeas.BatchCreateNotesRequest], Awaitable[grafeas.BatchCreateNotesResponse] + ]: + r"""Return a callable for the batch create notes method over gRPC. + + Creates new notes in batch. + + Returns: + Callable[[~.BatchCreateNotesRequest], + Awaitable[~.BatchCreateNotesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_create_notes" not in self._stubs: + self._stubs["batch_create_notes"] = self.grpc_channel.unary_unary( + "/grafeas.v1.Grafeas/BatchCreateNotes", + request_serializer=grafeas.BatchCreateNotesRequest.serialize, + response_deserializer=grafeas.BatchCreateNotesResponse.deserialize, + ) + return self._stubs["batch_create_notes"] + + @property + def update_note( + self, + ) -> Callable[[grafeas.UpdateNoteRequest], Awaitable[grafeas.Note]]: + r"""Return a callable for the update note method over gRPC. + + Updates the specified note. + + Returns: + Callable[[~.UpdateNoteRequest], + Awaitable[~.Note]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_note" not in self._stubs: + self._stubs["update_note"] = self.grpc_channel.unary_unary( + "/grafeas.v1.Grafeas/UpdateNote", + request_serializer=grafeas.UpdateNoteRequest.serialize, + response_deserializer=grafeas.Note.deserialize, + ) + return self._stubs["update_note"] + + @property + def list_note_occurrences( + self, + ) -> Callable[ + [grafeas.ListNoteOccurrencesRequest], + Awaitable[grafeas.ListNoteOccurrencesResponse], + ]: + r"""Return a callable for the list note occurrences method over gRPC. + + Lists occurrences referencing the specified note. + Provider projects can use this method to get all + occurrences across consumer projects referencing the + specified note. + + Returns: + Callable[[~.ListNoteOccurrencesRequest], + Awaitable[~.ListNoteOccurrencesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_note_occurrences" not in self._stubs: + self._stubs["list_note_occurrences"] = self.grpc_channel.unary_unary( + "/grafeas.v1.Grafeas/ListNoteOccurrences", + request_serializer=grafeas.ListNoteOccurrencesRequest.serialize, + response_deserializer=grafeas.ListNoteOccurrencesResponse.deserialize, + ) + return self._stubs["list_note_occurrences"] + + +__all__ = ("GrafeasGrpcAsyncIOTransport",) diff --git a/packages/grafeas/grafeas/grafeas_v1/types/__init__.py b/packages/grafeas/grafeas/grafeas_v1/types/__init__.py new file mode 100644 index 000000000000..e8f115089f83 --- /dev/null +++ b/packages/grafeas/grafeas/grafeas_v1/types/__init__.py @@ -0,0 +1,163 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .common import ( + RelatedUrl, + Signature, +) +from .attestation import ( + AttestationNote, + AttestationOccurrence, +) +from .provenance import ( + BuildProvenance, + Source, + FileHashes, + Hash, + Command, + Artifact, + SourceContext, + AliasContext, + CloudRepoSourceContext, + GerritSourceContext, + GitSourceContext, + RepoId, + ProjectRepoId, +) +from .build import ( + BuildNote, + BuildOccurrence, +) +from .cvss import CVSSv3 +from .deployment import ( + DeploymentNote, + DeploymentOccurrence, +) +from .discovery import ( + DiscoveryNote, + DiscoveryOccurrence, +) +from .image import ( + Layer, + Fingerprint, + ImageNote, + ImageOccurrence, +) +from .package import ( + Distribution, + Location, + PackageNote, + PackageOccurrence, + Version, +) +from .upgrade import ( + UpgradeNote, + UpgradeDistribution, + WindowsUpdate, + UpgradeOccurrence, +) +from .vulnerability import ( + VulnerabilityNote, + VulnerabilityOccurrence, +) +from .grafeas import ( + Occurrence, + Note, + GetOccurrenceRequest, + ListOccurrencesRequest, + ListOccurrencesResponse, + DeleteOccurrenceRequest, + CreateOccurrenceRequest, + UpdateOccurrenceRequest, + GetNoteRequest, + GetOccurrenceNoteRequest, + ListNotesRequest, + ListNotesResponse, + DeleteNoteRequest, + CreateNoteRequest, + UpdateNoteRequest, + ListNoteOccurrencesRequest, + ListNoteOccurrencesResponse, + BatchCreateNotesRequest, + BatchCreateNotesResponse, + BatchCreateOccurrencesRequest, + BatchCreateOccurrencesResponse, +) + + +__all__ = ( + "RelatedUrl", + "Signature", + "AttestationNote", + "AttestationOccurrence", + "BuildProvenance", + "Source", + "FileHashes", + "Hash", + "Command", + "Artifact", + "SourceContext", + "AliasContext", + "CloudRepoSourceContext", + "GerritSourceContext", + "GitSourceContext", + "RepoId", + "ProjectRepoId", + "BuildNote", + "BuildOccurrence", + "CVSSv3", + "DeploymentNote", + "DeploymentOccurrence", + "DiscoveryNote", + "DiscoveryOccurrence", + "Layer", + "Fingerprint", + "ImageNote", + "ImageOccurrence", + "Distribution", + "Location", + "PackageNote", + "PackageOccurrence", + "Version", + "UpgradeNote", + "UpgradeDistribution", + "WindowsUpdate", + "UpgradeOccurrence", + "VulnerabilityNote", + "VulnerabilityOccurrence", + "Occurrence", + "Note", + "GetOccurrenceRequest", + "ListOccurrencesRequest", + "ListOccurrencesResponse", + "DeleteOccurrenceRequest", + "CreateOccurrenceRequest", + "UpdateOccurrenceRequest", + "GetNoteRequest", + "GetOccurrenceNoteRequest", + "ListNotesRequest", + "ListNotesResponse", + "DeleteNoteRequest", + "CreateNoteRequest", + "UpdateNoteRequest", + "ListNoteOccurrencesRequest", + "ListNoteOccurrencesResponse", + "BatchCreateNotesRequest", + "BatchCreateNotesResponse", + "BatchCreateOccurrencesRequest", + "BatchCreateOccurrencesResponse", +) diff --git a/packages/grafeas/grafeas/grafeas_v1/types/attestation.py b/packages/grafeas/grafeas/grafeas_v1/types/attestation.py new file mode 100644 index 000000000000..693d5b1ea036 --- /dev/null +++ b/packages/grafeas/grafeas/grafeas_v1/types/attestation.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from grafeas.grafeas_v1.types import common + + +__protobuf__ = proto.module( + package="grafeas.v1", manifest={"AttestationNote", "AttestationOccurrence",}, +) + + +class AttestationNote(proto.Message): + r"""Note kind that represents a logical attestation "role" or + "authority". For example, an organization might have one + ``Authority`` for "QA" and one for "build". This note is intended to + act strictly as a grouping mechanism for the attached occurrences + (Attestations). This grouping mechanism also provides a security + boundary, since IAM ACLs gate the ability for a principle to attach + an occurrence to a given note. It also provides a single point of + lookup to find all attached attestation occurrences, even if they + don't all live in the same project. + + Attributes: + hint (~.attestation.AttestationNote.Hint): + Hint hints at the purpose of the attestation + authority. + """ + + class Hint(proto.Message): + r"""This submessage provides human-readable hints about the + purpose of the authority. Because the name of a note acts as its + resource reference, it is important to disambiguate the + canonical name of the Note (which might be a UUID for security + purposes) from "readable" names more suitable for debug output. + Note that these hints should not be used to look up authorities + in security sensitive contexts, such as when looking up + attestations to verify. + + Attributes: + human_readable_name (str): + Required. The human readable name of this + attestation authority, for example "qa". + """ + + human_readable_name = proto.Field(proto.STRING, number=1) + + hint = proto.Field(proto.MESSAGE, number=1, message=Hint,) + + +class AttestationOccurrence(proto.Message): + r"""Occurrence that represents a single "attestation". The + authenticity of an attestation can be verified using the + attached signature. If the verifier trusts the public key of the + signer, then verifying the signature is sufficient to establish + trust. In this circumstance, the authority to which this + attestation is attached is primarily useful for lookup (how to + find this attestation if you already know the authority and + artifact to be verified) and intent (for which authority this + attestation was intended to sign. + + Attributes: + serialized_payload (bytes): + Required. The serialized payload that is verified by one or + more ``signatures``. + signatures (Sequence[~.common.Signature]): + One or more signatures over ``serialized_payload``. Verifier + implementations should consider this attestation message + verified if at least one ``signature`` verifies + ``serialized_payload``. See ``Signature`` in common.proto + for more details on signature structure and verification. + """ + + serialized_payload = proto.Field(proto.BYTES, number=1) + + signatures = proto.RepeatedField(proto.MESSAGE, number=2, message=common.Signature,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/grafeas/grafeas/grafeas_v1/types/build.py b/packages/grafeas/grafeas/grafeas_v1/types/build.py new file mode 100644 index 000000000000..c987d86c46f8 --- /dev/null +++ b/packages/grafeas/grafeas/grafeas_v1/types/build.py @@ -0,0 +1,72 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from grafeas.grafeas_v1.types import provenance as g_provenance + + +__protobuf__ = proto.module( + package="grafeas.v1", manifest={"BuildNote", "BuildOccurrence",}, +) + + +class BuildNote(proto.Message): + r"""Note holding the version of the provider's builder and the + signature of the provenance message in the build details + occurrence. + + Attributes: + builder_version (str): + Required. Immutable. Version of the builder + which produced this build. + """ + + builder_version = proto.Field(proto.STRING, number=1) + + +class BuildOccurrence(proto.Message): + r"""Details of a build occurrence. + + Attributes: + provenance (~.g_provenance.BuildProvenance): + Required. The actual provenance for the + build. + provenance_bytes (str): + Serialized JSON representation of the provenance, used in + generating the build signature in the corresponding build + note. After verifying the signature, ``provenance_bytes`` + can be unmarshalled and compared to the provenance to + confirm that it is unchanged. A base64-encoded string + representation of the provenance bytes is used for the + signature in order to interoperate with openssl which + expects this format for signature verification. + + The serialized form is captured both to avoid ambiguity in + how the provenance is marshalled to json as well to prevent + incompatibilities with future changes. + """ + + provenance = proto.Field( + proto.MESSAGE, number=1, message=g_provenance.BuildProvenance, + ) + + provenance_bytes = proto.Field(proto.STRING, number=2) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/grafeas/grafeas/grafeas_v1/types/common.py b/packages/grafeas/grafeas/grafeas_v1/types/common.py new file mode 100644 index 000000000000..9731660dd6ad --- /dev/null +++ b/packages/grafeas/grafeas/grafeas_v1/types/common.py @@ -0,0 +1,121 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +__protobuf__ = proto.module( + package="grafeas.v1", manifest={"NoteKind", "RelatedUrl", "Signature",}, +) + + +class NoteKind(proto.Enum): + r"""Kind represents the kinds of notes supported.""" + NOTE_KIND_UNSPECIFIED = 0 + VULNERABILITY = 1 + BUILD = 2 + IMAGE = 3 + PACKAGE = 4 + DEPLOYMENT = 5 + DISCOVERY = 6 + ATTESTATION = 7 + UPGRADE = 8 + + +class RelatedUrl(proto.Message): + r"""Metadata for any related URL information. + + Attributes: + url (str): + Specific URL associated with the resource. + label (str): + Label to describe usage of the URL. + """ + + url = proto.Field(proto.STRING, number=1) + + label = proto.Field(proto.STRING, number=2) + + +class Signature(proto.Message): + r"""Verifiers (e.g. Kritis implementations) MUST verify signatures with + respect to the trust anchors defined in policy (e.g. a Kritis + policy). Typically this means that the verifier has been configured + with a map from ``public_key_id`` to public key material (and any + required parameters, e.g. signing algorithm). + + In particular, verification implementations MUST NOT treat the + signature ``public_key_id`` as anything more than a key lookup hint. + The ``public_key_id`` DOES NOT validate or authenticate a public + key; it only provides a mechanism for quickly selecting a public key + ALREADY CONFIGURED on the verifier through a trusted channel. + Verification implementations MUST reject signatures in any of the + following circumstances: + + - The ``public_key_id`` is not recognized by the verifier. + - The public key that ``public_key_id`` refers to does not verify + the signature with respect to the payload. + + The ``signature`` contents SHOULD NOT be "attached" (where the + payload is included with the serialized ``signature`` bytes). + Verifiers MUST ignore any "attached" payload and only verify + signatures with respect to explicitly provided payload (e.g. a + ``payload`` field on the proto message that holds this Signature, or + the canonical serialization of the proto message that holds this + signature). + + Attributes: + signature (bytes): + The content of the signature, an opaque + bytestring. The payload that this signature + verifies MUST be unambiguously provided with the + Signature during verification. A wrapper message + might provide the payload explicitly. + Alternatively, a message might have a canonical + serialization that can always be unambiguously + computed to derive the payload. + public_key_id (str): + The identifier for the public key that verifies this + signature. + + - The ``public_key_id`` is required. + - The ``public_key_id`` MUST be an RFC3986 conformant URI. + - When possible, the ``public_key_id`` SHOULD be an + immutable reference, such as a cryptographic digest. + + Examples of valid ``public_key_id``\ s: + + OpenPGP V4 public key fingerprint: + + - "openpgp4fpr:74FAF3B861BDA0870C7B6DEF607E48D2A663AEEA" + See + https://www.iana.org/assignments/uri-schemes/prov/openpgp4fpr + for more details on this scheme. + + RFC6920 digest-named SubjectPublicKeyInfo (digest of the DER + serialization): + + - "ni:///sha-256;cD9o9Cq6LG3jD0iKXqEi_vdjJGecm_iXkbqVoScViaU" + - "nih:///sha-256;703f68f42aba2c6de30f488a5ea122fef76324679c9bf89791ba95a1271589a5". + """ + + signature = proto.Field(proto.BYTES, number=1) + + public_key_id = proto.Field(proto.STRING, number=2) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/grafeas/grafeas/grafeas_v1/types/cvss.py b/packages/grafeas/grafeas/grafeas_v1/types/cvss.py new file mode 100644 index 000000000000..a466e9f9dbe0 --- /dev/null +++ b/packages/grafeas/grafeas/grafeas_v1/types/cvss.py @@ -0,0 +1,121 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +__protobuf__ = proto.module(package="grafeas.v1", manifest={"CVSSv3",},) + + +class CVSSv3(proto.Message): + r"""Common Vulnerability Scoring System version 3. + For details, see https://www.first.org/cvss/specification- + document + + Attributes: + base_score (float): + The base score is a function of the base + metric scores. + exploitability_score (float): + + impact_score (float): + + attack_vector (~.cvss.CVSSv3.AttackVector): + Base Metrics + Represents the intrinsic characteristics of a + vulnerability that are constant over time and + across user environments. + attack_complexity (~.cvss.CVSSv3.AttackComplexity): + + privileges_required (~.cvss.CVSSv3.PrivilegesRequired): + + user_interaction (~.cvss.CVSSv3.UserInteraction): + + scope (~.cvss.CVSSv3.Scope): + + confidentiality_impact (~.cvss.CVSSv3.Impact): + + integrity_impact (~.cvss.CVSSv3.Impact): + + availability_impact (~.cvss.CVSSv3.Impact): + + """ + + class AttackVector(proto.Enum): + r"""""" + ATTACK_VECTOR_UNSPECIFIED = 0 + ATTACK_VECTOR_NETWORK = 1 + ATTACK_VECTOR_ADJACENT = 2 + ATTACK_VECTOR_LOCAL = 3 + ATTACK_VECTOR_PHYSICAL = 4 + + class AttackComplexity(proto.Enum): + r"""""" + ATTACK_COMPLEXITY_UNSPECIFIED = 0 + ATTACK_COMPLEXITY_LOW = 1 + ATTACK_COMPLEXITY_HIGH = 2 + + class PrivilegesRequired(proto.Enum): + r"""""" + PRIVILEGES_REQUIRED_UNSPECIFIED = 0 + PRIVILEGES_REQUIRED_NONE = 1 + PRIVILEGES_REQUIRED_LOW = 2 + PRIVILEGES_REQUIRED_HIGH = 3 + + class UserInteraction(proto.Enum): + r"""""" + USER_INTERACTION_UNSPECIFIED = 0 + USER_INTERACTION_NONE = 1 + USER_INTERACTION_REQUIRED = 2 + + class Scope(proto.Enum): + r"""""" + SCOPE_UNSPECIFIED = 0 + SCOPE_UNCHANGED = 1 + SCOPE_CHANGED = 2 + + class Impact(proto.Enum): + r"""""" + IMPACT_UNSPECIFIED = 0 + IMPACT_HIGH = 1 + IMPACT_LOW = 2 + IMPACT_NONE = 3 + + base_score = proto.Field(proto.FLOAT, number=1) + + exploitability_score = proto.Field(proto.FLOAT, number=2) + + impact_score = proto.Field(proto.FLOAT, number=3) + + attack_vector = proto.Field(proto.ENUM, number=5, enum=AttackVector,) + + attack_complexity = proto.Field(proto.ENUM, number=6, enum=AttackComplexity,) + + privileges_required = proto.Field(proto.ENUM, number=7, enum=PrivilegesRequired,) + + user_interaction = proto.Field(proto.ENUM, number=8, enum=UserInteraction,) + + scope = proto.Field(proto.ENUM, number=9, enum=Scope,) + + confidentiality_impact = proto.Field(proto.ENUM, number=10, enum=Impact,) + + integrity_impact = proto.Field(proto.ENUM, number=11, enum=Impact,) + + availability_impact = proto.Field(proto.ENUM, number=12, enum=Impact,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/grafeas/grafeas/grafeas_v1/types/deployment.py b/packages/grafeas/grafeas/grafeas_v1/types/deployment.py new file mode 100644 index 000000000000..8cce8399c69c --- /dev/null +++ b/packages/grafeas/grafeas/grafeas_v1/types/deployment.py @@ -0,0 +1,89 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + + +__protobuf__ = proto.module( + package="grafeas.v1", manifest={"DeploymentNote", "DeploymentOccurrence",}, +) + + +class DeploymentNote(proto.Message): + r"""An artifact that can be deployed in some runtime. + + Attributes: + resource_uri (Sequence[str]): + Required. Resource URI for the artifact being + deployed. + """ + + resource_uri = proto.RepeatedField(proto.STRING, number=1) + + +class DeploymentOccurrence(proto.Message): + r"""The period during which some deployable was active in a + runtime. + + Attributes: + user_email (str): + Identity of the user that triggered this + deployment. + deploy_time (~.timestamp.Timestamp): + Required. Beginning of the lifetime of this + deployment. + undeploy_time (~.timestamp.Timestamp): + End of the lifetime of this deployment. + config (str): + Configuration used to create this deployment. + address (str): + Address of the runtime element hosting this + deployment. + resource_uri (Sequence[str]): + Output only. Resource URI for the artifact + being deployed taken from the deployable field + with the same name. + platform (~.deployment.DeploymentOccurrence.Platform): + Platform hosting this deployment. + """ + + class Platform(proto.Enum): + r"""Types of platforms.""" + PLATFORM_UNSPECIFIED = 0 + GKE = 1 + FLEX = 2 + CUSTOM = 3 + + user_email = proto.Field(proto.STRING, number=1) + + deploy_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) + + undeploy_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,) + + config = proto.Field(proto.STRING, number=4) + + address = proto.Field(proto.STRING, number=5) + + resource_uri = proto.RepeatedField(proto.STRING, number=6) + + platform = proto.Field(proto.ENUM, number=7, enum=Platform,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/grafeas/grafeas/grafeas_v1/types/discovery.py b/packages/grafeas/grafeas/grafeas_v1/types/discovery.py new file mode 100644 index 000000000000..765cc07ed20f --- /dev/null +++ b/packages/grafeas/grafeas/grafeas_v1/types/discovery.py @@ -0,0 +1,94 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.rpc import status_pb2 as status # type: ignore +from grafeas.grafeas_v1.types import common + + +__protobuf__ = proto.module( + package="grafeas.v1", manifest={"DiscoveryNote", "DiscoveryOccurrence",}, +) + + +class DiscoveryNote(proto.Message): + r"""A note that indicates a type of analysis a provider would perform. + This note exists in a provider's project. A ``Discovery`` occurrence + is created in a consumer's project at the start of analysis. + + Attributes: + analysis_kind (~.common.NoteKind): + Required. Immutable. The kind of analysis + that is handled by this discovery. + """ + + analysis_kind = proto.Field(proto.ENUM, number=1, enum=common.NoteKind,) + + +class DiscoveryOccurrence(proto.Message): + r"""Provides information about the analysis status of a + discovered resource. + + Attributes: + continuous_analysis (~.discovery.DiscoveryOccurrence.ContinuousAnalysis): + Whether the resource is continuously + analyzed. + analysis_status (~.discovery.DiscoveryOccurrence.AnalysisStatus): + The status of discovery for the resource. + analysis_status_error (~.status.Status): + When an error is encountered this will + contain a LocalizedMessage under details to show + to the user. The LocalizedMessage is output only + and populated by the API. + cpe (str): + The CPE of the resource being scanned. + last_scan_time (~.timestamp.Timestamp): + The last time this resource was scanned. + """ + + class ContinuousAnalysis(proto.Enum): + r"""Whether the resource is continuously analyzed.""" + CONTINUOUS_ANALYSIS_UNSPECIFIED = 0 + ACTIVE = 1 + INACTIVE = 2 + + class AnalysisStatus(proto.Enum): + r"""Analysis status for a resource. Currently for initial + analysis only (not updated in continuous analysis). + """ + ANALYSIS_STATUS_UNSPECIFIED = 0 + PENDING = 1 + SCANNING = 2 + FINISHED_SUCCESS = 3 + FINISHED_FAILED = 4 + FINISHED_UNSUPPORTED = 5 + + continuous_analysis = proto.Field(proto.ENUM, number=1, enum=ContinuousAnalysis,) + + analysis_status = proto.Field(proto.ENUM, number=2, enum=AnalysisStatus,) + + analysis_status_error = proto.Field(proto.MESSAGE, number=3, message=status.Status,) + + cpe = proto.Field(proto.STRING, number=4) + + last_scan_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/grafeas/grafeas/grafeas_v1/types/grafeas.py b/packages/grafeas/grafeas/grafeas_v1/types/grafeas.py new file mode 100644 index 000000000000..a3e37623a587 --- /dev/null +++ b/packages/grafeas/grafeas/grafeas_v1/types/grafeas.py @@ -0,0 +1,620 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from grafeas.grafeas_v1.types import attestation as g_attestation +from grafeas.grafeas_v1.types import build as g_build +from grafeas.grafeas_v1.types import common +from grafeas.grafeas_v1.types import deployment as g_deployment +from grafeas.grafeas_v1.types import discovery as g_discovery +from grafeas.grafeas_v1.types import image as g_image +from grafeas.grafeas_v1.types import package as g_package +from grafeas.grafeas_v1.types import upgrade as g_upgrade +from grafeas.grafeas_v1.types import vulnerability as g_vulnerability + + +__protobuf__ = proto.module( + package="grafeas.v1", + manifest={ + "Occurrence", + "Note", + "GetOccurrenceRequest", + "ListOccurrencesRequest", + "ListOccurrencesResponse", + "DeleteOccurrenceRequest", + "CreateOccurrenceRequest", + "UpdateOccurrenceRequest", + "GetNoteRequest", + "GetOccurrenceNoteRequest", + "ListNotesRequest", + "ListNotesResponse", + "DeleteNoteRequest", + "CreateNoteRequest", + "UpdateNoteRequest", + "ListNoteOccurrencesRequest", + "ListNoteOccurrencesResponse", + "BatchCreateNotesRequest", + "BatchCreateNotesResponse", + "BatchCreateOccurrencesRequest", + "BatchCreateOccurrencesResponse", + }, +) + + +class Occurrence(proto.Message): + r"""An instance of an analysis type that has been found on a + resource. + + Attributes: + name (str): + Output only. The name of the occurrence in the form of + ``projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]``. + resource_uri (str): + Required. Immutable. A URI that represents the resource for + which the occurrence applies. For example, + ``https://gcr.io/project/image@sha256:123abc`` for a Docker + image. + note_name (str): + Required. Immutable. The analysis note associated with this + occurrence, in the form of + ``projects/[PROVIDER_ID]/notes/[NOTE_ID]``. This field can + be used as a filter in list requests. + kind (~.common.NoteKind): + Output only. This explicitly denotes which of + the occurrence details are specified. This field + can be used as a filter in list requests. + remediation (str): + A description of actions that can be taken to + remedy the note. + create_time (~.timestamp.Timestamp): + Output only. The time this occurrence was + created. + update_time (~.timestamp.Timestamp): + Output only. The time this occurrence was + last updated. + vulnerability (~.g_vulnerability.VulnerabilityOccurrence): + Describes a security vulnerability. + build (~.g_build.BuildOccurrence): + Describes a verifiable build. + image (~.g_image.ImageOccurrence): + Describes how this resource derives from the + basis in the associated note. + package (~.g_package.PackageOccurrence): + Describes the installation of a package on + the linked resource. + deployment (~.g_deployment.DeploymentOccurrence): + Describes the deployment of an artifact on a + runtime. + discovery (~.g_discovery.DiscoveryOccurrence): + Describes when a resource was discovered. + attestation (~.g_attestation.AttestationOccurrence): + Describes an attestation of an artifact. + upgrade (~.g_upgrade.UpgradeOccurrence): + Describes an available package upgrade on the + linked resource. + """ + + name = proto.Field(proto.STRING, number=1) + + resource_uri = proto.Field(proto.STRING, number=2) + + note_name = proto.Field(proto.STRING, number=3) + + kind = proto.Field(proto.ENUM, number=4, enum=common.NoteKind,) + + remediation = proto.Field(proto.STRING, number=5) + + create_time = proto.Field(proto.MESSAGE, number=6, message=timestamp.Timestamp,) + + update_time = proto.Field(proto.MESSAGE, number=7, message=timestamp.Timestamp,) + + vulnerability = proto.Field( + proto.MESSAGE, + number=8, + oneof="details", + message=g_vulnerability.VulnerabilityOccurrence, + ) + + build = proto.Field( + proto.MESSAGE, number=9, oneof="details", message=g_build.BuildOccurrence, + ) + + image = proto.Field( + proto.MESSAGE, number=10, oneof="details", message=g_image.ImageOccurrence, + ) + + package = proto.Field( + proto.MESSAGE, number=11, oneof="details", message=g_package.PackageOccurrence, + ) + + deployment = proto.Field( + proto.MESSAGE, + number=12, + oneof="details", + message=g_deployment.DeploymentOccurrence, + ) + + discovery = proto.Field( + proto.MESSAGE, + number=13, + oneof="details", + message=g_discovery.DiscoveryOccurrence, + ) + + attestation = proto.Field( + proto.MESSAGE, + number=14, + oneof="details", + message=g_attestation.AttestationOccurrence, + ) + + upgrade = proto.Field( + proto.MESSAGE, number=15, oneof="details", message=g_upgrade.UpgradeOccurrence, + ) + + +class Note(proto.Message): + r"""A type of analysis that can be done for a resource. + + Attributes: + name (str): + Output only. The name of the note in the form of + ``projects/[PROVIDER_ID]/notes/[NOTE_ID]``. + short_description (str): + A one sentence description of this note. + long_description (str): + A detailed description of this note. + kind (~.common.NoteKind): + Output only. The type of analysis. This field + can be used as a filter in list requests. + related_url (Sequence[~.common.RelatedUrl]): + URLs associated with this note. + expiration_time (~.timestamp.Timestamp): + Time of expiration for this note. Empty if + note does not expire. + create_time (~.timestamp.Timestamp): + Output only. The time this note was created. + This field can be used as a filter in list + requests. + update_time (~.timestamp.Timestamp): + Output only. The time this note was last + updated. This field can be used as a filter in + list requests. + related_note_names (Sequence[str]): + Other notes related to this note. + vulnerability (~.g_vulnerability.VulnerabilityNote): + A note describing a package vulnerability. + build (~.g_build.BuildNote): + A note describing build provenance for a + verifiable build. + image (~.g_image.ImageNote): + A note describing a base image. + package (~.g_package.PackageNote): + A note describing a package hosted by various + package managers. + deployment (~.g_deployment.DeploymentNote): + A note describing something that can be + deployed. + discovery (~.g_discovery.DiscoveryNote): + A note describing the initial analysis of a + resource. + attestation (~.g_attestation.AttestationNote): + A note describing an attestation role. + upgrade (~.g_upgrade.UpgradeNote): + A note describing available package upgrades. + """ + + name = proto.Field(proto.STRING, number=1) + + short_description = proto.Field(proto.STRING, number=2) + + long_description = proto.Field(proto.STRING, number=3) + + kind = proto.Field(proto.ENUM, number=4, enum=common.NoteKind,) + + related_url = proto.RepeatedField( + proto.MESSAGE, number=5, message=common.RelatedUrl, + ) + + expiration_time = proto.Field(proto.MESSAGE, number=6, message=timestamp.Timestamp,) + + create_time = proto.Field(proto.MESSAGE, number=7, message=timestamp.Timestamp,) + + update_time = proto.Field(proto.MESSAGE, number=8, message=timestamp.Timestamp,) + + related_note_names = proto.RepeatedField(proto.STRING, number=9) + + vulnerability = proto.Field( + proto.MESSAGE, + number=10, + oneof="type", + message=g_vulnerability.VulnerabilityNote, + ) + + build = proto.Field( + proto.MESSAGE, number=11, oneof="type", message=g_build.BuildNote, + ) + + image = proto.Field( + proto.MESSAGE, number=12, oneof="type", message=g_image.ImageNote, + ) + + package = proto.Field( + proto.MESSAGE, number=13, oneof="type", message=g_package.PackageNote, + ) + + deployment = proto.Field( + proto.MESSAGE, number=14, oneof="type", message=g_deployment.DeploymentNote, + ) + + discovery = proto.Field( + proto.MESSAGE, number=15, oneof="type", message=g_discovery.DiscoveryNote, + ) + + attestation = proto.Field( + proto.MESSAGE, number=16, oneof="type", message=g_attestation.AttestationNote, + ) + + upgrade = proto.Field( + proto.MESSAGE, number=17, oneof="type", message=g_upgrade.UpgradeNote, + ) + + +class GetOccurrenceRequest(proto.Message): + r"""Request to get an occurrence. + + Attributes: + name (str): + The name of the occurrence in the form of + ``projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]``. + """ + + name = proto.Field(proto.STRING, number=1) + + +class ListOccurrencesRequest(proto.Message): + r"""Request to list occurrences. + + Attributes: + parent (str): + The name of the project to list occurrences for in the form + of ``projects/[PROJECT_ID]``. + filter (str): + The filter expression. + page_size (int): + Number of occurrences to return in the list. + Must be positive. Max allowed page size is 1000. + If not specified, page size defaults to 20. + page_token (str): + Token to provide to skip to a particular spot + in the list. + """ + + parent = proto.Field(proto.STRING, number=1) + + filter = proto.Field(proto.STRING, number=2) + + page_size = proto.Field(proto.INT32, number=3) + + page_token = proto.Field(proto.STRING, number=4) + + +class ListOccurrencesResponse(proto.Message): + r"""Response for listing occurrences. + + Attributes: + occurrences (Sequence[~.grafeas.Occurrence]): + The occurrences requested. + next_page_token (str): + The next pagination token in the list response. It should be + used as ``page_token`` for the following request. An empty + value means no more results. + """ + + @property + def raw_page(self): + return self + + occurrences = proto.RepeatedField(proto.MESSAGE, number=1, message=Occurrence,) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class DeleteOccurrenceRequest(proto.Message): + r"""Request to delete an occurrence. + + Attributes: + name (str): + The name of the occurrence in the form of + ``projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]``. + """ + + name = proto.Field(proto.STRING, number=1) + + +class CreateOccurrenceRequest(proto.Message): + r"""Request to create a new occurrence. + + Attributes: + parent (str): + The name of the project in the form of + ``projects/[PROJECT_ID]``, under which the occurrence is to + be created. + occurrence (~.grafeas.Occurrence): + The occurrence to create. + """ + + parent = proto.Field(proto.STRING, number=1) + + occurrence = proto.Field(proto.MESSAGE, number=2, message=Occurrence,) + + +class UpdateOccurrenceRequest(proto.Message): + r"""Request to update an occurrence. + + Attributes: + name (str): + The name of the occurrence in the form of + ``projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]``. + occurrence (~.grafeas.Occurrence): + The updated occurrence. + update_mask (~.field_mask.FieldMask): + The fields to update. + """ + + name = proto.Field(proto.STRING, number=1) + + occurrence = proto.Field(proto.MESSAGE, number=2, message=Occurrence,) + + update_mask = proto.Field(proto.MESSAGE, number=3, message=field_mask.FieldMask,) + + +class GetNoteRequest(proto.Message): + r"""Request to get a note. + + Attributes: + name (str): + The name of the note in the form of + ``projects/[PROVIDER_ID]/notes/[NOTE_ID]``. + """ + + name = proto.Field(proto.STRING, number=1) + + +class GetOccurrenceNoteRequest(proto.Message): + r"""Request to get the note to which the specified occurrence is + attached. + + Attributes: + name (str): + The name of the occurrence in the form of + ``projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]``. + """ + + name = proto.Field(proto.STRING, number=1) + + +class ListNotesRequest(proto.Message): + r"""Request to list notes. + + Attributes: + parent (str): + The name of the project to list notes for in the form of + ``projects/[PROJECT_ID]``. + filter (str): + The filter expression. + page_size (int): + Number of notes to return in the list. Must + be positive. Max allowed page size is 1000. If + not specified, page size defaults to 20. + page_token (str): + Token to provide to skip to a particular spot + in the list. + """ + + parent = proto.Field(proto.STRING, number=1) + + filter = proto.Field(proto.STRING, number=2) + + page_size = proto.Field(proto.INT32, number=3) + + page_token = proto.Field(proto.STRING, number=4) + + +class ListNotesResponse(proto.Message): + r"""Response for listing notes. + + Attributes: + notes (Sequence[~.grafeas.Note]): + The notes requested. + next_page_token (str): + The next pagination token in the list response. It should be + used as ``page_token`` for the following request. An empty + value means no more results. + """ + + @property + def raw_page(self): + return self + + notes = proto.RepeatedField(proto.MESSAGE, number=1, message=Note,) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class DeleteNoteRequest(proto.Message): + r"""Request to delete a note. + + Attributes: + name (str): + The name of the note in the form of + ``projects/[PROVIDER_ID]/notes/[NOTE_ID]``. + """ + + name = proto.Field(proto.STRING, number=1) + + +class CreateNoteRequest(proto.Message): + r"""Request to create a new note. + + Attributes: + parent (str): + The name of the project in the form of + ``projects/[PROJECT_ID]``, under which the note is to be + created. + note_id (str): + The ID to use for this note. + note (~.grafeas.Note): + The note to create. + """ + + parent = proto.Field(proto.STRING, number=1) + + note_id = proto.Field(proto.STRING, number=2) + + note = proto.Field(proto.MESSAGE, number=3, message=Note,) + + +class UpdateNoteRequest(proto.Message): + r"""Request to update a note. + + Attributes: + name (str): + The name of the note in the form of + ``projects/[PROVIDER_ID]/notes/[NOTE_ID]``. + note (~.grafeas.Note): + The updated note. + update_mask (~.field_mask.FieldMask): + The fields to update. + """ + + name = proto.Field(proto.STRING, number=1) + + note = proto.Field(proto.MESSAGE, number=2, message=Note,) + + update_mask = proto.Field(proto.MESSAGE, number=3, message=field_mask.FieldMask,) + + +class ListNoteOccurrencesRequest(proto.Message): + r"""Request to list occurrences for a note. + + Attributes: + name (str): + The name of the note to list occurrences for in the form of + ``projects/[PROVIDER_ID]/notes/[NOTE_ID]``. + filter (str): + The filter expression. + page_size (int): + Number of occurrences to return in the list. + page_token (str): + Token to provide to skip to a particular spot + in the list. + """ + + name = proto.Field(proto.STRING, number=1) + + filter = proto.Field(proto.STRING, number=2) + + page_size = proto.Field(proto.INT32, number=3) + + page_token = proto.Field(proto.STRING, number=4) + + +class ListNoteOccurrencesResponse(proto.Message): + r"""Response for listing occurrences for a note. + + Attributes: + occurrences (Sequence[~.grafeas.Occurrence]): + The occurrences attached to the specified + note. + next_page_token (str): + Token to provide to skip to a particular spot + in the list. + """ + + @property + def raw_page(self): + return self + + occurrences = proto.RepeatedField(proto.MESSAGE, number=1, message=Occurrence,) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class BatchCreateNotesRequest(proto.Message): + r"""Request to create notes in batch. + + Attributes: + parent (str): + The name of the project in the form of + ``projects/[PROJECT_ID]``, under which the notes are to be + created. + notes (Sequence[~.grafeas.BatchCreateNotesRequest.NotesEntry]): + The notes to create. Max allowed length is + 1000. + """ + + parent = proto.Field(proto.STRING, number=1) + + notes = proto.MapField(proto.STRING, proto.MESSAGE, number=2, message=Note,) + + +class BatchCreateNotesResponse(proto.Message): + r"""Response for creating notes in batch. + + Attributes: + notes (Sequence[~.grafeas.Note]): + The notes that were created. + """ + + notes = proto.RepeatedField(proto.MESSAGE, number=1, message=Note,) + + +class BatchCreateOccurrencesRequest(proto.Message): + r"""Request to create occurrences in batch. + + Attributes: + parent (str): + The name of the project in the form of + ``projects/[PROJECT_ID]``, under which the occurrences are + to be created. + occurrences (Sequence[~.grafeas.Occurrence]): + The occurrences to create. Max allowed length + is 1000. + """ + + parent = proto.Field(proto.STRING, number=1) + + occurrences = proto.RepeatedField(proto.MESSAGE, number=2, message=Occurrence,) + + +class BatchCreateOccurrencesResponse(proto.Message): + r"""Response for creating occurrences in batch. + + Attributes: + occurrences (Sequence[~.grafeas.Occurrence]): + The occurrences that were created. + """ + + occurrences = proto.RepeatedField(proto.MESSAGE, number=1, message=Occurrence,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/grafeas/grafeas/grafeas_v1/types/image.py b/packages/grafeas/grafeas/grafeas_v1/types/image.py new file mode 100644 index 000000000000..07ab7da77dc2 --- /dev/null +++ b/packages/grafeas/grafeas/grafeas_v1/types/image.py @@ -0,0 +1,122 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +__protobuf__ = proto.module( + package="grafeas.v1", + manifest={"Layer", "Fingerprint", "ImageNote", "ImageOccurrence",}, +) + + +class Layer(proto.Message): + r"""Layer holds metadata specific to a layer of a Docker image. + + Attributes: + directive (str): + Required. The recovered Dockerfile directive + used to construct this layer. See + https://docs.docker.com/engine/reference/builder/ + for more information. + arguments (str): + The recovered arguments to the Dockerfile + directive. + """ + + directive = proto.Field(proto.STRING, number=1) + + arguments = proto.Field(proto.STRING, number=2) + + +class Fingerprint(proto.Message): + r"""A set of properties that uniquely identify a given Docker + image. + + Attributes: + v1_name (str): + Required. The layer ID of the final layer in + the Docker image's v1 representation. + v2_blob (Sequence[str]): + Required. The ordered list of v2 blobs that + represent a given image. + v2_name (str): + Output only. The name of the image's v2 blobs computed via: + [bottom] := v2_blob[bottom] [N] := sha256(v2_blob[N] + " " + + v2_name[N+1]) Only the name of the final blob is kept. + """ + + v1_name = proto.Field(proto.STRING, number=1) + + v2_blob = proto.RepeatedField(proto.STRING, number=2) + + v2_name = proto.Field(proto.STRING, number=3) + + +class ImageNote(proto.Message): + r"""Basis describes the base image portion (Note) of the DockerImage + relationship. Linked occurrences are derived from this or an + equivalent image via: FROM Or an equivalent + reference, e.g., a tag of the resource_url. + + Attributes: + resource_url (str): + Required. Immutable. The resource_url for the resource + representing the basis of associated occurrence images. + fingerprint (~.image.Fingerprint): + Required. Immutable. The fingerprint of the + base image. + """ + + resource_url = proto.Field(proto.STRING, number=1) + + fingerprint = proto.Field(proto.MESSAGE, number=2, message=Fingerprint,) + + +class ImageOccurrence(proto.Message): + r"""Details of the derived image portion of the DockerImage + relationship. This image would be produced from a Dockerfile + with FROM . + + Attributes: + fingerprint (~.image.Fingerprint): + Required. The fingerprint of the derived + image. + distance (int): + Output only. The number of layers by which + this image differs from the associated image + basis. + layer_info (Sequence[~.image.Layer]): + This contains layer-specific metadata, if populated it has + length "distance" and is ordered with [distance] being the + layer immediately following the base image and [1] being the + final layer. + base_resource_url (str): + Output only. This contains the base image URL + for the derived image occurrence. + """ + + fingerprint = proto.Field(proto.MESSAGE, number=1, message=Fingerprint,) + + distance = proto.Field(proto.INT32, number=2) + + layer_info = proto.RepeatedField(proto.MESSAGE, number=3, message=Layer,) + + base_resource_url = proto.Field(proto.STRING, number=4) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/grafeas/grafeas/grafeas_v1/types/package.py b/packages/grafeas/grafeas/grafeas_v1/types/package.py new file mode 100644 index 000000000000..348872445667 --- /dev/null +++ b/packages/grafeas/grafeas/grafeas_v1/types/package.py @@ -0,0 +1,186 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +__protobuf__ = proto.module( + package="grafeas.v1", + manifest={ + "Architecture", + "Distribution", + "Location", + "PackageNote", + "PackageOccurrence", + "Version", + }, +) + + +class Architecture(proto.Enum): + r"""Instruction set architectures supported by various package + managers. + """ + ARCHITECTURE_UNSPECIFIED = 0 + X86 = 1 + X64 = 2 + + +class Distribution(proto.Message): + r"""This represents a particular channel of distribution for a + given package. E.g., Debian's jessie-backports dpkg mirror. + + Attributes: + cpe_uri (str): + Required. The cpe_uri in `CPE + format `__ denoting + the package manager version distributing a package. + architecture (~.package.Architecture): + The CPU architecture for which packages in + this distribution channel were built. + latest_version (~.package.Version): + The latest available version of this package + in this distribution channel. + maintainer (str): + A freeform string denoting the maintainer of + this package. + url (str): + The distribution channel-specific homepage + for this package. + description (str): + The distribution channel-specific description + of this package. + """ + + cpe_uri = proto.Field(proto.STRING, number=1) + + architecture = proto.Field(proto.ENUM, number=2, enum="Architecture",) + + latest_version = proto.Field(proto.MESSAGE, number=3, message="Version",) + + maintainer = proto.Field(proto.STRING, number=4) + + url = proto.Field(proto.STRING, number=5) + + description = proto.Field(proto.STRING, number=6) + + +class Location(proto.Message): + r"""An occurrence of a particular package installation found within a + system's filesystem. E.g., glibc was found in + ``/var/lib/dpkg/status``. + + Attributes: + cpe_uri (str): + Required. The CPE URI in `CPE + format `__ denoting + the package manager version distributing a package. + version (~.package.Version): + The version installed at this location. + path (str): + The path from which we gathered that this + package/version is installed. + """ + + cpe_uri = proto.Field(proto.STRING, number=1) + + version = proto.Field(proto.MESSAGE, number=2, message="Version",) + + path = proto.Field(proto.STRING, number=3) + + +class PackageNote(proto.Message): + r"""This represents a particular package that is distributed over + various channels. E.g., glibc (aka libc6) is distributed by + many, at various versions. + + Attributes: + name (str): + Required. Immutable. The name of the package. + distribution (Sequence[~.package.Distribution]): + The various channels by which a package is + distributed. + """ + + name = proto.Field(proto.STRING, number=1) + + distribution = proto.RepeatedField(proto.MESSAGE, number=10, message=Distribution,) + + +class PackageOccurrence(proto.Message): + r"""Details on how a particular software package was installed on + a system. + + Attributes: + name (str): + Output only. The name of the installed + package. + location (Sequence[~.package.Location]): + Required. All of the places within the + filesystem versions of this package have been + found. + """ + + name = proto.Field(proto.STRING, number=1) + + location = proto.RepeatedField(proto.MESSAGE, number=2, message=Location,) + + +class Version(proto.Message): + r"""Version contains structured information about the version of + a package. + + Attributes: + epoch (int): + Used to correct mistakes in the version + numbering scheme. + name (str): + Required only when version kind is NORMAL. + The main part of the version name. + revision (str): + The iteration of the package build from the + above version. + kind (~.package.Version.VersionKind): + Required. Distinguishes between sentinel + MIN/MAX versions and normal versions. + full_name (str): + Human readable version string. This string is + of the form :- and is + only set when kind is NORMAL. + """ + + class VersionKind(proto.Enum): + r"""Whether this is an ordinary package version or a sentinel + MIN/MAX version. + """ + VERSION_KIND_UNSPECIFIED = 0 + NORMAL = 1 + MINIMUM = 2 + MAXIMUM = 3 + + epoch = proto.Field(proto.INT32, number=1) + + name = proto.Field(proto.STRING, number=2) + + revision = proto.Field(proto.STRING, number=3) + + kind = proto.Field(proto.ENUM, number=4, enum=VersionKind,) + + full_name = proto.Field(proto.STRING, number=5) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/grafeas/grafeas/grafeas_v1/types/provenance.py b/packages/grafeas/grafeas/grafeas_v1/types/provenance.py new file mode 100644 index 000000000000..53079d1943af --- /dev/null +++ b/packages/grafeas/grafeas/grafeas_v1/types/provenance.py @@ -0,0 +1,414 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + + +__protobuf__ = proto.module( + package="grafeas.v1", + manifest={ + "BuildProvenance", + "Source", + "FileHashes", + "Hash", + "Command", + "Artifact", + "SourceContext", + "AliasContext", + "CloudRepoSourceContext", + "GerritSourceContext", + "GitSourceContext", + "RepoId", + "ProjectRepoId", + }, +) + + +class BuildProvenance(proto.Message): + r"""Provenance of a build. Contains all information needed to + verify the full details about the build from source to + completion. + + Attributes: + id (str): + Required. Unique identifier of the build. + project_id (str): + ID of the project. + commands (Sequence[~.provenance.Command]): + Commands requested by the build. + built_artifacts (Sequence[~.provenance.Artifact]): + Output of the build. + create_time (~.timestamp.Timestamp): + Time at which the build was created. + start_time (~.timestamp.Timestamp): + Time at which execution of the build was + started. + end_time (~.timestamp.Timestamp): + Time at which execution of the build was + finished. + creator (str): + E-mail address of the user who initiated this + build. Note that this was the user's e-mail + address at the time the build was initiated; + this address may not represent the same end-user + for all time. + logs_uri (str): + URI where any logs for this provenance were + written. + source_provenance (~.provenance.Source): + Details of the Source input to the build. + trigger_id (str): + Trigger identifier if the build was triggered + automatically; empty if not. + build_options (Sequence[~.provenance.BuildProvenance.BuildOptionsEntry]): + Special options applied to this build. This + is a catch-all field where build providers can + enter any desired additional details. + builder_version (str): + Version string of the builder at the time + this build was executed. + """ + + id = proto.Field(proto.STRING, number=1) + + project_id = proto.Field(proto.STRING, number=2) + + commands = proto.RepeatedField(proto.MESSAGE, number=3, message="Command",) + + built_artifacts = proto.RepeatedField(proto.MESSAGE, number=4, message="Artifact",) + + create_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,) + + start_time = proto.Field(proto.MESSAGE, number=6, message=timestamp.Timestamp,) + + end_time = proto.Field(proto.MESSAGE, number=7, message=timestamp.Timestamp,) + + creator = proto.Field(proto.STRING, number=8) + + logs_uri = proto.Field(proto.STRING, number=9) + + source_provenance = proto.Field(proto.MESSAGE, number=10, message="Source",) + + trigger_id = proto.Field(proto.STRING, number=11) + + build_options = proto.MapField(proto.STRING, proto.STRING, number=12) + + builder_version = proto.Field(proto.STRING, number=13) + + +class Source(proto.Message): + r"""Source describes the location of the source used for the + build. + + Attributes: + artifact_storage_source_uri (str): + If provided, the input binary artifacts for + the build came from this location. + file_hashes (Sequence[~.provenance.Source.FileHashesEntry]): + Hash(es) of the build source, which can be + used to verify that the original source + integrity was maintained in the build. + The keys to this map are file paths used as + build source and the values contain the hash + values for those files. + + If the build source came in a single package + such as a gzipped tarfile (.tar.gz), the + FileHash will be for the single path to that + file. + context (~.provenance.SourceContext): + If provided, the source code used for the + build came from this location. + additional_contexts (Sequence[~.provenance.SourceContext]): + If provided, some of the source code used for + the build may be found in these locations, in + the case where the source repository had + multiple remotes or submodules. This list will + not include the context specified in the context + field. + """ + + artifact_storage_source_uri = proto.Field(proto.STRING, number=1) + + file_hashes = proto.MapField( + proto.STRING, proto.MESSAGE, number=2, message="FileHashes", + ) + + context = proto.Field(proto.MESSAGE, number=3, message="SourceContext",) + + additional_contexts = proto.RepeatedField( + proto.MESSAGE, number=4, message="SourceContext", + ) + + +class FileHashes(proto.Message): + r"""Container message for hashes of byte content of files, used + in source messages to verify integrity of source input to the + build. + + Attributes: + file_hash (Sequence[~.provenance.Hash]): + Required. Collection of file hashes. + """ + + file_hash = proto.RepeatedField(proto.MESSAGE, number=1, message="Hash",) + + +class Hash(proto.Message): + r"""Container message for hash values. + + Attributes: + type (str): + Required. The type of hash that was + performed, e.g. "SHA-256". + value (bytes): + Required. The hash value. + """ + + type = proto.Field(proto.STRING, number=1) + + value = proto.Field(proto.BYTES, number=2) + + +class Command(proto.Message): + r"""Command describes a step performed as part of the build + pipeline. + + Attributes: + name (str): + Required. Name of the command, as presented on the command + line, or if the command is packaged as a Docker container, + as presented to ``docker pull``. + env (Sequence[str]): + Environment variables set before running this + command. + args (Sequence[str]): + Command-line arguments used when executing + this command. + dir (str): + Working directory (relative to project source + root) used when running this command. + id (str): + Optional unique identifier for this command, used in + wait_for to reference this command as a dependency. + wait_for (Sequence[str]): + The ID(s) of the command(s) that this command + depends on. + """ + + name = proto.Field(proto.STRING, number=1) + + env = proto.RepeatedField(proto.STRING, number=2) + + args = proto.RepeatedField(proto.STRING, number=3) + + dir = proto.Field(proto.STRING, number=4) + + id = proto.Field(proto.STRING, number=5) + + wait_for = proto.RepeatedField(proto.STRING, number=6) + + +class Artifact(proto.Message): + r"""Artifact describes a build product. + + Attributes: + checksum (str): + Hash or checksum value of a binary, or Docker + Registry 2.0 digest of a container. + id (str): + Artifact ID, if any; for container images, this will be a + URL by digest like + ``gcr.io/projectID/imagename@sha256:123456``. + names (Sequence[str]): + Related artifact names. This may be the path to a binary or + jar file, or in the case of a container build, the name used + to push the container image to Google Container Registry, as + presented to ``docker push``. Note that a single Artifact ID + can have multiple names, for example if two tags are applied + to one image. + """ + + checksum = proto.Field(proto.STRING, number=1) + + id = proto.Field(proto.STRING, number=2) + + names = proto.RepeatedField(proto.STRING, number=3) + + +class SourceContext(proto.Message): + r"""A SourceContext is a reference to a tree of files. A + SourceContext together with a path point to a unique revision of + a single file or directory. + + Attributes: + cloud_repo (~.provenance.CloudRepoSourceContext): + A SourceContext referring to a revision in a + Google Cloud Source Repo. + gerrit (~.provenance.GerritSourceContext): + A SourceContext referring to a Gerrit + project. + git (~.provenance.GitSourceContext): + A SourceContext referring to any third party + Git repo (e.g., GitHub). + labels (Sequence[~.provenance.SourceContext.LabelsEntry]): + Labels with user defined metadata. + """ + + cloud_repo = proto.Field( + proto.MESSAGE, number=1, oneof="context", message="CloudRepoSourceContext", + ) + + gerrit = proto.Field( + proto.MESSAGE, number=2, oneof="context", message="GerritSourceContext", + ) + + git = proto.Field( + proto.MESSAGE, number=3, oneof="context", message="GitSourceContext", + ) + + labels = proto.MapField(proto.STRING, proto.STRING, number=4) + + +class AliasContext(proto.Message): + r"""An alias to a repo revision. + + Attributes: + kind (~.provenance.AliasContext.Kind): + The alias kind. + name (str): + The alias name. + """ + + class Kind(proto.Enum): + r"""The type of an alias.""" + KIND_UNSPECIFIED = 0 + FIXED = 1 + MOVABLE = 2 + OTHER = 4 + + kind = proto.Field(proto.ENUM, number=1, enum=Kind,) + + name = proto.Field(proto.STRING, number=2) + + +class CloudRepoSourceContext(proto.Message): + r"""A CloudRepoSourceContext denotes a particular revision in a + Google Cloud Source Repo. + + Attributes: + repo_id (~.provenance.RepoId): + The ID of the repo. + revision_id (str): + A revision ID. + alias_context (~.provenance.AliasContext): + An alias, which may be a branch or tag. + """ + + repo_id = proto.Field(proto.MESSAGE, number=1, message="RepoId",) + + revision_id = proto.Field(proto.STRING, number=2, oneof="revision") + + alias_context = proto.Field( + proto.MESSAGE, number=3, oneof="revision", message=AliasContext, + ) + + +class GerritSourceContext(proto.Message): + r"""A SourceContext referring to a Gerrit project. + + Attributes: + host_uri (str): + The URI of a running Gerrit instance. + gerrit_project (str): + The full project name within the host. + Projects may be nested, so "project/subproject" + is a valid project name. The "repo name" is the + hostURI/project. + revision_id (str): + A revision (commit) ID. + alias_context (~.provenance.AliasContext): + An alias, which may be a branch or tag. + """ + + host_uri = proto.Field(proto.STRING, number=1) + + gerrit_project = proto.Field(proto.STRING, number=2) + + revision_id = proto.Field(proto.STRING, number=3, oneof="revision") + + alias_context = proto.Field( + proto.MESSAGE, number=4, oneof="revision", message=AliasContext, + ) + + +class GitSourceContext(proto.Message): + r"""A GitSourceContext denotes a particular revision in a third + party Git repository (e.g., GitHub). + + Attributes: + url (str): + Git repository URL. + revision_id (str): + Git commit hash. + """ + + url = proto.Field(proto.STRING, number=1) + + revision_id = proto.Field(proto.STRING, number=2) + + +class RepoId(proto.Message): + r"""A unique identifier for a Cloud Repo. + + Attributes: + project_repo_id (~.provenance.ProjectRepoId): + A combination of a project ID and a repo + name. + uid (str): + A server-assigned, globally unique + identifier. + """ + + project_repo_id = proto.Field( + proto.MESSAGE, number=1, oneof="id", message="ProjectRepoId", + ) + + uid = proto.Field(proto.STRING, number=2, oneof="id") + + +class ProjectRepoId(proto.Message): + r"""Selects a repo using a Google Cloud Platform project ID + (e.g., winged-cargo-31) and a repo name within that project. + + Attributes: + project_id (str): + The ID of the project. + repo_name (str): + The name of the repo. Leave empty for the + default repo. + """ + + project_id = proto.Field(proto.STRING, number=1) + + repo_name = proto.Field(proto.STRING, number=2) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/grafeas/grafeas/grafeas_v1/types/upgrade.py b/packages/grafeas/grafeas/grafeas_v1/types/upgrade.py new file mode 100644 index 000000000000..0f3308d57d0e --- /dev/null +++ b/packages/grafeas/grafeas/grafeas_v1/types/upgrade.py @@ -0,0 +1,209 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from grafeas.grafeas_v1.types import package as g_package + + +__protobuf__ = proto.module( + package="grafeas.v1", + manifest={ + "UpgradeNote", + "UpgradeDistribution", + "WindowsUpdate", + "UpgradeOccurrence", + }, +) + + +class UpgradeNote(proto.Message): + r"""An Upgrade Note represents a potential upgrade of a package to a + given version. For each package version combination (i.e. bash 4.0, + bash 4.1, bash 4.1.2), there will be an Upgrade Note. For Windows, + windows_update field represents the information related to the + update. + + Attributes: + package (str): + Required for non-Windows OS. The package this + Upgrade is for. + version (~.g_package.Version): + Required for non-Windows OS. The version of + the package in machine + human readable form. + distributions (Sequence[~.upgrade.UpgradeDistribution]): + Metadata about the upgrade for each specific + operating system. + windows_update (~.upgrade.WindowsUpdate): + Required for Windows OS. Represents the + metadata about the Windows update. + """ + + package = proto.Field(proto.STRING, number=1) + + version = proto.Field(proto.MESSAGE, number=2, message=g_package.Version,) + + distributions = proto.RepeatedField( + proto.MESSAGE, number=3, message="UpgradeDistribution", + ) + + windows_update = proto.Field(proto.MESSAGE, number=4, message="WindowsUpdate",) + + +class UpgradeDistribution(proto.Message): + r"""The Upgrade Distribution represents metadata about the + Upgrade for each operating system (CPE). Some distributions have + additional metadata around updates, classifying them into + various categories and severities. + + Attributes: + cpe_uri (str): + Required - The specific operating system this + metadata applies to. See + https://cpe.mitre.org/specification/. + classification (str): + The operating system classification of this Upgrade, as + specified by the upstream operating system upgrade feed. For + Windows the classification is one of the category_ids listed + at + https://docs.microsoft.com/en-us/previous-versions/windows/desktop/ff357803(v=vs.85) + severity (str): + The severity as specified by the upstream + operating system. + cve (Sequence[str]): + The cve tied to this Upgrade. + """ + + cpe_uri = proto.Field(proto.STRING, number=1) + + classification = proto.Field(proto.STRING, number=2) + + severity = proto.Field(proto.STRING, number=3) + + cve = proto.RepeatedField(proto.STRING, number=4) + + +class WindowsUpdate(proto.Message): + r"""Windows Update represents the metadata about the update for + the Windows operating system. The fields in this message come + from the Windows Update API documented at + https://docs.microsoft.com/en-us/windows/win32/api/wuapi/nn- + wuapi-iupdate. + + Attributes: + identity (~.upgrade.WindowsUpdate.Identity): + Required - The unique identifier for the + update. + title (str): + The localized title of the update. + description (str): + The localized description of the update. + categories (Sequence[~.upgrade.WindowsUpdate.Category]): + The list of categories to which the update + belongs. + kb_article_ids (Sequence[str]): + The Microsoft Knowledge Base article IDs that + are associated with the update. + support_url (str): + The hyperlink to the support information for + the update. + last_published_timestamp (~.timestamp.Timestamp): + The last published timestamp of the update. + """ + + class Identity(proto.Message): + r"""The unique identifier of the update. + + Attributes: + update_id (str): + The revision independent identifier of the + update. + revision (int): + The revision number of the update. + """ + + update_id = proto.Field(proto.STRING, number=1) + + revision = proto.Field(proto.INT32, number=2) + + class Category(proto.Message): + r"""The category to which the update belongs. + + Attributes: + category_id (str): + The identifier of the category. + name (str): + The localized name of the category. + """ + + category_id = proto.Field(proto.STRING, number=1) + + name = proto.Field(proto.STRING, number=2) + + identity = proto.Field(proto.MESSAGE, number=1, message=Identity,) + + title = proto.Field(proto.STRING, number=2) + + description = proto.Field(proto.STRING, number=3) + + categories = proto.RepeatedField(proto.MESSAGE, number=4, message=Category,) + + kb_article_ids = proto.RepeatedField(proto.STRING, number=5) + + support_url = proto.Field(proto.STRING, number=6) + + last_published_timestamp = proto.Field( + proto.MESSAGE, number=7, message=timestamp.Timestamp, + ) + + +class UpgradeOccurrence(proto.Message): + r"""An Upgrade Occurrence represents that a specific resource_url could + install a specific upgrade. This presence is supplied via local + sources (i.e. it is present in the mirror and the running system has + noticed its availability). For Windows, both distribution and + windows_update contain information for the Windows update. + + Attributes: + package (str): + Required for non-Windows OS. The package this + Upgrade is for. + parsed_version (~.g_package.Version): + Required for non-Windows OS. The version of + the package in a machine + human readable form. + distribution (~.upgrade.UpgradeDistribution): + Metadata about the upgrade for available for the specific + operating system for the resource_url. This allows efficient + filtering, as well as making it easier to use the + occurrence. + windows_update (~.upgrade.WindowsUpdate): + Required for Windows OS. Represents the + metadata about the Windows update. + """ + + package = proto.Field(proto.STRING, number=1) + + parsed_version = proto.Field(proto.MESSAGE, number=3, message=g_package.Version,) + + distribution = proto.Field(proto.MESSAGE, number=4, message=UpgradeDistribution,) + + windows_update = proto.Field(proto.MESSAGE, number=5, message=WindowsUpdate,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/grafeas/grafeas/grafeas_v1/types/vulnerability.py b/packages/grafeas/grafeas/grafeas_v1/types/vulnerability.py new file mode 100644 index 000000000000..12626ab039cc --- /dev/null +++ b/packages/grafeas/grafeas/grafeas_v1/types/vulnerability.py @@ -0,0 +1,343 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from grafeas.grafeas_v1.types import common +from grafeas.grafeas_v1.types import cvss +from grafeas.grafeas_v1.types import package + + +__protobuf__ = proto.module( + package="grafeas.v1", + manifest={"Severity", "VulnerabilityNote", "VulnerabilityOccurrence",}, +) + + +class Severity(proto.Enum): + r"""Note provider assigned severity/impact ranking.""" + SEVERITY_UNSPECIFIED = 0 + MINIMAL = 1 + LOW = 2 + MEDIUM = 3 + HIGH = 4 + CRITICAL = 5 + + +class VulnerabilityNote(proto.Message): + r"""A security vulnerability that can be found in resources. + + Attributes: + cvss_score (float): + The CVSS score of this vulnerability. CVSS + score is on a scale of 0 - 10 where 0 indicates + low severity and 10 indicates high severity. + severity (~.vulnerability.Severity): + The note provider assigned severity of this + vulnerability. + details (Sequence[~.vulnerability.VulnerabilityNote.Detail]): + Details of all known distros and packages + affected by this vulnerability. + cvss_v3 (~.cvss.CVSSv3): + The full description of the CVSSv3 for this + vulnerability. + windows_details (Sequence[~.vulnerability.VulnerabilityNote.WindowsDetail]): + Windows details get their own format because + the information format and model don't match a + normal detail. Specifically Windows updates are + done as patches, thus Windows vulnerabilities + really are a missing package, rather than a + package being at an incorrect version. + source_update_time (~.timestamp.Timestamp): + The time this information was last changed at + the source. This is an upstream timestamp from + the underlying information source - e.g. Ubuntu + security tracker. + """ + + class Detail(proto.Message): + r"""A detail for a distro and package affected by this + vulnerability and its associated fix (if one is available). + + Attributes: + severity_name (str): + The distro assigned severity of this + vulnerability. + description (str): + A vendor-specific description of this + vulnerability. + package_type (str): + The type of package; whether native or non + native (e.g., ruby gems, node.js packages, + etc.). + affected_cpe_uri (str): + Required. The `CPE + URI `__ this + vulnerability affects. + affected_package (str): + Required. The package this vulnerability + affects. + affected_version_start (~.package.Version): + The version number at the start of an interval in which this + vulnerability exists. A vulnerability can affect a package + between version numbers that are disjoint sets of intervals + (example: [1.0.0-1.1.0], [2.4.6-2.4.8] and [4.5.6-4.6.8]) + each of which will be represented in its own Detail. If a + specific affected version is provided by a vulnerability + database, affected_version_start and affected_version_end + will be the same in that Detail. + affected_version_end (~.package.Version): + The version number at the end of an interval in which this + vulnerability exists. A vulnerability can affect a package + between version numbers that are disjoint sets of intervals + (example: [1.0.0-1.1.0], [2.4.6-2.4.8] and [4.5.6-4.6.8]) + each of which will be represented in its own Detail. If a + specific affected version is provided by a vulnerability + database, affected_version_start and affected_version_end + will be the same in that Detail. + fixed_cpe_uri (str): + The distro recommended `CPE + URI `__ to update to + that contains a fix for this vulnerability. It is possible + for this to be different from the affected_cpe_uri. + fixed_package (str): + The distro recommended package to update to that contains a + fix for this vulnerability. It is possible for this to be + different from the affected_package. + fixed_version (~.package.Version): + The distro recommended version to update to + that contains a fix for this vulnerability. + Setting this to VersionKind.MAXIMUM means no + such version is yet available. + is_obsolete (bool): + Whether this detail is obsolete. Occurrences + are expected not to point to obsolete details. + source_update_time (~.timestamp.Timestamp): + The time this information was last changed at + the source. This is an upstream timestamp from + the underlying information source - e.g. Ubuntu + security tracker. + """ + + severity_name = proto.Field(proto.STRING, number=1) + + description = proto.Field(proto.STRING, number=2) + + package_type = proto.Field(proto.STRING, number=3) + + affected_cpe_uri = proto.Field(proto.STRING, number=4) + + affected_package = proto.Field(proto.STRING, number=5) + + affected_version_start = proto.Field( + proto.MESSAGE, number=6, message=package.Version, + ) + + affected_version_end = proto.Field( + proto.MESSAGE, number=7, message=package.Version, + ) + + fixed_cpe_uri = proto.Field(proto.STRING, number=8) + + fixed_package = proto.Field(proto.STRING, number=9) + + fixed_version = proto.Field(proto.MESSAGE, number=10, message=package.Version,) + + is_obsolete = proto.Field(proto.BOOL, number=11) + + source_update_time = proto.Field( + proto.MESSAGE, number=12, message=timestamp.Timestamp, + ) + + class WindowsDetail(proto.Message): + r""" + + Attributes: + cpe_uri (str): + Required. The `CPE + URI `__ this + vulnerability affects. + name (str): + Required. The name of this vulnerability. + description (str): + The description of this vulnerability. + fixing_kbs (Sequence[~.vulnerability.VulnerabilityNote.WindowsDetail.KnowledgeBase]): + Required. The names of the KBs which have + hotfixes to mitigate this vulnerability. Note + that there may be multiple hotfixes (and thus + multiple KBs) that mitigate a given + vulnerability. Currently any listed KBs presence + is considered a fix. + """ + + class KnowledgeBase(proto.Message): + r""" + + Attributes: + name (str): + The KB name (generally of the form KB[0-9]+ (e.g., + KB123456)). + url (str): + A link to the KB in the [Windows update catalog] + (https://www.catalog.update.microsoft.com/). + """ + + name = proto.Field(proto.STRING, number=1) + + url = proto.Field(proto.STRING, number=2) + + cpe_uri = proto.Field(proto.STRING, number=1) + + name = proto.Field(proto.STRING, number=2) + + description = proto.Field(proto.STRING, number=3) + + fixing_kbs = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="VulnerabilityNote.WindowsDetail.KnowledgeBase", + ) + + cvss_score = proto.Field(proto.FLOAT, number=1) + + severity = proto.Field(proto.ENUM, number=2, enum="Severity",) + + details = proto.RepeatedField(proto.MESSAGE, number=3, message=Detail,) + + cvss_v3 = proto.Field(proto.MESSAGE, number=4, message=cvss.CVSSv3,) + + windows_details = proto.RepeatedField( + proto.MESSAGE, number=5, message=WindowsDetail, + ) + + source_update_time = proto.Field( + proto.MESSAGE, number=6, message=timestamp.Timestamp, + ) + + +class VulnerabilityOccurrence(proto.Message): + r"""An occurrence of a severity vulnerability on a resource. + + Attributes: + type (str): + The type of package; whether native or non + native (e.g., ruby gems, node.js packages, + etc.). + severity (~.vulnerability.Severity): + Output only. The note provider assigned + severity of this vulnerability. + cvss_score (float): + Output only. The CVSS score of this + vulnerability. CVSS score is on a scale of 0 - + 10 where 0 indicates low severity and 10 + indicates high severity. + package_issue (Sequence[~.vulnerability.VulnerabilityOccurrence.PackageIssue]): + Required. The set of affected locations and + their fixes (if available) within the associated + resource. + short_description (str): + Output only. A one sentence description of + this vulnerability. + long_description (str): + Output only. A detailed description of this + vulnerability. + related_urls (Sequence[~.common.RelatedUrl]): + Output only. URLs related to this + vulnerability. + effective_severity (~.vulnerability.Severity): + The distro assigned severity for this + vulnerability when it is available, otherwise + this is the note provider assigned severity. + fix_available (bool): + Output only. Whether at least one of the + affected packages has a fix available. + """ + + class PackageIssue(proto.Message): + r"""A detail for a distro and package this vulnerability + occurrence was found in and its associated fix (if one is + available). + + Attributes: + affected_cpe_uri (str): + Required. The `CPE + URI `__ this + vulnerability was found in. + affected_package (str): + Required. The package this vulnerability was + found in. + affected_version (~.package.Version): + Required. The version of the package that is + installed on the resource affected by this + vulnerability. + fixed_cpe_uri (str): + The `CPE URI `__ this + vulnerability was fixed in. It is possible for this to be + different from the affected_cpe_uri. + fixed_package (str): + The package this vulnerability was fixed in. It is possible + for this to be different from the affected_package. + fixed_version (~.package.Version): + Required. The version of the package this + vulnerability was fixed in. Setting this to + VersionKind.MAXIMUM means no fix is yet + available. + fix_available (bool): + Output only. Whether a fix is available for + this package. + """ + + affected_cpe_uri = proto.Field(proto.STRING, number=1) + + affected_package = proto.Field(proto.STRING, number=2) + + affected_version = proto.Field( + proto.MESSAGE, number=3, message=package.Version, + ) + + fixed_cpe_uri = proto.Field(proto.STRING, number=4) + + fixed_package = proto.Field(proto.STRING, number=5) + + fixed_version = proto.Field(proto.MESSAGE, number=6, message=package.Version,) + + fix_available = proto.Field(proto.BOOL, number=7) + + type = proto.Field(proto.STRING, number=1) + + severity = proto.Field(proto.ENUM, number=2, enum="Severity",) + + cvss_score = proto.Field(proto.FLOAT, number=3) + + package_issue = proto.RepeatedField(proto.MESSAGE, number=4, message=PackageIssue,) + + short_description = proto.Field(proto.STRING, number=5) + + long_description = proto.Field(proto.STRING, number=6) + + related_urls = proto.RepeatedField( + proto.MESSAGE, number=7, message=common.RelatedUrl, + ) + + effective_severity = proto.Field(proto.ENUM, number=8, enum="Severity",) + + fix_available = proto.Field(proto.BOOL, number=9) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/grafeas/mypy.ini b/packages/grafeas/mypy.ini new file mode 100644 index 000000000000..4505b485436b --- /dev/null +++ b/packages/grafeas/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.6 +namespace_packages = True diff --git a/packages/grafeas/noxfile.py b/packages/grafeas/noxfile.py index 4cccb7aab04f..c7768a840a89 100644 --- a/packages/grafeas/noxfile.py +++ b/packages/grafeas/noxfile.py @@ -27,8 +27,8 @@ BLACK_PATHS = ["docs", "grafeas", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" -SYSTEM_TEST_PYTHON_VERSIONS = ["2.7", "3.8"] -UNIT_TEST_PYTHON_VERSIONS = ["2.7", "3.5", "3.6", "3.7", "3.8"] +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] +UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8"] @nox.session(python=DEFAULT_PYTHON_VERSION) @@ -70,6 +70,8 @@ def lint_setup_py(session): def default(session): # Install all test dependencies, then install this package in-place. + session.install("asyncmock", "pytest-asyncio") + session.install("mock", "pytest", "pytest-cov") session.install("-e", ".") @@ -99,6 +101,10 @@ def system(session): """Run the system test suite.""" system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") # Sanity check: Only run tests if the environment variable is set. if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): session.skip("Credentials must be set via environment variable") @@ -134,7 +140,7 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=76") + session.run("coverage", "report", "--show-missing", "--fail-under=90") session.run("coverage", "erase") @@ -144,7 +150,7 @@ def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx<3.0.0", "alabaster", "recommonmark") + session.install("sphinx", "alabaster", "recommonmark") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( @@ -159,3 +165,36 @@ def docs(session): os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install("sphinx", "alabaster", "recommonmark", "sphinx-docfx-yaml") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) diff --git a/packages/grafeas/samples/AUTHORING_GUIDE.md b/packages/grafeas/samples/AUTHORING_GUIDE.md new file mode 100644 index 000000000000..55c97b32f4c1 --- /dev/null +++ b/packages/grafeas/samples/AUTHORING_GUIDE.md @@ -0,0 +1 @@ +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md \ No newline at end of file diff --git a/packages/grafeas/samples/CONTRIBUTING.md b/packages/grafeas/samples/CONTRIBUTING.md new file mode 100644 index 000000000000..34c882b6f1a3 --- /dev/null +++ b/packages/grafeas/samples/CONTRIBUTING.md @@ -0,0 +1 @@ +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/CONTRIBUTING.md \ No newline at end of file diff --git a/packages/grafeas/scripts/fixup_grafeas_v1_keywords.py b/packages/grafeas/scripts/fixup_grafeas_v1_keywords.py new file mode 100644 index 000000000000..8a782dc6e362 --- /dev/null +++ b/packages/grafeas/scripts/fixup_grafeas_v1_keywords.py @@ -0,0 +1,191 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class grafeasCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'batch_create_notes': ('parent', 'notes', ), + 'batch_create_occurrences': ('parent', 'occurrences', ), + 'create_note': ('parent', 'note_id', 'note', ), + 'create_occurrence': ('parent', 'occurrence', ), + 'delete_note': ('name', ), + 'delete_occurrence': ('name', ), + 'get_note': ('name', ), + 'get_occurrence': ('name', ), + 'get_occurrence_note': ('name', ), + 'list_note_occurrences': ('name', 'filter', 'page_size', 'page_token', ), + 'list_notes': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_occurrences': ('parent', 'filter', 'page_size', 'page_token', ), + 'update_note': ('name', 'note', 'update_mask', ), + 'update_occurrence': ('name', 'occurrence', 'update_mask', ), + + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), + cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=grafeasCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the grafeas client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/grafeas/setup.py b/packages/grafeas/setup.py index b3a69bc590bf..d3a6570d9ea6 100644 --- a/packages/grafeas/setup.py +++ b/packages/grafeas/setup.py @@ -24,8 +24,9 @@ version = "0.4.1" release_status = "Development Status :: 3 - Alpha" dependencies = [ - "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", - 'enum34; python_version < "3.4"', + "google-api-core[grpc] >= 1.21.0, < 2.0.0dev", + "proto-plus >= 1.4.0", + "libcst >= 0.2.5", ] package_root = os.path.abspath(os.path.dirname(__file__)) @@ -35,7 +36,9 @@ readme = readme_file.read() packages = [ - package for package in setuptools.find_packages() if package.startswith("grafeas") + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("grafeas") ] namespaces = ["grafeas"] @@ -54,12 +57,9 @@ "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", "Operating System :: OS Independent", "Topic :: Internet", ], @@ -67,7 +67,8 @@ packages=packages, namespace_packages=namespaces, install_requires=dependencies, - python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*", + python_requires=">=3.6", + scripts=["scripts/fixup_grafeas_v1_keywords.py"], include_package_data=True, zip_safe=False, ) diff --git a/packages/grafeas/synth.metadata b/packages/grafeas/synth.metadata index 9f63e42474c2..8051dc58a281 100644 --- a/packages/grafeas/synth.metadata +++ b/packages/grafeas/synth.metadata @@ -3,23 +3,23 @@ { "git": { "name": ".", - "remote": "https://github.com/googleapis/python-grafeas.git", - "sha": "8eac6101e9cf3af0de347495254c4bf9f50e079e" + "remote": "git@github.com:googleapis/python-grafeas", + "sha": "fb7debad7a8793929b91d38f999941b2708d84bd" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "b882b8e6bfcd708042ff00f7adc67ce750817dd0", - "internalRef": "318028816" + "sha": "96ef637adf148d54236ad83fefa30c7f75f29737", + "internalRef": "325052820" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "303271797a360f8a439203413f13a160f2f5b3b4" + "sha": "4530cc6ff080ef8aca258c1ec92c4db10a1bbfb4" } } ], diff --git a/packages/grafeas/synth.py b/packages/grafeas/synth.py index 041670a52d95..178df608f7bb 100644 --- a/packages/grafeas/synth.py +++ b/packages/grafeas/synth.py @@ -13,6 +13,7 @@ # limitations under the License. """This script is used to synthesize generated parts of this library.""" +import re import synthtool as s import synthtool.gcp as gcp @@ -31,344 +32,182 @@ version="v1", bazel_target="//grafeas/v1:grafeas-v1-py", proto_output_path="grafeas/grafeas_v1/proto", - include_protos=True + include_protos=True, ) -excludes = ["README.rst", "nox.py", "setup.py", "docs/index.rst"] - -# Make 'grafeas' a namespace -s.move(library / "grafeas", excludes=["__init__.py"]) -s.move(library / "docs", excludes=["conf.py", "index.rst"]) -s.move( - library / "google/cloud/grafeas_v1/proto", - "grafeas/grafeas_v1/proto", - excludes=excludes, -) -s.move(library / "tests") +excludes = ["README.rst", "setup.py", "docs/index.rst"] +s.move(library, excludes=excludes) -# Fix proto imports -s.replace( - ["grafeas/**/*.py", "tests/**/*.py"], - "from grafeas\.v1( import \w*_pb2)", - "from grafeas.grafeas_v1.proto\g<1>", -) -s.replace( - "grafeas/**/*_pb2.py", - "from grafeas_v1\.proto( import \w*_pb2)", - "from grafeas.grafeas_v1.proto\g<1>", -) -s.replace( - "grafeas/**/grafeas_pb2_grpc.py", - "from grafeas_v1\.proto", - "from grafeas.grafeas_v1.proto", -) # Make package name 'grafeas' -s.replace( - "grafeas/grafeas_v1/gapic/grafeas_client.py", "google-cloud-grafeas", "grafeas" -) - -# Fix docstrings with no summary lines -s.replace( - "grafeas/grafeas_v1/proto/vulnerability_pb2.py", - r"""(\s+)["']__doc__["']: \"\"\"Attributes:""", - """\g<1>"__doc__": \"\"\" - Attributes:""", -) - -# Replace mentions of 'Container Analysis' with 'Grafeas' in the docs -s.replace("docs/**/v*/*.rst", "Container Analysis", "Grafeas") - +s.replace("grafeas/**/*.py", "grafeas-grafeas", "grafeas") # ---------------------------------------------------------------------------- # Remove google-specific portions of library # ---------------------------------------------------------------------------- -# Please see this PR https://github.com/googleapis/google-cloud-python/pull/8186/ +# Please see this PR for more context +# https://github.com/googleapis/google-cloud-python/pull/8186/ # Remove default service address, default scopes, default credentials -# Update tests and code in docstrings showing client instantiation. +# Users must pass a transport to the client constructor +# Remove default endpoint s.replace( - "grafeas/**/grafeas_client.py", - r""" SERVICE_ADDRESS = 'containeranalysis\.googleapis\.com:443' - \"\"\"The default address of the service\.\"\"\"""", + "grafeas/**/*client.py", + r"""\s+DEFAULT_ENDPOINT\s+=\s+"containeranalysis\.googleapis\.com" +\s+DEFAULT_MTLS_ENDPOINT\s+=\s+_get_default_mtls_endpoint\.__func__\( # type: ignore +\s+DEFAULT_ENDPOINT +\s+\)""", "", ) s.replace( - "grafeas/**/grafeas_client.py", - r""" def __init__\(self, transport=None, channel=None, credentials=None, - client_config=None, client_info=None, client_options=None\):""", - " def __init__(self, transport, client_config=None, client_info=None):", + "grafeas/**/transports/*.py", r"""'containeranalysis\.googleapis\.com'""", """''""", ) -s.replace( - "grafeas/**/grafeas_client.py", - r"""Union\[~\.GrafeasGrpcTransport, - Callable\[\[~\.Credentials, type], ~\.GrafeasGrpcTransport\]""", - """~.GrafeasGrpcTransport""", -) +# Remove 'from_service_account_file' method s.replace( - "grafeas/**/grafeas_client.py", - r""" channel \(grpc\.Channel\): DEPRECATED\. A ``Channel`` instance - through which to make calls\. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception\. - credentials \(google\.auth\.credentials\.Credentials\): The - authorization credentials to attach to requests\. These - credentials identify this application to the service\. If none - are specified, the client will attempt to ascertain the - credentials from the environment\. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception\.""", + "grafeas/**/*client.py", + """@classmethod + def from_service_account_file.* + from_service_account_json = from_service_account_file""", "", + flags=re.MULTILINE | re.DOTALL, ) -# Remove client_options -# api_endpoint is currently the only option and doesn't make sense for Grafeas. -s.replace("grafeas/**/grafeas_client.py", "import google.api_core.client_options\n", "") s.replace( - "grafeas/**/grafeas_client.py", - r""" client_options \(Union\[dict, google\.api_core\.client_options\.ClientOptions\]\): - Client options used to set user options on the client\. API Endpoint - should be set through client_options\. - \"\"\"""", - " \"\"\"" + "grafeas/**/async_client.py", + """\s+from_service_account_file = GrafeasClient\.from_service_account_file +\s+from_service_account_json = from_service_account_file""", + "", ) +# Remove credentials and client options from the service celint +# A transport must be used to initialize the client s.replace( - "grafeas/**/grafeas_client.py", - r"""if channel: - warnings\.warn\('The `channel` argument is deprecated; use ' - '`transport` instead\.', - PendingDeprecationWarning, stacklevel=2\) - - api_endpoint = self\.SERVICE_ADDRESS - if client_options: - if type\(client_options\) == dict: - client_options = google\.api_core\.client_options\.from_dict\(client_options\) - if client_options\.api_endpoint: - api_endpoint = client_options\.api_endpoint - - \# Instantiate the transport\. - \# The transport is responsible for handling serialization and - \# deserialization and actually sending data to the service\. - if transport: - if callable\(transport\): - self\.transport = transport\( - credentials=credentials, - default_class=grafeas_grpc_transport\.GrafeasGrpcTransport, - address=api_endpoint, - \) - else: - if credentials: - raise ValueError\( - 'Received both a transport instance and ' - 'credentials; these are mutually exclusive\.' - \) - self\.transport = transport - else: - self\.transport = grafeas_grpc_transport\.GrafeasGrpcTransport\( - address=api_endpoint, - channel=channel, - credentials=credentials, - \)""", - """# Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - self.transport = transport""", -) + "grafeas/**/client.py", + """(\s+)def __init__\(self.*?def """, + '''\g<1>def __init__(self, *, + transport: Union[str, GrafeasTransport] = None, + ) -> None: + """Instantiate the grafeas client. -s.replace( - "grafeas/**/grafeas_client.py", - r""" Example: - >>> from grafeas import grafeas_v1 - >>> - >>> client = grafeas_v1\.GrafeasClient\(\)""", - """ Example: - >>> from grafeas import grafeas_v1 - >>> from grafeas.grafeas_v1.gapic.transports import grafeas_grpc_transport - >>> - >>> address = "[SERVICE_ADDRESS]" - >>> scopes = ("[SCOPE]") - >>> transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - >>> client = grafeas_v1.GrafeasClient(transport)""", + Args: + transport (Union[str, ~.GrafeasTransport]): The + transport to use. + + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + + if isinstance(transport, GrafeasTransport): + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport() +\g<1>def ''', + flags=re.MULTILINE | re.DOTALL, ) +# do the same for async s.replace( - "grafeas/**/grafeas_client.py", - r''' @classmethod - def from_service_account_file\(cls, filename, \*args, \*\*kwargs\): - """Creates an instance of this client using the provided credentials - file\. + "grafeas/**/async_client.py", + """(\s+)def __init__\(self.*?async def """, + '''\g<1>def __init__(self, *, + transport: Union[str, GrafeasTransport] = 'grpc_asyncio', + ) -> None: + """Instantiate the grafeas client. Args: - filename \(str\): The path to the service account private key json - file\. - args: Additional arguments to pass to the constructor\. - kwargs: Additional arguments to pass to the constructor\. + transport (Union[str, ~.GrafeasTransport]): The + transport to use. - Returns: - GrafeasClient: The constructed client\. + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. """ - credentials = service_account\.Credentials\.from_service_account_file\( - filename\) - kwargs\['credentials'\] = credentials - return cls\(\*args, \*\*kwargs\) - from_service_account_json = from_service_account_file''', - "") - -s.replace( - "grafeas/**/grafeas_grpc_transport.py", - r""" \# The scopes needed to make gRPC calls to all of the methods defined - \# in this service\. - _OAUTH_SCOPES = \( - 'https://www\.googleapis\.com/auth/cloud-platform', - \)""", - "", + self._client = GrafeasClient( + transport=transport, + ) +\g<1>async def ''', + flags=re.MULTILINE | re.DOTALL, ) -s.replace( - "grafeas/**/grafeas_grpc_transport.py", - r""" def __init__\(self, channel=None, credentials=None, - address='containeranalysis\.googleapis\.com:443'\):""", - """ def __init__(self, address, scopes, channel=None, credentials=None):""", -) -s.replace( - "grafeas/**/grafeas_grpc_transport.py", - r""" \# Create the channel\. - if channel is None: - channel = self\.create_channel\( - address=address, - credentials=credentials, -""", - """ # Create the channel. - if channel is None: - channel = self.create_channel( - address, - scopes, - credentials=credentials, -""", -) +# Changes tests -s.replace( - "grafeas/**/grafeas_grpc_transport.py", - r""" def create_channel\( - cls, - address='containeranalysis\.googleapis\.com:443', - credentials=None, - \*\*kwargs\):""", - """ def create_channel( - cls, - address, - scopes, - credentials=None, - **kwargs):""", -) +# remove use of credentials +s.replace("tests/**/test_grafeas.py", """credentials=credentials.*?,""", "") +# remove client_options +s.replace("tests/**/test_grafeas.py", """client_options=\{.*?\},""", "") +s.replace("tests/**/test_grafeas.py", """client_options=options,""", "") s.replace( - "grafeas/**/grafeas_grpc_transport.py", - r""" Args: - address \(str\): The host for the channel to use\. - credentials \(~\.Credentials\): The - authorization credentials to attach to requests\. These - credentials identify this application to the service\. If - none are specified, the client will attempt to ascertain - the credentials from the environment\.""", - """ Args: - address (str): The host for the channel to use. - scopes (Sequence[str]): The scopes needed to make gRPC calls. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment.""", + "tests/**/test_grafeas.py", + """client_options=client_options.ClientOptions(.*?),""", + "", ) +# Delete irrelevant tests + +# client options tests s.replace( - "grafeas/**/grafeas_grpc_transport.py", - r""" return google\.api_core\.grpc_helpers\.create_channel\( - address, - credentials=credentials, - scopes=cls\._OAUTH_SCOPES, - \*\*kwargs - \)""", - """ return google.api_core.grpc_helpers.create_channel( - address, - credentials=credentials, - scopes=scopes, - **kwargs - )""", + "tests/**/test_grafeas.py", + """def client_cert_source_callback.*?def test_get_occurrence""", + """def test_get_occurrence""", + flags=re.MULTILINE | re.DOTALL, ) +# default endpoint test s.replace( - "grafeas/**/grafeas_grpc_transport.py", - r""" \"\"\"Instantiate the transport class\. - - Args: - channel \(grpc\.Channel\): A ``Channel`` instance through - which to make calls\. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception\. - credentials \(google\.auth\.credentials\.Credentials\): The - authorization credentials to attach to requests\. These - credentials identify this application to the service\. If none - are specified, the client will attempt to ascertain the - credentials from the environment\. - address \(str\): The address where the service is hosted\.""", - ''' """Instantiate the transport class. - - Args: - address (str): The address where the service is hosted. - scopes (Sequence[str]): The scopes needed to make gRPC calls. - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - ''', + "tests/**/test_grafeas.py", + """def test_grafeas_host_no_port.*?def test_grafeas_grpc_transport_channel""", + """def test_grafeas_grpc_transport_channel""", + flags=re.MULTILINE | re.DOTALL, ) +# duplicate credentials tests s.replace( - "tests/**/test_grafeas_client_v1.py", - r"""from grafeas\.grafeas_v1\.proto import grafeas_pb2""", - r"""from grafeas.grafeas_v1.proto import grafeas_pb2 -from grafeas.grafeas_v1.gapic.transports import grafeas_grpc_transport""", + "tests/**/test_grafeas.py", + """def test_credentials_transport_error.*?def test_transport_instance""", + """def test_transport_instance""", + flags=re.MULTILINE | re.DOTALL, ) s.replace( - "tests/**/test_grafeas_client_v1.py", - r"(\s+)client = grafeas_v1\.GrafeasClient\(\)", - r"""\g<1>address = "[SERVICE_ADDRESS]" -\g<1>scopes = ("SCOPE") -\g<1>transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) -\g<1>client=grafeas_v1.GrafeasClient(transport)""", + "tests/**/test_grafeas.py", + """def test_grafeas_base_transport_error.*?def test_grafeas_base_transport""", + """def test_grafeas_base_transport""", + flags=re.MULTILINE | re.DOTALL, ) - # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- -templated_files = common.py_library(cov_level=76) -s.move(templated_files) +templated_files = common.py_library( + samples=False, microgenerator=True, # set to True only if there are samples + cov_level=90 # some coverage is missing due to manual alterations +) + +s.move( + templated_files, excludes=[".coveragerc"] +) # microgenerator has a good .coveragerc file -# TODO(busunkim): Use latest sphinx after microgenerator transition -s.replace("noxfile.py", """['"]sphinx['"]""", '"sphinx<3.0.0"') # Library code is in "grafeas" instead of "google" s.replace("noxfile.py", """['"]google['"]""", '''"grafeas"''') -s.replace("noxfile.py", - '''"--cov=google.cloud.grafeas", - \s+"--cov=google.cloud",''', - '''"--cov=grafeas",''' +s.replace( + "noxfile.py", + """"--cov=google.cloud.grafeas", + \s+"--cov=google.cloud",""", + """"--cov=grafeas",""", ) s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/packages/grafeas/tests/unit/gapic/grafeas_v1/__init__.py b/packages/grafeas/tests/unit/gapic/grafeas_v1/__init__.py new file mode 100644 index 000000000000..8b137891791f --- /dev/null +++ b/packages/grafeas/tests/unit/gapic/grafeas_v1/__init__.py @@ -0,0 +1 @@ + diff --git a/packages/grafeas/tests/unit/gapic/grafeas_v1/test_grafeas.py b/packages/grafeas/tests/unit/gapic/grafeas_v1/test_grafeas.py new file mode 100644 index 000000000000..3ad70a5e3082 --- /dev/null +++ b/packages/grafeas/tests/unit/gapic/grafeas_v1/test_grafeas.py @@ -0,0 +1,3549 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + +from google import auth +from google.api_core import client_options +from google.api_core import exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import any_pb2 as any # type: ignore +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.rpc import status_pb2 as status # type: ignore +from grafeas.grafeas_v1.services.grafeas import GrafeasAsyncClient +from grafeas.grafeas_v1.services.grafeas import GrafeasClient +from grafeas.grafeas_v1.services.grafeas import pagers +from grafeas.grafeas_v1.services.grafeas import transports +from grafeas.grafeas_v1.types import attestation +from grafeas.grafeas_v1.types import build +from grafeas.grafeas_v1.types import common +from grafeas.grafeas_v1.types import cvss +from grafeas.grafeas_v1.types import deployment +from grafeas.grafeas_v1.types import discovery +from grafeas.grafeas_v1.types import grafeas +from grafeas.grafeas_v1.types import image +from grafeas.grafeas_v1.types import package +from grafeas.grafeas_v1.types import package as g_package +from grafeas.grafeas_v1.types import provenance +from grafeas.grafeas_v1.types import provenance as g_provenance +from grafeas.grafeas_v1.types import upgrade +from grafeas.grafeas_v1.types import vulnerability + + +def test_get_occurrence( + transport: str = "grpc", request_type=grafeas.GetOccurrenceRequest +): + client = GrafeasClient(transport=transport,) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_occurrence), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grafeas.Occurrence( + name="name_value", + resource_uri="resource_uri_value", + note_name="note_name_value", + kind=common.NoteKind.VULNERABILITY, + remediation="remediation_value", + vulnerability=vulnerability.VulnerabilityOccurrence(type="type_value"), + ) + + response = client.get_occurrence(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == grafeas.GetOccurrenceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, grafeas.Occurrence) + + assert response.name == "name_value" + + assert response.resource_uri == "resource_uri_value" + + assert response.note_name == "note_name_value" + + assert response.kind == common.NoteKind.VULNERABILITY + + assert response.remediation == "remediation_value" + + +def test_get_occurrence_from_dict(): + test_get_occurrence(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_occurrence_async(transport: str = "grpc_asyncio"): + client = GrafeasAsyncClient(transport=transport,) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = grafeas.GetOccurrenceRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_occurrence), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + grafeas.Occurrence( + name="name_value", + resource_uri="resource_uri_value", + note_name="note_name_value", + kind=common.NoteKind.VULNERABILITY, + remediation="remediation_value", + ) + ) + + response = await client.get_occurrence(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, grafeas.Occurrence) + + assert response.name == "name_value" + + assert response.resource_uri == "resource_uri_value" + + assert response.note_name == "note_name_value" + + assert response.kind == common.NoteKind.VULNERABILITY + + assert response.remediation == "remediation_value" + + +def test_get_occurrence_field_headers(): + client = GrafeasClient() + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = grafeas.GetOccurrenceRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_occurrence), "__call__") as call: + call.return_value = grafeas.Occurrence() + + client.get_occurrence(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_occurrence_field_headers_async(): + client = GrafeasAsyncClient() + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = grafeas.GetOccurrenceRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_occurrence), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(grafeas.Occurrence()) + + await client.get_occurrence(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_occurrence_flattened(): + client = GrafeasClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_occurrence), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grafeas.Occurrence() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_occurrence(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_get_occurrence_flattened_error(): + client = GrafeasClient() + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_occurrence( + grafeas.GetOccurrenceRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_occurrence_flattened_async(): + client = GrafeasAsyncClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_occurrence), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grafeas.Occurrence() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(grafeas.Occurrence()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_occurrence(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_occurrence_flattened_error_async(): + client = GrafeasAsyncClient() + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_occurrence( + grafeas.GetOccurrenceRequest(), name="name_value", + ) + + +def test_list_occurrences( + transport: str = "grpc", request_type=grafeas.ListOccurrencesRequest +): + client = GrafeasClient(transport=transport,) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_occurrences), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grafeas.ListOccurrencesResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_occurrences(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == grafeas.ListOccurrencesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOccurrencesPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_occurrences_from_dict(): + test_list_occurrences(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_occurrences_async(transport: str = "grpc_asyncio"): + client = GrafeasAsyncClient(transport=transport,) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = grafeas.ListOccurrencesRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_occurrences), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + grafeas.ListOccurrencesResponse(next_page_token="next_page_token_value",) + ) + + response = await client.list_occurrences(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOccurrencesAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_occurrences_field_headers(): + client = GrafeasClient() + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = grafeas.ListOccurrencesRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_occurrences), "__call__" + ) as call: + call.return_value = grafeas.ListOccurrencesResponse() + + client.list_occurrences(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_occurrences_field_headers_async(): + client = GrafeasAsyncClient() + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = grafeas.ListOccurrencesRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_occurrences), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + grafeas.ListOccurrencesResponse() + ) + + await client.list_occurrences(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_occurrences_flattened(): + client = GrafeasClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_occurrences), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grafeas.ListOccurrencesResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_occurrences( + parent="parent_value", filter="filter_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].filter == "filter_value" + + +def test_list_occurrences_flattened_error(): + client = GrafeasClient() + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_occurrences( + grafeas.ListOccurrencesRequest(), + parent="parent_value", + filter="filter_value", + ) + + +@pytest.mark.asyncio +async def test_list_occurrences_flattened_async(): + client = GrafeasAsyncClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_occurrences), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grafeas.ListOccurrencesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + grafeas.ListOccurrencesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_occurrences( + parent="parent_value", filter="filter_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].filter == "filter_value" + + +@pytest.mark.asyncio +async def test_list_occurrences_flattened_error_async(): + client = GrafeasAsyncClient() + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_occurrences( + grafeas.ListOccurrencesRequest(), + parent="parent_value", + filter="filter_value", + ) + + +def test_list_occurrences_pager(): + client = GrafeasClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_occurrences), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + grafeas.ListOccurrencesResponse( + occurrences=[ + grafeas.Occurrence(), + grafeas.Occurrence(), + grafeas.Occurrence(), + ], + next_page_token="abc", + ), + grafeas.ListOccurrencesResponse(occurrences=[], next_page_token="def",), + grafeas.ListOccurrencesResponse( + occurrences=[grafeas.Occurrence(),], next_page_token="ghi", + ), + grafeas.ListOccurrencesResponse( + occurrences=[grafeas.Occurrence(), grafeas.Occurrence(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_occurrences(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, grafeas.Occurrence) for i in results) + + +def test_list_occurrences_pages(): + client = GrafeasClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_occurrences), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + grafeas.ListOccurrencesResponse( + occurrences=[ + grafeas.Occurrence(), + grafeas.Occurrence(), + grafeas.Occurrence(), + ], + next_page_token="abc", + ), + grafeas.ListOccurrencesResponse(occurrences=[], next_page_token="def",), + grafeas.ListOccurrencesResponse( + occurrences=[grafeas.Occurrence(),], next_page_token="ghi", + ), + grafeas.ListOccurrencesResponse( + occurrences=[grafeas.Occurrence(), grafeas.Occurrence(),], + ), + RuntimeError, + ) + pages = list(client.list_occurrences(request={}).pages) + for page, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_occurrences_async_pager(): + client = GrafeasAsyncClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_occurrences), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + grafeas.ListOccurrencesResponse( + occurrences=[ + grafeas.Occurrence(), + grafeas.Occurrence(), + grafeas.Occurrence(), + ], + next_page_token="abc", + ), + grafeas.ListOccurrencesResponse(occurrences=[], next_page_token="def",), + grafeas.ListOccurrencesResponse( + occurrences=[grafeas.Occurrence(),], next_page_token="ghi", + ), + grafeas.ListOccurrencesResponse( + occurrences=[grafeas.Occurrence(), grafeas.Occurrence(),], + ), + RuntimeError, + ) + async_pager = await client.list_occurrences(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, grafeas.Occurrence) for i in responses) + + +@pytest.mark.asyncio +async def test_list_occurrences_async_pages(): + client = GrafeasAsyncClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_occurrences), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + grafeas.ListOccurrencesResponse( + occurrences=[ + grafeas.Occurrence(), + grafeas.Occurrence(), + grafeas.Occurrence(), + ], + next_page_token="abc", + ), + grafeas.ListOccurrencesResponse(occurrences=[], next_page_token="def",), + grafeas.ListOccurrencesResponse( + occurrences=[grafeas.Occurrence(),], next_page_token="ghi", + ), + grafeas.ListOccurrencesResponse( + occurrences=[grafeas.Occurrence(), grafeas.Occurrence(),], + ), + RuntimeError, + ) + pages = [] + async for page in (await client.list_occurrences(request={})).pages: + pages.append(page) + for page, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page.raw_page.next_page_token == token + + +def test_delete_occurrence( + transport: str = "grpc", request_type=grafeas.DeleteOccurrenceRequest +): + client = GrafeasClient(transport=transport,) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_occurrence), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_occurrence(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == grafeas.DeleteOccurrenceRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_occurrence_from_dict(): + test_delete_occurrence(request_type=dict) + + +@pytest.mark.asyncio +async def test_delete_occurrence_async(transport: str = "grpc_asyncio"): + client = GrafeasAsyncClient(transport=transport,) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = grafeas.DeleteOccurrenceRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_occurrence), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.delete_occurrence(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_occurrence_field_headers(): + client = GrafeasClient() + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = grafeas.DeleteOccurrenceRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_occurrence), "__call__" + ) as call: + call.return_value = None + + client.delete_occurrence(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_occurrence_field_headers_async(): + client = GrafeasAsyncClient() + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = grafeas.DeleteOccurrenceRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_occurrence), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.delete_occurrence(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_occurrence_flattened(): + client = GrafeasClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_occurrence), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_occurrence(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_delete_occurrence_flattened_error(): + client = GrafeasClient() + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_occurrence( + grafeas.DeleteOccurrenceRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_occurrence_flattened_async(): + client = GrafeasAsyncClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_occurrence), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_occurrence(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_delete_occurrence_flattened_error_async(): + client = GrafeasAsyncClient() + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_occurrence( + grafeas.DeleteOccurrenceRequest(), name="name_value", + ) + + +def test_create_occurrence( + transport: str = "grpc", request_type=grafeas.CreateOccurrenceRequest +): + client = GrafeasClient(transport=transport,) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.create_occurrence), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grafeas.Occurrence( + name="name_value", + resource_uri="resource_uri_value", + note_name="note_name_value", + kind=common.NoteKind.VULNERABILITY, + remediation="remediation_value", + vulnerability=vulnerability.VulnerabilityOccurrence(type="type_value"), + ) + + response = client.create_occurrence(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == grafeas.CreateOccurrenceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, grafeas.Occurrence) + + assert response.name == "name_value" + + assert response.resource_uri == "resource_uri_value" + + assert response.note_name == "note_name_value" + + assert response.kind == common.NoteKind.VULNERABILITY + + assert response.remediation == "remediation_value" + + +def test_create_occurrence_from_dict(): + test_create_occurrence(request_type=dict) + + +@pytest.mark.asyncio +async def test_create_occurrence_async(transport: str = "grpc_asyncio"): + client = GrafeasAsyncClient(transport=transport,) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = grafeas.CreateOccurrenceRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_occurrence), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + grafeas.Occurrence( + name="name_value", + resource_uri="resource_uri_value", + note_name="note_name_value", + kind=common.NoteKind.VULNERABILITY, + remediation="remediation_value", + ) + ) + + response = await client.create_occurrence(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, grafeas.Occurrence) + + assert response.name == "name_value" + + assert response.resource_uri == "resource_uri_value" + + assert response.note_name == "note_name_value" + + assert response.kind == common.NoteKind.VULNERABILITY + + assert response.remediation == "remediation_value" + + +def test_create_occurrence_field_headers(): + client = GrafeasClient() + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = grafeas.CreateOccurrenceRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.create_occurrence), "__call__" + ) as call: + call.return_value = grafeas.Occurrence() + + client.create_occurrence(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_occurrence_field_headers_async(): + client = GrafeasAsyncClient() + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = grafeas.CreateOccurrenceRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_occurrence), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(grafeas.Occurrence()) + + await client.create_occurrence(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_occurrence_flattened(): + client = GrafeasClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.create_occurrence), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grafeas.Occurrence() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_occurrence( + parent="parent_value", occurrence=grafeas.Occurrence(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].occurrence == grafeas.Occurrence(name="name_value") + + +def test_create_occurrence_flattened_error(): + client = GrafeasClient() + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_occurrence( + grafeas.CreateOccurrenceRequest(), + parent="parent_value", + occurrence=grafeas.Occurrence(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_occurrence_flattened_async(): + client = GrafeasAsyncClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_occurrence), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grafeas.Occurrence() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(grafeas.Occurrence()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_occurrence( + parent="parent_value", occurrence=grafeas.Occurrence(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].occurrence == grafeas.Occurrence(name="name_value") + + +@pytest.mark.asyncio +async def test_create_occurrence_flattened_error_async(): + client = GrafeasAsyncClient() + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_occurrence( + grafeas.CreateOccurrenceRequest(), + parent="parent_value", + occurrence=grafeas.Occurrence(name="name_value"), + ) + + +def test_batch_create_occurrences( + transport: str = "grpc", request_type=grafeas.BatchCreateOccurrencesRequest +): + client = GrafeasClient(transport=transport,) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.batch_create_occurrences), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grafeas.BatchCreateOccurrencesResponse() + + response = client.batch_create_occurrences(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == grafeas.BatchCreateOccurrencesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, grafeas.BatchCreateOccurrencesResponse) + + +def test_batch_create_occurrences_from_dict(): + test_batch_create_occurrences(request_type=dict) + + +@pytest.mark.asyncio +async def test_batch_create_occurrences_async(transport: str = "grpc_asyncio"): + client = GrafeasAsyncClient(transport=transport,) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = grafeas.BatchCreateOccurrencesRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.batch_create_occurrences), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + grafeas.BatchCreateOccurrencesResponse() + ) + + response = await client.batch_create_occurrences(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, grafeas.BatchCreateOccurrencesResponse) + + +def test_batch_create_occurrences_field_headers(): + client = GrafeasClient() + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = grafeas.BatchCreateOccurrencesRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.batch_create_occurrences), "__call__" + ) as call: + call.return_value = grafeas.BatchCreateOccurrencesResponse() + + client.batch_create_occurrences(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_batch_create_occurrences_field_headers_async(): + client = GrafeasAsyncClient() + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = grafeas.BatchCreateOccurrencesRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.batch_create_occurrences), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + grafeas.BatchCreateOccurrencesResponse() + ) + + await client.batch_create_occurrences(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_batch_create_occurrences_flattened(): + client = GrafeasClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.batch_create_occurrences), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grafeas.BatchCreateOccurrencesResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.batch_create_occurrences( + parent="parent_value", occurrences=[grafeas.Occurrence(name="name_value")], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].occurrences == [grafeas.Occurrence(name="name_value")] + + +def test_batch_create_occurrences_flattened_error(): + client = GrafeasClient() + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_create_occurrences( + grafeas.BatchCreateOccurrencesRequest(), + parent="parent_value", + occurrences=[grafeas.Occurrence(name="name_value")], + ) + + +@pytest.mark.asyncio +async def test_batch_create_occurrences_flattened_async(): + client = GrafeasAsyncClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.batch_create_occurrences), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grafeas.BatchCreateOccurrencesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + grafeas.BatchCreateOccurrencesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.batch_create_occurrences( + parent="parent_value", occurrences=[grafeas.Occurrence(name="name_value")], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].occurrences == [grafeas.Occurrence(name="name_value")] + + +@pytest.mark.asyncio +async def test_batch_create_occurrences_flattened_error_async(): + client = GrafeasAsyncClient() + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.batch_create_occurrences( + grafeas.BatchCreateOccurrencesRequest(), + parent="parent_value", + occurrences=[grafeas.Occurrence(name="name_value")], + ) + + +def test_update_occurrence( + transport: str = "grpc", request_type=grafeas.UpdateOccurrenceRequest +): + client = GrafeasClient(transport=transport,) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.update_occurrence), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grafeas.Occurrence( + name="name_value", + resource_uri="resource_uri_value", + note_name="note_name_value", + kind=common.NoteKind.VULNERABILITY, + remediation="remediation_value", + vulnerability=vulnerability.VulnerabilityOccurrence(type="type_value"), + ) + + response = client.update_occurrence(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == grafeas.UpdateOccurrenceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, grafeas.Occurrence) + + assert response.name == "name_value" + + assert response.resource_uri == "resource_uri_value" + + assert response.note_name == "note_name_value" + + assert response.kind == common.NoteKind.VULNERABILITY + + assert response.remediation == "remediation_value" + + +def test_update_occurrence_from_dict(): + test_update_occurrence(request_type=dict) + + +@pytest.mark.asyncio +async def test_update_occurrence_async(transport: str = "grpc_asyncio"): + client = GrafeasAsyncClient(transport=transport,) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = grafeas.UpdateOccurrenceRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_occurrence), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + grafeas.Occurrence( + name="name_value", + resource_uri="resource_uri_value", + note_name="note_name_value", + kind=common.NoteKind.VULNERABILITY, + remediation="remediation_value", + ) + ) + + response = await client.update_occurrence(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, grafeas.Occurrence) + + assert response.name == "name_value" + + assert response.resource_uri == "resource_uri_value" + + assert response.note_name == "note_name_value" + + assert response.kind == common.NoteKind.VULNERABILITY + + assert response.remediation == "remediation_value" + + +def test_update_occurrence_field_headers(): + client = GrafeasClient() + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = grafeas.UpdateOccurrenceRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.update_occurrence), "__call__" + ) as call: + call.return_value = grafeas.Occurrence() + + client.update_occurrence(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_occurrence_field_headers_async(): + client = GrafeasAsyncClient() + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = grafeas.UpdateOccurrenceRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_occurrence), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(grafeas.Occurrence()) + + await client.update_occurrence(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_update_occurrence_flattened(): + client = GrafeasClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.update_occurrence), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grafeas.Occurrence() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_occurrence( + name="name_value", + occurrence=grafeas.Occurrence(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].occurrence == grafeas.Occurrence(name="name_value") + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + +def test_update_occurrence_flattened_error(): + client = GrafeasClient() + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_occurrence( + grafeas.UpdateOccurrenceRequest(), + name="name_value", + occurrence=grafeas.Occurrence(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_occurrence_flattened_async(): + client = GrafeasAsyncClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_occurrence), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grafeas.Occurrence() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(grafeas.Occurrence()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_occurrence( + name="name_value", + occurrence=grafeas.Occurrence(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].occurrence == grafeas.Occurrence(name="name_value") + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + +@pytest.mark.asyncio +async def test_update_occurrence_flattened_error_async(): + client = GrafeasAsyncClient() + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_occurrence( + grafeas.UpdateOccurrenceRequest(), + name="name_value", + occurrence=grafeas.Occurrence(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + +def test_get_occurrence_note( + transport: str = "grpc", request_type=grafeas.GetOccurrenceNoteRequest +): + client = GrafeasClient(transport=transport,) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.get_occurrence_note), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grafeas.Note( + name="name_value", + short_description="short_description_value", + long_description="long_description_value", + kind=common.NoteKind.VULNERABILITY, + related_note_names=["related_note_names_value"], + vulnerability=vulnerability.VulnerabilityNote(cvss_score=0.1082), + ) + + response = client.get_occurrence_note(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == grafeas.GetOccurrenceNoteRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, grafeas.Note) + + assert response.name == "name_value" + + assert response.short_description == "short_description_value" + + assert response.long_description == "long_description_value" + + assert response.kind == common.NoteKind.VULNERABILITY + + assert response.related_note_names == ["related_note_names_value"] + + +def test_get_occurrence_note_from_dict(): + test_get_occurrence_note(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_occurrence_note_async(transport: str = "grpc_asyncio"): + client = GrafeasAsyncClient(transport=transport,) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = grafeas.GetOccurrenceNoteRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_occurrence_note), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + grafeas.Note( + name="name_value", + short_description="short_description_value", + long_description="long_description_value", + kind=common.NoteKind.VULNERABILITY, + related_note_names=["related_note_names_value"], + ) + ) + + response = await client.get_occurrence_note(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, grafeas.Note) + + assert response.name == "name_value" + + assert response.short_description == "short_description_value" + + assert response.long_description == "long_description_value" + + assert response.kind == common.NoteKind.VULNERABILITY + + assert response.related_note_names == ["related_note_names_value"] + + +def test_get_occurrence_note_field_headers(): + client = GrafeasClient() + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = grafeas.GetOccurrenceNoteRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.get_occurrence_note), "__call__" + ) as call: + call.return_value = grafeas.Note() + + client.get_occurrence_note(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_occurrence_note_field_headers_async(): + client = GrafeasAsyncClient() + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = grafeas.GetOccurrenceNoteRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_occurrence_note), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(grafeas.Note()) + + await client.get_occurrence_note(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_occurrence_note_flattened(): + client = GrafeasClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.get_occurrence_note), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grafeas.Note() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_occurrence_note(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_get_occurrence_note_flattened_error(): + client = GrafeasClient() + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_occurrence_note( + grafeas.GetOccurrenceNoteRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_occurrence_note_flattened_async(): + client = GrafeasAsyncClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_occurrence_note), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grafeas.Note() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(grafeas.Note()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_occurrence_note(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_occurrence_note_flattened_error_async(): + client = GrafeasAsyncClient() + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_occurrence_note( + grafeas.GetOccurrenceNoteRequest(), name="name_value", + ) + + +def test_get_note(transport: str = "grpc", request_type=grafeas.GetNoteRequest): + client = GrafeasClient(transport=transport,) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_note), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grafeas.Note( + name="name_value", + short_description="short_description_value", + long_description="long_description_value", + kind=common.NoteKind.VULNERABILITY, + related_note_names=["related_note_names_value"], + vulnerability=vulnerability.VulnerabilityNote(cvss_score=0.1082), + ) + + response = client.get_note(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == grafeas.GetNoteRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, grafeas.Note) + + assert response.name == "name_value" + + assert response.short_description == "short_description_value" + + assert response.long_description == "long_description_value" + + assert response.kind == common.NoteKind.VULNERABILITY + + assert response.related_note_names == ["related_note_names_value"] + + +def test_get_note_from_dict(): + test_get_note(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_note_async(transport: str = "grpc_asyncio"): + client = GrafeasAsyncClient(transport=transport,) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = grafeas.GetNoteRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_note), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + grafeas.Note( + name="name_value", + short_description="short_description_value", + long_description="long_description_value", + kind=common.NoteKind.VULNERABILITY, + related_note_names=["related_note_names_value"], + ) + ) + + response = await client.get_note(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, grafeas.Note) + + assert response.name == "name_value" + + assert response.short_description == "short_description_value" + + assert response.long_description == "long_description_value" + + assert response.kind == common.NoteKind.VULNERABILITY + + assert response.related_note_names == ["related_note_names_value"] + + +def test_get_note_field_headers(): + client = GrafeasClient() + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = grafeas.GetNoteRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_note), "__call__") as call: + call.return_value = grafeas.Note() + + client.get_note(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_note_field_headers_async(): + client = GrafeasAsyncClient() + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = grafeas.GetNoteRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_note), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(grafeas.Note()) + + await client.get_note(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_note_flattened(): + client = GrafeasClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_note), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grafeas.Note() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_note(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_get_note_flattened_error(): + client = GrafeasClient() + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_note( + grafeas.GetNoteRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_note_flattened_async(): + client = GrafeasAsyncClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_note), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grafeas.Note() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(grafeas.Note()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_note(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_note_flattened_error_async(): + client = GrafeasAsyncClient() + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_note( + grafeas.GetNoteRequest(), name="name_value", + ) + + +def test_list_notes(transport: str = "grpc", request_type=grafeas.ListNotesRequest): + client = GrafeasClient(transport=transport,) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_notes), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grafeas.ListNotesResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_notes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == grafeas.ListNotesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListNotesPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_notes_from_dict(): + test_list_notes(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_notes_async(transport: str = "grpc_asyncio"): + client = GrafeasAsyncClient(transport=transport,) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = grafeas.ListNotesRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_notes), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + grafeas.ListNotesResponse(next_page_token="next_page_token_value",) + ) + + response = await client.list_notes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListNotesAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_notes_field_headers(): + client = GrafeasClient() + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = grafeas.ListNotesRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_notes), "__call__") as call: + call.return_value = grafeas.ListNotesResponse() + + client.list_notes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_notes_field_headers_async(): + client = GrafeasAsyncClient() + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = grafeas.ListNotesRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_notes), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + grafeas.ListNotesResponse() + ) + + await client.list_notes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_notes_flattened(): + client = GrafeasClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_notes), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grafeas.ListNotesResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_notes( + parent="parent_value", filter="filter_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].filter == "filter_value" + + +def test_list_notes_flattened_error(): + client = GrafeasClient() + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_notes( + grafeas.ListNotesRequest(), parent="parent_value", filter="filter_value", + ) + + +@pytest.mark.asyncio +async def test_list_notes_flattened_async(): + client = GrafeasAsyncClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_notes), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grafeas.ListNotesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + grafeas.ListNotesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_notes( + parent="parent_value", filter="filter_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].filter == "filter_value" + + +@pytest.mark.asyncio +async def test_list_notes_flattened_error_async(): + client = GrafeasAsyncClient() + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_notes( + grafeas.ListNotesRequest(), parent="parent_value", filter="filter_value", + ) + + +def test_list_notes_pager(): + client = GrafeasClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_notes), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + grafeas.ListNotesResponse( + notes=[grafeas.Note(), grafeas.Note(), grafeas.Note(),], + next_page_token="abc", + ), + grafeas.ListNotesResponse(notes=[], next_page_token="def",), + grafeas.ListNotesResponse(notes=[grafeas.Note(),], next_page_token="ghi",), + grafeas.ListNotesResponse(notes=[grafeas.Note(), grafeas.Note(),],), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_notes(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, grafeas.Note) for i in results) + + +def test_list_notes_pages(): + client = GrafeasClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_notes), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + grafeas.ListNotesResponse( + notes=[grafeas.Note(), grafeas.Note(), grafeas.Note(),], + next_page_token="abc", + ), + grafeas.ListNotesResponse(notes=[], next_page_token="def",), + grafeas.ListNotesResponse(notes=[grafeas.Note(),], next_page_token="ghi",), + grafeas.ListNotesResponse(notes=[grafeas.Note(), grafeas.Note(),],), + RuntimeError, + ) + pages = list(client.list_notes(request={}).pages) + for page, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_notes_async_pager(): + client = GrafeasAsyncClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_notes), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + grafeas.ListNotesResponse( + notes=[grafeas.Note(), grafeas.Note(), grafeas.Note(),], + next_page_token="abc", + ), + grafeas.ListNotesResponse(notes=[], next_page_token="def",), + grafeas.ListNotesResponse(notes=[grafeas.Note(),], next_page_token="ghi",), + grafeas.ListNotesResponse(notes=[grafeas.Note(), grafeas.Note(),],), + RuntimeError, + ) + async_pager = await client.list_notes(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, grafeas.Note) for i in responses) + + +@pytest.mark.asyncio +async def test_list_notes_async_pages(): + client = GrafeasAsyncClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_notes), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + grafeas.ListNotesResponse( + notes=[grafeas.Note(), grafeas.Note(), grafeas.Note(),], + next_page_token="abc", + ), + grafeas.ListNotesResponse(notes=[], next_page_token="def",), + grafeas.ListNotesResponse(notes=[grafeas.Note(),], next_page_token="ghi",), + grafeas.ListNotesResponse(notes=[grafeas.Note(), grafeas.Note(),],), + RuntimeError, + ) + pages = [] + async for page in (await client.list_notes(request={})).pages: + pages.append(page) + for page, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page.raw_page.next_page_token == token + + +def test_delete_note(transport: str = "grpc", request_type=grafeas.DeleteNoteRequest): + client = GrafeasClient(transport=transport,) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_note), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_note(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == grafeas.DeleteNoteRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_note_from_dict(): + test_delete_note(request_type=dict) + + +@pytest.mark.asyncio +async def test_delete_note_async(transport: str = "grpc_asyncio"): + client = GrafeasAsyncClient(transport=transport,) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = grafeas.DeleteNoteRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_note), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.delete_note(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_note_field_headers(): + client = GrafeasClient() + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = grafeas.DeleteNoteRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_note), "__call__") as call: + call.return_value = None + + client.delete_note(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_note_field_headers_async(): + client = GrafeasAsyncClient() + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = grafeas.DeleteNoteRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_note), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.delete_note(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_note_flattened(): + client = GrafeasClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_note), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_note(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_delete_note_flattened_error(): + client = GrafeasClient() + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_note( + grafeas.DeleteNoteRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_note_flattened_async(): + client = GrafeasAsyncClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_note), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_note(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_delete_note_flattened_error_async(): + client = GrafeasAsyncClient() + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_note( + grafeas.DeleteNoteRequest(), name="name_value", + ) + + +def test_create_note(transport: str = "grpc", request_type=grafeas.CreateNoteRequest): + client = GrafeasClient(transport=transport,) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_note), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grafeas.Note( + name="name_value", + short_description="short_description_value", + long_description="long_description_value", + kind=common.NoteKind.VULNERABILITY, + related_note_names=["related_note_names_value"], + vulnerability=vulnerability.VulnerabilityNote(cvss_score=0.1082), + ) + + response = client.create_note(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == grafeas.CreateNoteRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, grafeas.Note) + + assert response.name == "name_value" + + assert response.short_description == "short_description_value" + + assert response.long_description == "long_description_value" + + assert response.kind == common.NoteKind.VULNERABILITY + + assert response.related_note_names == ["related_note_names_value"] + + +def test_create_note_from_dict(): + test_create_note(request_type=dict) + + +@pytest.mark.asyncio +async def test_create_note_async(transport: str = "grpc_asyncio"): + client = GrafeasAsyncClient(transport=transport,) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = grafeas.CreateNoteRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_note), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + grafeas.Note( + name="name_value", + short_description="short_description_value", + long_description="long_description_value", + kind=common.NoteKind.VULNERABILITY, + related_note_names=["related_note_names_value"], + ) + ) + + response = await client.create_note(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, grafeas.Note) + + assert response.name == "name_value" + + assert response.short_description == "short_description_value" + + assert response.long_description == "long_description_value" + + assert response.kind == common.NoteKind.VULNERABILITY + + assert response.related_note_names == ["related_note_names_value"] + + +def test_create_note_field_headers(): + client = GrafeasClient() + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = grafeas.CreateNoteRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_note), "__call__") as call: + call.return_value = grafeas.Note() + + client.create_note(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_note_field_headers_async(): + client = GrafeasAsyncClient() + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = grafeas.CreateNoteRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_note), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(grafeas.Note()) + + await client.create_note(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_note_flattened(): + client = GrafeasClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_note), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grafeas.Note() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_note( + parent="parent_value", + note_id="note_id_value", + note=grafeas.Note(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].note_id == "note_id_value" + + assert args[0].note == grafeas.Note(name="name_value") + + +def test_create_note_flattened_error(): + client = GrafeasClient() + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_note( + grafeas.CreateNoteRequest(), + parent="parent_value", + note_id="note_id_value", + note=grafeas.Note(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_note_flattened_async(): + client = GrafeasAsyncClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_note), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grafeas.Note() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(grafeas.Note()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_note( + parent="parent_value", + note_id="note_id_value", + note=grafeas.Note(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].note_id == "note_id_value" + + assert args[0].note == grafeas.Note(name="name_value") + + +@pytest.mark.asyncio +async def test_create_note_flattened_error_async(): + client = GrafeasAsyncClient() + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_note( + grafeas.CreateNoteRequest(), + parent="parent_value", + note_id="note_id_value", + note=grafeas.Note(name="name_value"), + ) + + +def test_batch_create_notes( + transport: str = "grpc", request_type=grafeas.BatchCreateNotesRequest +): + client = GrafeasClient(transport=transport,) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.batch_create_notes), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grafeas.BatchCreateNotesResponse() + + response = client.batch_create_notes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == grafeas.BatchCreateNotesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, grafeas.BatchCreateNotesResponse) + + +def test_batch_create_notes_from_dict(): + test_batch_create_notes(request_type=dict) + + +@pytest.mark.asyncio +async def test_batch_create_notes_async(transport: str = "grpc_asyncio"): + client = GrafeasAsyncClient(transport=transport,) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = grafeas.BatchCreateNotesRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.batch_create_notes), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + grafeas.BatchCreateNotesResponse() + ) + + response = await client.batch_create_notes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, grafeas.BatchCreateNotesResponse) + + +def test_batch_create_notes_field_headers(): + client = GrafeasClient() + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = grafeas.BatchCreateNotesRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.batch_create_notes), "__call__" + ) as call: + call.return_value = grafeas.BatchCreateNotesResponse() + + client.batch_create_notes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_batch_create_notes_field_headers_async(): + client = GrafeasAsyncClient() + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = grafeas.BatchCreateNotesRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.batch_create_notes), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + grafeas.BatchCreateNotesResponse() + ) + + await client.batch_create_notes(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_batch_create_notes_flattened(): + client = GrafeasClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.batch_create_notes), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grafeas.BatchCreateNotesResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.batch_create_notes( + parent="parent_value", notes={"key_value": grafeas.Note(name="name_value")}, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].notes == {"key_value": grafeas.Note(name="name_value")} + + +def test_batch_create_notes_flattened_error(): + client = GrafeasClient() + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_create_notes( + grafeas.BatchCreateNotesRequest(), + parent="parent_value", + notes={"key_value": grafeas.Note(name="name_value")}, + ) + + +@pytest.mark.asyncio +async def test_batch_create_notes_flattened_async(): + client = GrafeasAsyncClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.batch_create_notes), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grafeas.BatchCreateNotesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + grafeas.BatchCreateNotesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.batch_create_notes( + parent="parent_value", notes={"key_value": grafeas.Note(name="name_value")}, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].notes == {"key_value": grafeas.Note(name="name_value")} + + +@pytest.mark.asyncio +async def test_batch_create_notes_flattened_error_async(): + client = GrafeasAsyncClient() + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.batch_create_notes( + grafeas.BatchCreateNotesRequest(), + parent="parent_value", + notes={"key_value": grafeas.Note(name="name_value")}, + ) + + +def test_update_note(transport: str = "grpc", request_type=grafeas.UpdateNoteRequest): + client = GrafeasClient(transport=transport,) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.update_note), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grafeas.Note( + name="name_value", + short_description="short_description_value", + long_description="long_description_value", + kind=common.NoteKind.VULNERABILITY, + related_note_names=["related_note_names_value"], + vulnerability=vulnerability.VulnerabilityNote(cvss_score=0.1082), + ) + + response = client.update_note(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == grafeas.UpdateNoteRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, grafeas.Note) + + assert response.name == "name_value" + + assert response.short_description == "short_description_value" + + assert response.long_description == "long_description_value" + + assert response.kind == common.NoteKind.VULNERABILITY + + assert response.related_note_names == ["related_note_names_value"] + + +def test_update_note_from_dict(): + test_update_note(request_type=dict) + + +@pytest.mark.asyncio +async def test_update_note_async(transport: str = "grpc_asyncio"): + client = GrafeasAsyncClient(transport=transport,) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = grafeas.UpdateNoteRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_note), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + grafeas.Note( + name="name_value", + short_description="short_description_value", + long_description="long_description_value", + kind=common.NoteKind.VULNERABILITY, + related_note_names=["related_note_names_value"], + ) + ) + + response = await client.update_note(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, grafeas.Note) + + assert response.name == "name_value" + + assert response.short_description == "short_description_value" + + assert response.long_description == "long_description_value" + + assert response.kind == common.NoteKind.VULNERABILITY + + assert response.related_note_names == ["related_note_names_value"] + + +def test_update_note_field_headers(): + client = GrafeasClient() + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = grafeas.UpdateNoteRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.update_note), "__call__") as call: + call.return_value = grafeas.Note() + + client.update_note(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_note_field_headers_async(): + client = GrafeasAsyncClient() + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = grafeas.UpdateNoteRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_note), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(grafeas.Note()) + + await client.update_note(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_update_note_flattened(): + client = GrafeasClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.update_note), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grafeas.Note() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_note( + name="name_value", + note=grafeas.Note(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].note == grafeas.Note(name="name_value") + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + +def test_update_note_flattened_error(): + client = GrafeasClient() + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_note( + grafeas.UpdateNoteRequest(), + name="name_value", + note=grafeas.Note(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_note_flattened_async(): + client = GrafeasAsyncClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_note), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grafeas.Note() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(grafeas.Note()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_note( + name="name_value", + note=grafeas.Note(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].note == grafeas.Note(name="name_value") + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + +@pytest.mark.asyncio +async def test_update_note_flattened_error_async(): + client = GrafeasAsyncClient() + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_note( + grafeas.UpdateNoteRequest(), + name="name_value", + note=grafeas.Note(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + +def test_list_note_occurrences( + transport: str = "grpc", request_type=grafeas.ListNoteOccurrencesRequest +): + client = GrafeasClient(transport=transport,) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_note_occurrences), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grafeas.ListNoteOccurrencesResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_note_occurrences(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == grafeas.ListNoteOccurrencesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListNoteOccurrencesPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_note_occurrences_from_dict(): + test_list_note_occurrences(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_note_occurrences_async(transport: str = "grpc_asyncio"): + client = GrafeasAsyncClient(transport=transport,) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = grafeas.ListNoteOccurrencesRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_note_occurrences), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + grafeas.ListNoteOccurrencesResponse( + next_page_token="next_page_token_value", + ) + ) + + response = await client.list_note_occurrences(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListNoteOccurrencesAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_note_occurrences_field_headers(): + client = GrafeasClient() + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = grafeas.ListNoteOccurrencesRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_note_occurrences), "__call__" + ) as call: + call.return_value = grafeas.ListNoteOccurrencesResponse() + + client.list_note_occurrences(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_note_occurrences_field_headers_async(): + client = GrafeasAsyncClient() + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = grafeas.ListNoteOccurrencesRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_note_occurrences), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + grafeas.ListNoteOccurrencesResponse() + ) + + await client.list_note_occurrences(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_list_note_occurrences_flattened(): + client = GrafeasClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_note_occurrences), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grafeas.ListNoteOccurrencesResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_note_occurrences( + name="name_value", filter="filter_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].filter == "filter_value" + + +def test_list_note_occurrences_flattened_error(): + client = GrafeasClient() + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_note_occurrences( + grafeas.ListNoteOccurrencesRequest(), + name="name_value", + filter="filter_value", + ) + + +@pytest.mark.asyncio +async def test_list_note_occurrences_flattened_async(): + client = GrafeasAsyncClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_note_occurrences), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grafeas.ListNoteOccurrencesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + grafeas.ListNoteOccurrencesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_note_occurrences( + name="name_value", filter="filter_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].filter == "filter_value" + + +@pytest.mark.asyncio +async def test_list_note_occurrences_flattened_error_async(): + client = GrafeasAsyncClient() + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_note_occurrences( + grafeas.ListNoteOccurrencesRequest(), + name="name_value", + filter="filter_value", + ) + + +def test_list_note_occurrences_pager(): + client = GrafeasClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_note_occurrences), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + grafeas.ListNoteOccurrencesResponse( + occurrences=[ + grafeas.Occurrence(), + grafeas.Occurrence(), + grafeas.Occurrence(), + ], + next_page_token="abc", + ), + grafeas.ListNoteOccurrencesResponse(occurrences=[], next_page_token="def",), + grafeas.ListNoteOccurrencesResponse( + occurrences=[grafeas.Occurrence(),], next_page_token="ghi", + ), + grafeas.ListNoteOccurrencesResponse( + occurrences=[grafeas.Occurrence(), grafeas.Occurrence(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), + ) + pager = client.list_note_occurrences(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, grafeas.Occurrence) for i in results) + + +def test_list_note_occurrences_pages(): + client = GrafeasClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_note_occurrences), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + grafeas.ListNoteOccurrencesResponse( + occurrences=[ + grafeas.Occurrence(), + grafeas.Occurrence(), + grafeas.Occurrence(), + ], + next_page_token="abc", + ), + grafeas.ListNoteOccurrencesResponse(occurrences=[], next_page_token="def",), + grafeas.ListNoteOccurrencesResponse( + occurrences=[grafeas.Occurrence(),], next_page_token="ghi", + ), + grafeas.ListNoteOccurrencesResponse( + occurrences=[grafeas.Occurrence(), grafeas.Occurrence(),], + ), + RuntimeError, + ) + pages = list(client.list_note_occurrences(request={}).pages) + for page, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_note_occurrences_async_pager(): + client = GrafeasAsyncClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_note_occurrences), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + grafeas.ListNoteOccurrencesResponse( + occurrences=[ + grafeas.Occurrence(), + grafeas.Occurrence(), + grafeas.Occurrence(), + ], + next_page_token="abc", + ), + grafeas.ListNoteOccurrencesResponse(occurrences=[], next_page_token="def",), + grafeas.ListNoteOccurrencesResponse( + occurrences=[grafeas.Occurrence(),], next_page_token="ghi", + ), + grafeas.ListNoteOccurrencesResponse( + occurrences=[grafeas.Occurrence(), grafeas.Occurrence(),], + ), + RuntimeError, + ) + async_pager = await client.list_note_occurrences(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, grafeas.Occurrence) for i in responses) + + +@pytest.mark.asyncio +async def test_list_note_occurrences_async_pages(): + client = GrafeasAsyncClient() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_note_occurrences), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + grafeas.ListNoteOccurrencesResponse( + occurrences=[ + grafeas.Occurrence(), + grafeas.Occurrence(), + grafeas.Occurrence(), + ], + next_page_token="abc", + ), + grafeas.ListNoteOccurrencesResponse(occurrences=[], next_page_token="def",), + grafeas.ListNoteOccurrencesResponse( + occurrences=[grafeas.Occurrence(),], next_page_token="ghi", + ), + grafeas.ListNoteOccurrencesResponse( + occurrences=[grafeas.Occurrence(), grafeas.Occurrence(),], + ), + RuntimeError, + ) + pages = [] + async for page in (await client.list_note_occurrences(request={})).pages: + pages.append(page) + for page, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page.raw_page.next_page_token == token + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.GrafeasGrpcTransport() + client = GrafeasClient(transport=transport) + assert client._transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.GrafeasGrpcTransport() + channel = transport.grpc_channel + assert channel + + transport = transports.GrafeasGrpcAsyncIOTransport() + channel = transport.grpc_channel + assert channel + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = GrafeasClient() + assert isinstance(client._transport, transports.GrafeasGrpcTransport,) + + +def test_grafeas_base_transport(): + # Instantiate the base transport. + with mock.patch( + "grafeas.grafeas_v1.services.grafeas.transports.GrafeasTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.GrafeasTransport() + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_occurrence", + "list_occurrences", + "delete_occurrence", + "create_occurrence", + "batch_create_occurrences", + "update_occurrence", + "get_occurrence_note", + "get_note", + "list_notes", + "delete_note", + "create_note", + "batch_create_notes", + "update_note", + "list_note_occurrences", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +def test_grafeas_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + auth, "load_credentials_from_file" + ) as load_creds, mock.patch( + "grafeas.grafeas_v1.services.grafeas.transports.GrafeasTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.GrafeasTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", scopes=(), quota_project_id="octopus", + ) + + +def test_grafeas_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + GrafeasClient() + adc.assert_called_once_with( + scopes=(), quota_project_id=None, + ) + + +def test_grafeas_transport_auth_adc(): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transports.GrafeasGrpcTransport( + host="squid.clam.whelk", quota_project_id="octopus" + ) + adc.assert_called_once_with( + scopes=(), quota_project_id="octopus", + ) + + +def test_grafeas_grpc_transport_channel(): + channel = grpc.insecure_channel("http://localhost/") + + # Check that if channel is provided, mtls endpoint and client_cert_source + # won't be used. + callback = mock.MagicMock() + transport = transports.GrafeasGrpcTransport( + host="squid.clam.whelk", + channel=channel, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=callback, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert not callback.called + + +def test_grafeas_grpc_asyncio_transport_channel(): + channel = aio.insecure_channel("http://localhost/") + + # Check that if channel is provided, mtls endpoint and client_cert_source + # won't be used. + callback = mock.MagicMock() + transport = transports.GrafeasGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=callback, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert not callback.called + + +def test_occurrence_path(): + project = "squid" + occurrence = "clam" + + expected = "projects/{project}/occurrences/{occurrence}".format( + project=project, occurrence=occurrence, + ) + actual = GrafeasClient.occurrence_path(project, occurrence) + assert expected == actual + + +def test_parse_occurrence_path(): + expected = { + "project": "whelk", + "occurrence": "octopus", + } + path = GrafeasClient.occurrence_path(**expected) + + # Check that the path construction is reversible. + actual = GrafeasClient.parse_occurrence_path(path) + assert expected == actual + + +def test_note_path(): + project = "squid" + note = "clam" + + expected = "projects/{project}/notes/{note}".format(project=project, note=note,) + actual = GrafeasClient.note_path(project, note) + assert expected == actual + + +def test_parse_note_path(): + expected = { + "project": "whelk", + "note": "octopus", + } + path = GrafeasClient.note_path(**expected) + + # Check that the path construction is reversible. + actual = GrafeasClient.parse_note_path(path) + assert expected == actual diff --git a/packages/grafeas/tests/unit/gapic/v1/test_grafeas_client_v1.py b/packages/grafeas/tests/unit/gapic/v1/test_grafeas_client_v1.py deleted file mode 100644 index 5ac0efb61c95..000000000000 --- a/packages/grafeas/tests/unit/gapic/v1/test_grafeas_client_v1.py +++ /dev/null @@ -1,842 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Unit tests.""" - -import mock -import pytest - -from google.protobuf import empty_pb2 -from grafeas import grafeas_v1 -from grafeas.grafeas_v1.proto import grafeas_pb2 -from grafeas.grafeas_v1.gapic.transports import grafeas_grpc_transport - - -class MultiCallableStub(object): - """Stub for the grpc.UnaryUnaryMultiCallable interface.""" - - def __init__(self, method, channel_stub): - self.method = method - self.channel_stub = channel_stub - - def __call__(self, request, timeout=None, metadata=None, credentials=None): - self.channel_stub.requests.append((self.method, request)) - - response = None - if self.channel_stub.responses: - response = self.channel_stub.responses.pop() - - if isinstance(response, Exception): - raise response - - if response: - return response - - -class ChannelStub(object): - """Stub for the grpc.Channel interface.""" - - def __init__(self, responses=[]): - self.responses = responses - self.requests = [] - - def unary_unary(self, method, request_serializer=None, response_deserializer=None): - return MultiCallableStub(method, self) - - -class CustomException(Exception): - pass - - -class TestGrafeasClient(object): - def test_get_occurrence(self): - # Setup Expected Response - name_2 = "name2-1052831874" - resource_uri = "resourceUri-384040517" - note_name = "noteName1780787896" - remediation = "remediation779381797" - expected_response = { - "name": name_2, - "resource_uri": resource_uri, - "note_name": note_name, - "remediation": remediation, - } - expected_response = grafeas_pb2.Occurrence(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup Request - name = client.occurrence_path("[PROJECT]", "[OCCURRENCE]") - - response = client.get_occurrence(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = grafeas_pb2.GetOccurrenceRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_occurrence_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup request - name = client.occurrence_path("[PROJECT]", "[OCCURRENCE]") - - with pytest.raises(CustomException): - client.get_occurrence(name) - - def test_list_occurrences(self): - # Setup Expected Response - next_page_token = "" - occurrences_element = {} - occurrences = [occurrences_element] - expected_response = { - "next_page_token": next_page_token, - "occurrences": occurrences, - } - expected_response = grafeas_pb2.ListOccurrencesResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup Request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_occurrences(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.occurrences[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = grafeas_pb2.ListOccurrencesRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_occurrences_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_occurrences(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_delete_occurrence(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup Request - name = client.occurrence_path("[PROJECT]", "[OCCURRENCE]") - - client.delete_occurrence(name) - - assert len(channel.requests) == 1 - expected_request = grafeas_pb2.DeleteOccurrenceRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_occurrence_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup request - name = client.occurrence_path("[PROJECT]", "[OCCURRENCE]") - - with pytest.raises(CustomException): - client.delete_occurrence(name) - - def test_create_occurrence(self): - # Setup Expected Response - name = "name3373707" - resource_uri = "resourceUri-384040517" - note_name = "noteName1780787896" - remediation = "remediation779381797" - expected_response = { - "name": name, - "resource_uri": resource_uri, - "note_name": note_name, - "remediation": remediation, - } - expected_response = grafeas_pb2.Occurrence(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup Request - parent = client.project_path("[PROJECT]") - occurrence = {} - - response = client.create_occurrence(parent, occurrence) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = grafeas_pb2.CreateOccurrenceRequest( - parent=parent, occurrence=occurrence - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_occurrence_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup request - parent = client.project_path("[PROJECT]") - occurrence = {} - - with pytest.raises(CustomException): - client.create_occurrence(parent, occurrence) - - def test_batch_create_occurrences(self): - # Setup Expected Response - expected_response = {} - expected_response = grafeas_pb2.BatchCreateOccurrencesResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup Request - parent = client.project_path("[PROJECT]") - occurrences = [] - - response = client.batch_create_occurrences(parent, occurrences) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = grafeas_pb2.BatchCreateOccurrencesRequest( - parent=parent, occurrences=occurrences - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_batch_create_occurrences_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup request - parent = client.project_path("[PROJECT]") - occurrences = [] - - with pytest.raises(CustomException): - client.batch_create_occurrences(parent, occurrences) - - def test_update_occurrence(self): - # Setup Expected Response - name_2 = "name2-1052831874" - resource_uri = "resourceUri-384040517" - note_name = "noteName1780787896" - remediation = "remediation779381797" - expected_response = { - "name": name_2, - "resource_uri": resource_uri, - "note_name": note_name, - "remediation": remediation, - } - expected_response = grafeas_pb2.Occurrence(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup Request - name = client.occurrence_path("[PROJECT]", "[OCCURRENCE]") - occurrence = {} - - response = client.update_occurrence(name, occurrence) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = grafeas_pb2.UpdateOccurrenceRequest( - name=name, occurrence=occurrence - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_occurrence_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup request - name = client.occurrence_path("[PROJECT]", "[OCCURRENCE]") - occurrence = {} - - with pytest.raises(CustomException): - client.update_occurrence(name, occurrence) - - def test_get_occurrence_note(self): - # Setup Expected Response - name_2 = "name2-1052831874" - short_description = "shortDescription-235369287" - long_description = "longDescription-1747792199" - expected_response = { - "name": name_2, - "short_description": short_description, - "long_description": long_description, - } - expected_response = grafeas_pb2.Note(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup Request - name = client.occurrence_path("[PROJECT]", "[OCCURRENCE]") - - response = client.get_occurrence_note(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = grafeas_pb2.GetOccurrenceNoteRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_occurrence_note_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup request - name = client.occurrence_path("[PROJECT]", "[OCCURRENCE]") - - with pytest.raises(CustomException): - client.get_occurrence_note(name) - - def test_get_note(self): - # Setup Expected Response - name_2 = "name2-1052831874" - short_description = "shortDescription-235369287" - long_description = "longDescription-1747792199" - expected_response = { - "name": name_2, - "short_description": short_description, - "long_description": long_description, - } - expected_response = grafeas_pb2.Note(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup Request - name = client.note_path("[PROJECT]", "[NOTE]") - - response = client.get_note(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = grafeas_pb2.GetNoteRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_note_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup request - name = client.note_path("[PROJECT]", "[NOTE]") - - with pytest.raises(CustomException): - client.get_note(name) - - def test_list_notes(self): - # Setup Expected Response - next_page_token = "" - notes_element = {} - notes = [notes_element] - expected_response = {"next_page_token": next_page_token, "notes": notes} - expected_response = grafeas_pb2.ListNotesResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup Request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_notes(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.notes[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = grafeas_pb2.ListNotesRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_notes_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_notes(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_delete_note(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup Request - name = client.note_path("[PROJECT]", "[NOTE]") - - client.delete_note(name) - - assert len(channel.requests) == 1 - expected_request = grafeas_pb2.DeleteNoteRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_note_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup request - name = client.note_path("[PROJECT]", "[NOTE]") - - with pytest.raises(CustomException): - client.delete_note(name) - - def test_create_note(self): - # Setup Expected Response - name = "name3373707" - short_description = "shortDescription-235369287" - long_description = "longDescription-1747792199" - expected_response = { - "name": name, - "short_description": short_description, - "long_description": long_description, - } - expected_response = grafeas_pb2.Note(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup Request - parent = client.project_path("[PROJECT]") - note_id = "noteId2129224840" - note = {} - - response = client.create_note(parent, note_id, note) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = grafeas_pb2.CreateNoteRequest( - parent=parent, note_id=note_id, note=note - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_note_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup request - parent = client.project_path("[PROJECT]") - note_id = "noteId2129224840" - note = {} - - with pytest.raises(CustomException): - client.create_note(parent, note_id, note) - - def test_batch_create_notes(self): - # Setup Expected Response - expected_response = {} - expected_response = grafeas_pb2.BatchCreateNotesResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup Request - parent = client.project_path("[PROJECT]") - notes = {} - - response = client.batch_create_notes(parent, notes) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = grafeas_pb2.BatchCreateNotesRequest( - parent=parent, notes=notes - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_batch_create_notes_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup request - parent = client.project_path("[PROJECT]") - notes = {} - - with pytest.raises(CustomException): - client.batch_create_notes(parent, notes) - - def test_update_note(self): - # Setup Expected Response - name_2 = "name2-1052831874" - short_description = "shortDescription-235369287" - long_description = "longDescription-1747792199" - expected_response = { - "name": name_2, - "short_description": short_description, - "long_description": long_description, - } - expected_response = grafeas_pb2.Note(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup Request - name = client.note_path("[PROJECT]", "[NOTE]") - note = {} - - response = client.update_note(name, note) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = grafeas_pb2.UpdateNoteRequest(name=name, note=note) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_note_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup request - name = client.note_path("[PROJECT]", "[NOTE]") - note = {} - - with pytest.raises(CustomException): - client.update_note(name, note) - - def test_list_note_occurrences(self): - # Setup Expected Response - next_page_token = "" - occurrences_element = {} - occurrences = [occurrences_element] - expected_response = { - "next_page_token": next_page_token, - "occurrences": occurrences, - } - expected_response = grafeas_pb2.ListNoteOccurrencesResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup Request - name = client.note_path("[PROJECT]", "[NOTE]") - - paged_list_response = client.list_note_occurrences(name) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.occurrences[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = grafeas_pb2.ListNoteOccurrencesRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_note_occurrences_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - address = "[SERVICE_ADDRESS]" - - scopes = "SCOPE" - - transport = grafeas_grpc_transport.GrafeasGrpcTransport(address, scopes) - - client = grafeas_v1.GrafeasClient(transport) - - # Setup request - name = client.note_path("[PROJECT]", "[NOTE]") - - paged_list_response = client.list_note_occurrences(name) - with pytest.raises(CustomException): - list(paged_list_response)