diff --git a/packages/google-cloud-monitoring/.coveragerc b/packages/google-cloud-monitoring/.coveragerc
index dd39c8546c41..0d8e6297dc9c 100644
--- a/packages/google-cloud-monitoring/.coveragerc
+++ b/packages/google-cloud-monitoring/.coveragerc
@@ -17,6 +17,8 @@
# Generated by synthtool. DO NOT EDIT!
[run]
branch = True
+omit =
+ google/cloud/__init__.py
[report]
fail_under = 100
@@ -32,4 +34,5 @@ omit =
*/gapic/*.py
*/proto/*.py
*/core/*.py
- */site-packages/*.py
\ No newline at end of file
+ */site-packages/*.py
+ google/cloud/__init__.py
diff --git a/packages/google-cloud-monitoring/.github/snippet-bot.yml b/packages/google-cloud-monitoring/.github/snippet-bot.yml
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/packages/google-cloud-monitoring/.gitignore b/packages/google-cloud-monitoring/.gitignore
index b87e1ed580d9..b9daa52f118d 100644
--- a/packages/google-cloud-monitoring/.gitignore
+++ b/packages/google-cloud-monitoring/.gitignore
@@ -46,6 +46,7 @@ pip-log.txt
# Built documentation
docs/_build
bigquery/docs/generated
+docs.metadata
# Virtual environment
env/
@@ -57,4 +58,4 @@ system_tests/local_test_setup
# Make sure a generated file isn't accidentally committed.
pylintrc
-pylintrc.test
\ No newline at end of file
+pylintrc.test
diff --git a/packages/google-cloud-monitoring/.kokoro/build.sh b/packages/google-cloud-monitoring/.kokoro/build.sh
index 59c67f7090d7..712144332886 100755
--- a/packages/google-cloud-monitoring/.kokoro/build.sh
+++ b/packages/google-cloud-monitoring/.kokoro/build.sh
@@ -36,4 +36,10 @@ python3.6 -m pip uninstall --yes --quiet nox-automation
python3.6 -m pip install --upgrade --quiet nox
python3.6 -m nox --version
-python3.6 -m nox
+# If NOX_SESSION is set, it only runs the specified session,
+# otherwise run all the sessions.
+if [[ -n "${NOX_SESSION:-}" ]]; then
+ python3.6 -m nox -s "${NOX_SESSION:-}"
+else
+ python3.6 -m nox
+fi
diff --git a/packages/google-cloud-monitoring/.kokoro/docker/docs/Dockerfile b/packages/google-cloud-monitoring/.kokoro/docker/docs/Dockerfile
new file mode 100644
index 000000000000..412b0b56a921
--- /dev/null
+++ b/packages/google-cloud-monitoring/.kokoro/docker/docs/Dockerfile
@@ -0,0 +1,98 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ubuntu:20.04
+
+ENV DEBIAN_FRONTEND noninteractive
+
+# Ensure local Python is preferred over distribution Python.
+ENV PATH /usr/local/bin:$PATH
+
+# Install dependencies.
+RUN apt-get update \
+ && apt-get install -y --no-install-recommends \
+ apt-transport-https \
+ build-essential \
+ ca-certificates \
+ curl \
+ dirmngr \
+ git \
+ gpg-agent \
+ graphviz \
+ libbz2-dev \
+ libdb5.3-dev \
+ libexpat1-dev \
+ libffi-dev \
+ liblzma-dev \
+ libreadline-dev \
+ libsnappy-dev \
+ libssl-dev \
+ libsqlite3-dev \
+ portaudio19-dev \
+ redis-server \
+ software-properties-common \
+ ssh \
+ sudo \
+ tcl \
+ tcl-dev \
+ tk \
+ tk-dev \
+ uuid-dev \
+ wget \
+ zlib1g-dev \
+ && add-apt-repository universe \
+ && apt-get update \
+ && apt-get -y install jq \
+ && apt-get clean autoclean \
+ && apt-get autoremove -y \
+ && rm -rf /var/lib/apt/lists/* \
+ && rm -f /var/cache/apt/archives/*.deb
+
+
+COPY fetch_gpg_keys.sh /tmp
+# Install the desired versions of Python.
+RUN set -ex \
+ && export GNUPGHOME="$(mktemp -d)" \
+ && echo "disable-ipv6" >> "${GNUPGHOME}/dirmngr.conf" \
+ && /tmp/fetch_gpg_keys.sh \
+ && for PYTHON_VERSION in 3.7.8 3.8.5; do \
+ wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz" \
+ && wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz.asc "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz.asc" \
+ && gpg --batch --verify python-${PYTHON_VERSION}.tar.xz.asc python-${PYTHON_VERSION}.tar.xz \
+ && rm -r python-${PYTHON_VERSION}.tar.xz.asc \
+ && mkdir -p /usr/src/python-${PYTHON_VERSION} \
+ && tar -xJC /usr/src/python-${PYTHON_VERSION} --strip-components=1 -f python-${PYTHON_VERSION}.tar.xz \
+ && rm python-${PYTHON_VERSION}.tar.xz \
+ && cd /usr/src/python-${PYTHON_VERSION} \
+ && ./configure \
+ --enable-shared \
+ # This works only on Python 2.7 and throws a warning on every other
+ # version, but seems otherwise harmless.
+ --enable-unicode=ucs4 \
+ --with-system-ffi \
+ --without-ensurepip \
+ && make -j$(nproc) \
+ && make install \
+ && ldconfig \
+ ; done \
+ && rm -rf "${GNUPGHOME}" \
+ && rm -rf /usr/src/python* \
+ && rm -rf ~/.cache/
+
+RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \
+ && python3.7 /tmp/get-pip.py \
+ && python3.8 /tmp/get-pip.py \
+ && rm /tmp/get-pip.py
+
+CMD ["python3.7"]
diff --git a/packages/google-cloud-monitoring/.kokoro/docker/docs/fetch_gpg_keys.sh b/packages/google-cloud-monitoring/.kokoro/docker/docs/fetch_gpg_keys.sh
new file mode 100755
index 000000000000..d653dd868e4b
--- /dev/null
+++ b/packages/google-cloud-monitoring/.kokoro/docker/docs/fetch_gpg_keys.sh
@@ -0,0 +1,45 @@
+#!/bin/bash
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# A script to fetch gpg keys with retry.
+# Avoid jinja parsing the file.
+#
+
+function retry {
+ if [[ "${#}" -le 1 ]]; then
+ echo "Usage: ${0} retry_count commands.."
+ exit 1
+ fi
+ local retries=${1}
+ local command="${@:2}"
+ until [[ "${retries}" -le 0 ]]; do
+ $command && return 0
+ if [[ $? -ne 0 ]]; then
+ echo "command failed, retrying"
+ ((retries--))
+ fi
+ done
+ return 1
+}
+
+# 3.6.9, 3.7.5 (Ned Deily)
+retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \
+ 0D96DF4D4110E5C43FBFB17F2D347EA6AA65421D
+
+# 3.8.0 (Łukasz Langa)
+retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \
+ E3FF2839C048B25C084DEBE9B26995E310250568
+
+#
diff --git a/packages/google-cloud-monitoring/.kokoro/docs/common.cfg b/packages/google-cloud-monitoring/.kokoro/docs/common.cfg
index f8e61870db67..bad43f76aca8 100644
--- a/packages/google-cloud-monitoring/.kokoro/docs/common.cfg
+++ b/packages/google-cloud-monitoring/.kokoro/docs/common.cfg
@@ -11,12 +11,12 @@ action {
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
# Use the trampoline script to run in docker.
-build_file: "python-monitoring/.kokoro/trampoline.sh"
+build_file: "python-monitoring/.kokoro/trampoline_v2.sh"
# Configure the docker image for kokoro-trampoline.
env_vars: {
key: "TRAMPOLINE_IMAGE"
- value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-lib-docs"
}
env_vars: {
key: "TRAMPOLINE_BUILD_FILE"
@@ -28,6 +28,23 @@ env_vars: {
value: "docs-staging"
}
+env_vars: {
+ key: "V2_STAGING_BUCKET"
+ value: "docs-staging-v2-staging"
+}
+
+# It will upload the docker image after successful builds.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE_UPLOAD"
+ value: "true"
+}
+
+# It will always build the docker image.
+env_vars: {
+ key: "TRAMPOLINE_DOCKERFILE"
+ value: ".kokoro/docker/docs/Dockerfile"
+}
+
# Fetch the token needed for reporting release status to GitHub
before_action {
fetch_keystore {
diff --git a/packages/google-cloud-monitoring/.kokoro/docs/docs-presubmit.cfg b/packages/google-cloud-monitoring/.kokoro/docs/docs-presubmit.cfg
new file mode 100644
index 000000000000..1118107829b7
--- /dev/null
+++ b/packages/google-cloud-monitoring/.kokoro/docs/docs-presubmit.cfg
@@ -0,0 +1,17 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "STAGING_BUCKET"
+ value: "gcloud-python-test"
+}
+
+env_vars: {
+ key: "V2_STAGING_BUCKET"
+ value: "gcloud-python-test"
+}
+
+# We only upload the image in the main `docs` build.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE_UPLOAD"
+ value: "false"
+}
diff --git a/packages/google-cloud-monitoring/.kokoro/populate-secrets.sh b/packages/google-cloud-monitoring/.kokoro/populate-secrets.sh
new file mode 100755
index 000000000000..f52514257ef0
--- /dev/null
+++ b/packages/google-cloud-monitoring/.kokoro/populate-secrets.sh
@@ -0,0 +1,43 @@
+#!/bin/bash
+# Copyright 2020 Google LLC.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -eo pipefail
+
+function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;}
+function msg { println "$*" >&2 ;}
+function println { printf '%s\n' "$(now) $*" ;}
+
+
+# Populates requested secrets set in SECRET_MANAGER_KEYS from service account:
+# kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com
+SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager"
+msg "Creating folder on disk for secrets: ${SECRET_LOCATION}"
+mkdir -p ${SECRET_LOCATION}
+for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g")
+do
+ msg "Retrieving secret ${key}"
+ docker run --entrypoint=gcloud \
+ --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \
+ gcr.io/google.com/cloudsdktool/cloud-sdk \
+ secrets versions access latest \
+ --project cloud-devrel-kokoro-resources \
+ --secret ${key} > \
+ "${SECRET_LOCATION}/${key}"
+ if [[ $? == 0 ]]; then
+ msg "Secret written to ${SECRET_LOCATION}/${key}"
+ else
+ msg "Error retrieving secret ${key}"
+ fi
+done
diff --git a/packages/google-cloud-monitoring/.kokoro/publish-docs.sh b/packages/google-cloud-monitoring/.kokoro/publish-docs.sh
index 58275ed36b15..8acb14e802b0 100755
--- a/packages/google-cloud-monitoring/.kokoro/publish-docs.sh
+++ b/packages/google-cloud-monitoring/.kokoro/publish-docs.sh
@@ -18,26 +18,16 @@ set -eo pipefail
# Disable buffering, so that the logs stream through.
export PYTHONUNBUFFERED=1
-cd github/python-monitoring
-
-# Remove old nox
-python3.6 -m pip uninstall --yes --quiet nox-automation
+export PATH="${HOME}/.local/bin:${PATH}"
# Install nox
-python3.6 -m pip install --upgrade --quiet nox
-python3.6 -m nox --version
+python3 -m pip install --user --upgrade --quiet nox
+python3 -m nox --version
# build docs
nox -s docs
-python3 -m pip install gcp-docuploader
-
-# install a json parser
-sudo apt-get update
-sudo apt-get -y install software-properties-common
-sudo add-apt-repository universe
-sudo apt-get update
-sudo apt-get -y install jq
+python3 -m pip install --user gcp-docuploader
# create metadata
python3 -m docuploader create-metadata \
@@ -52,4 +42,23 @@ python3 -m docuploader create-metadata \
cat docs.metadata
# upload docs
-python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket docs-staging
+python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}"
+
+
+# docfx yaml files
+nox -s docfx
+
+# create metadata.
+python3 -m docuploader create-metadata \
+ --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \
+ --version=$(python3 setup.py --version) \
+ --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \
+ --distribution-name=$(python3 setup.py --name) \
+ --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \
+ --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \
+ --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json)
+
+cat docs.metadata
+
+# upload docs
+python3 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}"
diff --git a/packages/google-cloud-monitoring/.kokoro/release/common.cfg b/packages/google-cloud-monitoring/.kokoro/release/common.cfg
index bf6bb9aff4b3..5d69ebaf540b 100644
--- a/packages/google-cloud-monitoring/.kokoro/release/common.cfg
+++ b/packages/google-cloud-monitoring/.kokoro/release/common.cfg
@@ -23,42 +23,18 @@ env_vars: {
value: "github/python-monitoring/.kokoro/release.sh"
}
-# Fetch the token needed for reporting release status to GitHub
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "yoshi-automation-github-key"
- }
- }
-}
-
-# Fetch PyPI password
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "google_cloud_pypi_password"
- }
- }
-}
-
-# Fetch magictoken to use with Magic Github Proxy
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "releasetool-magictoken"
- }
- }
+# Fetch PyPI password
+before_action {
+ fetch_keystore {
+ keystore_resource {
+ keystore_config_id: 73713
+ keyname: "google_cloud_pypi_password"
+ }
+ }
}
-# Fetch api key to use with Magic Github Proxy
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "magic-github-proxy-api-key"
- }
- }
-}
+# Tokens needed to report release status back to GitHub
+env_vars: {
+ key: "SECRET_MANAGER_KEYS"
+ value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem"
+}
\ No newline at end of file
diff --git a/packages/google-cloud-monitoring/.kokoro/samples/python3.6/common.cfg b/packages/google-cloud-monitoring/.kokoro/samples/python3.6/common.cfg
index 7c52aefa4d6f..9ac55bd88cbe 100644
--- a/packages/google-cloud-monitoring/.kokoro/samples/python3.6/common.cfg
+++ b/packages/google-cloud-monitoring/.kokoro/samples/python3.6/common.cfg
@@ -24,6 +24,12 @@ env_vars: {
value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
}
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-py36"
+}
+
# Download secrets for samples
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
diff --git a/packages/google-cloud-monitoring/.kokoro/samples/python3.7/common.cfg b/packages/google-cloud-monitoring/.kokoro/samples/python3.7/common.cfg
index 15dfa0370b83..1f5ce5da414e 100644
--- a/packages/google-cloud-monitoring/.kokoro/samples/python3.7/common.cfg
+++ b/packages/google-cloud-monitoring/.kokoro/samples/python3.7/common.cfg
@@ -24,6 +24,12 @@ env_vars: {
value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
}
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-py37"
+}
+
# Download secrets for samples
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
diff --git a/packages/google-cloud-monitoring/.kokoro/samples/python3.8/common.cfg b/packages/google-cloud-monitoring/.kokoro/samples/python3.8/common.cfg
index eddefb11e0bb..816c3b793399 100644
--- a/packages/google-cloud-monitoring/.kokoro/samples/python3.8/common.cfg
+++ b/packages/google-cloud-monitoring/.kokoro/samples/python3.8/common.cfg
@@ -24,6 +24,12 @@ env_vars: {
value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
}
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-py38"
+}
+
# Download secrets for samples
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
diff --git a/packages/google-cloud-monitoring/.kokoro/trampoline.sh b/packages/google-cloud-monitoring/.kokoro/trampoline.sh
index e8c4251f3ed4..f39236e943a8 100755
--- a/packages/google-cloud-monitoring/.kokoro/trampoline.sh
+++ b/packages/google-cloud-monitoring/.kokoro/trampoline.sh
@@ -15,9 +15,14 @@
set -eo pipefail
-python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" || ret_code=$?
+# Always run the cleanup script, regardless of the success of bouncing into
+# the container.
+function cleanup() {
+ chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh
+ ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh
+ echo "cleanup";
+}
+trap cleanup EXIT
-chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh
-${KOKORO_GFILE_DIR}/trampoline_cleanup.sh || true
-
-exit ${ret_code}
+$(dirname $0)/populate-secrets.sh # Secret Manager secrets.
+python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py"
\ No newline at end of file
diff --git a/packages/google-cloud-monitoring/.kokoro/trampoline_v2.sh b/packages/google-cloud-monitoring/.kokoro/trampoline_v2.sh
new file mode 100755
index 000000000000..719bcd5ba84d
--- /dev/null
+++ b/packages/google-cloud-monitoring/.kokoro/trampoline_v2.sh
@@ -0,0 +1,487 @@
+#!/usr/bin/env bash
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# trampoline_v2.sh
+#
+# This script does 3 things.
+#
+# 1. Prepare the Docker image for the test
+# 2. Run the Docker with appropriate flags to run the test
+# 3. Upload the newly built Docker image
+#
+# in a way that is somewhat compatible with trampoline_v1.
+#
+# To run this script, first download few files from gcs to /dev/shm.
+# (/dev/shm is passed into the container as KOKORO_GFILE_DIR).
+#
+# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/secrets_viewer_service_account.json /dev/shm
+# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/automl_secrets.txt /dev/shm
+#
+# Then run the script.
+# .kokoro/trampoline_v2.sh
+#
+# These environment variables are required:
+# TRAMPOLINE_IMAGE: The docker image to use.
+# TRAMPOLINE_DOCKERFILE: The location of the Dockerfile.
+#
+# You can optionally change these environment variables:
+# TRAMPOLINE_IMAGE_UPLOAD:
+# (true|false): Whether to upload the Docker image after the
+# successful builds.
+# TRAMPOLINE_BUILD_FILE: The script to run in the docker container.
+# TRAMPOLINE_WORKSPACE: The workspace path in the docker container.
+# Defaults to /workspace.
+# Potentially there are some repo specific envvars in .trampolinerc in
+# the project root.
+
+
+set -euo pipefail
+
+TRAMPOLINE_VERSION="2.0.5"
+
+if command -v tput >/dev/null && [[ -n "${TERM:-}" ]]; then
+ readonly IO_COLOR_RED="$(tput setaf 1)"
+ readonly IO_COLOR_GREEN="$(tput setaf 2)"
+ readonly IO_COLOR_YELLOW="$(tput setaf 3)"
+ readonly IO_COLOR_RESET="$(tput sgr0)"
+else
+ readonly IO_COLOR_RED=""
+ readonly IO_COLOR_GREEN=""
+ readonly IO_COLOR_YELLOW=""
+ readonly IO_COLOR_RESET=""
+fi
+
+function function_exists {
+ [ $(LC_ALL=C type -t $1)"" == "function" ]
+}
+
+# Logs a message using the given color. The first argument must be one
+# of the IO_COLOR_* variables defined above, such as
+# "${IO_COLOR_YELLOW}". The remaining arguments will be logged in the
+# given color. The log message will also have an RFC-3339 timestamp
+# prepended (in UTC). You can disable the color output by setting
+# TERM=vt100.
+function log_impl() {
+ local color="$1"
+ shift
+ local timestamp="$(date -u "+%Y-%m-%dT%H:%M:%SZ")"
+ echo "================================================================"
+ echo "${color}${timestamp}:" "$@" "${IO_COLOR_RESET}"
+ echo "================================================================"
+}
+
+# Logs the given message with normal coloring and a timestamp.
+function log() {
+ log_impl "${IO_COLOR_RESET}" "$@"
+}
+
+# Logs the given message in green with a timestamp.
+function log_green() {
+ log_impl "${IO_COLOR_GREEN}" "$@"
+}
+
+# Logs the given message in yellow with a timestamp.
+function log_yellow() {
+ log_impl "${IO_COLOR_YELLOW}" "$@"
+}
+
+# Logs the given message in red with a timestamp.
+function log_red() {
+ log_impl "${IO_COLOR_RED}" "$@"
+}
+
+readonly tmpdir=$(mktemp -d -t ci-XXXXXXXX)
+readonly tmphome="${tmpdir}/h"
+mkdir -p "${tmphome}"
+
+function cleanup() {
+ rm -rf "${tmpdir}"
+}
+trap cleanup EXIT
+
+RUNNING_IN_CI="${RUNNING_IN_CI:-false}"
+
+# The workspace in the container, defaults to /workspace.
+TRAMPOLINE_WORKSPACE="${TRAMPOLINE_WORKSPACE:-/workspace}"
+
+pass_down_envvars=(
+ # TRAMPOLINE_V2 variables.
+ # Tells scripts whether they are running as part of CI or not.
+ "RUNNING_IN_CI"
+ # Indicates which CI system we're in.
+ "TRAMPOLINE_CI"
+ # Indicates the version of the script.
+ "TRAMPOLINE_VERSION"
+)
+
+log_yellow "Building with Trampoline ${TRAMPOLINE_VERSION}"
+
+# Detect which CI systems we're in. If we're in any of the CI systems
+# we support, `RUNNING_IN_CI` will be true and `TRAMPOLINE_CI` will be
+# the name of the CI system. Both envvars will be passing down to the
+# container for telling which CI system we're in.
+if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then
+ # descriptive env var for indicating it's on CI.
+ RUNNING_IN_CI="true"
+ TRAMPOLINE_CI="kokoro"
+ if [[ "${TRAMPOLINE_USE_LEGACY_SERVICE_ACCOUNT:-}" == "true" ]]; then
+ if [[ ! -f "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" ]]; then
+ log_red "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json does not exist. Did you forget to mount cloud-devrel-kokoro-resources/trampoline? Aborting."
+ exit 1
+ fi
+ # This service account will be activated later.
+ TRAMPOLINE_SERVICE_ACCOUNT="${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json"
+ else
+ if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
+ gcloud auth list
+ fi
+ log_yellow "Configuring Container Registry access"
+ gcloud auth configure-docker --quiet
+ fi
+ pass_down_envvars+=(
+ # KOKORO dynamic variables.
+ "KOKORO_BUILD_NUMBER"
+ "KOKORO_BUILD_ID"
+ "KOKORO_JOB_NAME"
+ "KOKORO_GIT_COMMIT"
+ "KOKORO_GITHUB_COMMIT"
+ "KOKORO_GITHUB_PULL_REQUEST_NUMBER"
+ "KOKORO_GITHUB_PULL_REQUEST_COMMIT"
+ # For Build Cop Bot
+ "KOKORO_GITHUB_COMMIT_URL"
+ "KOKORO_GITHUB_PULL_REQUEST_URL"
+ )
+elif [[ "${TRAVIS:-}" == "true" ]]; then
+ RUNNING_IN_CI="true"
+ TRAMPOLINE_CI="travis"
+ pass_down_envvars+=(
+ "TRAVIS_BRANCH"
+ "TRAVIS_BUILD_ID"
+ "TRAVIS_BUILD_NUMBER"
+ "TRAVIS_BUILD_WEB_URL"
+ "TRAVIS_COMMIT"
+ "TRAVIS_COMMIT_MESSAGE"
+ "TRAVIS_COMMIT_RANGE"
+ "TRAVIS_JOB_NAME"
+ "TRAVIS_JOB_NUMBER"
+ "TRAVIS_JOB_WEB_URL"
+ "TRAVIS_PULL_REQUEST"
+ "TRAVIS_PULL_REQUEST_BRANCH"
+ "TRAVIS_PULL_REQUEST_SHA"
+ "TRAVIS_PULL_REQUEST_SLUG"
+ "TRAVIS_REPO_SLUG"
+ "TRAVIS_SECURE_ENV_VARS"
+ "TRAVIS_TAG"
+ )
+elif [[ -n "${GITHUB_RUN_ID:-}" ]]; then
+ RUNNING_IN_CI="true"
+ TRAMPOLINE_CI="github-workflow"
+ pass_down_envvars+=(
+ "GITHUB_WORKFLOW"
+ "GITHUB_RUN_ID"
+ "GITHUB_RUN_NUMBER"
+ "GITHUB_ACTION"
+ "GITHUB_ACTIONS"
+ "GITHUB_ACTOR"
+ "GITHUB_REPOSITORY"
+ "GITHUB_EVENT_NAME"
+ "GITHUB_EVENT_PATH"
+ "GITHUB_SHA"
+ "GITHUB_REF"
+ "GITHUB_HEAD_REF"
+ "GITHUB_BASE_REF"
+ )
+elif [[ "${CIRCLECI:-}" == "true" ]]; then
+ RUNNING_IN_CI="true"
+ TRAMPOLINE_CI="circleci"
+ pass_down_envvars+=(
+ "CIRCLE_BRANCH"
+ "CIRCLE_BUILD_NUM"
+ "CIRCLE_BUILD_URL"
+ "CIRCLE_COMPARE_URL"
+ "CIRCLE_JOB"
+ "CIRCLE_NODE_INDEX"
+ "CIRCLE_NODE_TOTAL"
+ "CIRCLE_PREVIOUS_BUILD_NUM"
+ "CIRCLE_PROJECT_REPONAME"
+ "CIRCLE_PROJECT_USERNAME"
+ "CIRCLE_REPOSITORY_URL"
+ "CIRCLE_SHA1"
+ "CIRCLE_STAGE"
+ "CIRCLE_USERNAME"
+ "CIRCLE_WORKFLOW_ID"
+ "CIRCLE_WORKFLOW_JOB_ID"
+ "CIRCLE_WORKFLOW_UPSTREAM_JOB_IDS"
+ "CIRCLE_WORKFLOW_WORKSPACE_ID"
+ )
+fi
+
+# Configure the service account for pulling the docker image.
+function repo_root() {
+ local dir="$1"
+ while [[ ! -d "${dir}/.git" ]]; do
+ dir="$(dirname "$dir")"
+ done
+ echo "${dir}"
+}
+
+# Detect the project root. In CI builds, we assume the script is in
+# the git tree and traverse from there, otherwise, traverse from `pwd`
+# to find `.git` directory.
+if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then
+ PROGRAM_PATH="$(realpath "$0")"
+ PROGRAM_DIR="$(dirname "${PROGRAM_PATH}")"
+ PROJECT_ROOT="$(repo_root "${PROGRAM_DIR}")"
+else
+ PROJECT_ROOT="$(repo_root $(pwd))"
+fi
+
+log_yellow "Changing to the project root: ${PROJECT_ROOT}."
+cd "${PROJECT_ROOT}"
+
+# To support relative path for `TRAMPOLINE_SERVICE_ACCOUNT`, we need
+# to use this environment variable in `PROJECT_ROOT`.
+if [[ -n "${TRAMPOLINE_SERVICE_ACCOUNT:-}" ]]; then
+
+ mkdir -p "${tmpdir}/gcloud"
+ gcloud_config_dir="${tmpdir}/gcloud"
+
+ log_yellow "Using isolated gcloud config: ${gcloud_config_dir}."
+ export CLOUDSDK_CONFIG="${gcloud_config_dir}"
+
+ log_yellow "Using ${TRAMPOLINE_SERVICE_ACCOUNT} for authentication."
+ gcloud auth activate-service-account \
+ --key-file "${TRAMPOLINE_SERVICE_ACCOUNT}"
+ log_yellow "Configuring Container Registry access"
+ gcloud auth configure-docker --quiet
+fi
+
+required_envvars=(
+ # The basic trampoline configurations.
+ "TRAMPOLINE_IMAGE"
+ "TRAMPOLINE_BUILD_FILE"
+)
+
+if [[ -f "${PROJECT_ROOT}/.trampolinerc" ]]; then
+ source "${PROJECT_ROOT}/.trampolinerc"
+fi
+
+log_yellow "Checking environment variables."
+for e in "${required_envvars[@]}"
+do
+ if [[ -z "${!e:-}" ]]; then
+ log "Missing ${e} env var. Aborting."
+ exit 1
+ fi
+done
+
+# We want to support legacy style TRAMPOLINE_BUILD_FILE used with V1
+# script: e.g. "github/repo-name/.kokoro/run_tests.sh"
+TRAMPOLINE_BUILD_FILE="${TRAMPOLINE_BUILD_FILE#github/*/}"
+log_yellow "Using TRAMPOLINE_BUILD_FILE: ${TRAMPOLINE_BUILD_FILE}"
+
+# ignore error on docker operations and test execution
+set +e
+
+log_yellow "Preparing Docker image."
+# We only download the docker image in CI builds.
+if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then
+ # Download the docker image specified by `TRAMPOLINE_IMAGE`
+
+ # We may want to add --max-concurrent-downloads flag.
+
+ log_yellow "Start pulling the Docker image: ${TRAMPOLINE_IMAGE}."
+ if docker pull "${TRAMPOLINE_IMAGE}"; then
+ log_green "Finished pulling the Docker image: ${TRAMPOLINE_IMAGE}."
+ has_image="true"
+ else
+ log_red "Failed pulling the Docker image: ${TRAMPOLINE_IMAGE}."
+ has_image="false"
+ fi
+else
+ # For local run, check if we have the image.
+ if docker images "${TRAMPOLINE_IMAGE}:latest" | grep "${TRAMPOLINE_IMAGE}"; then
+ has_image="true"
+ else
+ has_image="false"
+ fi
+fi
+
+
+# The default user for a Docker container has uid 0 (root). To avoid
+# creating root-owned files in the build directory we tell docker to
+# use the current user ID.
+user_uid="$(id -u)"
+user_gid="$(id -g)"
+user_name="$(id -un)"
+
+# To allow docker in docker, we add the user to the docker group in
+# the host os.
+docker_gid=$(cut -d: -f3 < <(getent group docker))
+
+update_cache="false"
+if [[ "${TRAMPOLINE_DOCKERFILE:-none}" != "none" ]]; then
+ # Build the Docker image from the source.
+ context_dir=$(dirname "${TRAMPOLINE_DOCKERFILE}")
+ docker_build_flags=(
+ "-f" "${TRAMPOLINE_DOCKERFILE}"
+ "-t" "${TRAMPOLINE_IMAGE}"
+ "--build-arg" "UID=${user_uid}"
+ "--build-arg" "USERNAME=${user_name}"
+ )
+ if [[ "${has_image}" == "true" ]]; then
+ docker_build_flags+=("--cache-from" "${TRAMPOLINE_IMAGE}")
+ fi
+
+ log_yellow "Start building the docker image."
+ if [[ "${TRAMPOLINE_VERBOSE:-false}" == "true" ]]; then
+ echo "docker build" "${docker_build_flags[@]}" "${context_dir}"
+ fi
+
+ # ON CI systems, we want to suppress docker build logs, only
+ # output the logs when it fails.
+ if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then
+ if docker build "${docker_build_flags[@]}" "${context_dir}" \
+ > "${tmpdir}/docker_build.log" 2>&1; then
+ if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
+ cat "${tmpdir}/docker_build.log"
+ fi
+
+ log_green "Finished building the docker image."
+ update_cache="true"
+ else
+ log_red "Failed to build the Docker image, aborting."
+ log_yellow "Dumping the build logs:"
+ cat "${tmpdir}/docker_build.log"
+ exit 1
+ fi
+ else
+ if docker build "${docker_build_flags[@]}" "${context_dir}"; then
+ log_green "Finished building the docker image."
+ update_cache="true"
+ else
+ log_red "Failed to build the Docker image, aborting."
+ exit 1
+ fi
+ fi
+else
+ if [[ "${has_image}" != "true" ]]; then
+ log_red "We do not have ${TRAMPOLINE_IMAGE} locally, aborting."
+ exit 1
+ fi
+fi
+
+# We use an array for the flags so they are easier to document.
+docker_flags=(
+ # Remove the container after it exists.
+ "--rm"
+
+ # Use the host network.
+ "--network=host"
+
+ # Run in priviledged mode. We are not using docker for sandboxing or
+ # isolation, just for packaging our dev tools.
+ "--privileged"
+
+ # Run the docker script with the user id. Because the docker image gets to
+ # write in ${PWD} you typically want this to be your user id.
+ # To allow docker in docker, we need to use docker gid on the host.
+ "--user" "${user_uid}:${docker_gid}"
+
+ # Pass down the USER.
+ "--env" "USER=${user_name}"
+
+ # Mount the project directory inside the Docker container.
+ "--volume" "${PROJECT_ROOT}:${TRAMPOLINE_WORKSPACE}"
+ "--workdir" "${TRAMPOLINE_WORKSPACE}"
+ "--env" "PROJECT_ROOT=${TRAMPOLINE_WORKSPACE}"
+
+ # Mount the temporary home directory.
+ "--volume" "${tmphome}:/h"
+ "--env" "HOME=/h"
+
+ # Allow docker in docker.
+ "--volume" "/var/run/docker.sock:/var/run/docker.sock"
+
+ # Mount the /tmp so that docker in docker can mount the files
+ # there correctly.
+ "--volume" "/tmp:/tmp"
+ # Pass down the KOKORO_GFILE_DIR and KOKORO_KEYSTORE_DIR
+ # TODO(tmatsuo): This part is not portable.
+ "--env" "TRAMPOLINE_SECRET_DIR=/secrets"
+ "--volume" "${KOKORO_GFILE_DIR:-/dev/shm}:/secrets/gfile"
+ "--env" "KOKORO_GFILE_DIR=/secrets/gfile"
+ "--volume" "${KOKORO_KEYSTORE_DIR:-/dev/shm}:/secrets/keystore"
+ "--env" "KOKORO_KEYSTORE_DIR=/secrets/keystore"
+)
+
+# Add an option for nicer output if the build gets a tty.
+if [[ -t 0 ]]; then
+ docker_flags+=("-it")
+fi
+
+# Passing down env vars
+for e in "${pass_down_envvars[@]}"
+do
+ if [[ -n "${!e:-}" ]]; then
+ docker_flags+=("--env" "${e}=${!e}")
+ fi
+done
+
+# If arguments are given, all arguments will become the commands run
+# in the container, otherwise run TRAMPOLINE_BUILD_FILE.
+if [[ $# -ge 1 ]]; then
+ log_yellow "Running the given commands '" "${@:1}" "' in the container."
+ readonly commands=("${@:1}")
+ if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
+ echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}"
+ fi
+ docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}"
+else
+ log_yellow "Running the tests in a Docker container."
+ docker_flags+=("--entrypoint=${TRAMPOLINE_BUILD_FILE}")
+ if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
+ echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}"
+ fi
+ docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}"
+fi
+
+
+test_retval=$?
+
+if [[ ${test_retval} -eq 0 ]]; then
+ log_green "Build finished with ${test_retval}"
+else
+ log_red "Build finished with ${test_retval}"
+fi
+
+# Only upload it when the test passes.
+if [[ "${update_cache}" == "true" ]] && \
+ [[ $test_retval == 0 ]] && \
+ [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]]; then
+ log_yellow "Uploading the Docker image."
+ if docker push "${TRAMPOLINE_IMAGE}"; then
+ log_green "Finished uploading the Docker image."
+ else
+ log_red "Failed uploading the Docker image."
+ fi
+ # Call trampoline_after_upload_hook if it's defined.
+ if function_exists trampoline_after_upload_hook; then
+ trampoline_after_upload_hook
+ fi
+
+fi
+
+exit "${test_retval}"
diff --git a/packages/google-cloud-monitoring/.trampolinerc b/packages/google-cloud-monitoring/.trampolinerc
new file mode 100644
index 000000000000..995ee29111e1
--- /dev/null
+++ b/packages/google-cloud-monitoring/.trampolinerc
@@ -0,0 +1,51 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Template for .trampolinerc
+
+# Add required env vars here.
+required_envvars+=(
+ "STAGING_BUCKET"
+ "V2_STAGING_BUCKET"
+)
+
+# Add env vars which are passed down into the container here.
+pass_down_envvars+=(
+ "STAGING_BUCKET"
+ "V2_STAGING_BUCKET"
+)
+
+# Prevent unintentional override on the default image.
+if [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]] && \
+ [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then
+ echo "Please set TRAMPOLINE_IMAGE if you want to upload the Docker image."
+ exit 1
+fi
+
+# Define the default value if it makes sense.
+if [[ -z "${TRAMPOLINE_IMAGE_UPLOAD:-}" ]]; then
+ TRAMPOLINE_IMAGE_UPLOAD=""
+fi
+
+if [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then
+ TRAMPOLINE_IMAGE=""
+fi
+
+if [[ -z "${TRAMPOLINE_DOCKERFILE:-}" ]]; then
+ TRAMPOLINE_DOCKERFILE=""
+fi
+
+if [[ -z "${TRAMPOLINE_BUILD_FILE:-}" ]]; then
+ TRAMPOLINE_BUILD_FILE=""
+fi
diff --git a/packages/google-cloud-monitoring/CONTRIBUTING.rst b/packages/google-cloud-monitoring/CONTRIBUTING.rst
index 0a4ada23a370..bf2f78be9c28 100644
--- a/packages/google-cloud-monitoring/CONTRIBUTING.rst
+++ b/packages/google-cloud-monitoring/CONTRIBUTING.rst
@@ -80,25 +80,6 @@ We use `nox `__ to instrument our tests.
.. nox: https://pypi.org/project/nox/
-Note on Editable Installs / Develop Mode
-========================================
-
-- As mentioned previously, using ``setuptools`` in `develop mode`_
- or a ``pip`` `editable install`_ is not possible with this
- library. This is because this library uses `namespace packages`_.
- For context see `Issue #2316`_ and the relevant `PyPA issue`_.
-
- Since ``editable`` / ``develop`` mode can't be used, packages
- need to be installed directly. Hence your changes to the source
- tree don't get incorporated into the **already installed**
- package.
-
-.. _namespace packages: https://www.python.org/dev/peps/pep-0420/
-.. _Issue #2316: https://github.com/GoogleCloudPlatform/google-cloud-python/issues/2316
-.. _PyPA issue: https://github.com/pypa/packaging-problems/issues/12
-.. _develop mode: https://setuptools.readthedocs.io/en/latest/setuptools.html#development-mode
-.. _editable install: https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs
-
*****************************************
I'm getting weird errors... Can you help?
*****************************************
diff --git a/packages/google-cloud-monitoring/docs/_templates/layout.html b/packages/google-cloud-monitoring/docs/_templates/layout.html
index 228529efe2d2..6316a537f72b 100644
--- a/packages/google-cloud-monitoring/docs/_templates/layout.html
+++ b/packages/google-cloud-monitoring/docs/_templates/layout.html
@@ -21,8 +21,8 @@
- On January 1, 2020 this library will no longer support Python 2 on the latest released version.
- Previously released library versions will continue to be available. For more information please
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version.
+ Library versions released prior to that date will continue to be available. For more information please
visit
Python 2 support on Google Cloud.
{% block body %} {% endblock %}
diff --git a/packages/google-cloud-monitoring/docs/conf.py b/packages/google-cloud-monitoring/docs/conf.py
index ab6d693cc504..c283c715d5e0 100644
--- a/packages/google-cloud-monitoring/docs/conf.py
+++ b/packages/google-cloud-monitoring/docs/conf.py
@@ -20,12 +20,16 @@
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath(".."))
+# For plugins that can not read conf.py.
+# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85
+sys.path.insert(0, os.path.abspath("."))
+
__version__ = ""
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
-needs_sphinx = "1.6.3"
+needs_sphinx = "1.5.5"
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
@@ -35,6 +39,7 @@
"sphinx.ext.autosummary",
"sphinx.ext.intersphinx",
"sphinx.ext.coverage",
+ "sphinx.ext.doctest",
"sphinx.ext.napoleon",
"sphinx.ext.todo",
"sphinx.ext.viewcode",
@@ -90,7 +95,12 @@
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
-exclude_patterns = ["_build"]
+exclude_patterns = [
+ "_build",
+ "samples/AUTHORING_GUIDE.md",
+ "samples/CONTRIBUTING.md",
+ "samples/snippets/README.rst",
+]
# The reST default role (used for this markup: `text`) to use for all
# documents.
@@ -337,7 +347,7 @@
intersphinx_mapping = {
"python": ("http://python.readthedocs.org/en/latest/", None),
"google-auth": ("https://google-auth.readthedocs.io/en/stable", None),
- "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None),
+ "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,),
"grpc": ("https://grpc.io/grpc/python/", None),
}
diff --git a/packages/google-cloud-monitoring/noxfile.py b/packages/google-cloud-monitoring/noxfile.py
index fef9e7ba8ecf..481973ccd16b 100644
--- a/packages/google-cloud-monitoring/noxfile.py
+++ b/packages/google-cloud-monitoring/noxfile.py
@@ -23,11 +23,11 @@
import nox
-BLACK_VERSION = "black==19.3b0"
+BLACK_VERSION = "black==19.10b0"
BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"]
-DEFAULT_PYTHON_VERSION = "3.7"
-SYSTEM_TEST_PYTHON_VERSIONS = ["2.7", "3.7"]
+DEFAULT_PYTHON_VERSION = "3.8"
+SYSTEM_TEST_PYTHON_VERSIONS = ["2.7", "3.8"]
UNIT_TEST_PYTHON_VERSIONS = ["2.7", "3.5", "3.6", "3.7", "3.8"]
@@ -39,7 +39,9 @@ def lint(session):
serious code quality issues.
"""
session.install("flake8", BLACK_VERSION)
- session.run("black", "--check", *BLACK_PATHS)
+ session.run(
+ "black", "--check", *BLACK_PATHS,
+ )
session.run("flake8", "google", "tests")
@@ -54,7 +56,9 @@ def blacken(session):
check the state of the `gcp_ubuntu_config` we use for that Kokoro run.
"""
session.install(BLACK_VERSION)
- session.run("black", *BLACK_PATHS)
+ session.run(
+ "black", *BLACK_PATHS,
+ )
@nox.session(python=DEFAULT_PYTHON_VERSION)
@@ -96,6 +100,10 @@ def system(session):
"""Run the system test suite."""
system_test_path = os.path.join("tests", "system.py")
system_test_folder_path = os.path.join("tests", "system")
+
+ # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true.
+ if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false":
+ session.skip("RUN_SYSTEM_TESTS is set to false, skipping")
# Sanity check: Only run tests if the environment variable is set.
if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""):
session.skip("Credentials must be set via environment variable")
@@ -111,7 +119,9 @@ def system(session):
# Install all test dependencies, then install this package into the
# virtualenv's dist-packages.
- session.install("mock", "pytest", "google-cloud-testutils")
+ session.install(
+ "mock", "pytest", "google-cloud-testutils",
+ )
session.install("-e", ".")
# Run py.test against the system tests.
@@ -154,3 +164,38 @@ def docs(session):
os.path.join("docs", ""),
os.path.join("docs", "_build", "html", ""),
)
+
+
+@nox.session(python=DEFAULT_PYTHON_VERSION)
+def docfx(session):
+ """Build the docfx yaml files for this library."""
+
+ session.install("-e", ".")
+ # sphinx-docfx-yaml supports up to sphinx version 1.5.5.
+ # https://github.com/docascode/sphinx-docfx-yaml/issues/97
+ session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml")
+
+ shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
+ session.run(
+ "sphinx-build",
+ "-T", # show full traceback on exception
+ "-N", # no colors
+ "-D",
+ (
+ "extensions=sphinx.ext.autodoc,"
+ "sphinx.ext.autosummary,"
+ "docfx_yaml.extension,"
+ "sphinx.ext.intersphinx,"
+ "sphinx.ext.coverage,"
+ "sphinx.ext.napoleon,"
+ "sphinx.ext.todo,"
+ "sphinx.ext.viewcode,"
+ "recommonmark"
+ ),
+ "-b",
+ "html",
+ "-d",
+ os.path.join("docs", "_build", "doctrees", ""),
+ os.path.join("docs", ""),
+ os.path.join("docs", "_build", "html", ""),
+ )
diff --git a/packages/google-cloud-monitoring/samples/AUTHORING_GUIDE.md b/packages/google-cloud-monitoring/samples/AUTHORING_GUIDE.md
new file mode 100644
index 000000000000..55c97b32f4c1
--- /dev/null
+++ b/packages/google-cloud-monitoring/samples/AUTHORING_GUIDE.md
@@ -0,0 +1 @@
+See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md
\ No newline at end of file
diff --git a/packages/google-cloud-monitoring/samples/CONTRIBUTING.md b/packages/google-cloud-monitoring/samples/CONTRIBUTING.md
new file mode 100644
index 000000000000..34c882b6f1a3
--- /dev/null
+++ b/packages/google-cloud-monitoring/samples/CONTRIBUTING.md
@@ -0,0 +1 @@
+See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/CONTRIBUTING.md
\ No newline at end of file
diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/.gitignore b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/.gitignore
new file mode 100644
index 000000000000..de0a466d79c3
--- /dev/null
+++ b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/.gitignore
@@ -0,0 +1 @@
+backup.json
diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/README.rst b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/README.rst
new file mode 100644
index 000000000000..bb59aad5feeb
--- /dev/null
+++ b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/README.rst
@@ -0,0 +1,138 @@
+.. This file is automatically generated. Do not edit this file directly.
+
+Google Stackdriver Alerting API Python Samples
+===============================================================================
+
+.. image:: https://gstatic.com/cloudssh/images/open-btn.png
+ :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=monitoring/api/v3/alerts-client/README.rst
+
+
+This directory contains samples for Google Stackdriver Alerting API. Stackdriver Monitoring collects metrics, events, and metadata from Google Cloud Platform, Amazon Web Services (AWS), hosted uptime probes, application instrumentation, and a variety of common application components including Cassandra, Nginx, Apache Web Server, Elasticsearch and many others. Stackdriver's Alerting API allows you to create, delete, and make back up copies of your alert policies.
+
+
+
+
+.. _Google Stackdriver Alerting API: https://cloud.google.com/monitoring/alerts/
+
+To run the sample, you need to enable the API at: https://console.cloud.google.com/apis/library/monitoring.googleapis.com
+
+To run the sample, you need to have `Monitoring Admin` role.
+
+Please visit [the Cloud Console UI of this API](https://console.cloud.google.com/monitoring) and [create a new Workspace with the same name of your Cloud project](https://cloud.google.com/monitoring/workspaces/create).
+
+
+Setup
+-------------------------------------------------------------------------------
+
+
+Authentication
+++++++++++++++
+
+This sample requires you to have authentication setup. Refer to the
+`Authentication Getting Started Guide`_ for instructions on setting up
+credentials for applications.
+
+.. _Authentication Getting Started Guide:
+ https://cloud.google.com/docs/authentication/getting-started
+
+Install Dependencies
+++++++++++++++++++++
+
+#. Clone python-docs-samples and change directory to the sample directory you want to use.
+
+ .. code-block:: bash
+
+ $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git
+
+#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions.
+
+ .. _Python Development Environment Setup Guide:
+ https://cloud.google.com/python/setup
+
+#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+.
+
+ .. code-block:: bash
+
+ $ virtualenv env
+ $ source env/bin/activate
+
+#. Install the dependencies needed to run the samples.
+
+ .. code-block:: bash
+
+ $ pip install -r requirements.txt
+
+.. _pip: https://pip.pypa.io/
+.. _virtualenv: https://virtualenv.pypa.io/
+
+Samples
+-------------------------------------------------------------------------------
+
+Snippets
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. image:: https://gstatic.com/cloudssh/images/open-btn.png
+ :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=monitoring/api/v3/alerts-client/snippets.py,monitoring/api/v3/alerts-client/README.rst
+
+
+
+
+To run this sample:
+
+.. code-block:: bash
+
+ $ python snippets.py
+
+ usage: snippets.py [-h]
+ {list-alert-policies,list-notification-channels,enable-alert-policies,disable-alert-policies,replace-notification-channels,backup,restore}
+ ...
+
+ Demonstrates AlertPolicy API operations.
+
+ positional arguments:
+ {list-alert-policies,list-notification-channels,enable-alert-policies,disable-alert-policies,replace-notification-channels,backup,restore}
+ list-alert-policies
+ list-notification-channels
+ enable-alert-policies
+ Enable or disable alert policies in a project.
+ Arguments: project_name (str) enable (bool): Enable or
+ disable the policies. filter_ (str, optional): Only
+ enable/disable alert policies that match this filter_.
+ See
+ https://cloud.google.com/monitoring/api/v3/sorting-
+ and-filtering
+ disable-alert-policies
+ Enable or disable alert policies in a project.
+ Arguments: project_name (str) enable (bool): Enable or
+ disable the policies. filter_ (str, optional): Only
+ enable/disable alert policies that match this filter_.
+ See
+ https://cloud.google.com/monitoring/api/v3/sorting-
+ and-filtering
+ replace-notification-channels
+ backup
+ restore
+
+ optional arguments:
+ -h, --help show this help message and exit
+
+
+
+
+
+The client library
+-------------------------------------------------------------------------------
+
+This sample uses the `Google Cloud Client Library for Python`_.
+You can read the documentation for more details on API usage and use GitHub
+to `browse the source`_ and `report issues`_.
+
+.. _Google Cloud Client Library for Python:
+ https://googlecloudplatform.github.io/google-cloud-python/
+.. _browse the source:
+ https://github.com/GoogleCloudPlatform/google-cloud-python
+.. _report issues:
+ https://github.com/GoogleCloudPlatform/google-cloud-python/issues
+
+
+.. _Google Cloud SDK: https://cloud.google.com/sdk/
diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/README.rst.in b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/README.rst.in
new file mode 100644
index 000000000000..00b280124ea4
--- /dev/null
+++ b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/README.rst.in
@@ -0,0 +1,33 @@
+# This file is used to generate README.rst
+
+product:
+ name: Google Stackdriver Alerting API
+ short_name: Stackdriver Alerting API
+ url: https://cloud.google.com/monitoring/alerts/
+ description: >
+ Stackdriver Monitoring collects metrics, events, and metadata from Google
+ Cloud Platform, Amazon Web Services (AWS), hosted uptime probes,
+ application instrumentation, and a variety of common application
+ components including Cassandra, Nginx, Apache Web Server, Elasticsearch
+ and many others. Stackdriver's Alerting API allows you to create,
+ delete, and make back up copies of your alert policies.
+
+required_api_url: https://console.cloud.google.com/apis/library/monitoring.googleapis.com
+required_role: Monitoring Admin
+other_required_steps: >
+ Please visit [the Cloud Console UI of this
+ API](https://console.cloud.google.com/monitoring) and create a new
+ Workspace with the same name of your Cloud project.
+
+setup:
+- auth
+- install_deps
+
+samples:
+- name: Snippets
+ file: snippets.py
+ show_help: true
+
+cloud_client_library: true
+
+folder: monitoring/api/v3/alerts-client
diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/noxfile.py b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/noxfile.py
new file mode 100644
index 000000000000..ba55d7ce53ca
--- /dev/null
+++ b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/noxfile.py
@@ -0,0 +1,224 @@
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import print_function
+
+import os
+from pathlib import Path
+import sys
+
+import nox
+
+
+# WARNING - WARNING - WARNING - WARNING - WARNING
+# WARNING - WARNING - WARNING - WARNING - WARNING
+# DO NOT EDIT THIS FILE EVER!
+# WARNING - WARNING - WARNING - WARNING - WARNING
+# WARNING - WARNING - WARNING - WARNING - WARNING
+
+# Copy `noxfile_config.py` to your directory and modify it instead.
+
+
+# `TEST_CONFIG` dict is a configuration hook that allows users to
+# modify the test configurations. The values here should be in sync
+# with `noxfile_config.py`. Users will copy `noxfile_config.py` into
+# their directory and modify it.
+
+TEST_CONFIG = {
+ # You can opt out from the test for specific Python versions.
+ 'ignored_versions': ["2.7"],
+
+ # An envvar key for determining the project id to use. Change it
+ # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
+ # build specific Cloud project. You can also use your own string
+ # to use your own Cloud project.
+ 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT',
+ # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
+
+ # A dictionary you want to inject into your test. Don't put any
+ # secrets here. These values will override predefined values.
+ 'envs': {},
+}
+
+
+try:
+ # Ensure we can import noxfile_config in the project's directory.
+ sys.path.append('.')
+ from noxfile_config import TEST_CONFIG_OVERRIDE
+except ImportError as e:
+ print("No user noxfile_config found: detail: {}".format(e))
+ TEST_CONFIG_OVERRIDE = {}
+
+# Update the TEST_CONFIG with the user supplied values.
+TEST_CONFIG.update(TEST_CONFIG_OVERRIDE)
+
+
+def get_pytest_env_vars():
+ """Returns a dict for pytest invocation."""
+ ret = {}
+
+ # Override the GCLOUD_PROJECT and the alias.
+ env_key = TEST_CONFIG['gcloud_project_env']
+ # This should error out if not set.
+ ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key]
+
+ # Apply user supplied envs.
+ ret.update(TEST_CONFIG['envs'])
+ return ret
+
+
+# DO NOT EDIT - automatically generated.
+# All versions used to tested samples.
+ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"]
+
+# Any default versions that should be ignored.
+IGNORED_VERSIONS = TEST_CONFIG['ignored_versions']
+
+TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS])
+
+INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False))
+#
+# Style Checks
+#
+
+
+def _determine_local_import_names(start_dir):
+ """Determines all import names that should be considered "local".
+
+ This is used when running the linter to insure that import order is
+ properly checked.
+ """
+ file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)]
+ return [
+ basename
+ for basename, extension in file_ext_pairs
+ if extension == ".py"
+ or os.path.isdir(os.path.join(start_dir, basename))
+ and basename not in ("__pycache__")
+ ]
+
+
+# Linting with flake8.
+#
+# We ignore the following rules:
+# E203: whitespace before ‘:’
+# E266: too many leading ‘#’ for block comment
+# E501: line too long
+# I202: Additional newline in a section of imports
+#
+# We also need to specify the rules which are ignored by default:
+# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121']
+FLAKE8_COMMON_ARGS = [
+ "--show-source",
+ "--builtin=gettext",
+ "--max-complexity=20",
+ "--import-order-style=google",
+ "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py",
+ "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202",
+ "--max-line-length=88",
+]
+
+
+@nox.session
+def lint(session):
+ session.install("flake8", "flake8-import-order")
+
+ local_names = _determine_local_import_names(".")
+ args = FLAKE8_COMMON_ARGS + [
+ "--application-import-names",
+ ",".join(local_names),
+ "."
+ ]
+ session.run("flake8", *args)
+
+
+#
+# Sample Tests
+#
+
+
+PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"]
+
+
+def _session_tests(session, post_install=None):
+ """Runs py.test for a particular project."""
+ if os.path.exists("requirements.txt"):
+ session.install("-r", "requirements.txt")
+
+ if os.path.exists("requirements-test.txt"):
+ session.install("-r", "requirements-test.txt")
+
+ if INSTALL_LIBRARY_FROM_SOURCE:
+ session.install("-e", _get_repo_root())
+
+ if post_install:
+ post_install(session)
+
+ session.run(
+ "pytest",
+ *(PYTEST_COMMON_ARGS + session.posargs),
+ # Pytest will return 5 when no tests are collected. This can happen
+ # on travis where slow and flaky tests are excluded.
+ # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html
+ success_codes=[0, 5],
+ env=get_pytest_env_vars()
+ )
+
+
+@nox.session(python=ALL_VERSIONS)
+def py(session):
+ """Runs py.test for a sample using the specified version of Python."""
+ if session.python in TESTED_VERSIONS:
+ _session_tests(session)
+ else:
+ session.skip("SKIPPED: {} tests are disabled for this sample.".format(
+ session.python
+ ))
+
+
+#
+# Readmegen
+#
+
+
+def _get_repo_root():
+ """ Returns the root folder of the project. """
+ # Get root of this repository. Assume we don't have directories nested deeper than 10 items.
+ p = Path(os.getcwd())
+ for i in range(10):
+ if p is None:
+ break
+ if Path(p / ".git").exists():
+ return str(p)
+ p = p.parent
+ raise Exception("Unable to detect repository root.")
+
+
+GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")])
+
+
+@nox.session
+@nox.parametrize("path", GENERATED_READMES)
+def readmegen(session, path):
+ """(Re-)generates the readme for a sample."""
+ session.install("jinja2", "pyyaml")
+ dir_ = os.path.dirname(path)
+
+ if os.path.exists(os.path.join(dir_, "requirements.txt")):
+ session.install("-r", os.path.join(dir_, "requirements.txt"))
+
+ in_file = os.path.join(dir_, "README.rst.in")
+ session.run(
+ "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file
+ )
diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/noxfile_config.py b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/noxfile_config.py
new file mode 100644
index 000000000000..664c58309d70
--- /dev/null
+++ b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/noxfile_config.py
@@ -0,0 +1,42 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Default TEST_CONFIG_OVERRIDE for python repos.
+
+# You can copy this file into your directory, then it will be inported from
+# the noxfile.py.
+
+# The source of truth:
+# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/noxfile_config.py
+
+TEST_CONFIG_OVERRIDE = {
+ # You can opt out from the test for specific Python versions.
+ 'ignored_versions': ["2.7"],
+
+ # Declare optional test sessions you want to opt-in. Currently we
+ # have the following optional test sessions:
+ # 'cloud_run' # Test session for Cloud Run application.
+ 'opt_in_sessions': [],
+
+ # An envvar key for determining the project id to use. Change it
+ # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
+ # build specific Cloud project. You can also use your own string
+ # to use your own Cloud project.
+ # 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT',
+ 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
+
+ # A dictionary you want to inject into your test. Don't put any
+ # secrets here. These values will override predefined values.
+ 'envs': {},
+}
diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/requirements-test.txt b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/requirements-test.txt
new file mode 100644
index 000000000000..ec623710c6ef
--- /dev/null
+++ b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/requirements-test.txt
@@ -0,0 +1,3 @@
+pytest==6.0.1
+retrying==1.3.3
+flaky==3.7.0
diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/requirements.txt b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/requirements.txt
new file mode 100644
index 000000000000..bc7a2fe57c8b
--- /dev/null
+++ b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/requirements.txt
@@ -0,0 +1,2 @@
+google-cloud-monitoring==1.1.0
+tabulate==0.8.7
diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/snippets.py b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/snippets.py
new file mode 100644
index 000000000000..80254232e6af
--- /dev/null
+++ b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/snippets.py
@@ -0,0 +1,343 @@
+# Copyright 2018 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import print_function
+
+import argparse
+import json
+import os
+
+from google.cloud import monitoring_v3
+import google.protobuf.json_format
+import tabulate
+
+
+# [START monitoring_alert_list_policies]
+def list_alert_policies(project_name):
+ client = monitoring_v3.AlertPolicyServiceClient()
+ policies = client.list_alert_policies(project_name)
+ print(tabulate.tabulate(
+ [(policy.name, policy.display_name) for policy in policies],
+ ('name', 'display_name')))
+# [END monitoring_alert_list_policies]
+
+
+# [START monitoring_alert_list_channels]
+def list_notification_channels(project_name):
+ client = monitoring_v3.NotificationChannelServiceClient()
+ channels = client.list_notification_channels(project_name)
+ print(tabulate.tabulate(
+ [(channel.name, channel.display_name) for channel in channels],
+ ('name', 'display_name')))
+# [END monitoring_alert_list_channels]
+
+
+# [START monitoring_alert_enable_policies]
+def enable_alert_policies(project_name, enable, filter_=None):
+ """Enable or disable alert policies in a project.
+
+ Arguments:
+ project_name (str)
+ enable (bool): Enable or disable the policies.
+ filter_ (str, optional): Only enable/disable alert policies that match
+ this filter_. See
+ https://cloud.google.com/monitoring/api/v3/sorting-and-filtering
+ """
+
+ client = monitoring_v3.AlertPolicyServiceClient()
+ policies = client.list_alert_policies(project_name, filter_=filter_)
+
+ for policy in policies:
+ if bool(enable) == policy.enabled.value:
+ print('Policy', policy.name, 'is already',
+ 'enabled' if policy.enabled.value else 'disabled')
+ else:
+ policy.enabled.value = bool(enable)
+ mask = monitoring_v3.types.field_mask_pb2.FieldMask()
+ mask.paths.append('enabled')
+ client.update_alert_policy(policy, mask)
+ print('Enabled' if enable else 'Disabled', policy.name)
+# [END monitoring_alert_enable_policies]
+
+
+# [START monitoring_alert_replace_channels]
+def replace_notification_channels(project_name, alert_policy_id, channel_ids):
+ _, project_id = project_name.split('/')
+ alert_client = monitoring_v3.AlertPolicyServiceClient()
+ channel_client = monitoring_v3.NotificationChannelServiceClient()
+ policy = monitoring_v3.types.alert_pb2.AlertPolicy()
+ policy.name = alert_client.alert_policy_path(project_id, alert_policy_id)
+
+ for channel_id in channel_ids:
+ policy.notification_channels.append(
+ channel_client.notification_channel_path(project_id, channel_id))
+
+ mask = monitoring_v3.types.field_mask_pb2.FieldMask()
+ mask.paths.append('notification_channels')
+ updated_policy = alert_client.update_alert_policy(policy, mask)
+ print('Updated', updated_policy.name)
+# [END monitoring_alert_replace_channels]
+
+
+# [START monitoring_alert_delete_channel]
+def delete_notification_channels(project_name, channel_ids, force=None):
+ channel_client = monitoring_v3.NotificationChannelServiceClient()
+ for channel_id in channel_ids:
+ channel_name = '{}/notificationChannels/{}'.format(
+ project_name, channel_id)
+ try:
+ channel_client.delete_notification_channel(
+ channel_name, force=force)
+ print('Channel {} deleted'.format(channel_name))
+ except ValueError:
+ print('The parameters are invalid')
+ except Exception as e:
+ print('API call failed: {}'.format(e))
+# [END monitoring_alert_delete_channel]
+
+
+# [START monitoring_alert_backup_policies]
+def backup(project_name, backup_filename):
+ alert_client = monitoring_v3.AlertPolicyServiceClient()
+ channel_client = monitoring_v3.NotificationChannelServiceClient()
+ record = {'project_name': project_name,
+ 'policies': list(alert_client.list_alert_policies(project_name)),
+ 'channels': list(channel_client.list_notification_channels(
+ project_name))}
+ json.dump(record, open(backup_filename, 'wt'), cls=ProtoEncoder, indent=2)
+ print('Backed up alert policies and notification channels to {}.'.format(
+ backup_filename)
+ )
+
+
+class ProtoEncoder(json.JSONEncoder):
+ """Uses google.protobuf.json_format to encode protobufs as json."""
+ def default(self, obj):
+ if type(obj) in (monitoring_v3.types.alert_pb2.AlertPolicy,
+ monitoring_v3.types.notification_pb2.
+ NotificationChannel):
+ text = google.protobuf.json_format.MessageToJson(obj)
+ return json.loads(text)
+ return super(ProtoEncoder, self).default(obj)
+# [END monitoring_alert_backup_policies]
+
+
+# [START monitoring_alert_restore_policies]
+# [START monitoring_alert_create_policy]
+# [START monitoring_alert_create_channel]
+# [START monitoring_alert_update_channel]
+# [START monitoring_alert_enable_channel]
+def restore(project_name, backup_filename):
+ print('Loading alert policies and notification channels from {}.'.format(
+ backup_filename)
+ )
+ record = json.load(open(backup_filename, 'rt'))
+ is_same_project = project_name == record['project_name']
+ # Convert dicts to AlertPolicies.
+ policies_json = [json.dumps(policy) for policy in record['policies']]
+ policies = [google.protobuf.json_format.Parse(
+ policy_json, monitoring_v3.types.alert_pb2.AlertPolicy())
+ for policy_json in policies_json]
+ # Convert dicts to NotificationChannels
+ channels_json = [json.dumps(channel) for channel in record['channels']]
+ channels = [google.protobuf.json_format.Parse(
+ channel_json, monitoring_v3.types.notification_pb2.
+ NotificationChannel()) for channel_json in channels_json]
+
+ # Restore the channels.
+ channel_client = monitoring_v3.NotificationChannelServiceClient()
+ channel_name_map = {}
+
+ for channel in channels:
+ updated = False
+ print('Updating channel', channel.display_name)
+ # This field is immutable and it is illegal to specify a
+ # non-default value (UNVERIFIED or VERIFIED) in the
+ # Create() or Update() operations.
+ channel.verification_status = monitoring_v3.enums.NotificationChannel.\
+ VerificationStatus.VERIFICATION_STATUS_UNSPECIFIED
+
+ if is_same_project:
+ try:
+ channel_client.update_notification_channel(channel)
+ updated = True
+ except google.api_core.exceptions.NotFound:
+ pass # The channel was deleted. Create it below.
+
+ if not updated:
+ # The channel no longer exists. Recreate it.
+ old_name = channel.name
+ channel.ClearField("name")
+ new_channel = channel_client.create_notification_channel(
+ project_name, channel)
+ channel_name_map[old_name] = new_channel.name
+
+ # Restore the alerts
+ alert_client = monitoring_v3.AlertPolicyServiceClient()
+
+ for policy in policies:
+ print('Updating policy', policy.display_name)
+ # These two fields cannot be set directly, so clear them.
+ policy.ClearField('creation_record')
+ policy.ClearField('mutation_record')
+
+ # Update old channel names with new channel names.
+ for i, channel in enumerate(policy.notification_channels):
+ new_channel = channel_name_map.get(channel)
+ if new_channel:
+ policy.notification_channels[i] = new_channel
+
+ updated = False
+
+ if is_same_project:
+ try:
+ alert_client.update_alert_policy(policy)
+ updated = True
+ except google.api_core.exceptions.NotFound:
+ pass # The policy was deleted. Create it below.
+ except google.api_core.exceptions.InvalidArgument:
+ # Annoying that API throws InvalidArgument when the policy
+ # does not exist. Seems like it should throw NotFound.
+ pass # The policy was deleted. Create it below.
+
+ if not updated:
+ # The policy no longer exists. Recreate it.
+ old_name = policy.name
+ policy.ClearField("name")
+ for condition in policy.conditions:
+ condition.ClearField("name")
+ policy = alert_client.create_alert_policy(project_name, policy)
+ print('Updated', policy.name)
+# [END monitoring_alert_enable_channel]
+# [END monitoring_alert_restore_policies]
+# [END monitoring_alert_create_policy]
+# [END monitoring_alert_create_channel]
+# [END monitoring_alert_update_channel]
+
+
+class MissingProjectIdError(Exception):
+ pass
+
+
+def project_id():
+ """Retreieves the project id from the environment variable.
+
+ Raises:
+ MissingProjectIdError -- When not set.
+
+ Returns:
+ str -- the project name
+ """
+ project_id = os.environ['GOOGLE_CLOUD_PROJECT']
+
+ if not project_id:
+ raise MissingProjectIdError(
+ 'Set the environment variable ' +
+ 'GCLOUD_PROJECT to your Google Cloud Project Id.')
+ return project_id
+
+
+def project_name():
+ return 'projects/' + project_id()
+
+
+if __name__ == '__main__':
+
+ parser = argparse.ArgumentParser(
+ description='Demonstrates AlertPolicy API operations.')
+
+ subparsers = parser.add_subparsers(dest='command')
+
+ list_alert_policies_parser = subparsers.add_parser(
+ 'list-alert-policies',
+ help=list_alert_policies.__doc__
+ )
+
+ list_notification_channels_parser = subparsers.add_parser(
+ 'list-notification-channels',
+ help=list_alert_policies.__doc__
+ )
+
+ enable_alert_policies_parser = subparsers.add_parser(
+ 'enable-alert-policies',
+ help=enable_alert_policies.__doc__
+ )
+ enable_alert_policies_parser.add_argument(
+ '--filter',
+ )
+
+ disable_alert_policies_parser = subparsers.add_parser(
+ 'disable-alert-policies',
+ help=enable_alert_policies.__doc__
+ )
+ disable_alert_policies_parser.add_argument(
+ '--filter',
+ )
+
+ replace_notification_channels_parser = subparsers.add_parser(
+ 'replace-notification-channels',
+ help=replace_notification_channels.__doc__
+ )
+ replace_notification_channels_parser.add_argument(
+ '-p', '--alert_policy_id',
+ required=True
+ )
+ replace_notification_channels_parser.add_argument(
+ '-c', '--notification_channel_id',
+ required=True,
+ action='append'
+ )
+
+ backup_parser = subparsers.add_parser(
+ 'backup',
+ help=backup.__doc__
+ )
+ backup_parser.add_argument(
+ '--backup_to_filename',
+ required=True
+ )
+
+ restore_parser = subparsers.add_parser(
+ 'restore',
+ help=restore.__doc__
+ )
+ restore_parser.add_argument(
+ '--restore_from_filename',
+ required=True
+ )
+
+ args = parser.parse_args()
+
+ if args.command == 'list-alert-policies':
+ list_alert_policies(project_name())
+
+ elif args.command == 'list-notification-channels':
+ list_notification_channels(project_name())
+
+ elif args.command == 'enable-alert-policies':
+ enable_alert_policies(project_name(), enable=True, filter_=args.filter)
+
+ elif args.command == 'disable-alert-policies':
+ enable_alert_policies(project_name(), enable=False,
+ filter_=args.filter)
+
+ elif args.command == 'replace-notification-channels':
+ replace_notification_channels(project_name(), args.alert_policy_id,
+ args.notification_channel_id)
+
+ elif args.command == 'backup':
+ backup(project_name(), args.backup_to_filename)
+
+ elif args.command == 'restore':
+ restore(project_name(), args.restore_from_filename)
diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/snippets_test.py b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/snippets_test.py
new file mode 100644
index 000000000000..550a8dc97596
--- /dev/null
+++ b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/snippets_test.py
@@ -0,0 +1,195 @@
+# Copyright 2018 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import print_function
+
+import random
+import string
+import time
+
+from google.api_core.exceptions import Aborted
+from google.api_core.exceptions import DeadlineExceeded
+from google.api_core.exceptions import NotFound
+from google.api_core.exceptions import ServiceUnavailable
+from google.cloud import monitoring_v3
+import google.protobuf.json_format
+import pytest
+from retrying import retry
+
+import snippets
+
+
+# We assume we have access to good randomness source.
+random.seed()
+
+
+def random_name(length):
+ return ''.join(
+ [random.choice(string.ascii_lowercase) for i in range(length)])
+
+
+def retry_on_exceptions(exception):
+ return isinstance(
+ exception, (Aborted, ServiceUnavailable, DeadlineExceeded))
+
+
+def delay_on_aborted(err, *args):
+ if retry_on_exceptions(err[1]):
+ # add randomness for avoiding continuous conflict
+ time.sleep(5 + (random.randint(0, 9) * 0.1))
+ return True
+ return False
+
+
+class PochanFixture:
+ """A test fixture that creates an alert POlicy and a notification CHANnel,
+ hence the name, pochan.
+ """
+
+ def __init__(self):
+ self.project_id = snippets.project_id()
+ self.project_name = snippets.project_name()
+ self.alert_policy_client = monitoring_v3.AlertPolicyServiceClient()
+ self.notification_channel_client = (
+ monitoring_v3.NotificationChannelServiceClient())
+
+ def __enter__(self):
+ @retry(wait_exponential_multiplier=1000, wait_exponential_max=10000,
+ stop_max_attempt_number=10,
+ retry_on_exception=retry_on_exceptions)
+ def setup():
+ # Create a policy.
+ policy = monitoring_v3.types.alert_pb2.AlertPolicy()
+ json = open('test_alert_policy.json').read()
+ google.protobuf.json_format.Parse(json, policy)
+ policy.display_name = 'snippets-test-' + random_name(10)
+ self.alert_policy = self.alert_policy_client.create_alert_policy(
+ self.project_name, policy)
+ # Create a notification channel.
+ notification_channel = (
+ monitoring_v3.types.notification_pb2.NotificationChannel())
+ json = open('test_notification_channel.json').read()
+ google.protobuf.json_format.Parse(json, notification_channel)
+ notification_channel.display_name = (
+ 'snippets-test-' + random_name(10))
+ self.notification_channel = (
+ self.notification_channel_client.create_notification_channel(
+ self.project_name, notification_channel))
+ setup()
+ return self
+
+ def __exit__(self, type, value, traceback):
+ # Delete the policy and channel we created.
+ @retry(wait_exponential_multiplier=1000, wait_exponential_max=10000,
+ stop_max_attempt_number=10,
+ retry_on_exception=retry_on_exceptions)
+ def teardown():
+ try:
+ self.alert_policy_client.delete_alert_policy(
+ self.alert_policy.name)
+ except NotFound:
+ print("Ignored NotFound when deleting a policy.")
+ try:
+ if self.notification_channel.name:
+ self.notification_channel_client\
+ .delete_notification_channel(
+ self.notification_channel.name)
+ except NotFound:
+ print("Ignored NotFound when deleting a channel.")
+ teardown()
+
+
+@pytest.fixture(scope='session')
+def pochan():
+ with PochanFixture() as pochan:
+ yield pochan
+
+
+def test_list_alert_policies(capsys, pochan):
+ snippets.list_alert_policies(pochan.project_name)
+ out, _ = capsys.readouterr()
+ assert pochan.alert_policy.display_name in out
+
+
+@pytest.mark.flaky(rerun_filter=delay_on_aborted, max_runs=5)
+def test_enable_alert_policies(capsys, pochan):
+ # These sleep calls are for mitigating the following error:
+ # "409 Too many concurrent edits to the project configuration.
+ # Please try again."
+ # Having multiple projects will void these `sleep()` calls.
+ # See also #3310
+ time.sleep(2)
+ snippets.enable_alert_policies(pochan.project_name, True)
+ out, _ = capsys.readouterr()
+ assert "Enabled {0}".format(pochan.project_name) in out \
+ or "{} is already enabled".format(pochan.alert_policy.name) in out
+
+ time.sleep(2)
+ snippets.enable_alert_policies(pochan.project_name, False)
+ out, _ = capsys.readouterr()
+ assert "Disabled {}".format(pochan.project_name) in out \
+ or "{} is already disabled".format(pochan.alert_policy.name) in out
+
+
+@pytest.mark.flaky(rerun_filter=delay_on_aborted, max_runs=5)
+def test_replace_channels(capsys, pochan):
+ alert_policy_id = pochan.alert_policy.name.split('/')[-1]
+ notification_channel_id = pochan.notification_channel.name.split('/')[-1]
+
+ # This sleep call is for mitigating the following error:
+ # "409 Too many concurrent edits to the project configuration.
+ # Please try again."
+ # Having multiple projects will void this `sleep()` call.
+ # See also #3310
+ time.sleep(2)
+ snippets.replace_notification_channels(
+ pochan.project_name, alert_policy_id, [notification_channel_id])
+ out, _ = capsys.readouterr()
+ assert "Updated {0}".format(pochan.alert_policy.name) in out
+
+
+@pytest.mark.flaky(rerun_filter=delay_on_aborted, max_runs=5)
+def test_backup_and_restore(capsys, pochan):
+ # These sleep calls are for mitigating the following error:
+ # "409 Too many concurrent edits to the project configuration.
+ # Please try again."
+ # Having multiple projects will void this `sleep()` call.
+ # See also #3310
+ time.sleep(2)
+ snippets.backup(pochan.project_name, 'backup.json')
+ out, _ = capsys.readouterr()
+
+ time.sleep(2)
+ snippets.restore(pochan.project_name, 'backup.json')
+ out, _ = capsys.readouterr()
+ assert "Updated {0}".format(pochan.alert_policy.name) in out
+ assert "Updating channel {0}".format(
+ pochan.notification_channel.display_name) in out
+
+
+@pytest.mark.flaky(rerun_filter=delay_on_aborted, max_runs=5)
+def test_delete_channels(capsys, pochan):
+ notification_channel_id = pochan.notification_channel.name.split('/')[-1]
+
+ # This sleep call is for mitigating the following error:
+ # "409 Too many concurrent edits to the project configuration.
+ # Please try again."
+ # Having multiple projects will void these `sleep()` calls.
+ # See also #3310
+ time.sleep(2)
+ snippets.delete_notification_channels(
+ pochan.project_name, [notification_channel_id], force=True)
+ out, _ = capsys.readouterr()
+ assert "{0} deleted".format(notification_channel_id) in out
+ pochan.notification_channel.name = '' # So teardown is not tried
diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/test_alert_policy.json b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/test_alert_policy.json
new file mode 100644
index 000000000000..d728949f9bb3
--- /dev/null
+++ b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/test_alert_policy.json
@@ -0,0 +1,31 @@
+{
+ "displayName": "test_alert_policy.json",
+ "combiner": "OR",
+ "conditions": [
+ {
+ "conditionThreshold": {
+ "filter": "metric.label.state=\"blocked\" AND metric.type=\"agent.googleapis.com/processes/count_by_state\" AND resource.type=\"gce_instance\"",
+ "comparison": "COMPARISON_GT",
+ "thresholdValue": 100,
+ "duration": "900s",
+ "trigger": {
+ "percent": 0
+ },
+ "aggregations": [
+ {
+ "alignmentPeriod": "60s",
+ "perSeriesAligner": "ALIGN_MEAN",
+ "crossSeriesReducer": "REDUCE_MEAN",
+ "groupByFields": [
+ "project",
+ "resource.label.instance_id",
+ "resource.label.zone"
+ ]
+ }
+ ]
+ },
+ "displayName": "test_alert_policy.json"
+ }
+ ],
+ "enabled": false
+}
\ No newline at end of file
diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/test_notification_channel.json b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/test_notification_channel.json
new file mode 100644
index 000000000000..6a0d53c00cdd
--- /dev/null
+++ b/packages/google-cloud-monitoring/samples/snippets/v3/alerts-client/test_notification_channel.json
@@ -0,0 +1,15 @@
+{
+ "type": "email",
+ "displayName": "Email joe.",
+ "description": "test_notification_channel.json",
+ "labels": {
+ "email_address": "joe@example.com"
+ },
+ "userLabels": {
+ "office": "california_westcoast_usa",
+ "division": "fulfillment",
+ "role": "operations",
+ "level": "5"
+ },
+ "enabled": true
+}
\ No newline at end of file
diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/README.rst b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/README.rst
new file mode 100644
index 000000000000..280f9c4e0a79
--- /dev/null
+++ b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/README.rst
@@ -0,0 +1,147 @@
+.. This file is automatically generated. Do not edit this file directly.
+
+Google Stackdriver Monitoring API Python Samples
+===============================================================================
+
+.. image:: https://gstatic.com/cloudssh/images/open-btn.png
+ :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=monitoring/api/v3/cloud-client/README.rst
+
+
+This directory contains samples for Google Stackdriver Monitoring API. Stackdriver Monitoring collects metrics, events, and metadata from Google Cloud Platform, Amazon Web Services (AWS), hosted uptime probes, application instrumentation, and a variety of common application components including Cassandra, Nginx, Apache Web Server, Elasticsearch
+ and many others. Stackdriver ingests that data and generates insights
+ via dashboards, charts, and alerts.
+
+
+
+
+.. _Google Stackdriver Monitoring API: https://cloud.google.com/monitoring/docs/
+
+To run the sample, you need to enable the API at: https://console.cloud.google.com/apis/library/monitoring.googleapis.com
+
+To run the sample, you need to have `Monitoring Admin` role.
+
+
+Please visit [the Cloud Console UI of this API](https://console.cloud.google.com/monitoring) and create a new Workspace with the same name of your Cloud project.
+
+
+Setup
+-------------------------------------------------------------------------------
+
+
+Authentication
+++++++++++++++
+
+This sample requires you to have authentication setup. Refer to the
+`Authentication Getting Started Guide`_ for instructions on setting up
+credentials for applications.
+
+.. _Authentication Getting Started Guide:
+ https://cloud.google.com/docs/authentication/getting-started
+
+Install Dependencies
+++++++++++++++++++++
+
+#. Clone python-docs-samples and change directory to the sample directory you want to use.
+
+ .. code-block:: bash
+
+ $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git
+
+#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions.
+
+ .. _Python Development Environment Setup Guide:
+ https://cloud.google.com/python/setup
+
+#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+.
+
+ .. code-block:: bash
+
+ $ virtualenv env
+ $ source env/bin/activate
+
+#. Install the dependencies needed to run the samples.
+
+ .. code-block:: bash
+
+ $ pip install -r requirements.txt
+
+.. _pip: https://pip.pypa.io/
+.. _virtualenv: https://virtualenv.pypa.io/
+
+Samples
+-------------------------------------------------------------------------------
+
+Quickstart
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. image:: https://gstatic.com/cloudssh/images/open-btn.png
+ :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=monitoring/api/v3/cloud-client/quickstart.py,monitoring/api/v3/cloud-client/README.rst
+
+
+
+
+To run this sample:
+
+.. code-block:: bash
+
+ $ python quickstart.py
+
+
+Snippets
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. image:: https://gstatic.com/cloudssh/images/open-btn.png
+ :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=monitoring/api/v3/cloud-client/snippets.py,monitoring/api/v3/cloud-client/README.rst
+
+
+
+
+To run this sample:
+
+.. code-block:: bash
+
+ $ python snippets.py
+
+ usage: snippets.py [-h]
+ {create-metric-descriptor,list-metric-descriptors,get-metric-descriptor,delete-metric-descriptor,list-resources,get-resource,write-time-series,list-time-series,list-time-series-header,list-time-series-reduce,list-time-series-aggregate}
+ ...
+
+ Demonstrates Monitoring API operations.
+
+ positional arguments:
+ {create-metric-descriptor,list-metric-descriptors,get-metric-descriptor,delete-metric-descriptor,list-resources,get-resource,write-time-series,list-time-series,list-time-series-header,list-time-series-reduce,list-time-series-aggregate}
+ create-metric-descriptor
+ list-metric-descriptors
+ get-metric-descriptor
+ delete-metric-descriptor
+ list-resources
+ get-resource
+ write-time-series
+ list-time-series
+ list-time-series-header
+ list-time-series-reduce
+ list-time-series-aggregate
+
+ optional arguments:
+ -h, --help show this help message and exit
+
+
+
+
+
+The client library
+-------------------------------------------------------------------------------
+
+This sample uses the `Google Cloud Client Library for Python`_.
+You can read the documentation for more details on API usage and use GitHub
+to `browse the source`_ and `report issues`_.
+
+.. _Google Cloud Client Library for Python:
+ https://googlecloudplatform.github.io/google-cloud-python/
+.. _browse the source:
+ https://github.com/GoogleCloudPlatform/google-cloud-python
+.. _report issues:
+ https://github.com/GoogleCloudPlatform/google-cloud-python/issues
+
+
+.. _Google Cloud SDK: https://cloud.google.com/sdk/
\ No newline at end of file
diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/README.rst.in b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/README.rst.in
new file mode 100644
index 000000000000..0ab6b2258b78
--- /dev/null
+++ b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/README.rst.in
@@ -0,0 +1,35 @@
+# This file is used to generate README.rst
+
+product:
+ name: Google Stackdriver Monitoring API
+ short_name: Stackdriver Monitoring API
+ url: https://cloud.google.com/monitoring/docs/
+ description: >
+ Stackdriver Monitoring collects metrics, events, and metadata from Google
+ Cloud Platform, Amazon Web Services (AWS), hosted uptime probes,
+ application instrumentation, and a variety of common application
+ components including Cassandra, Nginx, Apache Web Server, Elasticsearch
+ and many others. Stackdriver ingests that data and generates insights
+ via dashboards, charts, and alerts.
+
+required_api_url: https://console.cloud.google.com/apis/library/monitoring.googleapis.com
+required_role: Monitoring Admin
+other_required_steps: >
+ Please visit [the Cloud Console UI of this
+ API](https://console.cloud.google.com/monitoring) and create a new
+ Workspace with the same name of your Cloud project.
+
+setup:
+- auth
+- install_deps
+
+samples:
+- name: Quickstart
+ file: quickstart.py
+- name: Snippets
+ file: snippets.py
+ show_help: true
+
+cloud_client_library: true
+
+folder: monitoring/api/v3/cloud-client
diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/noxfile.py b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/noxfile.py
new file mode 100644
index 000000000000..ba55d7ce53ca
--- /dev/null
+++ b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/noxfile.py
@@ -0,0 +1,224 @@
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import print_function
+
+import os
+from pathlib import Path
+import sys
+
+import nox
+
+
+# WARNING - WARNING - WARNING - WARNING - WARNING
+# WARNING - WARNING - WARNING - WARNING - WARNING
+# DO NOT EDIT THIS FILE EVER!
+# WARNING - WARNING - WARNING - WARNING - WARNING
+# WARNING - WARNING - WARNING - WARNING - WARNING
+
+# Copy `noxfile_config.py` to your directory and modify it instead.
+
+
+# `TEST_CONFIG` dict is a configuration hook that allows users to
+# modify the test configurations. The values here should be in sync
+# with `noxfile_config.py`. Users will copy `noxfile_config.py` into
+# their directory and modify it.
+
+TEST_CONFIG = {
+ # You can opt out from the test for specific Python versions.
+ 'ignored_versions': ["2.7"],
+
+ # An envvar key for determining the project id to use. Change it
+ # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
+ # build specific Cloud project. You can also use your own string
+ # to use your own Cloud project.
+ 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT',
+ # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
+
+ # A dictionary you want to inject into your test. Don't put any
+ # secrets here. These values will override predefined values.
+ 'envs': {},
+}
+
+
+try:
+ # Ensure we can import noxfile_config in the project's directory.
+ sys.path.append('.')
+ from noxfile_config import TEST_CONFIG_OVERRIDE
+except ImportError as e:
+ print("No user noxfile_config found: detail: {}".format(e))
+ TEST_CONFIG_OVERRIDE = {}
+
+# Update the TEST_CONFIG with the user supplied values.
+TEST_CONFIG.update(TEST_CONFIG_OVERRIDE)
+
+
+def get_pytest_env_vars():
+ """Returns a dict for pytest invocation."""
+ ret = {}
+
+ # Override the GCLOUD_PROJECT and the alias.
+ env_key = TEST_CONFIG['gcloud_project_env']
+ # This should error out if not set.
+ ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key]
+
+ # Apply user supplied envs.
+ ret.update(TEST_CONFIG['envs'])
+ return ret
+
+
+# DO NOT EDIT - automatically generated.
+# All versions used to tested samples.
+ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"]
+
+# Any default versions that should be ignored.
+IGNORED_VERSIONS = TEST_CONFIG['ignored_versions']
+
+TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS])
+
+INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False))
+#
+# Style Checks
+#
+
+
+def _determine_local_import_names(start_dir):
+ """Determines all import names that should be considered "local".
+
+ This is used when running the linter to insure that import order is
+ properly checked.
+ """
+ file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)]
+ return [
+ basename
+ for basename, extension in file_ext_pairs
+ if extension == ".py"
+ or os.path.isdir(os.path.join(start_dir, basename))
+ and basename not in ("__pycache__")
+ ]
+
+
+# Linting with flake8.
+#
+# We ignore the following rules:
+# E203: whitespace before ‘:’
+# E266: too many leading ‘#’ for block comment
+# E501: line too long
+# I202: Additional newline in a section of imports
+#
+# We also need to specify the rules which are ignored by default:
+# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121']
+FLAKE8_COMMON_ARGS = [
+ "--show-source",
+ "--builtin=gettext",
+ "--max-complexity=20",
+ "--import-order-style=google",
+ "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py",
+ "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202",
+ "--max-line-length=88",
+]
+
+
+@nox.session
+def lint(session):
+ session.install("flake8", "flake8-import-order")
+
+ local_names = _determine_local_import_names(".")
+ args = FLAKE8_COMMON_ARGS + [
+ "--application-import-names",
+ ",".join(local_names),
+ "."
+ ]
+ session.run("flake8", *args)
+
+
+#
+# Sample Tests
+#
+
+
+PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"]
+
+
+def _session_tests(session, post_install=None):
+ """Runs py.test for a particular project."""
+ if os.path.exists("requirements.txt"):
+ session.install("-r", "requirements.txt")
+
+ if os.path.exists("requirements-test.txt"):
+ session.install("-r", "requirements-test.txt")
+
+ if INSTALL_LIBRARY_FROM_SOURCE:
+ session.install("-e", _get_repo_root())
+
+ if post_install:
+ post_install(session)
+
+ session.run(
+ "pytest",
+ *(PYTEST_COMMON_ARGS + session.posargs),
+ # Pytest will return 5 when no tests are collected. This can happen
+ # on travis where slow and flaky tests are excluded.
+ # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html
+ success_codes=[0, 5],
+ env=get_pytest_env_vars()
+ )
+
+
+@nox.session(python=ALL_VERSIONS)
+def py(session):
+ """Runs py.test for a sample using the specified version of Python."""
+ if session.python in TESTED_VERSIONS:
+ _session_tests(session)
+ else:
+ session.skip("SKIPPED: {} tests are disabled for this sample.".format(
+ session.python
+ ))
+
+
+#
+# Readmegen
+#
+
+
+def _get_repo_root():
+ """ Returns the root folder of the project. """
+ # Get root of this repository. Assume we don't have directories nested deeper than 10 items.
+ p = Path(os.getcwd())
+ for i in range(10):
+ if p is None:
+ break
+ if Path(p / ".git").exists():
+ return str(p)
+ p = p.parent
+ raise Exception("Unable to detect repository root.")
+
+
+GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")])
+
+
+@nox.session
+@nox.parametrize("path", GENERATED_READMES)
+def readmegen(session, path):
+ """(Re-)generates the readme for a sample."""
+ session.install("jinja2", "pyyaml")
+ dir_ = os.path.dirname(path)
+
+ if os.path.exists(os.path.join(dir_, "requirements.txt")):
+ session.install("-r", os.path.join(dir_, "requirements.txt"))
+
+ in_file = os.path.join(dir_, "README.rst.in")
+ session.run(
+ "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file
+ )
diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/noxfile_config.py b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/noxfile_config.py
new file mode 100644
index 000000000000..664c58309d70
--- /dev/null
+++ b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/noxfile_config.py
@@ -0,0 +1,42 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Default TEST_CONFIG_OVERRIDE for python repos.
+
+# You can copy this file into your directory, then it will be inported from
+# the noxfile.py.
+
+# The source of truth:
+# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/noxfile_config.py
+
+TEST_CONFIG_OVERRIDE = {
+ # You can opt out from the test for specific Python versions.
+ 'ignored_versions': ["2.7"],
+
+ # Declare optional test sessions you want to opt-in. Currently we
+ # have the following optional test sessions:
+ # 'cloud_run' # Test session for Cloud Run application.
+ 'opt_in_sessions': [],
+
+ # An envvar key for determining the project id to use. Change it
+ # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
+ # build specific Cloud project. You can also use your own string
+ # to use your own Cloud project.
+ # 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT',
+ 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
+
+ # A dictionary you want to inject into your test. Don't put any
+ # secrets here. These values will override predefined values.
+ 'envs': {},
+}
diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/quickstart.py b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/quickstart.py
new file mode 100644
index 000000000000..0527acae545e
--- /dev/null
+++ b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/quickstart.py
@@ -0,0 +1,43 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def run_quickstart():
+ # [START monitoring_quickstart]
+ from google.cloud import monitoring_v3
+
+ import time
+
+ client = monitoring_v3.MetricServiceClient()
+ project = 'my-project' # TODO: Update to your project ID.
+ project_name = client.project_path(project)
+
+ series = monitoring_v3.types.TimeSeries()
+ series.metric.type = 'custom.googleapis.com/my_metric'
+ series.resource.type = 'gce_instance'
+ series.resource.labels['instance_id'] = '1234567890123456789'
+ series.resource.labels['zone'] = 'us-central1-f'
+ point = series.points.add()
+ point.value.double_value = 3.14
+ now = time.time()
+ point.interval.end_time.seconds = int(now)
+ point.interval.end_time.nanos = int(
+ (now - point.interval.end_time.seconds) * 10**9)
+ client.create_time_series(project_name, [series])
+ print('Successfully wrote time series.')
+ # [END monitoring_quickstart]
+
+
+if __name__ == '__main__':
+ run_quickstart()
diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/quickstart_test.py b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/quickstart_test.py
new file mode 100644
index 000000000000..fd0191aafc12
--- /dev/null
+++ b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/quickstart_test.py
@@ -0,0 +1,46 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+import backoff
+import mock
+import pytest
+
+import quickstart
+
+
+PROJECT = os.environ['GOOGLE_CLOUD_PROJECT']
+
+
+@pytest.fixture
+def mock_project_path():
+ """Mock out project and replace with project from environment."""
+ project_patch = mock.patch(
+ 'google.cloud.monitoring_v3.MetricServiceClient.'
+ 'project_path')
+
+ with project_patch as project_mock:
+ project_mock.return_value = 'projects/{}'.format(PROJECT)
+ yield project_mock
+
+
+def test_quickstart(capsys, mock_project_path):
+ @backoff.on_exception(backoff.expo, AssertionError, max_time=60)
+ def eventually_consistent_test():
+ quickstart.run_quickstart()
+ out, _ = capsys.readouterr()
+ assert 'wrote' in out
+
+ eventually_consistent_test()
diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/requirements-test.txt b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/requirements-test.txt
new file mode 100644
index 000000000000..b04e65e37c9c
--- /dev/null
+++ b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/requirements-test.txt
@@ -0,0 +1,3 @@
+backoff==1.10.0
+pytest==6.0.1
+mock==4.0.2
diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/requirements.txt b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/requirements.txt
new file mode 100644
index 000000000000..10c88fc2f9ae
--- /dev/null
+++ b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/requirements.txt
@@ -0,0 +1 @@
+google-cloud-monitoring==1.1.0
diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/snippets.py b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/snippets.py
new file mode 100644
index 000000000000..64b3853fd7cc
--- /dev/null
+++ b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/snippets.py
@@ -0,0 +1,310 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import os
+import pprint
+import time
+import uuid
+
+from google.cloud import monitoring_v3
+
+
+PROJECT_ID = os.environ['GOOGLE_CLOUD_PROJECT']
+
+
+def create_metric_descriptor(project_id):
+ # [START monitoring_create_metric]
+ client = monitoring_v3.MetricServiceClient()
+ project_name = client.project_path(project_id)
+ descriptor = monitoring_v3.types.MetricDescriptor()
+ descriptor.type = 'custom.googleapis.com/my_metric' + str(uuid.uuid4())
+ descriptor.metric_kind = (
+ monitoring_v3.enums.MetricDescriptor.MetricKind.GAUGE)
+ descriptor.value_type = (
+ monitoring_v3.enums.MetricDescriptor.ValueType.DOUBLE)
+ descriptor.description = 'This is a simple example of a custom metric.'
+ descriptor = client.create_metric_descriptor(project_name, descriptor)
+ print('Created {}.'.format(descriptor.name))
+ # [END monitoring_create_metric]
+
+
+def delete_metric_descriptor(descriptor_name):
+ # [START monitoring_delete_metric]
+ client = monitoring_v3.MetricServiceClient()
+ client.delete_metric_descriptor(descriptor_name)
+ print('Deleted metric descriptor {}.'.format(descriptor_name))
+ # [END monitoring_delete_metric]
+
+
+def write_time_series(project_id):
+ # [START monitoring_write_timeseries]
+ client = monitoring_v3.MetricServiceClient()
+ project_name = client.project_path(project_id)
+
+ series = monitoring_v3.types.TimeSeries()
+ series.metric.type = 'custom.googleapis.com/my_metric' + str(uuid.uuid4())
+ series.resource.type = 'gce_instance'
+ series.resource.labels['instance_id'] = '1234567890123456789'
+ series.resource.labels['zone'] = 'us-central1-f'
+ point = series.points.add()
+ point.value.double_value = 3.14
+ now = time.time()
+ point.interval.end_time.seconds = int(now)
+ point.interval.end_time.nanos = int(
+ (now - point.interval.end_time.seconds) * 10**9)
+ client.create_time_series(project_name, [series])
+ # [END monitoring_write_timeseries]
+
+
+def list_time_series(project_id):
+ # [START monitoring_read_timeseries_simple]
+ client = monitoring_v3.MetricServiceClient()
+ project_name = client.project_path(project_id)
+ interval = monitoring_v3.types.TimeInterval()
+ now = time.time()
+ interval.end_time.seconds = int(now)
+ interval.end_time.nanos = int(
+ (now - interval.end_time.seconds) * 10**9)
+ interval.start_time.seconds = int(now - 1200)
+ interval.start_time.nanos = interval.end_time.nanos
+ results = client.list_time_series(
+ project_name,
+ 'metric.type = "compute.googleapis.com/instance/cpu/utilization"',
+ interval,
+ monitoring_v3.enums.ListTimeSeriesRequest.TimeSeriesView.FULL)
+ for result in results:
+ print(result)
+ # [END monitoring_read_timeseries_simple]
+
+
+def list_time_series_header(project_id):
+ # [START monitoring_read_timeseries_fields]
+ client = monitoring_v3.MetricServiceClient()
+ project_name = client.project_path(project_id)
+ interval = monitoring_v3.types.TimeInterval()
+ now = time.time()
+ interval.end_time.seconds = int(now)
+ interval.end_time.nanos = int(
+ (now - interval.end_time.seconds) * 10**9)
+ interval.start_time.seconds = int(now - 1200)
+ interval.start_time.nanos = interval.end_time.nanos
+ results = client.list_time_series(
+ project_name,
+ 'metric.type = "compute.googleapis.com/instance/cpu/utilization"',
+ interval,
+ monitoring_v3.enums.ListTimeSeriesRequest.TimeSeriesView.HEADERS)
+ for result in results:
+ print(result)
+ # [END monitoring_read_timeseries_fields]
+
+
+def list_time_series_aggregate(project_id):
+ # [START monitoring_read_timeseries_align]
+ client = monitoring_v3.MetricServiceClient()
+ project_name = client.project_path(project_id)
+ interval = monitoring_v3.types.TimeInterval()
+ now = time.time()
+ interval.end_time.seconds = int(now)
+ interval.end_time.nanos = int(
+ (now - interval.end_time.seconds) * 10**9)
+ interval.start_time.seconds = int(now - 3600)
+ interval.start_time.nanos = interval.end_time.nanos
+ aggregation = monitoring_v3.types.Aggregation()
+ aggregation.alignment_period.seconds = 1200 # 20 minutes
+ aggregation.per_series_aligner = (
+ monitoring_v3.enums.Aggregation.Aligner.ALIGN_MEAN)
+
+ results = client.list_time_series(
+ project_name,
+ 'metric.type = "compute.googleapis.com/instance/cpu/utilization"',
+ interval,
+ monitoring_v3.enums.ListTimeSeriesRequest.TimeSeriesView.FULL,
+ aggregation)
+ for result in results:
+ print(result)
+ # [END monitoring_read_timeseries_align]
+
+
+def list_time_series_reduce(project_id):
+ # [START monitoring_read_timeseries_reduce]
+ client = monitoring_v3.MetricServiceClient()
+ project_name = client.project_path(project_id)
+ interval = monitoring_v3.types.TimeInterval()
+ now = time.time()
+ interval.end_time.seconds = int(now)
+ interval.end_time.nanos = int(
+ (now - interval.end_time.seconds) * 10**9)
+ interval.start_time.seconds = int(now - 3600)
+ interval.start_time.nanos = interval.end_time.nanos
+ aggregation = monitoring_v3.types.Aggregation()
+ aggregation.alignment_period.seconds = 1200 # 20 minutes
+ aggregation.per_series_aligner = (
+ monitoring_v3.enums.Aggregation.Aligner.ALIGN_MEAN)
+ aggregation.cross_series_reducer = (
+ monitoring_v3.enums.Aggregation.Reducer.REDUCE_MEAN)
+ aggregation.group_by_fields.append('resource.zone')
+
+ results = client.list_time_series(
+ project_name,
+ 'metric.type = "compute.googleapis.com/instance/cpu/utilization"',
+ interval,
+ monitoring_v3.enums.ListTimeSeriesRequest.TimeSeriesView.FULL,
+ aggregation)
+ for result in results:
+ print(result)
+ # [END monitoring_read_timeseries_reduce]
+
+
+def list_metric_descriptors(project_id):
+ # [START monitoring_list_descriptors]
+ client = monitoring_v3.MetricServiceClient()
+ project_name = client.project_path(project_id)
+ for descriptor in client.list_metric_descriptors(project_name):
+ print(descriptor.type)
+ # [END monitoring_list_descriptors]
+
+
+def list_monitored_resources(project_id):
+ # [START monitoring_list_resources]
+ client = monitoring_v3.MetricServiceClient()
+ project_name = client.project_path(project_id)
+ resource_descriptors = (
+ client.list_monitored_resource_descriptors(project_name))
+ for descriptor in resource_descriptors:
+ print(descriptor.type)
+ # [END monitoring_list_resources]
+
+
+def get_monitored_resource_descriptor(project_id, resource_type_name):
+ # [START monitoring_get_resource]
+ client = monitoring_v3.MetricServiceClient()
+ resource_path = client.monitored_resource_descriptor_path(
+ project_id, resource_type_name)
+ pprint.pprint(client.get_monitored_resource_descriptor(resource_path))
+ # [END monitoring_get_resource]
+
+
+def get_metric_descriptor(metric_name):
+ # [START monitoring_get_descriptor]
+ client = monitoring_v3.MetricServiceClient()
+ descriptor = client.get_metric_descriptor(metric_name)
+ pprint.pprint(descriptor)
+ # [END monitoring_get_descriptor]
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(
+ description='Demonstrates Monitoring API operations.')
+
+ subparsers = parser.add_subparsers(dest='command')
+
+ create_metric_descriptor_parser = subparsers.add_parser(
+ 'create-metric-descriptor',
+ help=create_metric_descriptor.__doc__
+ )
+
+ list_metric_descriptor_parser = subparsers.add_parser(
+ 'list-metric-descriptors',
+ help=list_metric_descriptors.__doc__
+ )
+
+ get_metric_descriptor_parser = subparsers.add_parser(
+ 'get-metric-descriptor',
+ help=get_metric_descriptor.__doc__
+ )
+
+ get_metric_descriptor_parser.add_argument(
+ '--metric-type-name',
+ help='The metric type of the metric descriptor to see details about.',
+ required=True
+ )
+
+ delete_metric_descriptor_parser = subparsers.add_parser(
+ 'delete-metric-descriptor',
+ help=list_metric_descriptors.__doc__
+ )
+
+ delete_metric_descriptor_parser.add_argument(
+ '--metric-descriptor-name',
+ help='Metric descriptor to delete',
+ required=True
+ )
+
+ list_resources_parser = subparsers.add_parser(
+ 'list-resources',
+ help=list_monitored_resources.__doc__
+ )
+
+ get_resource_parser = subparsers.add_parser(
+ 'get-resource',
+ help=get_monitored_resource_descriptor.__doc__
+ )
+
+ get_resource_parser.add_argument(
+ '--resource-type-name',
+ help='Monitored resource to view more information about.',
+ required=True
+ )
+
+ write_time_series_parser = subparsers.add_parser(
+ 'write-time-series',
+ help=write_time_series.__doc__
+ )
+
+ list_time_series_parser = subparsers.add_parser(
+ 'list-time-series',
+ help=list_time_series.__doc__
+ )
+
+ list_time_series_header_parser = subparsers.add_parser(
+ 'list-time-series-header',
+ help=list_time_series_header.__doc__
+ )
+
+ read_time_series_reduce = subparsers.add_parser(
+ 'list-time-series-reduce',
+ help=list_time_series_reduce.__doc__
+ )
+
+ read_time_series_aggregate = subparsers.add_parser(
+ 'list-time-series-aggregate',
+ help=list_time_series_aggregate.__doc__
+ )
+
+ args = parser.parse_args()
+
+ if args.command == 'create-metric-descriptor':
+ create_metric_descriptor(PROJECT_ID)
+ if args.command == 'list-metric-descriptors':
+ list_metric_descriptors(PROJECT_ID)
+ if args.command == 'get-metric-descriptor':
+ get_metric_descriptor(args.metric_type_name)
+ if args.command == 'delete-metric-descriptor':
+ delete_metric_descriptor(args.metric_descriptor_name)
+ if args.command == 'list-resources':
+ list_monitored_resources(PROJECT_ID)
+ if args.command == 'get-resource':
+ get_monitored_resource_descriptor(
+ PROJECT_ID, args.resource_type_name)
+ if args.command == 'write-time-series':
+ write_time_series(PROJECT_ID)
+ if args.command == 'list-time-series':
+ list_time_series(PROJECT_ID)
+ if args.command == 'list-time-series-header':
+ list_time_series_header(PROJECT_ID)
+ if args.command == 'list-time-series-reduce':
+ list_time_series_reduce(PROJECT_ID)
+ if args.command == 'list-time-series-aggregate':
+ list_time_series_aggregate(PROJECT_ID)
diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/snippets_test.py b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/snippets_test.py
new file mode 100644
index 000000000000..5aabbda83922
--- /dev/null
+++ b/packages/google-cloud-monitoring/samples/snippets/v3/cloud-client/snippets_test.py
@@ -0,0 +1,117 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import re
+
+import backoff
+from google.api_core.exceptions import InternalServerError
+from google.api_core.exceptions import NotFound
+import pytest
+
+import snippets
+
+
+PROJECT_ID = os.environ['GOOGLE_CLOUD_PROJECT']
+
+
+@pytest.fixture(scope="function")
+def custom_metric_descriptor(capsys):
+ snippets.create_metric_descriptor(PROJECT_ID)
+ out, _ = capsys.readouterr()
+ match = re.search(r'Created (.*)\.', out)
+ metric_name = match.group(1)
+ yield metric_name
+
+ # teardown
+ try:
+ snippets.delete_metric_descriptor(metric_name)
+ except NotFound:
+ print("Metric descriptor already deleted")
+
+
+@pytest.fixture(scope="module")
+def write_time_series():
+
+ @backoff.on_exception(backoff.expo, InternalServerError, max_time=120)
+ def write():
+ snippets.write_time_series(PROJECT_ID)
+
+ write()
+ yield
+
+
+def test_get_delete_metric_descriptor(capsys, custom_metric_descriptor):
+ try:
+ @backoff.on_exception(
+ backoff.expo, (AssertionError, NotFound), max_time=60)
+ def eventually_consistent_test():
+ snippets.get_metric_descriptor(custom_metric_descriptor)
+ out, _ = capsys.readouterr()
+ assert 'DOUBLE' in out
+
+ eventually_consistent_test()
+ finally:
+ snippets.delete_metric_descriptor(custom_metric_descriptor)
+ out, _ = capsys.readouterr()
+ assert 'Deleted metric' in out
+
+
+def test_list_metric_descriptors(capsys):
+ snippets.list_metric_descriptors(PROJECT_ID)
+ out, _ = capsys.readouterr()
+ assert 'logging.googleapis.com/byte_count' in out
+
+
+def test_list_resources(capsys):
+ snippets.list_monitored_resources(PROJECT_ID)
+ out, _ = capsys.readouterr()
+ assert 'pubsub_topic' in out
+
+
+def test_get_resources(capsys):
+ snippets.get_monitored_resource_descriptor(
+ PROJECT_ID, 'pubsub_topic')
+ out, _ = capsys.readouterr()
+ assert 'A topic in Google Cloud Pub/Sub' in out
+
+
+def test_list_time_series(capsys, write_time_series):
+ snippets.list_time_series(PROJECT_ID)
+ out, _ = capsys.readouterr()
+ assert 'gce_instance' in out
+
+
+def test_list_time_series_header(capsys, write_time_series):
+ snippets.list_time_series_header(PROJECT_ID)
+ out, _ = capsys.readouterr()
+ assert 'gce_instance' in out
+
+
+def test_list_time_series_aggregate(capsys, write_time_series):
+ snippets.list_time_series_aggregate(PROJECT_ID)
+ out, _ = capsys.readouterr()
+ assert 'points' in out
+ assert 'interval' in out
+ assert 'start_time' in out
+ assert 'end_time' in out
+
+
+def test_list_time_series_reduce(capsys, write_time_series):
+ snippets.list_time_series_reduce(PROJECT_ID)
+ out, _ = capsys.readouterr()
+ assert 'points' in out
+ assert 'interval' in out
+ assert 'start_time' in out
+ assert 'end_time' in out
diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/README.rst b/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/README.rst
new file mode 100644
index 000000000000..30046bdef9d2
--- /dev/null
+++ b/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/README.rst
@@ -0,0 +1,115 @@
+.. This file is automatically generated. Do not edit this file directly.
+
+Google Stackdriver Uptime Checks API Python Samples
+===============================================================================
+
+.. image:: https://gstatic.com/cloudssh/images/open-btn.png
+ :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=monitoring/api/v3/uptime-check-client/README.rst
+
+
+This directory contains samples for Google Stackdriver Uptime Checks API. Stackdriver Monitoring collects metrics, events, and metadata from Google Cloud Platform, Amazon Web Services (AWS), hosted uptime probes, application instrumentation, and a variety of common application components including Cassandra, Nginx, Apache Web Server, Elasticsearch and many others. Stackdriver's Uptime Checks API allows you to create, delete, and list your project's Uptime Checks.
+
+
+
+
+.. _Google Stackdriver Uptime Checks API: https://cloud.google.com/monitoring/uptime-checks/management
+
+Setup
+-------------------------------------------------------------------------------
+
+
+Authentication
+++++++++++++++
+
+This sample requires you to have authentication setup. Refer to the
+`Authentication Getting Started Guide`_ for instructions on setting up
+credentials for applications.
+
+.. _Authentication Getting Started Guide:
+ https://cloud.google.com/docs/authentication/getting-started
+
+Install Dependencies
+++++++++++++++++++++
+
+#. Clone python-docs-samples and change directory to the sample directory you want to use.
+
+ .. code-block:: bash
+
+ $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git
+
+#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions.
+
+ .. _Python Development Environment Setup Guide:
+ https://cloud.google.com/python/setup
+
+#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+.
+
+ .. code-block:: bash
+
+ $ virtualenv env
+ $ source env/bin/activate
+
+#. Install the dependencies needed to run the samples.
+
+ .. code-block:: bash
+
+ $ pip install -r requirements.txt
+
+.. _pip: https://pip.pypa.io/
+.. _virtualenv: https://virtualenv.pypa.io/
+
+Samples
+-------------------------------------------------------------------------------
+
+Snippets
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. image:: https://gstatic.com/cloudssh/images/open-btn.png
+ :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=monitoring/api/v3/uptime-check-client/snippets.py,monitoring/api/v3/uptime-check-client/README.rst
+
+
+
+
+To run this sample:
+
+.. code-block:: bash
+
+ $ python snippets.py
+
+ usage: snippets.py [-h]
+ {list-uptime-check-configs,list-uptime-check-ips,create-uptime-check,get-uptime-check-config,delete-uptime-check-config}
+ ...
+
+ Demonstrates Uptime Check API operations.
+
+ positional arguments:
+ {list-uptime-check-configs,list-uptime-check-ips,create-uptime-check,get-uptime-check-config,delete-uptime-check-config}
+ list-uptime-check-configs
+ list-uptime-check-ips
+ create-uptime-check
+ get-uptime-check-config
+ delete-uptime-check-config
+
+ optional arguments:
+ -h, --help show this help message and exit
+
+
+
+
+
+The client library
+-------------------------------------------------------------------------------
+
+This sample uses the `Google Cloud Client Library for Python`_.
+You can read the documentation for more details on API usage and use GitHub
+to `browse the source`_ and `report issues`_.
+
+.. _Google Cloud Client Library for Python:
+ https://googlecloudplatform.github.io/google-cloud-python/
+.. _browse the source:
+ https://github.com/GoogleCloudPlatform/google-cloud-python
+.. _report issues:
+ https://github.com/GoogleCloudPlatform/google-cloud-python/issues
+
+
+.. _Google Cloud SDK: https://cloud.google.com/sdk/
\ No newline at end of file
diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/README.rst.in b/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/README.rst.in
new file mode 100644
index 000000000000..1174962e48d1
--- /dev/null
+++ b/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/README.rst.in
@@ -0,0 +1,26 @@
+# This file is used to generate README.rst
+
+product:
+ name: Google Stackdriver Uptime Checks API
+ short_name: Stackdriver Uptime Checks API
+ url: https://cloud.google.com/monitoring/uptime-checks/management
+ description: >
+ Stackdriver Monitoring collects metrics, events, and metadata from Google
+ Cloud Platform, Amazon Web Services (AWS), hosted uptime probes,
+ application instrumentation, and a variety of common application
+ components including Cassandra, Nginx, Apache Web Server, Elasticsearch
+ and many others. Stackdriver's Uptime Checks API allows you to create,
+ delete, and list your project's Uptime Checks.
+
+setup:
+- auth
+- install_deps
+
+samples:
+- name: Snippets
+ file: snippets.py
+ show_help: true
+
+cloud_client_library: true
+
+folder: monitoring/api/v3/uptime-check-client
\ No newline at end of file
diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/noxfile.py b/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/noxfile.py
new file mode 100644
index 000000000000..ba55d7ce53ca
--- /dev/null
+++ b/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/noxfile.py
@@ -0,0 +1,224 @@
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import print_function
+
+import os
+from pathlib import Path
+import sys
+
+import nox
+
+
+# WARNING - WARNING - WARNING - WARNING - WARNING
+# WARNING - WARNING - WARNING - WARNING - WARNING
+# DO NOT EDIT THIS FILE EVER!
+# WARNING - WARNING - WARNING - WARNING - WARNING
+# WARNING - WARNING - WARNING - WARNING - WARNING
+
+# Copy `noxfile_config.py` to your directory and modify it instead.
+
+
+# `TEST_CONFIG` dict is a configuration hook that allows users to
+# modify the test configurations. The values here should be in sync
+# with `noxfile_config.py`. Users will copy `noxfile_config.py` into
+# their directory and modify it.
+
+TEST_CONFIG = {
+ # You can opt out from the test for specific Python versions.
+ 'ignored_versions': ["2.7"],
+
+ # An envvar key for determining the project id to use. Change it
+ # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
+ # build specific Cloud project. You can also use your own string
+ # to use your own Cloud project.
+ 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT',
+ # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
+
+ # A dictionary you want to inject into your test. Don't put any
+ # secrets here. These values will override predefined values.
+ 'envs': {},
+}
+
+
+try:
+ # Ensure we can import noxfile_config in the project's directory.
+ sys.path.append('.')
+ from noxfile_config import TEST_CONFIG_OVERRIDE
+except ImportError as e:
+ print("No user noxfile_config found: detail: {}".format(e))
+ TEST_CONFIG_OVERRIDE = {}
+
+# Update the TEST_CONFIG with the user supplied values.
+TEST_CONFIG.update(TEST_CONFIG_OVERRIDE)
+
+
+def get_pytest_env_vars():
+ """Returns a dict for pytest invocation."""
+ ret = {}
+
+ # Override the GCLOUD_PROJECT and the alias.
+ env_key = TEST_CONFIG['gcloud_project_env']
+ # This should error out if not set.
+ ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key]
+
+ # Apply user supplied envs.
+ ret.update(TEST_CONFIG['envs'])
+ return ret
+
+
+# DO NOT EDIT - automatically generated.
+# All versions used to tested samples.
+ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"]
+
+# Any default versions that should be ignored.
+IGNORED_VERSIONS = TEST_CONFIG['ignored_versions']
+
+TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS])
+
+INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False))
+#
+# Style Checks
+#
+
+
+def _determine_local_import_names(start_dir):
+ """Determines all import names that should be considered "local".
+
+ This is used when running the linter to insure that import order is
+ properly checked.
+ """
+ file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)]
+ return [
+ basename
+ for basename, extension in file_ext_pairs
+ if extension == ".py"
+ or os.path.isdir(os.path.join(start_dir, basename))
+ and basename not in ("__pycache__")
+ ]
+
+
+# Linting with flake8.
+#
+# We ignore the following rules:
+# E203: whitespace before ‘:’
+# E266: too many leading ‘#’ for block comment
+# E501: line too long
+# I202: Additional newline in a section of imports
+#
+# We also need to specify the rules which are ignored by default:
+# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121']
+FLAKE8_COMMON_ARGS = [
+ "--show-source",
+ "--builtin=gettext",
+ "--max-complexity=20",
+ "--import-order-style=google",
+ "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py",
+ "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202",
+ "--max-line-length=88",
+]
+
+
+@nox.session
+def lint(session):
+ session.install("flake8", "flake8-import-order")
+
+ local_names = _determine_local_import_names(".")
+ args = FLAKE8_COMMON_ARGS + [
+ "--application-import-names",
+ ",".join(local_names),
+ "."
+ ]
+ session.run("flake8", *args)
+
+
+#
+# Sample Tests
+#
+
+
+PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"]
+
+
+def _session_tests(session, post_install=None):
+ """Runs py.test for a particular project."""
+ if os.path.exists("requirements.txt"):
+ session.install("-r", "requirements.txt")
+
+ if os.path.exists("requirements-test.txt"):
+ session.install("-r", "requirements-test.txt")
+
+ if INSTALL_LIBRARY_FROM_SOURCE:
+ session.install("-e", _get_repo_root())
+
+ if post_install:
+ post_install(session)
+
+ session.run(
+ "pytest",
+ *(PYTEST_COMMON_ARGS + session.posargs),
+ # Pytest will return 5 when no tests are collected. This can happen
+ # on travis where slow and flaky tests are excluded.
+ # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html
+ success_codes=[0, 5],
+ env=get_pytest_env_vars()
+ )
+
+
+@nox.session(python=ALL_VERSIONS)
+def py(session):
+ """Runs py.test for a sample using the specified version of Python."""
+ if session.python in TESTED_VERSIONS:
+ _session_tests(session)
+ else:
+ session.skip("SKIPPED: {} tests are disabled for this sample.".format(
+ session.python
+ ))
+
+
+#
+# Readmegen
+#
+
+
+def _get_repo_root():
+ """ Returns the root folder of the project. """
+ # Get root of this repository. Assume we don't have directories nested deeper than 10 items.
+ p = Path(os.getcwd())
+ for i in range(10):
+ if p is None:
+ break
+ if Path(p / ".git").exists():
+ return str(p)
+ p = p.parent
+ raise Exception("Unable to detect repository root.")
+
+
+GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")])
+
+
+@nox.session
+@nox.parametrize("path", GENERATED_READMES)
+def readmegen(session, path):
+ """(Re-)generates the readme for a sample."""
+ session.install("jinja2", "pyyaml")
+ dir_ = os.path.dirname(path)
+
+ if os.path.exists(os.path.join(dir_, "requirements.txt")):
+ session.install("-r", os.path.join(dir_, "requirements.txt"))
+
+ in_file = os.path.join(dir_, "README.rst.in")
+ session.run(
+ "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file
+ )
diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/requirements-test.txt b/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/requirements-test.txt
new file mode 100644
index 000000000000..d0029c6de49e
--- /dev/null
+++ b/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/requirements-test.txt
@@ -0,0 +1,2 @@
+backoff==1.10.0
+pytest==6.0.1
diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/requirements.txt b/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/requirements.txt
new file mode 100644
index 000000000000..bc7a2fe57c8b
--- /dev/null
+++ b/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/requirements.txt
@@ -0,0 +1,2 @@
+google-cloud-monitoring==1.1.0
+tabulate==0.8.7
diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/snippets.py b/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/snippets.py
new file mode 100644
index 000000000000..dcde3b58650d
--- /dev/null
+++ b/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/snippets.py
@@ -0,0 +1,257 @@
+# Copyright 2018 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import print_function
+
+import argparse
+import os
+import pprint
+
+from google.cloud import monitoring_v3
+import tabulate
+
+
+# [START monitoring_uptime_check_create]
+def create_uptime_check_config_get(project_name, host_name=None, display_name=None):
+ config = monitoring_v3.types.uptime_pb2.UptimeCheckConfig()
+ config.display_name = display_name or "New GET uptime check"
+ config.monitored_resource.type = "uptime_url"
+ config.monitored_resource.labels.update({"host": host_name or "example.com"})
+ config.http_check.request_method = (
+ monitoring_v3.enums.UptimeCheckConfig.HttpCheck.RequestMethod.GET
+ )
+ config.http_check.path = "/"
+ config.http_check.port = 80
+ config.timeout.seconds = 10
+ config.period.seconds = 300
+
+ client = monitoring_v3.UptimeCheckServiceClient()
+ new_config = client.create_uptime_check_config(project_name, config)
+ pprint.pprint(new_config)
+ return new_config
+
+
+def create_uptime_check_config_post(project_name, host_name=None, display_name=None):
+ config = monitoring_v3.types.uptime_pb2.UptimeCheckConfig()
+ config.display_name = display_name or "New POST uptime check"
+ config.monitored_resource.type = "uptime_url"
+ config.monitored_resource.labels.update({"host": host_name or "example.com"})
+ config.http_check.request_method = (
+ monitoring_v3.enums.UptimeCheckConfig.HttpCheck.RequestMethod.POST
+ )
+ config.http_check.content_type = (
+ monitoring_v3.enums.UptimeCheckConfig.HttpCheck.ContentType.URL_ENCODED
+ )
+ config.http_check.body = "foo=bar".encode("utf-8")
+ config.http_check.path = "/"
+ config.http_check.port = 80
+ config.timeout.seconds = 10
+ config.period.seconds = 300
+
+ client = monitoring_v3.UptimeCheckServiceClient()
+ new_config = client.create_uptime_check_config(project_name, config)
+ pprint.pprint(new_config)
+ return new_config
+
+
+# [END monitoring_uptime_check_create]
+
+# [START monitoring_uptime_check_update]
+def update_uptime_check_config(
+ config_name, new_display_name=None, new_http_check_path=None
+):
+ client = monitoring_v3.UptimeCheckServiceClient()
+ config = client.get_uptime_check_config(config_name)
+ field_mask = monitoring_v3.types.FieldMask()
+ if new_display_name:
+ field_mask.paths.append("display_name")
+ config.display_name = new_display_name
+ if new_http_check_path:
+ field_mask.paths.append("http_check.path")
+ config.http_check.path = new_http_check_path
+ client.update_uptime_check_config(config, field_mask)
+
+
+# [END monitoring_uptime_check_update]
+
+
+# [START monitoring_uptime_check_list_configs]
+def list_uptime_check_configs(project_name):
+ client = monitoring_v3.UptimeCheckServiceClient()
+ configs = client.list_uptime_check_configs(project_name)
+
+ for config in configs:
+ pprint.pprint(config)
+
+
+# [END monitoring_uptime_check_list_configs]
+
+
+# [START monitoring_uptime_check_list_ips]
+def list_uptime_check_ips():
+ client = monitoring_v3.UptimeCheckServiceClient()
+ ips = client.list_uptime_check_ips()
+ print(
+ tabulate.tabulate(
+ [(ip.region, ip.location, ip.ip_address) for ip in ips],
+ ("region", "location", "ip_address"),
+ )
+ )
+
+
+# [END monitoring_uptime_check_list_ips]
+
+
+# [START monitoring_uptime_check_get]
+def get_uptime_check_config(config_name):
+ client = monitoring_v3.UptimeCheckServiceClient()
+ config = client.get_uptime_check_config(config_name)
+ pprint.pprint(config)
+
+
+# [END monitoring_uptime_check_get]
+
+
+# [START monitoring_uptime_check_delete]
+# `config_name` is the `name` field of an UptimeCheckConfig.
+# See https://cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.uptimeCheckConfigs#UptimeCheckConfig.
+def delete_uptime_check_config(config_name):
+ client = monitoring_v3.UptimeCheckServiceClient()
+ client.delete_uptime_check_config(config_name)
+ print("Deleted ", config_name)
+
+
+# [END monitoring_uptime_check_delete]
+
+
+class MissingProjectIdError(Exception):
+ pass
+
+
+def project_id():
+ """Retreieves the project id from the environment variable.
+
+ Raises:
+ MissingProjectIdError -- When not set.
+
+ Returns:
+ str -- the project name
+ """
+ project_id = os.environ["GOOGLE_CLOUD_PROJECT"]
+
+ if not project_id:
+ raise MissingProjectIdError(
+ "Set the environment variable "
+ + "GCLOUD_PROJECT to your Google Cloud Project Id."
+ )
+ return project_id
+
+
+def project_name():
+ return "projects/" + project_id()
+
+
+if __name__ == "__main__":
+
+ parser = argparse.ArgumentParser(
+ description="Demonstrates Uptime Check API operations."
+ )
+
+ subparsers = parser.add_subparsers(dest="command")
+
+ list_uptime_check_configs_parser = subparsers.add_parser(
+ "list-uptime-check-configs", help=list_uptime_check_configs.__doc__
+ )
+
+ list_uptime_check_ips_parser = subparsers.add_parser(
+ "list-uptime-check-ips", help=list_uptime_check_ips.__doc__
+ )
+
+ create_uptime_check_config_get_parser = subparsers.add_parser(
+ "create-uptime-check-get", help=create_uptime_check_config_get.__doc__
+ )
+ create_uptime_check_config_get_parser.add_argument(
+ "-d", "--display_name", required=False,
+ )
+ create_uptime_check_config_get_parser.add_argument(
+ "-o", "--host_name", required=False,
+ )
+
+ create_uptime_check_config_post_parser = subparsers.add_parser(
+ "create-uptime-check-post", help=create_uptime_check_config_post.__doc__
+ )
+ create_uptime_check_config_post_parser.add_argument(
+ "-d", "--display_name", required=False,
+ )
+ create_uptime_check_config_post_parser.add_argument(
+ "-o", "--host_name", required=False,
+ )
+
+ get_uptime_check_config_parser = subparsers.add_parser(
+ "get-uptime-check-config", help=get_uptime_check_config.__doc__
+ )
+ get_uptime_check_config_parser.add_argument(
+ "-m", "--name", required=True,
+ )
+
+ delete_uptime_check_config_parser = subparsers.add_parser(
+ "delete-uptime-check-config", help=delete_uptime_check_config.__doc__
+ )
+ delete_uptime_check_config_parser.add_argument(
+ "-m", "--name", required=True,
+ )
+
+ update_uptime_check_config_parser = subparsers.add_parser(
+ "update-uptime-check-config", help=update_uptime_check_config.__doc__
+ )
+ update_uptime_check_config_parser.add_argument(
+ "-m", "--name", required=True,
+ )
+ update_uptime_check_config_parser.add_argument(
+ "-d", "--display_name", required=False,
+ )
+ update_uptime_check_config_parser.add_argument(
+ "-p", "--uptime_check_path", required=False,
+ )
+
+ args = parser.parse_args()
+
+ if args.command == "list-uptime-check-configs":
+ list_uptime_check_configs(project_name())
+
+ elif args.command == "list-uptime-check-ips":
+ list_uptime_check_ips()
+
+ elif args.command == "create-uptime-check-get":
+ create_uptime_check_config_get(
+ project_name(), args.host_name, args.display_name
+ )
+ elif args.command == "create-uptime-check-post":
+ create_uptime_check_config_post(
+ project_name(), args.host_name, args.display_name
+ )
+
+ elif args.command == "get-uptime-check-config":
+ get_uptime_check_config(args.name)
+
+ elif args.command == "delete-uptime-check-config":
+ delete_uptime_check_config(args.name)
+
+ elif args.command == "update-uptime-check-config":
+ if not args.display_name and not args.uptime_check_path:
+ print("Nothing to update. Pass --display_name or " "--uptime_check_path.")
+ else:
+ update_uptime_check_config(
+ args.name, args.display_name, args.uptime_check_path
+ )
diff --git a/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/snippets_test.py b/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/snippets_test.py
new file mode 100644
index 000000000000..81d2b247372c
--- /dev/null
+++ b/packages/google-cloud-monitoring/samples/snippets/v3/uptime-check-client/snippets_test.py
@@ -0,0 +1,105 @@
+# Copyright 2018 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import print_function
+
+import random
+import string
+
+import backoff
+from google.api_core.exceptions import DeadlineExceeded
+import pytest
+
+import snippets
+
+
+def random_name(length):
+ return "".join([random.choice(string.ascii_lowercase) for i in range(length)])
+
+
+class UptimeFixture:
+ """A test fixture that creates uptime check config.
+ """
+
+ def __init__(self):
+ self.project_id = snippets.project_id()
+ self.project_name = snippets.project_name()
+
+ def __enter__(self):
+ # Create an uptime check config (GET request).
+ self.config_get = snippets.create_uptime_check_config_get(
+ self.project_name, display_name=random_name(10)
+ )
+ # Create an uptime check config (POST request).
+ self.config_post = snippets.create_uptime_check_config_post(
+ self.project_name, display_name=random_name(10)
+ )
+ return self
+
+ def __exit__(self, type, value, traceback):
+ # Delete the config.
+ snippets.delete_uptime_check_config(self.config_get.name)
+ snippets.delete_uptime_check_config(self.config_post.name)
+
+
+@pytest.fixture(scope="session")
+def uptime():
+ with UptimeFixture() as uptime:
+ yield uptime
+
+
+def test_create_and_delete(capsys):
+ # create and delete happen in uptime fixture.
+ with UptimeFixture():
+ pass
+
+
+def test_update_uptime_config(capsys):
+ # create and delete happen in uptime fixture.
+ new_display_name = random_name(10)
+ new_uptime_check_path = "/" + random_name(10)
+ with UptimeFixture() as fixture:
+ # We sometimes see the permission error saying the resource
+ # may not exist. Weirdly DeadlineExceeded instance is raised
+ # in this case.
+ @backoff.on_exception(backoff.expo, DeadlineExceeded, max_time=120)
+ def call_sample():
+ snippets.update_uptime_check_config(
+ fixture.config_get.name, new_display_name, new_uptime_check_path)
+
+ call_sample()
+
+ out, _ = capsys.readouterr()
+ snippets.get_uptime_check_config(fixture.config_get.name)
+ out, _ = capsys.readouterr()
+ assert new_display_name in out
+ assert new_uptime_check_path in out
+
+
+def test_get_uptime_check_config(capsys, uptime):
+ snippets.get_uptime_check_config(uptime.config_get.name)
+ out, _ = capsys.readouterr()
+ assert uptime.config_get.display_name in out
+
+
+def test_list_uptime_check_configs(capsys, uptime):
+ snippets.list_uptime_check_configs(uptime.project_name)
+ out, _ = capsys.readouterr()
+ assert uptime.config_get.display_name in out
+
+
+def test_list_uptime_check_ips(capsys):
+ snippets.list_uptime_check_ips()
+ out, _ = capsys.readouterr()
+ assert "Singapore" in out
diff --git a/packages/google-cloud-monitoring/scripts/decrypt-secrets.sh b/packages/google-cloud-monitoring/scripts/decrypt-secrets.sh
index ff599eb2af25..21f6d2a26d90 100755
--- a/packages/google-cloud-monitoring/scripts/decrypt-secrets.sh
+++ b/packages/google-cloud-monitoring/scripts/decrypt-secrets.sh
@@ -20,14 +20,27 @@ ROOT=$( dirname "$DIR" )
# Work from the project root.
cd $ROOT
+# Prevent it from overriding files.
+# We recommend that sample authors use their own service account files and cloud project.
+# In that case, they are supposed to prepare these files by themselves.
+if [[ -f "testing/test-env.sh" ]] || \
+ [[ -f "testing/service-account.json" ]] || \
+ [[ -f "testing/client-secrets.json" ]]; then
+ echo "One or more target files exist, aborting."
+ exit 1
+fi
+
# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources.
PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}"
gcloud secrets versions access latest --secret="python-docs-samples-test-env" \
+ --project="${PROJECT_ID}" \
> testing/test-env.sh
gcloud secrets versions access latest \
--secret="python-docs-samples-service-account" \
+ --project="${PROJECT_ID}" \
> testing/service-account.json
gcloud secrets versions access latest \
--secret="python-docs-samples-client-secrets" \
- > testing/client-secrets.json
\ No newline at end of file
+ --project="${PROJECT_ID}" \
+ > testing/client-secrets.json
diff --git a/packages/google-cloud-monitoring/synth.metadata b/packages/google-cloud-monitoring/synth.metadata
index 06138a5bc09a..da924e9719ac 100644
--- a/packages/google-cloud-monitoring/synth.metadata
+++ b/packages/google-cloud-monitoring/synth.metadata
@@ -4,22 +4,21 @@
"git": {
"name": ".",
"remote": "https://github.com/googleapis/python-monitoring.git",
- "sha": "e45865745551f720d46e4f15e3da4e293c99f130"
+ "sha": "85a1420dbb23011681072e425a80e1130dac69e8"
}
},
{
"git": {
- "name": "googleapis",
- "remote": "https://github.com/googleapis/googleapis.git",
- "sha": "c8e291e6a4d60771219205b653715d5aeec3e96b",
- "internalRef": "311222505"
+ "name": "synthtool",
+ "remote": "https://github.com/googleapis/synthtool.git",
+ "sha": "da29da32b3a988457b49ae290112b74f14b713cc"
}
},
{
"git": {
"name": "synthtool",
"remote": "https://github.com/googleapis/synthtool.git",
- "sha": "cd522c3b4dde821766d95c80ae5aeb43d7a41170"
+ "sha": "da29da32b3a988457b49ae290112b74f14b713cc"
}
}
],
diff --git a/packages/google-cloud-monitoring/synth.py b/packages/google-cloud-monitoring/synth.py
index 756269b8dcfa..3367fa8b9664 100644
--- a/packages/google-cloud-monitoring/synth.py
+++ b/packages/google-cloud-monitoring/synth.py
@@ -16,6 +16,7 @@
import synthtool as s
import synthtool.gcp as gcp
+from synthtool.languages import python
import logging
AUTOSYNTH_MULTIPLE_COMMITS = True
@@ -112,9 +113,15 @@
# ----------------------------------------------------------------------------
# Add templated files
# ----------------------------------------------------------------------------
-templated_files = common.py_library(cov_level=92)
+templated_files = common.py_library(cov_level=92, samples=True)
s.move(templated_files)
+# ----------------------------------------------------------------------------
+# Samples templates
+# ----------------------------------------------------------------------------
+python.py_samples(skip_readmes=True)
+
+
# TODO(busunkim): Use latest sphinx after microgenerator transition
s.replace("noxfile.py", """['"]sphinx['"]""", '"sphinx<3.0.0"')