diff --git a/.github/header-checker-lint.yml b/.github/header-checker-lint.yml
new file mode 100644
index 0000000..fc281c0
--- /dev/null
+++ b/.github/header-checker-lint.yml
@@ -0,0 +1,15 @@
+{"allowedCopyrightHolders": ["Google LLC"],
+ "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"],
+ "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt"],
+ "sourceFileExtensions": [
+ "ts",
+ "js",
+ "java",
+ "sh",
+ "Dockerfile",
+ "yaml",
+ "py",
+ "html",
+ "txt"
+ ]
+}
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index b9daa52..b4243ce 100644
--- a/.gitignore
+++ b/.gitignore
@@ -50,8 +50,10 @@ docs.metadata
# Virtual environment
env/
+
+# Test logs
coverage.xml
-sponge_log.xml
+*sponge_log.xml
# System test environment variables.
system_tests/local_test_setup
diff --git a/.kokoro/build.sh b/.kokoro/build.sh
index b820385..c577f86 100755
--- a/.kokoro/build.sh
+++ b/.kokoro/build.sh
@@ -15,7 +15,11 @@
set -eo pipefail
-cd github/python-analytics-data
+if [[ -z "${PROJECT_ROOT:-}" ]]; then
+ PROJECT_ROOT="github/python-analytics-data"
+fi
+
+cd "${PROJECT_ROOT}"
# Disable buffering, so that the logs stream through.
export PYTHONUNBUFFERED=1
@@ -30,16 +34,26 @@ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json
export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json")
# Remove old nox
-python3.6 -m pip uninstall --yes --quiet nox-automation
+python3 -m pip uninstall --yes --quiet nox-automation
# Install nox
-python3.6 -m pip install --upgrade --quiet nox
-python3.6 -m nox --version
+python3 -m pip install --upgrade --quiet nox
+python3 -m nox --version
+
+# If this is a continuous build, send the test log to the FlakyBot.
+# See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot.
+if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then
+ cleanup() {
+ chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ }
+ trap cleanup EXIT HUP
+fi
# If NOX_SESSION is set, it only runs the specified session,
# otherwise run all the sessions.
if [[ -n "${NOX_SESSION:-}" ]]; then
- python3.6 -m nox -s "${NOX_SESSION:-}"
+ python3 -m nox -s ${NOX_SESSION:-}
else
- python3.6 -m nox
+ python3 -m nox
fi
diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg
index 1118107..f93b720 100644
--- a/.kokoro/docs/docs-presubmit.cfg
+++ b/.kokoro/docs/docs-presubmit.cfg
@@ -15,3 +15,14 @@ env_vars: {
key: "TRAMPOLINE_IMAGE_UPLOAD"
value: "false"
}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-analytics-data/.kokoro/build.sh"
+}
+
+# Only run this nox session.
+env_vars: {
+ key: "NOX_SESSION"
+ value: "docs docfx"
+}
diff --git a/.repo-metadata.json b/.repo-metadata.json
index 8b24193..019ad78 100644
--- a/.repo-metadata.json
+++ b/.repo-metadata.json
@@ -4,7 +4,7 @@
"product_documentation": "https://developers.google.com/analytics/",
"client_documentation": "https://googleapis.dev/python/analyticsdata/latest",
"issue_tracker": "https://issuetracker.google.com/issues?q=componentid:187400%2B%20",
- "release_level": "alpha",
+ "release_level": "beta",
"language": "python",
"repo": "googleapis/python-analytics-data",
"distribution_name": "google-analytics-data",
diff --git a/.trampolinerc b/.trampolinerc
index 995ee29..383b6ec 100644
--- a/.trampolinerc
+++ b/.trampolinerc
@@ -24,6 +24,7 @@ required_envvars+=(
pass_down_envvars+=(
"STAGING_BUCKET"
"V2_STAGING_BUCKET"
+ "NOX_SESSION"
)
# Prevent unintentional override on the default image.
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index b88c721..6f3e68d 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -70,9 +70,14 @@ We use `nox `__ to instrument our tests.
- To test your changes, run unit tests with ``nox``::
$ nox -s unit-2.7
- $ nox -s unit-3.7
+ $ nox -s unit-3.8
$ ...
+- Args to pytest can be passed through the nox command separated by a `--`. For
+ example, to run a single test::
+
+ $ nox -s unit-3.8 -- -k
+
.. note::
The unit tests and system tests are described in the
@@ -93,8 +98,12 @@ On Debian/Ubuntu::
************
Coding Style
************
+- We use the automatic code formatter ``black``. You can run it using
+ the nox session ``blacken``. This will eliminate many lint errors. Run via::
+
+ $ nox -s blacken
-- PEP8 compliance, with exceptions defined in the linter configuration.
+- PEP8 compliance is required, with exceptions defined in the linter configuration.
If you have ``nox`` installed, you can test that you have not introduced
any non-compliant code via::
@@ -133,13 +142,18 @@ Running System Tests
- To run system tests, you can execute::
- $ nox -s system-3.7
+ # Run all system tests
+ $ nox -s system-3.8
$ nox -s system-2.7
+ # Run a single system test
+ $ nox -s system-3.8 -- -k
+
+
.. note::
System tests are only configured to run under Python 2.7 and
- Python 3.7. For expediency, we do not run them in older versions
+ Python 3.8. For expediency, we do not run them in older versions
of Python 3.
This alone will not run the tests. You'll need to change some local
diff --git a/LICENSE b/LICENSE
index a8ee855..d645695 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,6 +1,7 @@
- Apache License
+
+ Apache License
Version 2.0, January 2004
- https://www.apache.org/licenses/
+ http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
@@ -192,7 +193,7 @@
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
- https://www.apache.org/licenses/LICENSE-2.0
+ http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
diff --git a/MANIFEST.in b/MANIFEST.in
index e9e29d1..e783f4c 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -16,10 +16,10 @@
# Generated by synthtool. DO NOT EDIT!
include README.rst LICENSE
-recursive-include google *.json *.proto
+recursive-include google *.json *.proto py.typed
recursive-include tests *
global-exclude *.py[co]
global-exclude __pycache__
# Exclude scripts for samples readmegen
-prune scripts/readme-gen
\ No newline at end of file
+prune scripts/readme-gen
diff --git a/README.rst b/README.rst
index 1ab0770..07d229f 100644
--- a/README.rst
+++ b/README.rst
@@ -1,15 +1,15 @@
Python Client for Analytics Data
=================================================
-|alpha| |pypi| |versions|
+|beta| |pypi| |versions|
`Analytics Data API`_: Access report data in Google Analytics.
- `Client Library Documentation`_
- `Product Documentation`_
-.. |alpha| image:: https://img.shields.io/badge/support-alpha-orange.svg
- :target: https://github.com/googleapis/google-cloud-python/blob/master/README.rst#alpha-support
+.. |beta| image:: https://img.shields.io/badge/support-beta-orange.svg
+ :target: https://github.com/googleapis/google-cloud-python/blob/master/README.rst#beta-support
.. |pypi| image:: https://img.shields.io/pypi/v/google-analytics-data.svg
:target: https://pypi.org/project/google-analytics-data/
.. |versions| image:: https://img.shields.io/pypi/pyversions/google-analytics-data.svg
diff --git a/docs/_static/custom.css b/docs/_static/custom.css
index 0abaf22..bcd37bb 100644
--- a/docs/_static/custom.css
+++ b/docs/_static/custom.css
@@ -1,4 +1,9 @@
div#python2-eol {
border-color: red;
border-width: medium;
-}
\ No newline at end of file
+}
+
+/* Ensure minimum width for 'Parameters' / 'Returns' column */
+dl.field-list > dt {
+ min-width: 100px
+}
diff --git a/docs/data_v1beta/beta_analytics_data.rst b/docs/data_v1beta/beta_analytics_data.rst
new file mode 100644
index 0000000..bcdd65e
--- /dev/null
+++ b/docs/data_v1beta/beta_analytics_data.rst
@@ -0,0 +1,11 @@
+BetaAnalyticsData
+-----------------------------------
+
+.. automodule:: google.analytics.data_v1beta.services.beta_analytics_data
+ :members:
+ :inherited-members:
+
+
+.. automodule:: google.analytics.data_v1beta.services.beta_analytics_data.pagers
+ :members:
+ :inherited-members:
diff --git a/docs/data_v1beta/services.rst b/docs/data_v1beta/services.rst
new file mode 100644
index 0000000..769b3f8
--- /dev/null
+++ b/docs/data_v1beta/services.rst
@@ -0,0 +1,6 @@
+Services for Google Analytics Data v1beta API
+=============================================
+.. toctree::
+ :maxdepth: 2
+
+ beta_analytics_data
diff --git a/docs/data_v1beta/types.rst b/docs/data_v1beta/types.rst
new file mode 100644
index 0000000..e7db7e0
--- /dev/null
+++ b/docs/data_v1beta/types.rst
@@ -0,0 +1,7 @@
+Types for Google Analytics Data v1beta API
+==========================================
+
+.. automodule:: google.analytics.data_v1beta.types
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/docs/index.rst b/docs/index.rst
index 41be6bb..973b5b9 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -7,6 +7,8 @@ API Reference
.. toctree::
:maxdepth: 2
+ data_v1beta/services
+ data_v1beta/types
data_v1alpha/services
data_v1alpha/types
diff --git a/google-analytics-data-v1alpha-py.tar.gz b/google-analytics-data-v1alpha-py.tar.gz
new file mode 100644
index 0000000..e69de29
diff --git a/google/analytics/data/__init__.py b/google/analytics/data/__init__.py
index 53752ab..f8a2f05 100644
--- a/google/analytics/data/__init__.py
+++ b/google/analytics/data/__init__.py
@@ -15,75 +15,70 @@
# limitations under the License.
#
-from google.analytics.data_v1alpha.services.alpha_analytics_data.async_client import (
- AlphaAnalyticsDataAsyncClient,
+from google.analytics.data_v1beta.services.beta_analytics_data.async_client import (
+ BetaAnalyticsDataAsyncClient,
)
-from google.analytics.data_v1alpha.services.alpha_analytics_data.client import (
- AlphaAnalyticsDataClient,
+from google.analytics.data_v1beta.services.beta_analytics_data.client import (
+ BetaAnalyticsDataClient,
)
-from google.analytics.data_v1alpha.types.analytics_data_api import (
+from google.analytics.data_v1beta.types.analytics_data_api import (
BatchRunPivotReportsRequest,
)
-from google.analytics.data_v1alpha.types.analytics_data_api import (
+from google.analytics.data_v1beta.types.analytics_data_api import (
BatchRunPivotReportsResponse,
)
-from google.analytics.data_v1alpha.types.analytics_data_api import (
- BatchRunReportsRequest,
-)
-from google.analytics.data_v1alpha.types.analytics_data_api import (
+from google.analytics.data_v1beta.types.analytics_data_api import BatchRunReportsRequest
+from google.analytics.data_v1beta.types.analytics_data_api import (
BatchRunReportsResponse,
)
-from google.analytics.data_v1alpha.types.analytics_data_api import GetMetadataRequest
-from google.analytics.data_v1alpha.types.analytics_data_api import Metadata
-from google.analytics.data_v1alpha.types.analytics_data_api import RunPivotReportRequest
-from google.analytics.data_v1alpha.types.analytics_data_api import (
- RunPivotReportResponse,
-)
-from google.analytics.data_v1alpha.types.analytics_data_api import (
+from google.analytics.data_v1beta.types.analytics_data_api import GetMetadataRequest
+from google.analytics.data_v1beta.types.analytics_data_api import Metadata
+from google.analytics.data_v1beta.types.analytics_data_api import RunPivotReportRequest
+from google.analytics.data_v1beta.types.analytics_data_api import RunPivotReportResponse
+from google.analytics.data_v1beta.types.analytics_data_api import (
RunRealtimeReportRequest,
)
-from google.analytics.data_v1alpha.types.analytics_data_api import (
+from google.analytics.data_v1beta.types.analytics_data_api import (
RunRealtimeReportResponse,
)
-from google.analytics.data_v1alpha.types.analytics_data_api import RunReportRequest
-from google.analytics.data_v1alpha.types.analytics_data_api import RunReportResponse
-from google.analytics.data_v1alpha.types.data import Cohort
-from google.analytics.data_v1alpha.types.data import CohortReportSettings
-from google.analytics.data_v1alpha.types.data import CohortSpec
-from google.analytics.data_v1alpha.types.data import CohortsRange
-from google.analytics.data_v1alpha.types.data import DateRange
-from google.analytics.data_v1alpha.types.data import Dimension
-from google.analytics.data_v1alpha.types.data import DimensionExpression
-from google.analytics.data_v1alpha.types.data import DimensionHeader
-from google.analytics.data_v1alpha.types.data import DimensionMetadata
-from google.analytics.data_v1alpha.types.data import DimensionValue
-from google.analytics.data_v1alpha.types.data import Entity
-from google.analytics.data_v1alpha.types.data import Filter
-from google.analytics.data_v1alpha.types.data import FilterExpression
-from google.analytics.data_v1alpha.types.data import FilterExpressionList
-from google.analytics.data_v1alpha.types.data import Metric
-from google.analytics.data_v1alpha.types.data import MetricAggregation
-from google.analytics.data_v1alpha.types.data import MetricHeader
-from google.analytics.data_v1alpha.types.data import MetricMetadata
-from google.analytics.data_v1alpha.types.data import MetricType
-from google.analytics.data_v1alpha.types.data import MetricValue
-from google.analytics.data_v1alpha.types.data import NumericValue
-from google.analytics.data_v1alpha.types.data import OrderBy
-from google.analytics.data_v1alpha.types.data import Pivot
-from google.analytics.data_v1alpha.types.data import PivotDimensionHeader
-from google.analytics.data_v1alpha.types.data import PivotHeader
-from google.analytics.data_v1alpha.types.data import PropertyQuota
-from google.analytics.data_v1alpha.types.data import QuotaStatus
-from google.analytics.data_v1alpha.types.data import ResponseMetaData
-from google.analytics.data_v1alpha.types.data import Row
+from google.analytics.data_v1beta.types.analytics_data_api import RunReportRequest
+from google.analytics.data_v1beta.types.analytics_data_api import RunReportResponse
+from google.analytics.data_v1beta.types.data import Cohort
+from google.analytics.data_v1beta.types.data import CohortReportSettings
+from google.analytics.data_v1beta.types.data import CohortSpec
+from google.analytics.data_v1beta.types.data import CohortsRange
+from google.analytics.data_v1beta.types.data import DateRange
+from google.analytics.data_v1beta.types.data import Dimension
+from google.analytics.data_v1beta.types.data import DimensionExpression
+from google.analytics.data_v1beta.types.data import DimensionHeader
+from google.analytics.data_v1beta.types.data import DimensionMetadata
+from google.analytics.data_v1beta.types.data import DimensionValue
+from google.analytics.data_v1beta.types.data import Filter
+from google.analytics.data_v1beta.types.data import FilterExpression
+from google.analytics.data_v1beta.types.data import FilterExpressionList
+from google.analytics.data_v1beta.types.data import Metric
+from google.analytics.data_v1beta.types.data import MetricAggregation
+from google.analytics.data_v1beta.types.data import MetricHeader
+from google.analytics.data_v1beta.types.data import MetricMetadata
+from google.analytics.data_v1beta.types.data import MetricType
+from google.analytics.data_v1beta.types.data import MetricValue
+from google.analytics.data_v1beta.types.data import NumericValue
+from google.analytics.data_v1beta.types.data import OrderBy
+from google.analytics.data_v1beta.types.data import Pivot
+from google.analytics.data_v1beta.types.data import PivotDimensionHeader
+from google.analytics.data_v1beta.types.data import PivotHeader
+from google.analytics.data_v1beta.types.data import PropertyQuota
+from google.analytics.data_v1beta.types.data import QuotaStatus
+from google.analytics.data_v1beta.types.data import ResponseMetaData
+from google.analytics.data_v1beta.types.data import Row
__all__ = (
- "AlphaAnalyticsDataAsyncClient",
- "AlphaAnalyticsDataClient",
"BatchRunPivotReportsRequest",
"BatchRunPivotReportsResponse",
"BatchRunReportsRequest",
"BatchRunReportsResponse",
+ "BetaAnalyticsDataAsyncClient",
+ "BetaAnalyticsDataClient",
"Cohort",
"CohortReportSettings",
"CohortSpec",
@@ -94,7 +89,6 @@
"DimensionHeader",
"DimensionMetadata",
"DimensionValue",
- "Entity",
"Filter",
"FilterExpression",
"FilterExpressionList",
diff --git a/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py b/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py
index 682c278..ddd0301 100644
--- a/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py
+++ b/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py
@@ -282,21 +282,17 @@ def __init__(
util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))
)
- ssl_credentials = None
+ client_cert_source_func = None
is_mtls = False
if use_client_cert:
if client_options.client_cert_source:
- import grpc # type: ignore
-
- cert, key = client_options.client_cert_source()
- ssl_credentials = grpc.ssl_channel_credentials(
- certificate_chain=cert, private_key=key
- )
is_mtls = True
+ client_cert_source_func = client_options.client_cert_source
else:
- creds = SslCredentials()
- is_mtls = creds.is_mtls
- ssl_credentials = creds.ssl_credentials if is_mtls else None
+ is_mtls = mtls.has_default_client_cert_source()
+ client_cert_source_func = (
+ mtls.default_client_cert_source() if is_mtls else None
+ )
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
@@ -339,7 +335,7 @@ def __init__(
credentials_file=client_options.credentials_file,
host=api_endpoint,
scopes=client_options.scopes,
- ssl_channel_credentials=ssl_credentials,
+ client_cert_source_for_mtls=client_cert_source_func,
quota_project_id=client_options.quota_project_id,
client_info=client_info,
)
diff --git a/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc.py b/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc.py
index da3658b..911aea5 100644
--- a/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc.py
+++ b/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc.py
@@ -57,6 +57,7 @@ def __init__(
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
+ client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
@@ -87,6 +88,10 @@ def __init__(
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ A callback to provide client certificate bytes and private key bytes,
+ both in PEM format. It is used to configure mutual TLS channel. It is
+ ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
@@ -103,6 +108,11 @@ def __init__(
"""
self._ssl_channel_credentials = ssl_channel_credentials
+ if api_mtls_endpoint:
+ warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+ if client_cert_source:
+ warnings.warn("client_cert_source is deprecated", DeprecationWarning)
+
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
@@ -112,11 +122,6 @@ def __init__(
self._grpc_channel = channel
self._ssl_channel_credentials = None
elif api_mtls_endpoint:
- warnings.warn(
- "api_mtls_endpoint and client_cert_source are deprecated",
- DeprecationWarning,
- )
-
host = (
api_mtls_endpoint
if ":" in api_mtls_endpoint
@@ -160,12 +165,18 @@ def __init__(
scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
)
+ if client_cert_source_for_mtls and not ssl_channel_credentials:
+ cert, key = client_cert_source_for_mtls()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+
# create a new channel. The provided one is ignored.
self._grpc_channel = type(self).create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
- ssl_credentials=ssl_channel_credentials,
+ ssl_credentials=self._ssl_channel_credentials,
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
options=[
diff --git a/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc_asyncio.py b/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc_asyncio.py
index 30d97c1..15f4013 100644
--- a/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc_asyncio.py
+++ b/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc_asyncio.py
@@ -101,6 +101,7 @@ def __init__(
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
+ client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
@@ -132,6 +133,10 @@ def __init__(
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ A callback to provide client certificate bytes and private key bytes,
+ both in PEM format. It is used to configure mutual TLS channel. It is
+ ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
@@ -148,6 +153,11 @@ def __init__(
"""
self._ssl_channel_credentials = ssl_channel_credentials
+ if api_mtls_endpoint:
+ warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+ if client_cert_source:
+ warnings.warn("client_cert_source is deprecated", DeprecationWarning)
+
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
@@ -157,11 +167,6 @@ def __init__(
self._grpc_channel = channel
self._ssl_channel_credentials = None
elif api_mtls_endpoint:
- warnings.warn(
- "api_mtls_endpoint and client_cert_source are deprecated",
- DeprecationWarning,
- )
-
host = (
api_mtls_endpoint
if ":" in api_mtls_endpoint
@@ -205,12 +210,18 @@ def __init__(
scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
)
+ if client_cert_source_for_mtls and not ssl_channel_credentials:
+ cert, key = client_cert_source_for_mtls()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+
# create a new channel. The provided one is ignored.
self._grpc_channel = type(self).create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
- ssl_credentials=ssl_channel_credentials,
+ ssl_credentials=self._ssl_channel_credentials,
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
options=[
diff --git a/google/analytics/data_v1beta/__init__.py b/google/analytics/data_v1beta/__init__.py
new file mode 100644
index 0000000..40584d8
--- /dev/null
+++ b/google/analytics/data_v1beta/__init__.py
@@ -0,0 +1,103 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from .services.beta_analytics_data import BetaAnalyticsDataClient
+from .types.analytics_data_api import BatchRunPivotReportsRequest
+from .types.analytics_data_api import BatchRunPivotReportsResponse
+from .types.analytics_data_api import BatchRunReportsRequest
+from .types.analytics_data_api import BatchRunReportsResponse
+from .types.analytics_data_api import GetMetadataRequest
+from .types.analytics_data_api import Metadata
+from .types.analytics_data_api import RunPivotReportRequest
+from .types.analytics_data_api import RunPivotReportResponse
+from .types.analytics_data_api import RunRealtimeReportRequest
+from .types.analytics_data_api import RunRealtimeReportResponse
+from .types.analytics_data_api import RunReportRequest
+from .types.analytics_data_api import RunReportResponse
+from .types.data import Cohort
+from .types.data import CohortReportSettings
+from .types.data import CohortSpec
+from .types.data import CohortsRange
+from .types.data import DateRange
+from .types.data import Dimension
+from .types.data import DimensionExpression
+from .types.data import DimensionHeader
+from .types.data import DimensionMetadata
+from .types.data import DimensionValue
+from .types.data import Filter
+from .types.data import FilterExpression
+from .types.data import FilterExpressionList
+from .types.data import Metric
+from .types.data import MetricAggregation
+from .types.data import MetricHeader
+from .types.data import MetricMetadata
+from .types.data import MetricType
+from .types.data import MetricValue
+from .types.data import NumericValue
+from .types.data import OrderBy
+from .types.data import Pivot
+from .types.data import PivotDimensionHeader
+from .types.data import PivotHeader
+from .types.data import PropertyQuota
+from .types.data import QuotaStatus
+from .types.data import ResponseMetaData
+from .types.data import Row
+
+
+__all__ = (
+ "BatchRunPivotReportsRequest",
+ "BatchRunPivotReportsResponse",
+ "BatchRunReportsRequest",
+ "BatchRunReportsResponse",
+ "Cohort",
+ "CohortReportSettings",
+ "CohortSpec",
+ "CohortsRange",
+ "DateRange",
+ "Dimension",
+ "DimensionExpression",
+ "DimensionHeader",
+ "DimensionMetadata",
+ "DimensionValue",
+ "Filter",
+ "FilterExpression",
+ "FilterExpressionList",
+ "GetMetadataRequest",
+ "Metadata",
+ "Metric",
+ "MetricAggregation",
+ "MetricHeader",
+ "MetricMetadata",
+ "MetricType",
+ "MetricValue",
+ "NumericValue",
+ "OrderBy",
+ "Pivot",
+ "PivotDimensionHeader",
+ "PivotHeader",
+ "PropertyQuota",
+ "QuotaStatus",
+ "ResponseMetaData",
+ "Row",
+ "RunPivotReportRequest",
+ "RunPivotReportResponse",
+ "RunRealtimeReportRequest",
+ "RunRealtimeReportResponse",
+ "RunReportRequest",
+ "RunReportResponse",
+ "BetaAnalyticsDataClient",
+)
diff --git a/google/analytics/data_v1beta/py.typed b/google/analytics/data_v1beta/py.typed
new file mode 100644
index 0000000..1d549e5
--- /dev/null
+++ b/google/analytics/data_v1beta/py.typed
@@ -0,0 +1,2 @@
+# Marker file for PEP 561.
+# The google-analytics-data package uses inline types.
diff --git a/google/analytics/data_v1beta/services/__init__.py b/google/analytics/data_v1beta/services/__init__.py
new file mode 100644
index 0000000..42ffdf2
--- /dev/null
+++ b/google/analytics/data_v1beta/services/__init__.py
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/google/analytics/data_v1beta/services/beta_analytics_data/__init__.py b/google/analytics/data_v1beta/services/beta_analytics_data/__init__.py
new file mode 100644
index 0000000..2bae762
--- /dev/null
+++ b/google/analytics/data_v1beta/services/beta_analytics_data/__init__.py
@@ -0,0 +1,24 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from .client import BetaAnalyticsDataClient
+from .async_client import BetaAnalyticsDataAsyncClient
+
+__all__ = (
+ "BetaAnalyticsDataClient",
+ "BetaAnalyticsDataAsyncClient",
+)
diff --git a/google/analytics/data_v1beta/services/beta_analytics_data/async_client.py b/google/analytics/data_v1beta/services/beta_analytics_data/async_client.py
new file mode 100644
index 0000000..e4dd62c
--- /dev/null
+++ b/google/analytics/data_v1beta/services/beta_analytics_data/async_client.py
@@ -0,0 +1,528 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from collections import OrderedDict
+import functools
+import re
+from typing import Dict, Sequence, Tuple, Type, Union
+import pkg_resources
+
+import google.api_core.client_options as ClientOptions # type: ignore
+from google.api_core import exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
+from google.auth import credentials # type: ignore
+from google.oauth2 import service_account # type: ignore
+
+from google.analytics.data_v1beta.services.beta_analytics_data import pagers
+from google.analytics.data_v1beta.types import analytics_data_api
+from google.analytics.data_v1beta.types import data
+
+from .transports.base import BetaAnalyticsDataTransport, DEFAULT_CLIENT_INFO
+from .transports.grpc_asyncio import BetaAnalyticsDataGrpcAsyncIOTransport
+from .client import BetaAnalyticsDataClient
+
+
+class BetaAnalyticsDataAsyncClient:
+ """Google Analytics reporting data service."""
+
+ _client: BetaAnalyticsDataClient
+
+ DEFAULT_ENDPOINT = BetaAnalyticsDataClient.DEFAULT_ENDPOINT
+ DEFAULT_MTLS_ENDPOINT = BetaAnalyticsDataClient.DEFAULT_MTLS_ENDPOINT
+
+ metadata_path = staticmethod(BetaAnalyticsDataClient.metadata_path)
+ parse_metadata_path = staticmethod(BetaAnalyticsDataClient.parse_metadata_path)
+
+ common_billing_account_path = staticmethod(
+ BetaAnalyticsDataClient.common_billing_account_path
+ )
+ parse_common_billing_account_path = staticmethod(
+ BetaAnalyticsDataClient.parse_common_billing_account_path
+ )
+
+ common_folder_path = staticmethod(BetaAnalyticsDataClient.common_folder_path)
+ parse_common_folder_path = staticmethod(
+ BetaAnalyticsDataClient.parse_common_folder_path
+ )
+
+ common_organization_path = staticmethod(
+ BetaAnalyticsDataClient.common_organization_path
+ )
+ parse_common_organization_path = staticmethod(
+ BetaAnalyticsDataClient.parse_common_organization_path
+ )
+
+ common_project_path = staticmethod(BetaAnalyticsDataClient.common_project_path)
+ parse_common_project_path = staticmethod(
+ BetaAnalyticsDataClient.parse_common_project_path
+ )
+
+ common_location_path = staticmethod(BetaAnalyticsDataClient.common_location_path)
+ parse_common_location_path = staticmethod(
+ BetaAnalyticsDataClient.parse_common_location_path
+ )
+
+ from_service_account_info = BetaAnalyticsDataClient.from_service_account_info
+ from_service_account_file = BetaAnalyticsDataClient.from_service_account_file
+ from_service_account_json = from_service_account_file
+
+ @property
+ def transport(self) -> BetaAnalyticsDataTransport:
+ """Return the transport used by the client instance.
+
+ Returns:
+ BetaAnalyticsDataTransport: The transport used by the client instance.
+ """
+ return self._client.transport
+
+ get_transport_class = functools.partial(
+ type(BetaAnalyticsDataClient).get_transport_class, type(BetaAnalyticsDataClient)
+ )
+
+ def __init__(
+ self,
+ *,
+ credentials: credentials.Credentials = None,
+ transport: Union[str, BetaAnalyticsDataTransport] = "grpc_asyncio",
+ client_options: ClientOptions = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiate the beta analytics data client.
+
+ Args:
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ transport (Union[str, ~.BetaAnalyticsDataTransport]): The
+ transport to use. If set to None, a transport is chosen
+ automatically.
+ client_options (ClientOptions): Custom options for the client. It
+ won't take effect if a ``transport`` instance is provided.
+ (1) The ``api_endpoint`` property can be used to override the
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
+ environment variable can also be used to override the endpoint:
+ "always" (always use the default mTLS endpoint), "never" (always
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
+
+ Raises:
+ google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
+ creation failed for any reason.
+ """
+
+ self._client = BetaAnalyticsDataClient(
+ credentials=credentials,
+ transport=transport,
+ client_options=client_options,
+ client_info=client_info,
+ )
+
+ async def run_report(
+ self,
+ request: analytics_data_api.RunReportRequest = None,
+ *,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> pagers.RunReportAsyncPager:
+ r"""Returns a customized report of your Google Analytics
+ event data. Reports contain statistics derived from data
+ collected by the Google Analytics tracking code. The
+ data returned from the API is as a table with columns
+ for the requested dimensions and metrics. Metrics are
+ individual measurements of user activity on your
+ property, such as active users or event count.
+ Dimensions break down metrics across some common
+ criteria, such as country or event name.
+
+ Args:
+ request (:class:`google.analytics.data_v1beta.types.RunReportRequest`):
+ The request object. The request to generate a report.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.analytics.data_v1beta.services.beta_analytics_data.pagers.RunReportAsyncPager:
+ The response report table
+ corresponding to a request.
+ Iterating over this object will yield
+ results and resolve additional pages
+ automatically.
+
+ """
+ # Create or coerce a protobuf request object.
+
+ request = analytics_data_api.RunReportRequest(request)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.run_report,
+ default_timeout=60.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("property", request.property),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # This method is paged; wrap the response in a pager, which provides
+ # an `__aiter__` convenience method.
+ response = pagers.RunReportAsyncPager(
+ method=rpc, request=request, response=response, metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ async def run_pivot_report(
+ self,
+ request: analytics_data_api.RunPivotReportRequest = None,
+ *,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> analytics_data_api.RunPivotReportResponse:
+ r"""Returns a customized pivot report of your Google
+ Analytics event data. Pivot reports are more advanced
+ and expressive formats than regular reports. In a pivot
+ report, dimensions are only visible if they are included
+ in a pivot. Multiple pivots can be specified to further
+ dissect your data.
+
+ Args:
+ request (:class:`google.analytics.data_v1beta.types.RunPivotReportRequest`):
+ The request object. The request to generate a pivot
+ report.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.analytics.data_v1beta.types.RunPivotReportResponse:
+ The response pivot report table
+ corresponding to a pivot request.
+
+ """
+ # Create or coerce a protobuf request object.
+
+ request = analytics_data_api.RunPivotReportRequest(request)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.run_pivot_report,
+ default_timeout=60.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("property", request.property),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ async def batch_run_reports(
+ self,
+ request: analytics_data_api.BatchRunReportsRequest = None,
+ *,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> analytics_data_api.BatchRunReportsResponse:
+ r"""Returns multiple reports in a batch. All reports must
+ be for the same GA4 Property.
+
+ Args:
+ request (:class:`google.analytics.data_v1beta.types.BatchRunReportsRequest`):
+ The request object. The batch request containing
+ multiple report requests.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.analytics.data_v1beta.types.BatchRunReportsResponse:
+ The batch response containing
+ multiple reports.
+
+ """
+ # Create or coerce a protobuf request object.
+
+ request = analytics_data_api.BatchRunReportsRequest(request)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.batch_run_reports,
+ default_timeout=60.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("property", request.property),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ async def batch_run_pivot_reports(
+ self,
+ request: analytics_data_api.BatchRunPivotReportsRequest = None,
+ *,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> analytics_data_api.BatchRunPivotReportsResponse:
+ r"""Returns multiple pivot reports in a batch. All
+ reports must be for the same GA4 Property.
+
+ Args:
+ request (:class:`google.analytics.data_v1beta.types.BatchRunPivotReportsRequest`):
+ The request object. The batch request containing
+ multiple pivot report requests.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.analytics.data_v1beta.types.BatchRunPivotReportsResponse:
+ The batch response containing
+ multiple pivot reports.
+
+ """
+ # Create or coerce a protobuf request object.
+
+ request = analytics_data_api.BatchRunPivotReportsRequest(request)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.batch_run_pivot_reports,
+ default_timeout=60.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("property", request.property),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ async def get_metadata(
+ self,
+ request: analytics_data_api.GetMetadataRequest = None,
+ *,
+ name: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> analytics_data_api.Metadata:
+ r"""Returns metadata for dimensions and metrics available in
+ reporting methods. Used to explore the dimensions and metrics.
+ In this method, a Google Analytics GA4 Property Identifier is
+ specified in the request, and the metadata response includes
+ Custom dimensions and metrics as well as Universal metadata.
+
+ For example if a custom metric with parameter name
+ ``levels_unlocked`` is registered to a property, the Metadata
+ response will contain ``customEvent:levels_unlocked``. Universal
+ metadata are dimensions and metrics applicable to any property
+ such as ``country`` and ``totalUsers``.
+
+ Args:
+ request (:class:`google.analytics.data_v1beta.types.GetMetadataRequest`):
+ The request object. Request for a property's dimension
+ and metric metadata.
+ name (:class:`str`):
+ Required. The resource name of the metadata to retrieve.
+ This name field is specified in the URL path and not URL
+ parameters. Property is a numeric Google Analytics GA4
+ Property identifier. To learn more, see `where to find
+ your Property
+ ID `__.
+
+ Example: properties/1234/metadata
+
+ Set the Property ID to 0 for dimensions and metrics
+ common to all properties. In this special mode, this
+ method will not return custom dimensions and metrics.
+
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.analytics.data_v1beta.types.Metadata:
+ The dimensions and metrics currently
+ accepted in reporting methods.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = analytics_data_api.GetMetadataRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.get_metadata,
+ default_timeout=None,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ async def run_realtime_report(
+ self,
+ request: analytics_data_api.RunRealtimeReportRequest = None,
+ *,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> analytics_data_api.RunRealtimeReportResponse:
+ r"""The Google Analytics Realtime API returns a
+ customized report of realtime event data for your
+ property. These reports show events and usage from the
+ last 30 minutes.
+
+ Args:
+ request (:class:`google.analytics.data_v1beta.types.RunRealtimeReportRequest`):
+ The request object. The request to generate a realtime
+ report.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.analytics.data_v1beta.types.RunRealtimeReportResponse:
+ The response realtime report table
+ corresponding to a request.
+
+ """
+ # Create or coerce a protobuf request object.
+
+ request = analytics_data_api.RunRealtimeReportRequest(request)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.run_realtime_report,
+ default_timeout=60.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("property", request.property),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+
+try:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=pkg_resources.get_distribution("google-analytics-data",).version,
+ )
+except pkg_resources.DistributionNotFound:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
+
+
+__all__ = ("BetaAnalyticsDataAsyncClient",)
diff --git a/google/analytics/data_v1beta/services/beta_analytics_data/client.py b/google/analytics/data_v1beta/services/beta_analytics_data/client.py
new file mode 100644
index 0000000..c26aa32
--- /dev/null
+++ b/google/analytics/data_v1beta/services/beta_analytics_data/client.py
@@ -0,0 +1,733 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from collections import OrderedDict
+from distutils import util
+import os
+import re
+from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union
+import pkg_resources
+
+from google.api_core import client_options as client_options_lib # type: ignore
+from google.api_core import exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
+from google.auth import credentials # type: ignore
+from google.auth.transport import mtls # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
+from google.auth.exceptions import MutualTLSChannelError # type: ignore
+from google.oauth2 import service_account # type: ignore
+
+from google.analytics.data_v1beta.services.beta_analytics_data import pagers
+from google.analytics.data_v1beta.types import analytics_data_api
+from google.analytics.data_v1beta.types import data
+
+from .transports.base import BetaAnalyticsDataTransport, DEFAULT_CLIENT_INFO
+from .transports.grpc import BetaAnalyticsDataGrpcTransport
+from .transports.grpc_asyncio import BetaAnalyticsDataGrpcAsyncIOTransport
+
+
+class BetaAnalyticsDataClientMeta(type):
+ """Metaclass for the BetaAnalyticsData client.
+
+ This provides class-level methods for building and retrieving
+ support objects (e.g. transport) without polluting the client instance
+ objects.
+ """
+
+ _transport_registry = (
+ OrderedDict()
+ ) # type: Dict[str, Type[BetaAnalyticsDataTransport]]
+ _transport_registry["grpc"] = BetaAnalyticsDataGrpcTransport
+ _transport_registry["grpc_asyncio"] = BetaAnalyticsDataGrpcAsyncIOTransport
+
+ def get_transport_class(
+ cls, label: str = None,
+ ) -> Type[BetaAnalyticsDataTransport]:
+ """Return an appropriate transport class.
+
+ Args:
+ label: The name of the desired transport. If none is
+ provided, then the first transport in the registry is used.
+
+ Returns:
+ The transport class to use.
+ """
+ # If a specific transport is requested, return that one.
+ if label:
+ return cls._transport_registry[label]
+
+ # No transport is requested; return the default (that is, the first one
+ # in the dictionary).
+ return next(iter(cls._transport_registry.values()))
+
+
+class BetaAnalyticsDataClient(metaclass=BetaAnalyticsDataClientMeta):
+ """Google Analytics reporting data service."""
+
+ @staticmethod
+ def _get_default_mtls_endpoint(api_endpoint):
+ """Convert api endpoint to mTLS endpoint.
+ Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
+ "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
+ Args:
+ api_endpoint (Optional[str]): the api endpoint to convert.
+ Returns:
+ str: converted mTLS api endpoint.
+ """
+ if not api_endpoint:
+ return api_endpoint
+
+ mtls_endpoint_re = re.compile(
+ r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?"
+ )
+
+ m = mtls_endpoint_re.match(api_endpoint)
+ name, mtls, sandbox, googledomain = m.groups()
+ if mtls or not googledomain:
+ return api_endpoint
+
+ if sandbox:
+ return api_endpoint.replace(
+ "sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
+ )
+
+ return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
+
+ DEFAULT_ENDPOINT = "analyticsdata.googleapis.com"
+ DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
+ DEFAULT_ENDPOINT
+ )
+
+ @classmethod
+ def from_service_account_info(cls, info: dict, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials info.
+
+ Args:
+ info (dict): The service account private key info.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ BetaAnalyticsDataClient: The constructed client.
+ """
+ credentials = service_account.Credentials.from_service_account_info(info)
+ kwargs["credentials"] = credentials
+ return cls(*args, **kwargs)
+
+ @classmethod
+ def from_service_account_file(cls, filename: str, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ file.
+
+ Args:
+ filename (str): The path to the service account private key json
+ file.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ BetaAnalyticsDataClient: The constructed client.
+ """
+ credentials = service_account.Credentials.from_service_account_file(filename)
+ kwargs["credentials"] = credentials
+ return cls(*args, **kwargs)
+
+ from_service_account_json = from_service_account_file
+
+ @property
+ def transport(self) -> BetaAnalyticsDataTransport:
+ """Return the transport used by the client instance.
+
+ Returns:
+ BetaAnalyticsDataTransport: The transport used by the client instance.
+ """
+ return self._transport
+
+ @staticmethod
+ def metadata_path(property: str,) -> str:
+ """Return a fully-qualified metadata string."""
+ return "properties/{property}/metadata".format(property=property,)
+
+ @staticmethod
+ def parse_metadata_path(path: str) -> Dict[str, str]:
+ """Parse a metadata path into its component segments."""
+ m = re.match(r"^properties/(?P.+?)/metadata$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_billing_account_path(billing_account: str,) -> str:
+ """Return a fully-qualified billing_account string."""
+ return "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
+ )
+
+ @staticmethod
+ def parse_common_billing_account_path(path: str) -> Dict[str, str]:
+ """Parse a billing_account path into its component segments."""
+ m = re.match(r"^billingAccounts/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_folder_path(folder: str,) -> str:
+ """Return a fully-qualified folder string."""
+ return "folders/{folder}".format(folder=folder,)
+
+ @staticmethod
+ def parse_common_folder_path(path: str) -> Dict[str, str]:
+ """Parse a folder path into its component segments."""
+ m = re.match(r"^folders/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_organization_path(organization: str,) -> str:
+ """Return a fully-qualified organization string."""
+ return "organizations/{organization}".format(organization=organization,)
+
+ @staticmethod
+ def parse_common_organization_path(path: str) -> Dict[str, str]:
+ """Parse a organization path into its component segments."""
+ m = re.match(r"^organizations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_project_path(project: str,) -> str:
+ """Return a fully-qualified project string."""
+ return "projects/{project}".format(project=project,)
+
+ @staticmethod
+ def parse_common_project_path(path: str) -> Dict[str, str]:
+ """Parse a project path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_location_path(project: str, location: str,) -> str:
+ """Return a fully-qualified location string."""
+ return "projects/{project}/locations/{location}".format(
+ project=project, location=location,
+ )
+
+ @staticmethod
+ def parse_common_location_path(path: str) -> Dict[str, str]:
+ """Parse a location path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ def __init__(
+ self,
+ *,
+ credentials: Optional[credentials.Credentials] = None,
+ transport: Union[str, BetaAnalyticsDataTransport, None] = None,
+ client_options: Optional[client_options_lib.ClientOptions] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiate the beta analytics data client.
+
+ Args:
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ transport (Union[str, BetaAnalyticsDataTransport]): The
+ transport to use. If set to None, a transport is chosen
+ automatically.
+ client_options (google.api_core.client_options.ClientOptions): Custom options for the
+ client. It won't take effect if a ``transport`` instance is provided.
+ (1) The ``api_endpoint`` property can be used to override the
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
+ environment variable can also be used to override the endpoint:
+ "always" (always use the default mTLS endpoint), "never" (always
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+ creation failed for any reason.
+ """
+ if isinstance(client_options, dict):
+ client_options = client_options_lib.from_dict(client_options)
+ if client_options is None:
+ client_options = client_options_lib.ClientOptions()
+
+ # Create SSL credentials for mutual TLS if needed.
+ use_client_cert = bool(
+ util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))
+ )
+
+ client_cert_source_func = None
+ is_mtls = False
+ if use_client_cert:
+ if client_options.client_cert_source:
+ is_mtls = True
+ client_cert_source_func = client_options.client_cert_source
+ else:
+ is_mtls = mtls.has_default_client_cert_source()
+ client_cert_source_func = (
+ mtls.default_client_cert_source() if is_mtls else None
+ )
+
+ # Figure out which api endpoint to use.
+ if client_options.api_endpoint is not None:
+ api_endpoint = client_options.api_endpoint
+ else:
+ use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
+ if use_mtls_env == "never":
+ api_endpoint = self.DEFAULT_ENDPOINT
+ elif use_mtls_env == "always":
+ api_endpoint = self.DEFAULT_MTLS_ENDPOINT
+ elif use_mtls_env == "auto":
+ api_endpoint = (
+ self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT
+ )
+ else:
+ raise MutualTLSChannelError(
+ "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always"
+ )
+
+ # Save or instantiate the transport.
+ # Ordinarily, we provide the transport, but allowing a custom transport
+ # instance provides an extensibility point for unusual situations.
+ if isinstance(transport, BetaAnalyticsDataTransport):
+ # transport is a BetaAnalyticsDataTransport instance.
+ if credentials or client_options.credentials_file:
+ raise ValueError(
+ "When providing a transport instance, "
+ "provide its credentials directly."
+ )
+ if client_options.scopes:
+ raise ValueError(
+ "When providing a transport instance, "
+ "provide its scopes directly."
+ )
+ self._transport = transport
+ else:
+ Transport = type(self).get_transport_class(transport)
+ self._transport = Transport(
+ credentials=credentials,
+ credentials_file=client_options.credentials_file,
+ host=api_endpoint,
+ scopes=client_options.scopes,
+ client_cert_source_for_mtls=client_cert_source_func,
+ quota_project_id=client_options.quota_project_id,
+ client_info=client_info,
+ )
+
+ def run_report(
+ self,
+ request: analytics_data_api.RunReportRequest = None,
+ *,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> pagers.RunReportPager:
+ r"""Returns a customized report of your Google Analytics
+ event data. Reports contain statistics derived from data
+ collected by the Google Analytics tracking code. The
+ data returned from the API is as a table with columns
+ for the requested dimensions and metrics. Metrics are
+ individual measurements of user activity on your
+ property, such as active users or event count.
+ Dimensions break down metrics across some common
+ criteria, such as country or event name.
+
+ Args:
+ request (google.analytics.data_v1beta.types.RunReportRequest):
+ The request object. The request to generate a report.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.analytics.data_v1beta.services.beta_analytics_data.pagers.RunReportPager:
+ The response report table
+ corresponding to a request.
+ Iterating over this object will yield
+ results and resolve additional pages
+ automatically.
+
+ """
+ # Create or coerce a protobuf request object.
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a analytics_data_api.RunReportRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, analytics_data_api.RunReportRequest):
+ request = analytics_data_api.RunReportRequest(request)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.run_report]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("property", request.property),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # This method is paged; wrap the response in a pager, which provides
+ # an `__iter__` convenience method.
+ response = pagers.RunReportPager(
+ method=rpc, request=request, response=response, metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ def run_pivot_report(
+ self,
+ request: analytics_data_api.RunPivotReportRequest = None,
+ *,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> analytics_data_api.RunPivotReportResponse:
+ r"""Returns a customized pivot report of your Google
+ Analytics event data. Pivot reports are more advanced
+ and expressive formats than regular reports. In a pivot
+ report, dimensions are only visible if they are included
+ in a pivot. Multiple pivots can be specified to further
+ dissect your data.
+
+ Args:
+ request (google.analytics.data_v1beta.types.RunPivotReportRequest):
+ The request object. The request to generate a pivot
+ report.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.analytics.data_v1beta.types.RunPivotReportResponse:
+ The response pivot report table
+ corresponding to a pivot request.
+
+ """
+ # Create or coerce a protobuf request object.
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a analytics_data_api.RunPivotReportRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, analytics_data_api.RunPivotReportRequest):
+ request = analytics_data_api.RunPivotReportRequest(request)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.run_pivot_report]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("property", request.property),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ def batch_run_reports(
+ self,
+ request: analytics_data_api.BatchRunReportsRequest = None,
+ *,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> analytics_data_api.BatchRunReportsResponse:
+ r"""Returns multiple reports in a batch. All reports must
+ be for the same GA4 Property.
+
+ Args:
+ request (google.analytics.data_v1beta.types.BatchRunReportsRequest):
+ The request object. The batch request containing
+ multiple report requests.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.analytics.data_v1beta.types.BatchRunReportsResponse:
+ The batch response containing
+ multiple reports.
+
+ """
+ # Create or coerce a protobuf request object.
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a analytics_data_api.BatchRunReportsRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, analytics_data_api.BatchRunReportsRequest):
+ request = analytics_data_api.BatchRunReportsRequest(request)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.batch_run_reports]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("property", request.property),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ def batch_run_pivot_reports(
+ self,
+ request: analytics_data_api.BatchRunPivotReportsRequest = None,
+ *,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> analytics_data_api.BatchRunPivotReportsResponse:
+ r"""Returns multiple pivot reports in a batch. All
+ reports must be for the same GA4 Property.
+
+ Args:
+ request (google.analytics.data_v1beta.types.BatchRunPivotReportsRequest):
+ The request object. The batch request containing
+ multiple pivot report requests.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.analytics.data_v1beta.types.BatchRunPivotReportsResponse:
+ The batch response containing
+ multiple pivot reports.
+
+ """
+ # Create or coerce a protobuf request object.
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a analytics_data_api.BatchRunPivotReportsRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, analytics_data_api.BatchRunPivotReportsRequest):
+ request = analytics_data_api.BatchRunPivotReportsRequest(request)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.batch_run_pivot_reports]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("property", request.property),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ def get_metadata(
+ self,
+ request: analytics_data_api.GetMetadataRequest = None,
+ *,
+ name: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> analytics_data_api.Metadata:
+ r"""Returns metadata for dimensions and metrics available in
+ reporting methods. Used to explore the dimensions and metrics.
+ In this method, a Google Analytics GA4 Property Identifier is
+ specified in the request, and the metadata response includes
+ Custom dimensions and metrics as well as Universal metadata.
+
+ For example if a custom metric with parameter name
+ ``levels_unlocked`` is registered to a property, the Metadata
+ response will contain ``customEvent:levels_unlocked``. Universal
+ metadata are dimensions and metrics applicable to any property
+ such as ``country`` and ``totalUsers``.
+
+ Args:
+ request (google.analytics.data_v1beta.types.GetMetadataRequest):
+ The request object. Request for a property's dimension
+ and metric metadata.
+ name (str):
+ Required. The resource name of the metadata to retrieve.
+ This name field is specified in the URL path and not URL
+ parameters. Property is a numeric Google Analytics GA4
+ Property identifier. To learn more, see `where to find
+ your Property
+ ID `__.
+
+ Example: properties/1234/metadata
+
+ Set the Property ID to 0 for dimensions and metrics
+ common to all properties. In this special mode, this
+ method will not return custom dimensions and metrics.
+
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.analytics.data_v1beta.types.Metadata:
+ The dimensions and metrics currently
+ accepted in reporting methods.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a analytics_data_api.GetMetadataRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, analytics_data_api.GetMetadataRequest):
+ request = analytics_data_api.GetMetadataRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.get_metadata]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ def run_realtime_report(
+ self,
+ request: analytics_data_api.RunRealtimeReportRequest = None,
+ *,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> analytics_data_api.RunRealtimeReportResponse:
+ r"""The Google Analytics Realtime API returns a
+ customized report of realtime event data for your
+ property. These reports show events and usage from the
+ last 30 minutes.
+
+ Args:
+ request (google.analytics.data_v1beta.types.RunRealtimeReportRequest):
+ The request object. The request to generate a realtime
+ report.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.analytics.data_v1beta.types.RunRealtimeReportResponse:
+ The response realtime report table
+ corresponding to a request.
+
+ """
+ # Create or coerce a protobuf request object.
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a analytics_data_api.RunRealtimeReportRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, analytics_data_api.RunRealtimeReportRequest):
+ request = analytics_data_api.RunRealtimeReportRequest(request)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.run_realtime_report]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("property", request.property),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+
+try:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=pkg_resources.get_distribution("google-analytics-data",).version,
+ )
+except pkg_resources.DistributionNotFound:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
+
+
+__all__ = ("BetaAnalyticsDataClient",)
diff --git a/google/analytics/data_v1beta/services/beta_analytics_data/pagers.py b/google/analytics/data_v1beta/services/beta_analytics_data/pagers.py
new file mode 100644
index 0000000..6361325
--- /dev/null
+++ b/google/analytics/data_v1beta/services/beta_analytics_data/pagers.py
@@ -0,0 +1,158 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from typing import (
+ Any,
+ AsyncIterable,
+ Awaitable,
+ Callable,
+ Iterable,
+ Sequence,
+ Tuple,
+ Optional,
+)
+
+from google.analytics.data_v1beta.types import analytics_data_api
+from google.analytics.data_v1beta.types import data
+
+
+class RunReportPager:
+ """A pager for iterating through ``run_report`` requests.
+
+ This class thinly wraps an initial
+ :class:`google.analytics.data_v1beta.types.RunReportResponse` object, and
+ provides an ``__iter__`` method to iterate through its
+ ``dimension_headers`` field.
+
+ If there are more pages, the ``__iter__`` method will make additional
+ ``RunReport`` requests and continue to iterate
+ through the ``dimension_headers`` field on the
+ corresponding responses.
+
+ All the usual :class:`google.analytics.data_v1beta.types.RunReportResponse`
+ attributes are available on the pager. If multiple requests are made, only
+ the most recent response is retained, and thus used for attribute lookup.
+ """
+
+ def __init__(
+ self,
+ method: Callable[..., analytics_data_api.RunReportResponse],
+ request: analytics_data_api.RunReportRequest,
+ response: analytics_data_api.RunReportResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
+ ):
+ """Instantiate the pager.
+
+ Args:
+ method (Callable): The method that was originally called, and
+ which instantiated this pager.
+ request (google.analytics.data_v1beta.types.RunReportRequest):
+ The initial request object.
+ response (google.analytics.data_v1beta.types.RunReportResponse):
+ The initial response object.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ self._method = method
+ self._request = analytics_data_api.RunReportRequest(request)
+ self._response = response
+ self._metadata = metadata
+
+ def __getattr__(self, name: str) -> Any:
+ return getattr(self._response, name)
+
+ @property
+ def pages(self) -> Iterable[analytics_data_api.RunReportResponse]:
+ yield self._response
+ while self._response.next_page_token:
+ self._request.page_token = self._response.next_page_token
+ self._response = self._method(self._request, metadata=self._metadata)
+ yield self._response
+
+ def __iter__(self) -> Iterable[data.DimensionHeader]:
+ for page in self.pages:
+ yield from page.dimension_headers
+
+ def __repr__(self) -> str:
+ return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
+
+
+class RunReportAsyncPager:
+ """A pager for iterating through ``run_report`` requests.
+
+ This class thinly wraps an initial
+ :class:`google.analytics.data_v1beta.types.RunReportResponse` object, and
+ provides an ``__aiter__`` method to iterate through its
+ ``dimension_headers`` field.
+
+ If there are more pages, the ``__aiter__`` method will make additional
+ ``RunReport`` requests and continue to iterate
+ through the ``dimension_headers`` field on the
+ corresponding responses.
+
+ All the usual :class:`google.analytics.data_v1beta.types.RunReportResponse`
+ attributes are available on the pager. If multiple requests are made, only
+ the most recent response is retained, and thus used for attribute lookup.
+ """
+
+ def __init__(
+ self,
+ method: Callable[..., Awaitable[analytics_data_api.RunReportResponse]],
+ request: analytics_data_api.RunReportRequest,
+ response: analytics_data_api.RunReportResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
+ ):
+ """Instantiate the pager.
+
+ Args:
+ method (Callable): The method that was originally called, and
+ which instantiated this pager.
+ request (google.analytics.data_v1beta.types.RunReportRequest):
+ The initial request object.
+ response (google.analytics.data_v1beta.types.RunReportResponse):
+ The initial response object.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ self._method = method
+ self._request = analytics_data_api.RunReportRequest(request)
+ self._response = response
+ self._metadata = metadata
+
+ def __getattr__(self, name: str) -> Any:
+ return getattr(self._response, name)
+
+ @property
+ async def pages(self) -> AsyncIterable[analytics_data_api.RunReportResponse]:
+ yield self._response
+ while self._response.next_page_token:
+ self._request.page_token = self._response.next_page_token
+ self._response = await self._method(self._request, metadata=self._metadata)
+ yield self._response
+
+ def __aiter__(self) -> AsyncIterable[data.DimensionHeader]:
+ async def async_generator():
+ async for page in self.pages:
+ for response in page.dimension_headers:
+ yield response
+
+ return async_generator()
+
+ def __repr__(self) -> str:
+ return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
diff --git a/google/analytics/data_v1beta/services/beta_analytics_data/transports/__init__.py b/google/analytics/data_v1beta/services/beta_analytics_data/transports/__init__.py
new file mode 100644
index 0000000..1c9d44a
--- /dev/null
+++ b/google/analytics/data_v1beta/services/beta_analytics_data/transports/__init__.py
@@ -0,0 +1,35 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from collections import OrderedDict
+from typing import Dict, Type
+
+from .base import BetaAnalyticsDataTransport
+from .grpc import BetaAnalyticsDataGrpcTransport
+from .grpc_asyncio import BetaAnalyticsDataGrpcAsyncIOTransport
+
+
+# Compile a registry of transports.
+_transport_registry = OrderedDict() # type: Dict[str, Type[BetaAnalyticsDataTransport]]
+_transport_registry["grpc"] = BetaAnalyticsDataGrpcTransport
+_transport_registry["grpc_asyncio"] = BetaAnalyticsDataGrpcAsyncIOTransport
+
+__all__ = (
+ "BetaAnalyticsDataTransport",
+ "BetaAnalyticsDataGrpcTransport",
+ "BetaAnalyticsDataGrpcAsyncIOTransport",
+)
diff --git a/google/analytics/data_v1beta/services/beta_analytics_data/transports/base.py b/google/analytics/data_v1beta/services/beta_analytics_data/transports/base.py
new file mode 100644
index 0000000..430c9dc
--- /dev/null
+++ b/google/analytics/data_v1beta/services/beta_analytics_data/transports/base.py
@@ -0,0 +1,204 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import abc
+import typing
+import pkg_resources
+
+from google import auth # type: ignore
+from google.api_core import exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
+from google.auth import credentials # type: ignore
+
+from google.analytics.data_v1beta.types import analytics_data_api
+
+
+try:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=pkg_resources.get_distribution("google-analytics-data",).version,
+ )
+except pkg_resources.DistributionNotFound:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
+
+
+class BetaAnalyticsDataTransport(abc.ABC):
+ """Abstract transport class for BetaAnalyticsData."""
+
+ AUTH_SCOPES = (
+ "https://www.googleapis.com/auth/analytics",
+ "https://www.googleapis.com/auth/analytics.readonly",
+ )
+
+ def __init__(
+ self,
+ *,
+ host: str = "analyticsdata.googleapis.com",
+ credentials: credentials.Credentials = None,
+ credentials_file: typing.Optional[str] = None,
+ scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES,
+ quota_project_id: typing.Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ **kwargs,
+ ) -> None:
+ """Instantiate the transport.
+
+ Args:
+ host (Optional[str]): The hostname to connect to.
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is mutually exclusive with credentials.
+ scope (Optional[Sequence[str]]): A list of scopes.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+ """
+ # Save the hostname. Default to port 443 (HTTPS) if none is specified.
+ if ":" not in host:
+ host += ":443"
+ self._host = host
+
+ # If no credentials are provided, then determine the appropriate
+ # defaults.
+ if credentials and credentials_file:
+ raise exceptions.DuplicateCredentialArgs(
+ "'credentials_file' and 'credentials' are mutually exclusive"
+ )
+
+ if credentials_file is not None:
+ credentials, _ = auth.load_credentials_from_file(
+ credentials_file, scopes=scopes, quota_project_id=quota_project_id
+ )
+
+ elif credentials is None:
+ credentials, _ = auth.default(
+ scopes=scopes, quota_project_id=quota_project_id
+ )
+
+ # Save the credentials.
+ self._credentials = credentials
+
+ # Lifted into its own function so it can be stubbed out during tests.
+ self._prep_wrapped_messages(client_info)
+
+ def _prep_wrapped_messages(self, client_info):
+ # Precompute the wrapped methods.
+ self._wrapped_methods = {
+ self.run_report: gapic_v1.method.wrap_method(
+ self.run_report, default_timeout=60.0, client_info=client_info,
+ ),
+ self.run_pivot_report: gapic_v1.method.wrap_method(
+ self.run_pivot_report, default_timeout=60.0, client_info=client_info,
+ ),
+ self.batch_run_reports: gapic_v1.method.wrap_method(
+ self.batch_run_reports, default_timeout=60.0, client_info=client_info,
+ ),
+ self.batch_run_pivot_reports: gapic_v1.method.wrap_method(
+ self.batch_run_pivot_reports,
+ default_timeout=60.0,
+ client_info=client_info,
+ ),
+ self.get_metadata: gapic_v1.method.wrap_method(
+ self.get_metadata, default_timeout=None, client_info=client_info,
+ ),
+ self.run_realtime_report: gapic_v1.method.wrap_method(
+ self.run_realtime_report, default_timeout=60.0, client_info=client_info,
+ ),
+ }
+
+ @property
+ def run_report(
+ self,
+ ) -> typing.Callable[
+ [analytics_data_api.RunReportRequest],
+ typing.Union[
+ analytics_data_api.RunReportResponse,
+ typing.Awaitable[analytics_data_api.RunReportResponse],
+ ],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def run_pivot_report(
+ self,
+ ) -> typing.Callable[
+ [analytics_data_api.RunPivotReportRequest],
+ typing.Union[
+ analytics_data_api.RunPivotReportResponse,
+ typing.Awaitable[analytics_data_api.RunPivotReportResponse],
+ ],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def batch_run_reports(
+ self,
+ ) -> typing.Callable[
+ [analytics_data_api.BatchRunReportsRequest],
+ typing.Union[
+ analytics_data_api.BatchRunReportsResponse,
+ typing.Awaitable[analytics_data_api.BatchRunReportsResponse],
+ ],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def batch_run_pivot_reports(
+ self,
+ ) -> typing.Callable[
+ [analytics_data_api.BatchRunPivotReportsRequest],
+ typing.Union[
+ analytics_data_api.BatchRunPivotReportsResponse,
+ typing.Awaitable[analytics_data_api.BatchRunPivotReportsResponse],
+ ],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def get_metadata(
+ self,
+ ) -> typing.Callable[
+ [analytics_data_api.GetMetadataRequest],
+ typing.Union[
+ analytics_data_api.Metadata, typing.Awaitable[analytics_data_api.Metadata]
+ ],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def run_realtime_report(
+ self,
+ ) -> typing.Callable[
+ [analytics_data_api.RunRealtimeReportRequest],
+ typing.Union[
+ analytics_data_api.RunRealtimeReportResponse,
+ typing.Awaitable[analytics_data_api.RunRealtimeReportResponse],
+ ],
+ ]:
+ raise NotImplementedError()
+
+
+__all__ = ("BetaAnalyticsDataTransport",)
diff --git a/google/analytics/data_v1beta/services/beta_analytics_data/transports/grpc.py b/google/analytics/data_v1beta/services/beta_analytics_data/transports/grpc.py
new file mode 100644
index 0000000..c2ebac2
--- /dev/null
+++ b/google/analytics/data_v1beta/services/beta_analytics_data/transports/grpc.py
@@ -0,0 +1,450 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import warnings
+from typing import Callable, Dict, Optional, Sequence, Tuple
+
+from google.api_core import grpc_helpers # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google import auth # type: ignore
+from google.auth import credentials # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
+
+import grpc # type: ignore
+
+from google.analytics.data_v1beta.types import analytics_data_api
+
+from .base import BetaAnalyticsDataTransport, DEFAULT_CLIENT_INFO
+
+
+class BetaAnalyticsDataGrpcTransport(BetaAnalyticsDataTransport):
+ """gRPC backend transport for BetaAnalyticsData.
+
+ Google Analytics reporting data service.
+
+ This class defines the same methods as the primary client, so the
+ primary client can load the underlying transport implementation
+ and call it.
+
+ It sends protocol buffers over the wire using gRPC (which is built on
+ top of HTTP/2); the ``grpcio`` package must be installed.
+ """
+
+ _stubs: Dict[str, Callable]
+
+ def __init__(
+ self,
+ *,
+ host: str = "analyticsdata.googleapis.com",
+ credentials: credentials.Credentials = None,
+ credentials_file: str = None,
+ scopes: Sequence[str] = None,
+ channel: grpc.Channel = None,
+ api_mtls_endpoint: str = None,
+ client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
+ ssl_channel_credentials: grpc.ChannelCredentials = None,
+ client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
+ quota_project_id: Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiate the transport.
+
+ Args:
+ host (Optional[str]): The hostname to connect to.
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ This argument is ignored if ``channel`` is provided.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is ignored if ``channel`` is provided.
+ scopes (Optional(Sequence[str])): A list of scopes. This argument is
+ ignored if ``channel`` is provided.
+ channel (Optional[grpc.Channel]): A ``Channel`` instance through
+ which to make calls.
+ api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+ If provided, it overrides the ``host`` argument and tries to create
+ a mutual TLS channel with client SSL credentials from
+ ``client_cert_source`` or applicatin default SSL credentials.
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ Deprecated. A callback to provide client SSL certificate bytes and
+ private key bytes, both in PEM format. It is ignored if
+ ``api_mtls_endpoint`` is None.
+ ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+ for grpc channel. It is ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ A callback to provide client certificate bytes and private key bytes,
+ both in PEM format. It is used to configure mutual TLS channel. It is
+ ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+ creation failed for any reason.
+ google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+ and ``credentials_file`` are passed.
+ """
+ self._ssl_channel_credentials = ssl_channel_credentials
+
+ if api_mtls_endpoint:
+ warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+ if client_cert_source:
+ warnings.warn("client_cert_source is deprecated", DeprecationWarning)
+
+ if channel:
+ # Sanity check: Ensure that channel and credentials are not both
+ # provided.
+ credentials = False
+
+ # If a channel was explicitly provided, set it.
+ self._grpc_channel = channel
+ self._ssl_channel_credentials = None
+ elif api_mtls_endpoint:
+ host = (
+ api_mtls_endpoint
+ if ":" in api_mtls_endpoint
+ else api_mtls_endpoint + ":443"
+ )
+
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
+ # Create SSL credentials with client_cert_source or application
+ # default SSL credentials.
+ if client_cert_source:
+ cert, key = client_cert_source()
+ ssl_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ else:
+ ssl_credentials = SslCredentials().ssl_credentials
+
+ # create a new channel. The provided one is ignored.
+ self._grpc_channel = type(self).create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ ssl_credentials=ssl_credentials,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+ self._ssl_channel_credentials = ssl_credentials
+ else:
+ host = host if ":" in host else host + ":443"
+
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
+ if client_cert_source_for_mtls and not ssl_channel_credentials:
+ cert, key = client_cert_source_for_mtls()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+
+ # create a new channel. The provided one is ignored.
+ self._grpc_channel = type(self).create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ ssl_credentials=self._ssl_channel_credentials,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+ self._stubs = {} # type: Dict[str, Callable]
+
+ # Run the base constructor.
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ client_info=client_info,
+ )
+
+ @classmethod
+ def create_channel(
+ cls,
+ host: str = "analyticsdata.googleapis.com",
+ credentials: credentials.Credentials = None,
+ credentials_file: str = None,
+ scopes: Optional[Sequence[str]] = None,
+ quota_project_id: Optional[str] = None,
+ **kwargs,
+ ) -> grpc.Channel:
+ """Create and return a gRPC channel object.
+ Args:
+ address (Optional[str]): The host for the channel to use.
+ credentials (Optional[~.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify this application to the service. If
+ none are specified, the client will attempt to ascertain
+ the credentials from the environment.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is mutually exclusive with credentials.
+ scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ kwargs (Optional[dict]): Keyword arguments, which are passed to the
+ channel creation.
+ Returns:
+ grpc.Channel: A gRPC channel object.
+
+ Raises:
+ google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+ and ``credentials_file`` are passed.
+ """
+ scopes = scopes or cls.AUTH_SCOPES
+ return grpc_helpers.create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ **kwargs,
+ )
+
+ @property
+ def grpc_channel(self) -> grpc.Channel:
+ """Return the channel designed to connect to this service.
+ """
+ return self._grpc_channel
+
+ @property
+ def run_report(
+ self,
+ ) -> Callable[
+ [analytics_data_api.RunReportRequest], analytics_data_api.RunReportResponse
+ ]:
+ r"""Return a callable for the run report method over gRPC.
+
+ Returns a customized report of your Google Analytics
+ event data. Reports contain statistics derived from data
+ collected by the Google Analytics tracking code. The
+ data returned from the API is as a table with columns
+ for the requested dimensions and metrics. Metrics are
+ individual measurements of user activity on your
+ property, such as active users or event count.
+ Dimensions break down metrics across some common
+ criteria, such as country or event name.
+
+ Returns:
+ Callable[[~.RunReportRequest],
+ ~.RunReportResponse]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "run_report" not in self._stubs:
+ self._stubs["run_report"] = self.grpc_channel.unary_unary(
+ "/google.analytics.data.v1beta.BetaAnalyticsData/RunReport",
+ request_serializer=analytics_data_api.RunReportRequest.serialize,
+ response_deserializer=analytics_data_api.RunReportResponse.deserialize,
+ )
+ return self._stubs["run_report"]
+
+ @property
+ def run_pivot_report(
+ self,
+ ) -> Callable[
+ [analytics_data_api.RunPivotReportRequest],
+ analytics_data_api.RunPivotReportResponse,
+ ]:
+ r"""Return a callable for the run pivot report method over gRPC.
+
+ Returns a customized pivot report of your Google
+ Analytics event data. Pivot reports are more advanced
+ and expressive formats than regular reports. In a pivot
+ report, dimensions are only visible if they are included
+ in a pivot. Multiple pivots can be specified to further
+ dissect your data.
+
+ Returns:
+ Callable[[~.RunPivotReportRequest],
+ ~.RunPivotReportResponse]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "run_pivot_report" not in self._stubs:
+ self._stubs["run_pivot_report"] = self.grpc_channel.unary_unary(
+ "/google.analytics.data.v1beta.BetaAnalyticsData/RunPivotReport",
+ request_serializer=analytics_data_api.RunPivotReportRequest.serialize,
+ response_deserializer=analytics_data_api.RunPivotReportResponse.deserialize,
+ )
+ return self._stubs["run_pivot_report"]
+
+ @property
+ def batch_run_reports(
+ self,
+ ) -> Callable[
+ [analytics_data_api.BatchRunReportsRequest],
+ analytics_data_api.BatchRunReportsResponse,
+ ]:
+ r"""Return a callable for the batch run reports method over gRPC.
+
+ Returns multiple reports in a batch. All reports must
+ be for the same GA4 Property.
+
+ Returns:
+ Callable[[~.BatchRunReportsRequest],
+ ~.BatchRunReportsResponse]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "batch_run_reports" not in self._stubs:
+ self._stubs["batch_run_reports"] = self.grpc_channel.unary_unary(
+ "/google.analytics.data.v1beta.BetaAnalyticsData/BatchRunReports",
+ request_serializer=analytics_data_api.BatchRunReportsRequest.serialize,
+ response_deserializer=analytics_data_api.BatchRunReportsResponse.deserialize,
+ )
+ return self._stubs["batch_run_reports"]
+
+ @property
+ def batch_run_pivot_reports(
+ self,
+ ) -> Callable[
+ [analytics_data_api.BatchRunPivotReportsRequest],
+ analytics_data_api.BatchRunPivotReportsResponse,
+ ]:
+ r"""Return a callable for the batch run pivot reports method over gRPC.
+
+ Returns multiple pivot reports in a batch. All
+ reports must be for the same GA4 Property.
+
+ Returns:
+ Callable[[~.BatchRunPivotReportsRequest],
+ ~.BatchRunPivotReportsResponse]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "batch_run_pivot_reports" not in self._stubs:
+ self._stubs["batch_run_pivot_reports"] = self.grpc_channel.unary_unary(
+ "/google.analytics.data.v1beta.BetaAnalyticsData/BatchRunPivotReports",
+ request_serializer=analytics_data_api.BatchRunPivotReportsRequest.serialize,
+ response_deserializer=analytics_data_api.BatchRunPivotReportsResponse.deserialize,
+ )
+ return self._stubs["batch_run_pivot_reports"]
+
+ @property
+ def get_metadata(
+ self,
+ ) -> Callable[[analytics_data_api.GetMetadataRequest], analytics_data_api.Metadata]:
+ r"""Return a callable for the get metadata method over gRPC.
+
+ Returns metadata for dimensions and metrics available in
+ reporting methods. Used to explore the dimensions and metrics.
+ In this method, a Google Analytics GA4 Property Identifier is
+ specified in the request, and the metadata response includes
+ Custom dimensions and metrics as well as Universal metadata.
+
+ For example if a custom metric with parameter name
+ ``levels_unlocked`` is registered to a property, the Metadata
+ response will contain ``customEvent:levels_unlocked``. Universal
+ metadata are dimensions and metrics applicable to any property
+ such as ``country`` and ``totalUsers``.
+
+ Returns:
+ Callable[[~.GetMetadataRequest],
+ ~.Metadata]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "get_metadata" not in self._stubs:
+ self._stubs["get_metadata"] = self.grpc_channel.unary_unary(
+ "/google.analytics.data.v1beta.BetaAnalyticsData/GetMetadata",
+ request_serializer=analytics_data_api.GetMetadataRequest.serialize,
+ response_deserializer=analytics_data_api.Metadata.deserialize,
+ )
+ return self._stubs["get_metadata"]
+
+ @property
+ def run_realtime_report(
+ self,
+ ) -> Callable[
+ [analytics_data_api.RunRealtimeReportRequest],
+ analytics_data_api.RunRealtimeReportResponse,
+ ]:
+ r"""Return a callable for the run realtime report method over gRPC.
+
+ The Google Analytics Realtime API returns a
+ customized report of realtime event data for your
+ property. These reports show events and usage from the
+ last 30 minutes.
+
+ Returns:
+ Callable[[~.RunRealtimeReportRequest],
+ ~.RunRealtimeReportResponse]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "run_realtime_report" not in self._stubs:
+ self._stubs["run_realtime_report"] = self.grpc_channel.unary_unary(
+ "/google.analytics.data.v1beta.BetaAnalyticsData/RunRealtimeReport",
+ request_serializer=analytics_data_api.RunRealtimeReportRequest.serialize,
+ response_deserializer=analytics_data_api.RunRealtimeReportResponse.deserialize,
+ )
+ return self._stubs["run_realtime_report"]
+
+
+__all__ = ("BetaAnalyticsDataGrpcTransport",)
diff --git a/google/analytics/data_v1beta/services/beta_analytics_data/transports/grpc_asyncio.py b/google/analytics/data_v1beta/services/beta_analytics_data/transports/grpc_asyncio.py
new file mode 100644
index 0000000..dcf5965
--- /dev/null
+++ b/google/analytics/data_v1beta/services/beta_analytics_data/transports/grpc_asyncio.py
@@ -0,0 +1,457 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import warnings
+from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple
+
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import grpc_helpers_async # type: ignore
+from google import auth # type: ignore
+from google.auth import credentials # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
+
+import grpc # type: ignore
+from grpc.experimental import aio # type: ignore
+
+from google.analytics.data_v1beta.types import analytics_data_api
+
+from .base import BetaAnalyticsDataTransport, DEFAULT_CLIENT_INFO
+from .grpc import BetaAnalyticsDataGrpcTransport
+
+
+class BetaAnalyticsDataGrpcAsyncIOTransport(BetaAnalyticsDataTransport):
+ """gRPC AsyncIO backend transport for BetaAnalyticsData.
+
+ Google Analytics reporting data service.
+
+ This class defines the same methods as the primary client, so the
+ primary client can load the underlying transport implementation
+ and call it.
+
+ It sends protocol buffers over the wire using gRPC (which is built on
+ top of HTTP/2); the ``grpcio`` package must be installed.
+ """
+
+ _grpc_channel: aio.Channel
+ _stubs: Dict[str, Callable] = {}
+
+ @classmethod
+ def create_channel(
+ cls,
+ host: str = "analyticsdata.googleapis.com",
+ credentials: credentials.Credentials = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ quota_project_id: Optional[str] = None,
+ **kwargs,
+ ) -> aio.Channel:
+ """Create and return a gRPC AsyncIO channel object.
+ Args:
+ address (Optional[str]): The host for the channel to use.
+ credentials (Optional[~.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify this application to the service. If
+ none are specified, the client will attempt to ascertain
+ the credentials from the environment.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is ignored if ``channel`` is provided.
+ scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ kwargs (Optional[dict]): Keyword arguments, which are passed to the
+ channel creation.
+ Returns:
+ aio.Channel: A gRPC AsyncIO channel object.
+ """
+ scopes = scopes or cls.AUTH_SCOPES
+ return grpc_helpers_async.create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ **kwargs,
+ )
+
+ def __init__(
+ self,
+ *,
+ host: str = "analyticsdata.googleapis.com",
+ credentials: credentials.Credentials = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ channel: aio.Channel = None,
+ api_mtls_endpoint: str = None,
+ client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
+ ssl_channel_credentials: grpc.ChannelCredentials = None,
+ client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
+ quota_project_id=None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiate the transport.
+
+ Args:
+ host (Optional[str]): The hostname to connect to.
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ This argument is ignored if ``channel`` is provided.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is ignored if ``channel`` is provided.
+ scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ channel (Optional[aio.Channel]): A ``Channel`` instance through
+ which to make calls.
+ api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+ If provided, it overrides the ``host`` argument and tries to create
+ a mutual TLS channel with client SSL credentials from
+ ``client_cert_source`` or applicatin default SSL credentials.
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ Deprecated. A callback to provide client SSL certificate bytes and
+ private key bytes, both in PEM format. It is ignored if
+ ``api_mtls_endpoint`` is None.
+ ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+ for grpc channel. It is ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ A callback to provide client certificate bytes and private key bytes,
+ both in PEM format. It is used to configure mutual TLS channel. It is
+ ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
+ Raises:
+ google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
+ creation failed for any reason.
+ google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+ and ``credentials_file`` are passed.
+ """
+ self._ssl_channel_credentials = ssl_channel_credentials
+
+ if api_mtls_endpoint:
+ warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+ if client_cert_source:
+ warnings.warn("client_cert_source is deprecated", DeprecationWarning)
+
+ if channel:
+ # Sanity check: Ensure that channel and credentials are not both
+ # provided.
+ credentials = False
+
+ # If a channel was explicitly provided, set it.
+ self._grpc_channel = channel
+ self._ssl_channel_credentials = None
+ elif api_mtls_endpoint:
+ host = (
+ api_mtls_endpoint
+ if ":" in api_mtls_endpoint
+ else api_mtls_endpoint + ":443"
+ )
+
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
+ # Create SSL credentials with client_cert_source or application
+ # default SSL credentials.
+ if client_cert_source:
+ cert, key = client_cert_source()
+ ssl_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ else:
+ ssl_credentials = SslCredentials().ssl_credentials
+
+ # create a new channel. The provided one is ignored.
+ self._grpc_channel = type(self).create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ ssl_credentials=ssl_credentials,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+ self._ssl_channel_credentials = ssl_credentials
+ else:
+ host = host if ":" in host else host + ":443"
+
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
+ if client_cert_source_for_mtls and not ssl_channel_credentials:
+ cert, key = client_cert_source_for_mtls()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+
+ # create a new channel. The provided one is ignored.
+ self._grpc_channel = type(self).create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ ssl_credentials=self._ssl_channel_credentials,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+ # Run the base constructor.
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ client_info=client_info,
+ )
+
+ self._stubs = {}
+
+ @property
+ def grpc_channel(self) -> aio.Channel:
+ """Create the channel designed to connect to this service.
+
+ This property caches on the instance; repeated calls return
+ the same channel.
+ """
+ # Return the channel from cache.
+ return self._grpc_channel
+
+ @property
+ def run_report(
+ self,
+ ) -> Callable[
+ [analytics_data_api.RunReportRequest],
+ Awaitable[analytics_data_api.RunReportResponse],
+ ]:
+ r"""Return a callable for the run report method over gRPC.
+
+ Returns a customized report of your Google Analytics
+ event data. Reports contain statistics derived from data
+ collected by the Google Analytics tracking code. The
+ data returned from the API is as a table with columns
+ for the requested dimensions and metrics. Metrics are
+ individual measurements of user activity on your
+ property, such as active users or event count.
+ Dimensions break down metrics across some common
+ criteria, such as country or event name.
+
+ Returns:
+ Callable[[~.RunReportRequest],
+ Awaitable[~.RunReportResponse]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "run_report" not in self._stubs:
+ self._stubs["run_report"] = self.grpc_channel.unary_unary(
+ "/google.analytics.data.v1beta.BetaAnalyticsData/RunReport",
+ request_serializer=analytics_data_api.RunReportRequest.serialize,
+ response_deserializer=analytics_data_api.RunReportResponse.deserialize,
+ )
+ return self._stubs["run_report"]
+
+ @property
+ def run_pivot_report(
+ self,
+ ) -> Callable[
+ [analytics_data_api.RunPivotReportRequest],
+ Awaitable[analytics_data_api.RunPivotReportResponse],
+ ]:
+ r"""Return a callable for the run pivot report method over gRPC.
+
+ Returns a customized pivot report of your Google
+ Analytics event data. Pivot reports are more advanced
+ and expressive formats than regular reports. In a pivot
+ report, dimensions are only visible if they are included
+ in a pivot. Multiple pivots can be specified to further
+ dissect your data.
+
+ Returns:
+ Callable[[~.RunPivotReportRequest],
+ Awaitable[~.RunPivotReportResponse]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "run_pivot_report" not in self._stubs:
+ self._stubs["run_pivot_report"] = self.grpc_channel.unary_unary(
+ "/google.analytics.data.v1beta.BetaAnalyticsData/RunPivotReport",
+ request_serializer=analytics_data_api.RunPivotReportRequest.serialize,
+ response_deserializer=analytics_data_api.RunPivotReportResponse.deserialize,
+ )
+ return self._stubs["run_pivot_report"]
+
+ @property
+ def batch_run_reports(
+ self,
+ ) -> Callable[
+ [analytics_data_api.BatchRunReportsRequest],
+ Awaitable[analytics_data_api.BatchRunReportsResponse],
+ ]:
+ r"""Return a callable for the batch run reports method over gRPC.
+
+ Returns multiple reports in a batch. All reports must
+ be for the same GA4 Property.
+
+ Returns:
+ Callable[[~.BatchRunReportsRequest],
+ Awaitable[~.BatchRunReportsResponse]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "batch_run_reports" not in self._stubs:
+ self._stubs["batch_run_reports"] = self.grpc_channel.unary_unary(
+ "/google.analytics.data.v1beta.BetaAnalyticsData/BatchRunReports",
+ request_serializer=analytics_data_api.BatchRunReportsRequest.serialize,
+ response_deserializer=analytics_data_api.BatchRunReportsResponse.deserialize,
+ )
+ return self._stubs["batch_run_reports"]
+
+ @property
+ def batch_run_pivot_reports(
+ self,
+ ) -> Callable[
+ [analytics_data_api.BatchRunPivotReportsRequest],
+ Awaitable[analytics_data_api.BatchRunPivotReportsResponse],
+ ]:
+ r"""Return a callable for the batch run pivot reports method over gRPC.
+
+ Returns multiple pivot reports in a batch. All
+ reports must be for the same GA4 Property.
+
+ Returns:
+ Callable[[~.BatchRunPivotReportsRequest],
+ Awaitable[~.BatchRunPivotReportsResponse]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "batch_run_pivot_reports" not in self._stubs:
+ self._stubs["batch_run_pivot_reports"] = self.grpc_channel.unary_unary(
+ "/google.analytics.data.v1beta.BetaAnalyticsData/BatchRunPivotReports",
+ request_serializer=analytics_data_api.BatchRunPivotReportsRequest.serialize,
+ response_deserializer=analytics_data_api.BatchRunPivotReportsResponse.deserialize,
+ )
+ return self._stubs["batch_run_pivot_reports"]
+
+ @property
+ def get_metadata(
+ self,
+ ) -> Callable[
+ [analytics_data_api.GetMetadataRequest], Awaitable[analytics_data_api.Metadata]
+ ]:
+ r"""Return a callable for the get metadata method over gRPC.
+
+ Returns metadata for dimensions and metrics available in
+ reporting methods. Used to explore the dimensions and metrics.
+ In this method, a Google Analytics GA4 Property Identifier is
+ specified in the request, and the metadata response includes
+ Custom dimensions and metrics as well as Universal metadata.
+
+ For example if a custom metric with parameter name
+ ``levels_unlocked`` is registered to a property, the Metadata
+ response will contain ``customEvent:levels_unlocked``. Universal
+ metadata are dimensions and metrics applicable to any property
+ such as ``country`` and ``totalUsers``.
+
+ Returns:
+ Callable[[~.GetMetadataRequest],
+ Awaitable[~.Metadata]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "get_metadata" not in self._stubs:
+ self._stubs["get_metadata"] = self.grpc_channel.unary_unary(
+ "/google.analytics.data.v1beta.BetaAnalyticsData/GetMetadata",
+ request_serializer=analytics_data_api.GetMetadataRequest.serialize,
+ response_deserializer=analytics_data_api.Metadata.deserialize,
+ )
+ return self._stubs["get_metadata"]
+
+ @property
+ def run_realtime_report(
+ self,
+ ) -> Callable[
+ [analytics_data_api.RunRealtimeReportRequest],
+ Awaitable[analytics_data_api.RunRealtimeReportResponse],
+ ]:
+ r"""Return a callable for the run realtime report method over gRPC.
+
+ The Google Analytics Realtime API returns a
+ customized report of realtime event data for your
+ property. These reports show events and usage from the
+ last 30 minutes.
+
+ Returns:
+ Callable[[~.RunRealtimeReportRequest],
+ Awaitable[~.RunRealtimeReportResponse]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "run_realtime_report" not in self._stubs:
+ self._stubs["run_realtime_report"] = self.grpc_channel.unary_unary(
+ "/google.analytics.data.v1beta.BetaAnalyticsData/RunRealtimeReport",
+ request_serializer=analytics_data_api.RunRealtimeReportRequest.serialize,
+ response_deserializer=analytics_data_api.RunRealtimeReportResponse.deserialize,
+ )
+ return self._stubs["run_realtime_report"]
+
+
+__all__ = ("BetaAnalyticsDataGrpcAsyncIOTransport",)
diff --git a/google/analytics/data_v1beta/types/__init__.py b/google/analytics/data_v1beta/types/__init__.py
new file mode 100644
index 0000000..6b5beb8
--- /dev/null
+++ b/google/analytics/data_v1beta/types/__init__.py
@@ -0,0 +1,104 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from .data import (
+ DateRange,
+ Dimension,
+ DimensionExpression,
+ Metric,
+ FilterExpression,
+ FilterExpressionList,
+ Filter,
+ OrderBy,
+ Pivot,
+ CohortSpec,
+ Cohort,
+ CohortsRange,
+ CohortReportSettings,
+ ResponseMetaData,
+ DimensionHeader,
+ MetricHeader,
+ PivotHeader,
+ PivotDimensionHeader,
+ Row,
+ DimensionValue,
+ MetricValue,
+ NumericValue,
+ PropertyQuota,
+ QuotaStatus,
+ DimensionMetadata,
+ MetricMetadata,
+ MetricAggregation,
+ MetricType,
+)
+from .analytics_data_api import (
+ Metadata,
+ RunReportRequest,
+ RunReportResponse,
+ RunPivotReportRequest,
+ RunPivotReportResponse,
+ BatchRunReportsRequest,
+ BatchRunReportsResponse,
+ BatchRunPivotReportsRequest,
+ BatchRunPivotReportsResponse,
+ GetMetadataRequest,
+ RunRealtimeReportRequest,
+ RunRealtimeReportResponse,
+)
+
+__all__ = (
+ "DateRange",
+ "Dimension",
+ "DimensionExpression",
+ "Metric",
+ "FilterExpression",
+ "FilterExpressionList",
+ "Filter",
+ "OrderBy",
+ "Pivot",
+ "CohortSpec",
+ "Cohort",
+ "CohortsRange",
+ "CohortReportSettings",
+ "ResponseMetaData",
+ "DimensionHeader",
+ "MetricHeader",
+ "PivotHeader",
+ "PivotDimensionHeader",
+ "Row",
+ "DimensionValue",
+ "MetricValue",
+ "NumericValue",
+ "PropertyQuota",
+ "QuotaStatus",
+ "DimensionMetadata",
+ "MetricMetadata",
+ "MetricAggregation",
+ "MetricType",
+ "Metadata",
+ "RunReportRequest",
+ "RunReportResponse",
+ "RunPivotReportRequest",
+ "RunPivotReportResponse",
+ "BatchRunReportsRequest",
+ "BatchRunReportsResponse",
+ "BatchRunPivotReportsRequest",
+ "BatchRunPivotReportsResponse",
+ "GetMetadataRequest",
+ "RunRealtimeReportRequest",
+ "RunRealtimeReportResponse",
+)
diff --git a/google/analytics/data_v1beta/types/analytics_data_api.py b/google/analytics/data_v1beta/types/analytics_data_api.py
new file mode 100644
index 0000000..b12602b
--- /dev/null
+++ b/google/analytics/data_v1beta/types/analytics_data_api.py
@@ -0,0 +1,678 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import proto # type: ignore
+
+
+from google.analytics.data_v1beta.types import data
+
+
+__protobuf__ = proto.module(
+ package="google.analytics.data.v1beta",
+ manifest={
+ "Metadata",
+ "RunReportRequest",
+ "RunReportResponse",
+ "RunPivotReportRequest",
+ "RunPivotReportResponse",
+ "BatchRunReportsRequest",
+ "BatchRunReportsResponse",
+ "BatchRunPivotReportsRequest",
+ "BatchRunPivotReportsResponse",
+ "GetMetadataRequest",
+ "RunRealtimeReportRequest",
+ "RunRealtimeReportResponse",
+ },
+)
+
+
+class Metadata(proto.Message):
+ r"""The dimensions and metrics currently accepted in reporting
+ methods.
+
+ Attributes:
+ name (str):
+ Resource name of this metadata.
+ dimensions (Sequence[google.analytics.data_v1beta.types.DimensionMetadata]):
+ The dimension descriptions.
+ metrics (Sequence[google.analytics.data_v1beta.types.MetricMetadata]):
+ The metric descriptions.
+ """
+
+ name = proto.Field(proto.STRING, number=3)
+
+ dimensions = proto.RepeatedField(
+ proto.MESSAGE, number=1, message=data.DimensionMetadata,
+ )
+
+ metrics = proto.RepeatedField(proto.MESSAGE, number=2, message=data.MetricMetadata,)
+
+
+class RunReportRequest(proto.Message):
+ r"""The request to generate a report.
+
+ Attributes:
+ property (str):
+ A Google Analytics GA4 property identifier whose events are
+ tracked. Specified in the URL path and not the body. To
+ learn more, see `where to find your Property
+ ID `__.
+ Within a batch request, this property should either be
+ unspecified or consistent with the batch-level property.
+
+ Example: properties/1234
+ dimensions (Sequence[google.analytics.data_v1beta.types.Dimension]):
+ The dimensions requested and displayed.
+ metrics (Sequence[google.analytics.data_v1beta.types.Metric]):
+ The metrics requested and displayed.
+ date_ranges (Sequence[google.analytics.data_v1beta.types.DateRange]):
+ Date ranges of data to read. If multiple date ranges are
+ requested, each response row will contain a zero based date
+ range index. If two date ranges overlap, the event data for
+ the overlapping days is included in the response rows for
+ both date ranges. In a cohort request, this ``dateRanges``
+ must be unspecified.
+ dimension_filter (google.analytics.data_v1beta.types.FilterExpression):
+ The filter clause of dimensions. Dimensions
+ must be requested to be used in this filter.
+ Metrics cannot be used in this filter.
+ metric_filter (google.analytics.data_v1beta.types.FilterExpression):
+ The filter clause of metrics. Applied at post
+ aggregation phase, similar to SQL having-clause.
+ Metrics must be requested to be used in this
+ filter. Dimensions cannot be used in this
+ filter.
+ page_size (int):
+ Page size is for paging and specifies maximum number of rows
+ to return. The API returns a maximum of 200,000 rows per
+ request, no matter how many you ask for. Page size must be
+ positive.
+
+ The API can also return fewer rows than the requested
+ ``pageSize``, if there aren't as many dimension values as
+ the ``pageSize``. For instance, there are fewer than 300
+ possible values for the dimension ``country``, so when
+ reporting on only ``country``, you can't get more than 300
+ rows, even if you set ``pageSize`` to a higher value.
+
+ To learn more about this pagination parameter, see
+ `Pagination `__.
+ page_token (str):
+ A continuation token to get the next page of the results.
+ Adding this to the request will return the next page of rows
+ after the ``pageToken``. The ``pageToken`` should be the
+ value returned in the ``nextPageToken`` parameter in the
+ response.
+
+ When paginating, all other parameters specified in
+ ``RunReportRequest`` must match the call that provided the
+ page token.
+
+ To learn more about this pagination parameter, see
+ `Pagination `__.
+ metric_aggregations (Sequence[google.analytics.data_v1beta.types.MetricAggregation]):
+ Aggregation of metrics. Aggregated metric values will be
+ shown in rows where the dimension_values are set to
+ "RESERVED_(MetricAggregation)".
+ order_bys (Sequence[google.analytics.data_v1beta.types.OrderBy]):
+ Specifies how rows are ordered in the
+ response.
+ currency_code (str):
+ A currency code in ISO4217 format, such as
+ "AED", "USD", "JPY". If the field is empty, the
+ report uses the property's default currency.
+ cohort_spec (google.analytics.data_v1beta.types.CohortSpec):
+ Cohort group associated with this request. If
+ there is a cohort group in the request the
+ 'cohort' dimension must be present.
+ keep_empty_rows (bool):
+ If false or unspecified, each row with all
+ metrics equal to 0 will not be returned. If
+ true, these rows will be returned if they are
+ not separately removed by a filter.
+ return_property_quota (bool):
+ Toggles whether to return the current state of this
+ Analytics Property's quota. Quota is returned in
+ `PropertyQuota <#PropertyQuota>`__.
+ """
+
+ property = proto.Field(proto.STRING, number=1)
+
+ dimensions = proto.RepeatedField(proto.MESSAGE, number=2, message=data.Dimension,)
+
+ metrics = proto.RepeatedField(proto.MESSAGE, number=3, message=data.Metric,)
+
+ date_ranges = proto.RepeatedField(proto.MESSAGE, number=4, message=data.DateRange,)
+
+ dimension_filter = proto.Field(
+ proto.MESSAGE, number=5, message=data.FilterExpression,
+ )
+
+ metric_filter = proto.Field(proto.MESSAGE, number=6, message=data.FilterExpression,)
+
+ page_size = proto.Field(proto.INT32, number=7)
+
+ page_token = proto.Field(proto.STRING, number=8)
+
+ metric_aggregations = proto.RepeatedField(
+ proto.ENUM, number=9, enum=data.MetricAggregation,
+ )
+
+ order_bys = proto.RepeatedField(proto.MESSAGE, number=10, message=data.OrderBy,)
+
+ currency_code = proto.Field(proto.STRING, number=11)
+
+ cohort_spec = proto.Field(proto.MESSAGE, number=12, message=data.CohortSpec,)
+
+ keep_empty_rows = proto.Field(proto.BOOL, number=13)
+
+ return_property_quota = proto.Field(proto.BOOL, number=14)
+
+
+class RunReportResponse(proto.Message):
+ r"""The response report table corresponding to a request.
+
+ Attributes:
+ dimension_headers (Sequence[google.analytics.data_v1beta.types.DimensionHeader]):
+ Describes dimension columns. The number of
+ DimensionHeaders and ordering of
+ DimensionHeaders matches the dimensions present
+ in rows.
+ metric_headers (Sequence[google.analytics.data_v1beta.types.MetricHeader]):
+ Describes metric columns. The number of
+ MetricHeaders and ordering of MetricHeaders
+ matches the metrics present in rows.
+ rows (Sequence[google.analytics.data_v1beta.types.Row]):
+ Rows of dimension value combinations and
+ metric values in the report.
+ totals (Sequence[google.analytics.data_v1beta.types.Row]):
+ If requested, the totaled values of metrics.
+ maximums (Sequence[google.analytics.data_v1beta.types.Row]):
+ If requested, the maximum values of metrics.
+ minimums (Sequence[google.analytics.data_v1beta.types.Row]):
+ If requested, the minimum values of metrics.
+ next_page_token (str):
+ A token that can be sent as ``pageToken`` in a subsequent
+ ``RunReportRequest`` call to retrieve the next page of
+ report rows. If this field is omitted, there are no
+ subsequent pages of report rows.
+
+ To learn more about this pagination parameter, see
+ `Pagination `__.
+ total_size (int):
+ The total number of rows in the query result. ``totalSize``
+ is independent of the number of rows returned in the
+ response, the ``pageSize`` request parameter, and the
+ ``pageToken`` request parameter. For example if a query
+ returns 175 rows and includes ``pageSize`` of 50 in the API
+ request, the response will contain ``totalSize`` of 175 but
+ only 50 rows.
+
+ To learn more about this pagination parameter, see
+ `Pagination `__.
+ metadata (google.analytics.data_v1beta.types.ResponseMetaData):
+ Metadata for the report.
+ property_quota (google.analytics.data_v1beta.types.PropertyQuota):
+ This Analytics Property's quota state
+ including this request.
+ """
+
+ @property
+ def raw_page(self):
+ return self
+
+ dimension_headers = proto.RepeatedField(
+ proto.MESSAGE, number=1, message=data.DimensionHeader,
+ )
+
+ metric_headers = proto.RepeatedField(
+ proto.MESSAGE, number=2, message=data.MetricHeader,
+ )
+
+ rows = proto.RepeatedField(proto.MESSAGE, number=3, message=data.Row,)
+
+ totals = proto.RepeatedField(proto.MESSAGE, number=4, message=data.Row,)
+
+ maximums = proto.RepeatedField(proto.MESSAGE, number=5, message=data.Row,)
+
+ minimums = proto.RepeatedField(proto.MESSAGE, number=6, message=data.Row,)
+
+ next_page_token = proto.Field(proto.STRING, number=7)
+
+ total_size = proto.Field(proto.INT32, number=8)
+
+ metadata = proto.Field(proto.MESSAGE, number=9, message=data.ResponseMetaData,)
+
+ property_quota = proto.Field(proto.MESSAGE, number=10, message=data.PropertyQuota,)
+
+
+class RunPivotReportRequest(proto.Message):
+ r"""The request to generate a pivot report.
+
+ Attributes:
+ property (str):
+ A Google Analytics GA4 property identifier whose events are
+ tracked. Specified in the URL path and not the body. To
+ learn more, see `where to find your Property
+ ID `__.
+ Within a batch request, this property should either be
+ unspecified or consistent with the batch-level property.
+
+ Example: properties/1234
+ dimensions (Sequence[google.analytics.data_v1beta.types.Dimension]):
+ The dimensions requested. All defined dimensions must be
+ used by one of the following: dimension_expression,
+ dimension_filter, pivots, order_bys.
+ metrics (Sequence[google.analytics.data_v1beta.types.Metric]):
+ The metrics requested, at least one metric needs to be
+ specified. All defined metrics must be used by one of the
+ following: metric_expression, metric_filter, order_bys.
+ date_ranges (Sequence[google.analytics.data_v1beta.types.DateRange]):
+ The date range to retrieve event data for the report. If
+ multiple date ranges are specified, event data from each
+ date range is used in the report. A special dimension with
+ field name "dateRange" can be included in a Pivot's field
+ names; if included, the report compares between date ranges.
+ In a cohort request, this ``dateRanges`` must be
+ unspecified.
+ pivots (Sequence[google.analytics.data_v1beta.types.Pivot]):
+ Describes the visual format of the report's
+ dimensions in columns or rows. The union of the
+ fieldNames (dimension names) in all pivots must
+ be a subset of dimension names defined in
+ Dimensions. No two pivots can share a dimension.
+ A dimension is only visible if it appears in a
+ pivot.
+ dimension_filter (google.analytics.data_v1beta.types.FilterExpression):
+ The filter clause of dimensions. Dimensions
+ must be requested to be used in this filter.
+ Metrics cannot be used in this filter.
+ metric_filter (google.analytics.data_v1beta.types.FilterExpression):
+ The filter clause of metrics. Applied at post
+ aggregation phase, similar to SQL having-clause.
+ Metrics must be requested to be used in this
+ filter. Dimensions cannot be used in this
+ filter.
+ currency_code (str):
+ A currency code in ISO4217 format, such as
+ "AED", "USD", "JPY". If the field is empty, the
+ report uses the property's default currency.
+ cohort_spec (google.analytics.data_v1beta.types.CohortSpec):
+ Cohort group associated with this request. If
+ there is a cohort group in the request the
+ 'cohort' dimension must be present.
+ keep_empty_rows (bool):
+ If false or unspecified, each row with all
+ metrics equal to 0 will not be returned. If
+ true, these rows will be returned if they are
+ not separately removed by a filter.
+ return_property_quota (bool):
+ Toggles whether to return the current state of this
+ Analytics Property's quota. Quota is returned in
+ `PropertyQuota <#PropertyQuota>`__.
+ """
+
+ property = proto.Field(proto.STRING, number=1)
+
+ dimensions = proto.RepeatedField(proto.MESSAGE, number=2, message=data.Dimension,)
+
+ metrics = proto.RepeatedField(proto.MESSAGE, number=3, message=data.Metric,)
+
+ date_ranges = proto.RepeatedField(proto.MESSAGE, number=4, message=data.DateRange,)
+
+ pivots = proto.RepeatedField(proto.MESSAGE, number=5, message=data.Pivot,)
+
+ dimension_filter = proto.Field(
+ proto.MESSAGE, number=6, message=data.FilterExpression,
+ )
+
+ metric_filter = proto.Field(proto.MESSAGE, number=7, message=data.FilterExpression,)
+
+ currency_code = proto.Field(proto.STRING, number=8)
+
+ cohort_spec = proto.Field(proto.MESSAGE, number=9, message=data.CohortSpec,)
+
+ keep_empty_rows = proto.Field(proto.BOOL, number=10)
+
+ return_property_quota = proto.Field(proto.BOOL, number=11)
+
+
+class RunPivotReportResponse(proto.Message):
+ r"""The response pivot report table corresponding to a pivot
+ request.
+
+ Attributes:
+ pivot_headers (Sequence[google.analytics.data_v1beta.types.PivotHeader]):
+ Summarizes the columns and rows created by a pivot. Each
+ pivot in the request produces one header in the response. If
+ we have a request like this:
+
+ ::
+
+ "pivots": [{
+ "fieldNames": ["country",
+ "city"]
+ },
+ {
+ "fieldNames": "eventName"
+ }]
+
+ We will have the following ``pivotHeaders`` in the response:
+
+ ::
+
+ "pivotHeaders" : [{
+ "dimensionHeaders": [{
+ "dimensionValues": [
+ { "value": "United Kingdom" },
+ { "value": "London" }
+ ]
+ },
+ {
+ "dimensionValues": [
+ { "value": "Japan" },
+ { "value": "Osaka" }
+ ]
+ }]
+ },
+ {
+ "dimensionHeaders": [{
+ "dimensionValues": [{ "value": "session_start" }]
+ },
+ {
+ "dimensionValues": [{ "value": "scroll" }]
+ }]
+ }]
+ dimension_headers (Sequence[google.analytics.data_v1beta.types.DimensionHeader]):
+ Describes dimension columns. The number of
+ DimensionHeaders and ordering of
+ DimensionHeaders matches the dimensions present
+ in rows.
+ metric_headers (Sequence[google.analytics.data_v1beta.types.MetricHeader]):
+ Describes metric columns. The number of
+ MetricHeaders and ordering of MetricHeaders
+ matches the metrics present in rows.
+ rows (Sequence[google.analytics.data_v1beta.types.Row]):
+ Rows of dimension value combinations and
+ metric values in the report.
+ aggregates (Sequence[google.analytics.data_v1beta.types.Row]):
+ Aggregation of metric values. Can be totals, minimums, or
+ maximums. The returned aggregations are controlled by the
+ metric_aggregations in the pivot. The type of aggregation
+ returned in each row is shown by the dimension_values which
+ are set to "RESERVED\_".
+ metadata (google.analytics.data_v1beta.types.ResponseMetaData):
+ Metadata for the report.
+ property_quota (google.analytics.data_v1beta.types.PropertyQuota):
+ This Analytics Property's quota state
+ including this request.
+ """
+
+ pivot_headers = proto.RepeatedField(
+ proto.MESSAGE, number=1, message=data.PivotHeader,
+ )
+
+ dimension_headers = proto.RepeatedField(
+ proto.MESSAGE, number=2, message=data.DimensionHeader,
+ )
+
+ metric_headers = proto.RepeatedField(
+ proto.MESSAGE, number=3, message=data.MetricHeader,
+ )
+
+ rows = proto.RepeatedField(proto.MESSAGE, number=4, message=data.Row,)
+
+ aggregates = proto.RepeatedField(proto.MESSAGE, number=5, message=data.Row,)
+
+ metadata = proto.Field(proto.MESSAGE, number=6, message=data.ResponseMetaData,)
+
+ property_quota = proto.Field(proto.MESSAGE, number=7, message=data.PropertyQuota,)
+
+
+class BatchRunReportsRequest(proto.Message):
+ r"""The batch request containing multiple report requests.
+
+ Attributes:
+ property (str):
+ A Google Analytics GA4 property identifier whose events are
+ tracked. Specified in the URL path and not the body. To
+ learn more, see `where to find your Property
+ ID `__.
+ This property must be specified for the batch. The property
+ within RunReportRequest may either be unspecified or
+ consistent with this property.
+
+ Example: properties/1234
+ requests (Sequence[google.analytics.data_v1beta.types.RunReportRequest]):
+ Individual requests. Each request has a
+ separate report response. Each batch request is
+ allowed up to 5 requests.
+ """
+
+ property = proto.Field(proto.STRING, number=1)
+
+ requests = proto.RepeatedField(proto.MESSAGE, number=2, message="RunReportRequest",)
+
+
+class BatchRunReportsResponse(proto.Message):
+ r"""The batch response containing multiple reports.
+
+ Attributes:
+ reports (Sequence[google.analytics.data_v1beta.types.RunReportResponse]):
+ Individual responses. Each response has a
+ separate report request.
+ """
+
+ reports = proto.RepeatedField(proto.MESSAGE, number=1, message="RunReportResponse",)
+
+
+class BatchRunPivotReportsRequest(proto.Message):
+ r"""The batch request containing multiple pivot report requests.
+
+ Attributes:
+ property (str):
+ A Google Analytics GA4 property identifier whose events are
+ tracked. Specified in the URL path and not the body. To
+ learn more, see `where to find your Property
+ ID `__.
+ This property must be specified for the batch. The property
+ within RunPivotReportRequest may either be unspecified or
+ consistent with this property.
+
+ Example: properties/1234
+ requests (Sequence[google.analytics.data_v1beta.types.RunPivotReportRequest]):
+ Individual requests. Each request has a
+ separate pivot report response. Each batch
+ request is allowed up to 5 requests.
+ """
+
+ property = proto.Field(proto.STRING, number=1)
+
+ requests = proto.RepeatedField(
+ proto.MESSAGE, number=2, message="RunPivotReportRequest",
+ )
+
+
+class BatchRunPivotReportsResponse(proto.Message):
+ r"""The batch response containing multiple pivot reports.
+
+ Attributes:
+ pivot_reports (Sequence[google.analytics.data_v1beta.types.RunPivotReportResponse]):
+ Individual responses. Each response has a
+ separate pivot report request.
+ """
+
+ pivot_reports = proto.RepeatedField(
+ proto.MESSAGE, number=1, message="RunPivotReportResponse",
+ )
+
+
+class GetMetadataRequest(proto.Message):
+ r"""Request for a property's dimension and metric metadata.
+
+ Attributes:
+ name (str):
+ Required. The resource name of the metadata to retrieve.
+ This name field is specified in the URL path and not URL
+ parameters. Property is a numeric Google Analytics GA4
+ Property identifier. To learn more, see `where to find your
+ Property
+ ID `__.
+
+ Example: properties/1234/metadata
+
+ Set the Property ID to 0 for dimensions and metrics common
+ to all properties. In this special mode, this method will
+ not return custom dimensions and metrics.
+ """
+
+ name = proto.Field(proto.STRING, number=1)
+
+
+class RunRealtimeReportRequest(proto.Message):
+ r"""The request to generate a realtime report.
+
+ Attributes:
+ property (str):
+ A Google Analytics GA4 property identifier whose events are
+ tracked. Specified in the URL path and not the body. To
+ learn more, see `where to find your Property
+ ID `__.
+
+ Example: properties/1234
+ dimensions (Sequence[google.analytics.data_v1beta.types.Dimension]):
+ The dimensions requested and displayed.
+ metrics (Sequence[google.analytics.data_v1beta.types.Metric]):
+ The metrics requested and displayed.
+ dimension_filter (google.analytics.data_v1beta.types.FilterExpression):
+ The filter clause of dimensions. Dimensions
+ must be requested to be used in this filter.
+ Metrics cannot be used in this filter.
+ metric_filter (google.analytics.data_v1beta.types.FilterExpression):
+ The filter clause of metrics. Applied at post
+ aggregation phase, similar to SQL having-clause.
+ Metrics must be requested to be used in this
+ filter. Dimensions cannot be used in this
+ filter.
+ page_size (int):
+ Page size specifies maximum number of rows to return. If
+ unspecified, up to 10,000 rows are returned. The API returns
+ a maximum of 100,000 rows per request, no matter how many
+ you ask for. Page size must be positive.
+
+ The API can also return fewer rows than the requested
+ ``pageSize``, if there aren't as many dimension values as
+ the ``pageSize``. For instance, there are fewer than 300
+ possible values for the dimension ``country``, so when
+ reporting on only ``country``, you can't get more than 300
+ rows, even if you set ``pageSize`` to a higher value.
+
+ To learn more about this pagination parameter, see
+ `Pagination `__.
+ metric_aggregations (Sequence[google.analytics.data_v1beta.types.MetricAggregation]):
+ Aggregation of metrics. Aggregated metric values will be
+ shown in rows where the dimension_values are set to
+ "RESERVED_(MetricAggregation)".
+ order_bys (Sequence[google.analytics.data_v1beta.types.OrderBy]):
+ Specifies how rows are ordered in the
+ response.
+ return_property_quota (bool):
+ Toggles whether to return the current state of this
+ Analytics Property's Realtime quota. Quota is returned in
+ `PropertyQuota <#PropertyQuota>`__.
+ """
+
+ property = proto.Field(proto.STRING, number=1)
+
+ dimensions = proto.RepeatedField(proto.MESSAGE, number=2, message=data.Dimension,)
+
+ metrics = proto.RepeatedField(proto.MESSAGE, number=3, message=data.Metric,)
+
+ dimension_filter = proto.Field(
+ proto.MESSAGE, number=4, message=data.FilterExpression,
+ )
+
+ metric_filter = proto.Field(proto.MESSAGE, number=5, message=data.FilterExpression,)
+
+ page_size = proto.Field(proto.INT32, number=6)
+
+ metric_aggregations = proto.RepeatedField(
+ proto.ENUM, number=7, enum=data.MetricAggregation,
+ )
+
+ order_bys = proto.RepeatedField(proto.MESSAGE, number=8, message=data.OrderBy,)
+
+ return_property_quota = proto.Field(proto.BOOL, number=9)
+
+
+class RunRealtimeReportResponse(proto.Message):
+ r"""The response realtime report table corresponding to a
+ request.
+
+ Attributes:
+ dimension_headers (Sequence[google.analytics.data_v1beta.types.DimensionHeader]):
+ Describes dimension columns. The number of
+ DimensionHeaders and ordering of
+ DimensionHeaders matches the dimensions present
+ in rows.
+ metric_headers (Sequence[google.analytics.data_v1beta.types.MetricHeader]):
+ Describes metric columns. The number of
+ MetricHeaders and ordering of MetricHeaders
+ matches the metrics present in rows.
+ rows (Sequence[google.analytics.data_v1beta.types.Row]):
+ Rows of dimension value combinations and
+ metric values in the report.
+ totals (Sequence[google.analytics.data_v1beta.types.Row]):
+ If requested, the totaled values of metrics.
+ maximums (Sequence[google.analytics.data_v1beta.types.Row]):
+ If requested, the maximum values of metrics.
+ minimums (Sequence[google.analytics.data_v1beta.types.Row]):
+ If requested, the minimum values of metrics.
+ total_size (int):
+ The total number of rows in the query result. ``totalSize``
+ is independent of the number of rows returned in the
+ response and the ``pageSize`` request parameter. For example
+ if a query returns 175 rows and includes ``pageSize`` of 50
+ in the API request, the response will contain ``totalSize``
+ of 175 but only 50 rows.
+ property_quota (google.analytics.data_v1beta.types.PropertyQuota):
+ This Analytics Property's Realtime quota
+ state including this request.
+ """
+
+ dimension_headers = proto.RepeatedField(
+ proto.MESSAGE, number=1, message=data.DimensionHeader,
+ )
+
+ metric_headers = proto.RepeatedField(
+ proto.MESSAGE, number=2, message=data.MetricHeader,
+ )
+
+ rows = proto.RepeatedField(proto.MESSAGE, number=3, message=data.Row,)
+
+ totals = proto.RepeatedField(proto.MESSAGE, number=4, message=data.Row,)
+
+ maximums = proto.RepeatedField(proto.MESSAGE, number=5, message=data.Row,)
+
+ minimums = proto.RepeatedField(proto.MESSAGE, number=6, message=data.Row,)
+
+ total_size = proto.Field(proto.INT32, number=7)
+
+ property_quota = proto.Field(proto.MESSAGE, number=8, message=data.PropertyQuota,)
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/google/analytics/data_v1beta/types/data.py b/google/analytics/data_v1beta/types/data.py
new file mode 100644
index 0000000..1f0fa19
--- /dev/null
+++ b/google/analytics/data_v1beta/types/data.py
@@ -0,0 +1,1068 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import proto # type: ignore
+
+
+__protobuf__ = proto.module(
+ package="google.analytics.data.v1beta",
+ manifest={
+ "MetricAggregation",
+ "MetricType",
+ "DateRange",
+ "Dimension",
+ "DimensionExpression",
+ "Metric",
+ "FilterExpression",
+ "FilterExpressionList",
+ "Filter",
+ "OrderBy",
+ "Pivot",
+ "CohortSpec",
+ "Cohort",
+ "CohortsRange",
+ "CohortReportSettings",
+ "ResponseMetaData",
+ "DimensionHeader",
+ "MetricHeader",
+ "PivotHeader",
+ "PivotDimensionHeader",
+ "Row",
+ "DimensionValue",
+ "MetricValue",
+ "NumericValue",
+ "PropertyQuota",
+ "QuotaStatus",
+ "DimensionMetadata",
+ "MetricMetadata",
+ },
+)
+
+
+class MetricAggregation(proto.Enum):
+ r"""Represents aggregation of metrics."""
+ METRIC_AGGREGATION_UNSPECIFIED = 0
+ TOTAL = 1
+ MINIMUM = 5
+ MAXIMUM = 6
+ COUNT = 4
+
+
+class MetricType(proto.Enum):
+ r"""A metric's value type."""
+ METRIC_TYPE_UNSPECIFIED = 0
+ TYPE_INTEGER = 1
+ TYPE_FLOAT = 2
+ TYPE_SECONDS = 4
+ TYPE_MILLISECONDS = 5
+ TYPE_MINUTES = 6
+ TYPE_HOURS = 7
+ TYPE_STANDARD = 8
+ TYPE_CURRENCY = 9
+ TYPE_FEET = 10
+ TYPE_MILES = 11
+ TYPE_METERS = 12
+ TYPE_KILOMETERS = 13
+
+
+class DateRange(proto.Message):
+ r"""A contiguous set of days: startDate, startDate + 1, ...,
+ endDate. Requests are allowed up to 4 date ranges.
+
+ Attributes:
+ start_date (str):
+ The inclusive start date for the query in the format
+ ``YYYY-MM-DD``. Cannot be after ``end_date``. The format
+ ``NdaysAgo``, ``yesterday``, or ``today`` is also accepted,
+ and in that case, the date is inferred based on the
+ property's reporting time zone.
+ end_date (str):
+ The inclusive end date for the query in the format
+ ``YYYY-MM-DD``. Cannot be before ``start_date``. The format
+ ``NdaysAgo``, ``yesterday``, or ``today`` is also accepted,
+ and in that case, the date is inferred based on the
+ property's reporting time zone.
+ name (str):
+ Assigns a name to this date range. The dimension
+ ``dateRange`` is valued to this name in a report response.
+ If set, cannot begin with ``date_range_`` or ``RESERVED_``.
+ If not set, date ranges are named by their zero based index
+ in the request: ``date_range_0``, ``date_range_1``, etc.
+ """
+
+ start_date = proto.Field(proto.STRING, number=1)
+
+ end_date = proto.Field(proto.STRING, number=2)
+
+ name = proto.Field(proto.STRING, number=3)
+
+
+class Dimension(proto.Message):
+ r"""Dimensions are attributes of your data. For example, the
+ dimension city indicates the city from which an event
+ originates. Dimension values in report responses are strings;
+ for example, city could be "Paris" or "New York". Requests are
+ allowed up to 8 dimensions.
+
+ Attributes:
+ name (str):
+ The name of the dimension. See the `API
+ Dimensions `__
+ for the list of dimension names.
+
+ If ``dimensionExpression`` is specified, ``name`` can be any
+ string that you would like within the allowed character set.
+ For example if a ``dimensionExpression`` concatenates
+ ``country`` and ``city``, you could call that dimension
+ ``countryAndCity``. Dimension names that you choose must
+ match the regular expression ``^[a-zA-Z0-9_]$``.
+
+ Dimensions are referenced by ``name`` in
+ ``dimensionFilter``, ``orderBys``, ``dimensionExpression``,
+ and ``pivots``.
+ dimension_expression (google.analytics.data_v1beta.types.DimensionExpression):
+ One dimension can be the result of an
+ expression of multiple dimensions. For example,
+ dimension "country, city": concatenate(country,
+ ", ", city).
+ """
+
+ name = proto.Field(proto.STRING, number=1)
+
+ dimension_expression = proto.Field(
+ proto.MESSAGE, number=2, message="DimensionExpression",
+ )
+
+
+class DimensionExpression(proto.Message):
+ r"""Used to express a dimension which is the result of a formula of
+ multiple dimensions. Example usages:
+
+ 1) lower_case(dimension)
+ 2) concatenate(dimension1, symbol, dimension2).
+
+ Attributes:
+ lower_case (google.analytics.data_v1beta.types.DimensionExpression.CaseExpression):
+ Used to convert a dimension value to lower
+ case.
+ upper_case (google.analytics.data_v1beta.types.DimensionExpression.CaseExpression):
+ Used to convert a dimension value to upper
+ case.
+ concatenate (google.analytics.data_v1beta.types.DimensionExpression.ConcatenateExpression):
+ Used to combine dimension values to a single
+ dimension. For example, dimension "country,
+ city": concatenate(country, ", ", city).
+ """
+
+ class CaseExpression(proto.Message):
+ r"""Used to convert a dimension value to a single case.
+
+ Attributes:
+ dimension_name (str):
+ Name of a dimension. The name must refer back
+ to a name in dimensions field of the request.
+ """
+
+ dimension_name = proto.Field(proto.STRING, number=1)
+
+ class ConcatenateExpression(proto.Message):
+ r"""Used to combine dimension values to a single dimension.
+
+ Attributes:
+ dimension_names (Sequence[str]):
+ Names of dimensions. The names must refer
+ back to names in the dimensions field of the
+ request.
+ delimiter (str):
+ The delimiter placed between dimension names.
+
+ Delimiters are often single characters such as "|" or ","
+ but can be longer strings. If a dimension value contains the
+ delimiter, both will be present in response with no
+ distinction. For example if dimension 1 value = "US,FR",
+ dimension 2 value = "JP", and delimiter = ",", then the
+ response will contain "US,FR,JP".
+ """
+
+ dimension_names = proto.RepeatedField(proto.STRING, number=1)
+
+ delimiter = proto.Field(proto.STRING, number=2)
+
+ lower_case = proto.Field(
+ proto.MESSAGE, number=4, oneof="one_expression", message=CaseExpression,
+ )
+
+ upper_case = proto.Field(
+ proto.MESSAGE, number=5, oneof="one_expression", message=CaseExpression,
+ )
+
+ concatenate = proto.Field(
+ proto.MESSAGE, number=6, oneof="one_expression", message=ConcatenateExpression,
+ )
+
+
+class Metric(proto.Message):
+ r"""The quantitative measurements of a report. For example, the metric
+ ``eventCount`` is the total number of events. Requests are allowed
+ up to 10 metrics.
+
+ Attributes:
+ name (str):
+ The name of the metric. See the `API
+ Metrics `__
+ for the list of metric names.
+
+ If ``expression`` is specified, ``name`` can be any string
+ that you would like within the allowed character set. For
+ example if ``expression`` is ``screenPageViews/sessions``,
+ you could call that metric's name = ``viewsPerSession``.
+ Metric names that you choose must match the regular
+ expression ``^[a-zA-Z0-9_]$``.
+
+ Metrics are referenced by ``name`` in ``metricFilter``,
+ ``orderBys``, and metric ``expression``.
+ expression (str):
+ A mathematical expression for derived metrics. For example,
+ the metric Event count per user is
+ ``eventCount/totalUsers``.
+ invisible (bool):
+ Indicates if a metric is invisible in the report response.
+ If a metric is invisible, the metric will not produce a
+ column in the response, but can be used in ``metricFilter``,
+ ``orderBys``, or a metric ``expression``.
+ """
+
+ name = proto.Field(proto.STRING, number=1)
+
+ expression = proto.Field(proto.STRING, number=2)
+
+ invisible = proto.Field(proto.BOOL, number=3)
+
+
+class FilterExpression(proto.Message):
+ r"""To express dimension or metric filters.
+ The fields in the same FilterExpression need to be either all
+ dimensions or all metrics.
+
+ Attributes:
+ and_group (google.analytics.data_v1beta.types.FilterExpressionList):
+ The FilterExpressions in and_group have an AND relationship.
+ or_group (google.analytics.data_v1beta.types.FilterExpressionList):
+ The FilterExpressions in or_group have an OR relationship.
+ not_expression (google.analytics.data_v1beta.types.FilterExpression):
+ The FilterExpression is NOT of not_expression.
+ filter (google.analytics.data_v1beta.types.Filter):
+ A primitive filter.
+ All fields in filter in same FilterExpression
+ needs to be either all dimensions or metrics.
+ """
+
+ and_group = proto.Field(
+ proto.MESSAGE, number=1, oneof="expr", message="FilterExpressionList",
+ )
+
+ or_group = proto.Field(
+ proto.MESSAGE, number=2, oneof="expr", message="FilterExpressionList",
+ )
+
+ not_expression = proto.Field(
+ proto.MESSAGE, number=3, oneof="expr", message="FilterExpression",
+ )
+
+ filter = proto.Field(proto.MESSAGE, number=4, oneof="expr", message="Filter",)
+
+
+class FilterExpressionList(proto.Message):
+ r"""A list of filter expressions.
+
+ Attributes:
+ expressions (Sequence[google.analytics.data_v1beta.types.FilterExpression]):
+ A list of filter expressions.
+ """
+
+ expressions = proto.RepeatedField(
+ proto.MESSAGE, number=1, message="FilterExpression",
+ )
+
+
+class Filter(proto.Message):
+ r"""An expression to filter dimension or metric values.
+
+ Attributes:
+ field_name (str):
+ The dimension name or metric name. Must be a
+ name defined in dimensions or metrics.
+ string_filter (google.analytics.data_v1beta.types.Filter.StringFilter):
+ Strings related filter.
+ in_list_filter (google.analytics.data_v1beta.types.Filter.InListFilter):
+ A filter for in list values.
+ numeric_filter (google.analytics.data_v1beta.types.Filter.NumericFilter):
+ A filter for numeric or date values.
+ between_filter (google.analytics.data_v1beta.types.Filter.BetweenFilter):
+ A filter for two values.
+ """
+
+ class StringFilter(proto.Message):
+ r"""The filter for string
+
+ Attributes:
+ match_type (google.analytics.data_v1beta.types.Filter.StringFilter.MatchType):
+ The match type for this filter.
+ value (str):
+ The string value used for the matching.
+ case_sensitive (bool):
+ If true, the string value is case sensitive.
+ """
+
+ class MatchType(proto.Enum):
+ r"""The match type of a string filter"""
+ MATCH_TYPE_UNSPECIFIED = 0
+ EXACT = 1
+ BEGINS_WITH = 2
+ ENDS_WITH = 3
+ CONTAINS = 4
+ FULL_REGEXP = 5
+ PARTIAL_REGEXP = 6
+
+ match_type = proto.Field(
+ proto.ENUM, number=1, enum="Filter.StringFilter.MatchType",
+ )
+
+ value = proto.Field(proto.STRING, number=2)
+
+ case_sensitive = proto.Field(proto.BOOL, number=3)
+
+ class InListFilter(proto.Message):
+ r"""The result needs to be in a list of string values.
+
+ Attributes:
+ values (Sequence[str]):
+ The list of string values.
+ Must be non-empty.
+ case_sensitive (bool):
+ If true, the string value is case sensitive.
+ """
+
+ values = proto.RepeatedField(proto.STRING, number=1)
+
+ case_sensitive = proto.Field(proto.BOOL, number=2)
+
+ class NumericFilter(proto.Message):
+ r"""Filters for numeric or date values.
+
+ Attributes:
+ operation (google.analytics.data_v1beta.types.Filter.NumericFilter.Operation):
+ The operation type for this filter.
+ value (google.analytics.data_v1beta.types.NumericValue):
+ A numeric value or a date value.
+ """
+
+ class Operation(proto.Enum):
+ r"""The operation applied to a numeric filter"""
+ OPERATION_UNSPECIFIED = 0
+ EQUAL = 1
+ LESS_THAN = 2
+ LESS_THAN_OR_EQUAL = 3
+ GREATER_THAN = 4
+ GREATER_THAN_OR_EQUAL = 5
+
+ operation = proto.Field(
+ proto.ENUM, number=1, enum="Filter.NumericFilter.Operation",
+ )
+
+ value = proto.Field(proto.MESSAGE, number=2, message="NumericValue",)
+
+ class BetweenFilter(proto.Message):
+ r"""To express that the result needs to be between two numbers
+ (inclusive).
+
+ Attributes:
+ from_value (google.analytics.data_v1beta.types.NumericValue):
+ Begins with this number.
+ to_value (google.analytics.data_v1beta.types.NumericValue):
+ Ends with this number.
+ """
+
+ from_value = proto.Field(proto.MESSAGE, number=1, message="NumericValue",)
+
+ to_value = proto.Field(proto.MESSAGE, number=2, message="NumericValue",)
+
+ field_name = proto.Field(proto.STRING, number=1)
+
+ string_filter = proto.Field(
+ proto.MESSAGE, number=3, oneof="one_filter", message=StringFilter,
+ )
+
+ in_list_filter = proto.Field(
+ proto.MESSAGE, number=4, oneof="one_filter", message=InListFilter,
+ )
+
+ numeric_filter = proto.Field(
+ proto.MESSAGE, number=5, oneof="one_filter", message=NumericFilter,
+ )
+
+ between_filter = proto.Field(
+ proto.MESSAGE, number=6, oneof="one_filter", message=BetweenFilter,
+ )
+
+
+class OrderBy(proto.Message):
+ r"""The sort options.
+
+ Attributes:
+ metric (google.analytics.data_v1beta.types.OrderBy.MetricOrderBy):
+ Sorts results by a metric's values.
+ dimension (google.analytics.data_v1beta.types.OrderBy.DimensionOrderBy):
+ Sorts results by a dimension's values.
+ pivot (google.analytics.data_v1beta.types.OrderBy.PivotOrderBy):
+ Sorts results by a metric's values within a
+ pivot column group.
+ desc (bool):
+ If true, sorts by descending order.
+ """
+
+ class MetricOrderBy(proto.Message):
+ r"""Sorts by metric values.
+
+ Attributes:
+ metric_name (str):
+ A metric name in the request to order by.
+ """
+
+ metric_name = proto.Field(proto.STRING, number=1)
+
+ class DimensionOrderBy(proto.Message):
+ r"""Sorts by dimension values.
+
+ Attributes:
+ dimension_name (str):
+ A dimension name in the request to order by.
+ order_type (google.analytics.data_v1beta.types.OrderBy.DimensionOrderBy.OrderType):
+ Controls the rule for dimension value
+ ordering.
+ """
+
+ class OrderType(proto.Enum):
+ r"""Rule to order the string dimension values by."""
+ ORDER_TYPE_UNSPECIFIED = 0
+ ALPHANUMERIC = 1
+ CASE_INSENSITIVE_ALPHANUMERIC = 2
+ NUMERIC = 3
+
+ dimension_name = proto.Field(proto.STRING, number=1)
+
+ order_type = proto.Field(
+ proto.ENUM, number=2, enum="OrderBy.DimensionOrderBy.OrderType",
+ )
+
+ class PivotOrderBy(proto.Message):
+ r"""Sorts by a pivot column group.
+
+ Attributes:
+ metric_name (str):
+ In the response to order by, order rows by
+ this column. Must be a metric name from the
+ request.
+ pivot_selections (Sequence[google.analytics.data_v1beta.types.OrderBy.PivotOrderBy.PivotSelection]):
+ Used to select a dimension name and value
+ pivot. If multiple pivot selections are given,
+ the sort occurs on rows where all pivot
+ selection dimension name and value pairs match
+ the row's dimension name and value pair.
+ """
+
+ class PivotSelection(proto.Message):
+ r"""A pair of dimension names and values. Rows with this dimension pivot
+ pair are ordered by the metric's value.
+
+ For example if pivots = {{"browser", "Chrome"}} and metric_name =
+ "Sessions", then the rows will be sorted based on Sessions in
+ Chrome.
+
+ ::
+
+ ---------|----------|----------------|----------|----------------
+ | Chrome | Chrome | Safari | Safari
+ ---------|----------|----------------|----------|----------------
+ Country | Sessions | Pages/Sessions | Sessions | Pages/Sessions
+ ---------|----------|----------------|----------|----------------
+ US | 2 | 2 | 3 | 1
+ ---------|----------|----------------|----------|----------------
+ Canada | 3 | 1 | 4 | 1
+ ---------|----------|----------------|----------|----------------
+
+ Attributes:
+ dimension_name (str):
+ Must be a dimension name from the request.
+ dimension_value (str):
+ Order by only when the named dimension is
+ this value.
+ """
+
+ dimension_name = proto.Field(proto.STRING, number=1)
+
+ dimension_value = proto.Field(proto.STRING, number=2)
+
+ metric_name = proto.Field(proto.STRING, number=1)
+
+ pivot_selections = proto.RepeatedField(
+ proto.MESSAGE, number=2, message="OrderBy.PivotOrderBy.PivotSelection",
+ )
+
+ metric = proto.Field(
+ proto.MESSAGE, number=1, oneof="one_order_by", message=MetricOrderBy,
+ )
+
+ dimension = proto.Field(
+ proto.MESSAGE, number=2, oneof="one_order_by", message=DimensionOrderBy,
+ )
+
+ pivot = proto.Field(
+ proto.MESSAGE, number=3, oneof="one_order_by", message=PivotOrderBy,
+ )
+
+ desc = proto.Field(proto.BOOL, number=4)
+
+
+class Pivot(proto.Message):
+ r"""Describes the visible dimension columns and rows in the
+ report response.
+
+ Attributes:
+ field_names (Sequence[str]):
+ Dimension names for visible columns in the
+ report response. Including "dateRange" produces
+ a date range column; for each row in the
+ response, dimension values in the date range
+ column will indicate the corresponding date
+ range from the request.
+ order_bys (Sequence[google.analytics.data_v1beta.types.OrderBy]):
+ Specifies how dimensions are ordered in the pivot. In the
+ first Pivot, the OrderBys determine Row and
+ PivotDimensionHeader ordering; in subsequent Pivots, the
+ OrderBys determine only PivotDimensionHeader ordering.
+ Dimensions specified in these OrderBys must be a subset of
+ Pivot.field_names.
+ offset (int):
+ The row count of the start row. The first row
+ is counted as row 0.
+ limit (int):
+ The number of unique combinations of dimension values to
+ return in this pivot. If unspecified, up to 10,000 unique
+ combinations of dimension values are returned. ``limit``
+ must be positive.
+
+ The product of the ``limit`` for each ``pivot`` in a
+ ``RunPivotReportRequest`` must not exceed 100,000. For
+ example, a two pivot request with ``limit: 1000`` in each
+ pivot will fail because the product is ``1,000,000``.
+ metric_aggregations (Sequence[google.analytics.data_v1beta.types.MetricAggregation]):
+ Aggregate the metrics by dimensions in this pivot using the
+ specified metric_aggregations.
+ """
+
+ field_names = proto.RepeatedField(proto.STRING, number=1)
+
+ order_bys = proto.RepeatedField(proto.MESSAGE, number=2, message="OrderBy",)
+
+ offset = proto.Field(proto.INT64, number=3)
+
+ limit = proto.Field(proto.INT64, number=4)
+
+ metric_aggregations = proto.RepeatedField(
+ proto.ENUM, number=5, enum="MetricAggregation",
+ )
+
+
+class CohortSpec(proto.Message):
+ r"""The specification of cohorts for a cohort report.
+
+ Cohort reports create a time series of user retention for the
+ cohort. For example, you could select the cohort of users that were
+ acquired in the first week of September and follow that cohort for
+ the next six weeks. Selecting the users acquired in the first week
+ of September cohort is specified in the ``cohort`` object. Following
+ that cohort for the next six weeks is specified in the
+ ``cohortsRange`` object.
+
+ For examples, see `Cohort Report
+ Examples `__.
+
+ The report response could show a weekly time series where say your
+ app has retained 60% of this cohort after three weeks and 25% of
+ this cohort after six weeks. These two percentages can be calculated
+ by the metric ``cohortActiveUsers/cohortTotalUsers`` and will be
+ separate rows in the report.
+
+ Attributes:
+ cohorts (Sequence[google.analytics.data_v1beta.types.Cohort]):
+ Defines the selection criteria to group users
+ into cohorts.
+ Most cohort reports define only a single cohort.
+ If multiple cohorts are specified, each cohort
+ can be recognized in the report by their name.
+ cohorts_range (google.analytics.data_v1beta.types.CohortsRange):
+ Cohort reports follow cohorts over an
+ extended reporting date range. This range
+ specifies an offset duration to follow the
+ cohorts over.
+ cohort_report_settings (google.analytics.data_v1beta.types.CohortReportSettings):
+ Optional settings for a cohort report.
+ """
+
+ cohorts = proto.RepeatedField(proto.MESSAGE, number=1, message="Cohort",)
+
+ cohorts_range = proto.Field(proto.MESSAGE, number=2, message="CohortsRange",)
+
+ cohort_report_settings = proto.Field(
+ proto.MESSAGE, number=3, message="CohortReportSettings",
+ )
+
+
+class Cohort(proto.Message):
+ r"""Defines a cohort selection criteria. A cohort is a group of users
+ who share a common characteristic. For example, users with the same
+ ``firstSessionDate`` belong to the same cohort.
+
+ Attributes:
+ name (str):
+ Assigns a name to this cohort. The dimension ``cohort`` is
+ valued to this name in a report response. If set, cannot
+ begin with ``cohort_`` or ``RESERVED_``. If not set, cohorts
+ are named by their zero based index ``cohort_0``,
+ ``cohort_1``, etc.
+ dimension (str):
+ Dimension used by the cohort. Required and only supports
+ ``firstSessionDate``.
+ date_range (google.analytics.data_v1beta.types.DateRange):
+ The cohort selects users whose first touch date is between
+ start date and end date defined in the ``dateRange``. This
+ ``dateRange`` does not specify the full date range of event
+ data that is present in a cohort report. In a cohort report,
+ this ``dateRange`` is extended by the granularity and offset
+ present in the ``cohortsRange``; event data for the extended
+ reporting date range is present in a cohort report.
+
+ In a cohort request, this ``dateRange`` is required and the
+ ``dateRanges`` in the ``RunReportRequest`` or
+ ``RunPivotReportRequest`` must be unspecified.
+
+ This ``dateRange`` should generally be aligned with the
+ cohort's granularity. If ``CohortsRange`` uses daily
+ granularity, this ``dateRange`` can be a single day. If
+ ``CohortsRange`` uses weekly granularity, this ``dateRange``
+ can be aligned to a week boundary, starting at Sunday and
+ ending Saturday. If ``CohortsRange`` uses monthly
+ granularity, this ``dateRange`` can be aligned to a month,
+ starting at the first and ending on the last day of the
+ month.
+ """
+
+ name = proto.Field(proto.STRING, number=1)
+
+ dimension = proto.Field(proto.STRING, number=2)
+
+ date_range = proto.Field(proto.MESSAGE, number=3, message="DateRange",)
+
+
+class CohortsRange(proto.Message):
+ r"""Configures the extended reporting date range for a cohort
+ report. Specifies an offset duration to follow the cohorts over.
+
+ Attributes:
+ granularity (google.analytics.data_v1beta.types.CohortsRange.Granularity):
+ Required. The granularity used to interpret the
+ ``startOffset`` and ``endOffset`` for the extended reporting
+ date range for a cohort report.
+ start_offset (int):
+ ``startOffset`` specifies the start date of the extended
+ reporting date range for a cohort report. ``startOffset`` is
+ commonly set to 0 so that reports contain data from the
+ acquisition of the cohort forward.
+
+ If ``granularity`` is ``DAILY``, the ``startDate`` of the
+ extended reporting date range is ``startDate`` of the cohort
+ plus ``startOffset`` days.
+
+ If ``granularity`` is ``WEEKLY``, the ``startDate`` of the
+ extended reporting date range is ``startDate`` of the cohort
+ plus ``startOffset * 7`` days.
+
+ If ``granularity`` is ``MONTHLY``, the ``startDate`` of the
+ extended reporting date range is ``startDate`` of the cohort
+ plus ``startOffset * 30`` days.
+ end_offset (int):
+ Required. ``endOffset`` specifies the end date of the
+ extended reporting date range for a cohort report.
+ ``endOffset`` can be any positive integer but is commonly
+ set to 5 to 10 so that reports contain data on the cohort
+ for the next several granularity time periods.
+
+ If ``granularity`` is ``DAILY``, the ``endDate`` of the
+ extended reporting date range is ``endDate`` of the cohort
+ plus ``endOffset`` days.
+
+ If ``granularity`` is ``WEEKLY``, the ``endDate`` of the
+ extended reporting date range is ``endDate`` of the cohort
+ plus ``endOffset * 7`` days.
+
+ If ``granularity`` is ``MONTHLY``, the ``endDate`` of the
+ extended reporting date range is ``endDate`` of the cohort
+ plus ``endOffset * 30`` days.
+ """
+
+ class Granularity(proto.Enum):
+ r"""The granularity used to interpret the ``startOffset`` and
+ ``endOffset`` for the extended reporting date range for a cohort
+ report.
+ """
+ GRANULARITY_UNSPECIFIED = 0
+ DAILY = 1
+ WEEKLY = 2
+ MONTHLY = 3
+
+ granularity = proto.Field(proto.ENUM, number=1, enum=Granularity,)
+
+ start_offset = proto.Field(proto.INT32, number=2)
+
+ end_offset = proto.Field(proto.INT32, number=3)
+
+
+class CohortReportSettings(proto.Message):
+ r"""Optional settings of a cohort report.
+
+ Attributes:
+ accumulate (bool):
+ If true, accumulates the result from first touch day to the
+ end day. Not supported in ``RunReportRequest``.
+ """
+
+ accumulate = proto.Field(proto.BOOL, number=1)
+
+
+class ResponseMetaData(proto.Message):
+ r"""Response's metadata carrying additional information about the
+ report content.
+
+ Attributes:
+ data_loss_from_other_row (bool):
+ If true, indicates some buckets of dimension
+ combinations are rolled into "(other)" row. This
+ can happen for high cardinality reports.
+ """
+
+ data_loss_from_other_row = proto.Field(proto.BOOL, number=3)
+
+
+class DimensionHeader(proto.Message):
+ r"""Describes a dimension column in the report. Dimensions
+ requested in a report produce column entries within rows and
+ DimensionHeaders. However, dimensions used exclusively within
+ filters or expressions do not produce columns in a report;
+ correspondingly, those dimensions do not produce headers.
+
+ Attributes:
+ name (str):
+ The dimension's name.
+ """
+
+ name = proto.Field(proto.STRING, number=1)
+
+
+class MetricHeader(proto.Message):
+ r"""Describes a metric column in the report. Visible metrics
+ requested in a report produce column entries within rows and
+ MetricHeaders. However, metrics used exclusively within filters
+ or expressions do not produce columns in a report;
+ correspondingly, those metrics do not produce headers.
+
+ Attributes:
+ name (str):
+ The metric's name.
+ type_ (google.analytics.data_v1beta.types.MetricType):
+ The metric's data type.
+ """
+
+ name = proto.Field(proto.STRING, number=1)
+
+ type_ = proto.Field(proto.ENUM, number=2, enum="MetricType",)
+
+
+class PivotHeader(proto.Message):
+ r"""Dimensions' values in a single pivot.
+
+ Attributes:
+ pivot_dimension_headers (Sequence[google.analytics.data_v1beta.types.PivotDimensionHeader]):
+ The size is the same as the cardinality of
+ the corresponding dimension combinations.
+ row_count (int):
+ The cardinality of the pivot. The total number of rows for
+ this pivot's fields regardless of how the parameters
+ ``offset`` and ``limit`` are specified in the request.
+ """
+
+ pivot_dimension_headers = proto.RepeatedField(
+ proto.MESSAGE, number=1, message="PivotDimensionHeader",
+ )
+
+ row_count = proto.Field(proto.INT32, number=2)
+
+
+class PivotDimensionHeader(proto.Message):
+ r"""Summarizes dimension values from a row for this pivot.
+
+ Attributes:
+ dimension_values (Sequence[google.analytics.data_v1beta.types.DimensionValue]):
+ Values of multiple dimensions in a pivot.
+ """
+
+ dimension_values = proto.RepeatedField(
+ proto.MESSAGE, number=1, message="DimensionValue",
+ )
+
+
+class Row(proto.Message):
+ r"""Report data for each row. For example if RunReportRequest contains:
+
+ .. code:: none
+
+ "dimensions": [
+ {
+ "name": "eventName"
+ },
+ {
+ "name": "countryId"
+ }
+ ],
+ "metrics": [
+ {
+ "name": "eventCount"
+ }
+ ]
+
+ One row with 'in_app_purchase' as the eventName, 'JP' as the
+ countryId, and 15 as the eventCount, would be:
+
+ .. code:: none
+
+ "dimensionValues": [
+ {
+ "value": "in_app_purchase"
+ },
+ {
+ "value": "JP"
+ }
+ ],
+ "metricValues": [
+ {
+ "value": "15"
+ }
+ ]
+
+ Attributes:
+ dimension_values (Sequence[google.analytics.data_v1beta.types.DimensionValue]):
+ List of requested dimension values. In a PivotReport,
+ dimension_values are only listed for dimensions included in
+ a pivot.
+ metric_values (Sequence[google.analytics.data_v1beta.types.MetricValue]):
+ List of requested visible metric values.
+ """
+
+ dimension_values = proto.RepeatedField(
+ proto.MESSAGE, number=1, message="DimensionValue",
+ )
+
+ metric_values = proto.RepeatedField(proto.MESSAGE, number=2, message="MetricValue",)
+
+
+class DimensionValue(proto.Message):
+ r"""The value of a dimension.
+
+ Attributes:
+ value (str):
+ Value as a string if the dimension type is a
+ string.
+ """
+
+ value = proto.Field(proto.STRING, number=1, oneof="one_value")
+
+
+class MetricValue(proto.Message):
+ r"""The value of a metric.
+
+ Attributes:
+ value (str):
+ Measurement value. See MetricHeader for type.
+ """
+
+ value = proto.Field(proto.STRING, number=4, oneof="one_value")
+
+
+class NumericValue(proto.Message):
+ r"""To represent a number.
+
+ Attributes:
+ int64_value (int):
+ Integer value
+ double_value (float):
+ Double value
+ """
+
+ int64_value = proto.Field(proto.INT64, number=1, oneof="one_value")
+
+ double_value = proto.Field(proto.DOUBLE, number=2, oneof="one_value")
+
+
+class PropertyQuota(proto.Message):
+ r"""Current state of all quotas for this Analytics Property. If
+ any quota for a property is exhausted, all requests to that
+ property will return Resource Exhausted errors.
+
+ Attributes:
+ tokens_per_day (google.analytics.data_v1beta.types.QuotaStatus):
+ Standard Analytics Properties can use up to
+ 25,000 tokens per day; Analytics 360 Properties
+ can use 250,000 tokens per day. Most requests
+ consume fewer than 10 tokens.
+ tokens_per_hour (google.analytics.data_v1beta.types.QuotaStatus):
+ Standard Analytics Properties can use up to
+ 5,000 tokens per hour; Analytics 360 Properties
+ can use 50,000 tokens per hour. An API request
+ consumes a single number of tokens, and that
+ number is deducted from both the hourly and
+ daily quotas.
+ concurrent_requests (google.analytics.data_v1beta.types.QuotaStatus):
+ Standard Analytics Properties can send up to
+ 10 concurrent requests; Analytics 360 Properties
+ can use up to 50 concurrent requests.
+ server_errors_per_project_per_hour (google.analytics.data_v1beta.types.QuotaStatus):
+ Standard Analytics Properties and cloud
+ project pairs can have up to 10 server errors
+ per hour; Analytics 360 Properties and cloud
+ project pairs can have up to 50 server errors
+ per hour.
+ """
+
+ tokens_per_day = proto.Field(proto.MESSAGE, number=1, message="QuotaStatus",)
+
+ tokens_per_hour = proto.Field(proto.MESSAGE, number=2, message="QuotaStatus",)
+
+ concurrent_requests = proto.Field(proto.MESSAGE, number=3, message="QuotaStatus",)
+
+ server_errors_per_project_per_hour = proto.Field(
+ proto.MESSAGE, number=4, message="QuotaStatus",
+ )
+
+
+class QuotaStatus(proto.Message):
+ r"""Current state for a particular quota group.
+
+ Attributes:
+ consumed (int):
+ Quota consumed by this request.
+ remaining (int):
+ Quota remaining after this request.
+ """
+
+ consumed = proto.Field(proto.INT32, number=1)
+
+ remaining = proto.Field(proto.INT32, number=2)
+
+
+class DimensionMetadata(proto.Message):
+ r"""Explains a dimension.
+
+ Attributes:
+ api_name (str):
+ This dimension's name. Useable in
+ `Dimension <#Dimension>`__'s ``name``. For example,
+ ``eventName``.
+ ui_name (str):
+ This dimension's name within the Google Analytics user
+ interface. For example, ``Event name``.
+ description (str):
+ Description of how this dimension is used and
+ calculated.
+ deprecated_api_names (Sequence[str]):
+ Still usable but deprecated names for this dimension. If
+ populated, this dimension is available by either ``apiName``
+ or one of ``deprecatedApiNames`` for a period of time. After
+ the deprecation period, the dimension will be available only
+ by ``apiName``.
+ custom_definition (bool):
+ True if the dimension is a custom dimension
+ for this property.
+ """
+
+ api_name = proto.Field(proto.STRING, number=1)
+
+ ui_name = proto.Field(proto.STRING, number=2)
+
+ description = proto.Field(proto.STRING, number=3)
+
+ deprecated_api_names = proto.RepeatedField(proto.STRING, number=4)
+
+ custom_definition = proto.Field(proto.BOOL, number=5)
+
+
+class MetricMetadata(proto.Message):
+ r"""Explains a metric.
+
+ Attributes:
+ api_name (str):
+ A metric name. Useable in `Metric <#Metric>`__'s ``name``.
+ For example, ``eventCount``.
+ ui_name (str):
+ This metric's name within the Google Analytics user
+ interface. For example, ``Event count``.
+ description (str):
+ Description of how this metric is used and
+ calculated.
+ deprecated_api_names (Sequence[str]):
+ Still usable but deprecated names for this metric. If
+ populated, this metric is available by either ``apiName`` or
+ one of ``deprecatedApiNames`` for a period of time. After
+ the deprecation period, the metric will be available only by
+ ``apiName``.
+ type_ (google.analytics.data_v1beta.types.MetricType):
+ The type of this metric.
+ expression (str):
+ The mathematical expression for this derived metric. Can be
+ used in `Metric <#Metric>`__'s ``expression`` field for
+ equivalent reports. Most metrics are not expressions, and
+ for non-expressions, this field is empty.
+ custom_definition (bool):
+ True if the metric is a custom metric for
+ this property.
+ """
+
+ api_name = proto.Field(proto.STRING, number=1)
+
+ ui_name = proto.Field(proto.STRING, number=2)
+
+ description = proto.Field(proto.STRING, number=3)
+
+ deprecated_api_names = proto.RepeatedField(proto.STRING, number=4)
+
+ type_ = proto.Field(proto.ENUM, number=5, enum="MetricType",)
+
+ expression = proto.Field(proto.STRING, number=6)
+
+ custom_definition = proto.Field(proto.BOOL, number=7)
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/noxfile.py b/noxfile.py
index a57e24b..1ee6cab 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -30,6 +30,17 @@
SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"]
UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"]
+# 'docfx' is excluded since it only needs to run in 'docs-presubmit'
+nox.options.sessions = [
+ "unit",
+ "system",
+ "cover",
+ "lint",
+ "lint_setup_py",
+ "blacken",
+ "docs",
+]
+
@nox.session(python=DEFAULT_PYTHON_VERSION)
def lint(session):
@@ -75,13 +86,15 @@ def default(session):
session.install(
"mock", "pytest", "pytest-cov",
)
+
session.install("-e", ".")
# Run py.test against the unit tests.
session.run(
"py.test",
"--quiet",
- "--cov=google/cloud",
+ f"--junitxml=unit_{session.python}_sponge_log.xml",
+ "--cov=google.analytics",
"--cov=tests/unit",
"--cov-append",
"--cov-config=.coveragerc",
@@ -129,9 +142,21 @@ def system(session):
# Run py.test against the system tests.
if system_test_exists:
- session.run("py.test", "--quiet", system_test_path, *session.posargs)
+ session.run(
+ "py.test",
+ "--quiet",
+ f"--junitxml=system_{session.python}_sponge_log.xml",
+ system_test_path,
+ *session.posargs,
+ )
if system_test_folder_exists:
- session.run("py.test", "--quiet", system_test_folder_path, *session.posargs)
+ session.run(
+ "py.test",
+ "--quiet",
+ f"--junitxml=system_{session.python}_sponge_log.xml",
+ system_test_folder_path,
+ *session.posargs,
+ )
@nox.session(python=DEFAULT_PYTHON_VERSION)
diff --git a/samples/snippets/quickstart.py b/samples/snippets/quickstart.py
index 1525dd2..5c6657d 100644
--- a/samples/snippets/quickstart.py
+++ b/samples/snippets/quickstart.py
@@ -59,7 +59,7 @@
import argparse
# [START ga_data_run_report]
-from google.analytics.data import AlphaAnalyticsDataClient
+from google.analytics.data_v1alpha import AlphaAnalyticsDataClient
from google.analytics.data_v1alpha.types import DateRange
from google.analytics.data_v1alpha.types import Dimension
from google.analytics.data_v1alpha.types import Entity
diff --git a/scripts/fixup_data_v1alpha_keywords.py b/scripts/fixup_data_v1alpha_keywords.py
deleted file mode 100644
index 9efe39c..0000000
--- a/scripts/fixup_data_v1alpha_keywords.py
+++ /dev/null
@@ -1,182 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-import argparse
-import os
-import libcst as cst
-import pathlib
-import sys
-from typing import (Any, Callable, Dict, List, Sequence, Tuple)
-
-
-def partition(
- predicate: Callable[[Any], bool],
- iterator: Sequence[Any]
-) -> Tuple[List[Any], List[Any]]:
- """A stable, out-of-place partition."""
- results = ([], [])
-
- for i in iterator:
- results[int(predicate(i))].append(i)
-
- # Returns trueList, falseList
- return results[1], results[0]
-
-
-class dataCallTransformer(cst.CSTTransformer):
- CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata')
- METHOD_TO_PARAMS: Dict[str, Tuple[str]] = {
- 'batch_run_pivot_reports': ('entity', 'requests', ),
- 'batch_run_reports': ('entity', 'requests', ),
- 'get_metadata': ('name', ),
- 'run_pivot_report': ('entity', 'dimensions', 'metrics', 'dimension_filter', 'metric_filter', 'pivots', 'date_ranges', 'currency_code', 'cohort_spec', 'keep_empty_rows', 'return_property_quota', ),
- 'run_report': ('entity', 'dimensions', 'metrics', 'date_ranges', 'offset', 'limit', 'metric_aggregations', 'dimension_filter', 'metric_filter', 'order_bys', 'currency_code', 'cohort_spec', 'keep_empty_rows', 'return_property_quota', ),
-
- }
-
- def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode:
- try:
- key = original.func.attr.value
- kword_params = self.METHOD_TO_PARAMS[key]
- except (AttributeError, KeyError):
- # Either not a method from the API or too convoluted to be sure.
- return updated
-
- # If the existing code is valid, keyword args come after positional args.
- # Therefore, all positional args must map to the first parameters.
- args, kwargs = partition(lambda a: not bool(a.keyword), updated.args)
- if any(k.keyword.value == "request" for k in kwargs):
- # We've already fixed this file, don't fix it again.
- return updated
-
- kwargs, ctrl_kwargs = partition(
- lambda a: not a.keyword.value in self.CTRL_PARAMS,
- kwargs
- )
-
- args, ctrl_args = args[:len(kword_params)], args[len(kword_params):]
- ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl))
- for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS))
-
- request_arg = cst.Arg(
- value=cst.Dict([
- cst.DictElement(
- cst.SimpleString("'{}'".format(name)),
- cst.Element(value=arg.value)
- )
- # Note: the args + kwargs looks silly, but keep in mind that
- # the control parameters had to be stripped out, and that
- # those could have been passed positionally or by keyword.
- for name, arg in zip(kword_params, args + kwargs)]),
- keyword=cst.Name("request")
- )
-
- return updated.with_changes(
- args=[request_arg] + ctrl_kwargs
- )
-
-
-def fix_files(
- in_dir: pathlib.Path,
- out_dir: pathlib.Path,
- *,
- transformer=dataCallTransformer(),
-):
- """Duplicate the input dir to the output dir, fixing file method calls.
-
- Preconditions:
- * in_dir is a real directory
- * out_dir is a real, empty directory
- """
- pyfile_gen = (
- pathlib.Path(os.path.join(root, f))
- for root, _, files in os.walk(in_dir)
- for f in files if os.path.splitext(f)[1] == ".py"
- )
-
- for fpath in pyfile_gen:
- with open(fpath, 'r') as f:
- src = f.read()
-
- # Parse the code and insert method call fixes.
- tree = cst.parse_module(src)
- updated = tree.visit(transformer)
-
- # Create the path and directory structure for the new file.
- updated_path = out_dir.joinpath(fpath.relative_to(in_dir))
- updated_path.parent.mkdir(parents=True, exist_ok=True)
-
- # Generate the updated source file at the corresponding path.
- with open(updated_path, 'w') as f:
- f.write(updated.code)
-
-
-if __name__ == '__main__':
- parser = argparse.ArgumentParser(
- description="""Fix up source that uses the data client library.
-
-The existing sources are NOT overwritten but are copied to output_dir with changes made.
-
-Note: This tool operates at a best-effort level at converting positional
- parameters in client method calls to keyword based parameters.
- Cases where it WILL FAIL include
- A) * or ** expansion in a method call.
- B) Calls via function or method alias (includes free function calls)
- C) Indirect or dispatched calls (e.g. the method is looked up dynamically)
-
- These all constitute false negatives. The tool will also detect false
- positives when an API method shares a name with another method.
-""")
- parser.add_argument(
- '-d',
- '--input-directory',
- required=True,
- dest='input_dir',
- help='the input directory to walk for python files to fix up',
- )
- parser.add_argument(
- '-o',
- '--output-directory',
- required=True,
- dest='output_dir',
- help='the directory to output files fixed via un-flattening',
- )
- args = parser.parse_args()
- input_dir = pathlib.Path(args.input_dir)
- output_dir = pathlib.Path(args.output_dir)
- if not input_dir.is_dir():
- print(
- f"input directory '{input_dir}' does not exist or is not a directory",
- file=sys.stderr,
- )
- sys.exit(-1)
-
- if not output_dir.is_dir():
- print(
- f"output directory '{output_dir}' does not exist or is not a directory",
- file=sys.stderr,
- )
- sys.exit(-1)
-
- if os.listdir(output_dir):
- print(
- f"output directory '{output_dir}' is not empty",
- file=sys.stderr,
- )
- sys.exit(-1)
-
- fix_files(input_dir, output_dir)
diff --git a/setup.py b/setup.py
index ff9ba09..2822c64 100644
--- a/setup.py
+++ b/setup.py
@@ -46,9 +46,8 @@
"proto-plus >= 1.4.0",
),
python_requires=">=3.6",
- scripts=["scripts/fixup_data_v1alpha_keywords.py",],
classifiers=[
- "Development Status :: 3 - Alpha",
+ "Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.6",
diff --git a/synth.metadata b/synth.metadata
index 2197936..fdccd7c 100644
--- a/synth.metadata
+++ b/synth.metadata
@@ -3,30 +3,22 @@
{
"git": {
"name": ".",
- "remote": "https://github.com/googleapis/python-analytics-data.git",
- "sha": "054c81a64157c0a8ab213daac262e51ea5dfb072"
- }
- },
- {
- "git": {
- "name": "googleapis",
- "remote": "https://github.com/googleapis/googleapis.git",
- "sha": "520682435235d9c503983a360a2090025aa47cd1",
- "internalRef": "350246057"
+ "remote": "git@github.com:googleapis/python-analytics-data",
+ "sha": "3b9a193edfba38c9e8f72580f400514fa283a79c"
}
},
{
"git": {
"name": "synthtool",
"remote": "https://github.com/googleapis/synthtool.git",
- "sha": "373861061648b5fe5e0ac4f8a38b32d639ee93e4"
+ "sha": "0199c79b8324fba66476300824aa931788c47e2d"
}
},
{
"git": {
"name": "synthtool",
"remote": "https://github.com/googleapis/synthtool.git",
- "sha": "373861061648b5fe5e0ac4f8a38b32d639ee93e4"
+ "sha": "0199c79b8324fba66476300824aa931788c47e2d"
}
}
],
@@ -35,98 +27,19 @@
"client": {
"source": "googleapis",
"apiName": "analyticsdata",
- "apiVersion": "v1alpha1",
+ "apiVersion": "v1alpha",
+ "language": "python",
+ "generator": "bazel"
+ }
+ },
+ {
+ "client": {
+ "source": "googleapis",
+ "apiName": "analyticsdata",
+ "apiVersion": "v1beta",
"language": "python",
"generator": "bazel"
}
}
- ],
- "generatedFiles": [
- ".coveragerc",
- ".flake8",
- ".github/CONTRIBUTING.md",
- ".github/ISSUE_TEMPLATE/bug_report.md",
- ".github/ISSUE_TEMPLATE/feature_request.md",
- ".github/ISSUE_TEMPLATE/support_request.md",
- ".github/PULL_REQUEST_TEMPLATE.md",
- ".github/release-please.yml",
- ".github/snippet-bot.yml",
- ".gitignore",
- ".kokoro/build.sh",
- ".kokoro/continuous/common.cfg",
- ".kokoro/continuous/continuous.cfg",
- ".kokoro/docker/docs/Dockerfile",
- ".kokoro/docker/docs/fetch_gpg_keys.sh",
- ".kokoro/docs/common.cfg",
- ".kokoro/docs/docs-presubmit.cfg",
- ".kokoro/docs/docs.cfg",
- ".kokoro/populate-secrets.sh",
- ".kokoro/presubmit/common.cfg",
- ".kokoro/presubmit/presubmit.cfg",
- ".kokoro/publish-docs.sh",
- ".kokoro/release.sh",
- ".kokoro/release/common.cfg",
- ".kokoro/release/release.cfg",
- ".kokoro/samples/lint/common.cfg",
- ".kokoro/samples/lint/continuous.cfg",
- ".kokoro/samples/lint/periodic.cfg",
- ".kokoro/samples/lint/presubmit.cfg",
- ".kokoro/samples/python3.6/common.cfg",
- ".kokoro/samples/python3.6/continuous.cfg",
- ".kokoro/samples/python3.6/periodic.cfg",
- ".kokoro/samples/python3.6/presubmit.cfg",
- ".kokoro/samples/python3.7/common.cfg",
- ".kokoro/samples/python3.7/continuous.cfg",
- ".kokoro/samples/python3.7/periodic.cfg",
- ".kokoro/samples/python3.7/presubmit.cfg",
- ".kokoro/samples/python3.8/common.cfg",
- ".kokoro/samples/python3.8/continuous.cfg",
- ".kokoro/samples/python3.8/periodic.cfg",
- ".kokoro/samples/python3.8/presubmit.cfg",
- ".kokoro/test-samples.sh",
- ".kokoro/trampoline.sh",
- ".kokoro/trampoline_v2.sh",
- ".pre-commit-config.yaml",
- ".trampolinerc",
- "CODE_OF_CONDUCT.md",
- "CONTRIBUTING.rst",
- "LICENSE",
- "MANIFEST.in",
- "docs/_static/custom.css",
- "docs/_templates/layout.html",
- "docs/conf.py",
- "docs/data_v1alpha/alpha_analytics_data.rst",
- "docs/data_v1alpha/services.rst",
- "docs/data_v1alpha/types.rst",
- "docs/multiprocessing.rst",
- "google/analytics/data/__init__.py",
- "google/analytics/data/py.typed",
- "google/analytics/data_v1alpha/__init__.py",
- "google/analytics/data_v1alpha/py.typed",
- "google/analytics/data_v1alpha/services/__init__.py",
- "google/analytics/data_v1alpha/services/alpha_analytics_data/__init__.py",
- "google/analytics/data_v1alpha/services/alpha_analytics_data/async_client.py",
- "google/analytics/data_v1alpha/services/alpha_analytics_data/client.py",
- "google/analytics/data_v1alpha/services/alpha_analytics_data/transports/__init__.py",
- "google/analytics/data_v1alpha/services/alpha_analytics_data/transports/base.py",
- "google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc.py",
- "google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc_asyncio.py",
- "google/analytics/data_v1alpha/types/__init__.py",
- "google/analytics/data_v1alpha/types/analytics_data_api.py",
- "google/analytics/data_v1alpha/types/data.py",
- "mypy.ini",
- "noxfile.py",
- "renovate.json",
- "scripts/decrypt-secrets.sh",
- "scripts/readme-gen/readme_gen.py",
- "scripts/readme-gen/templates/README.tmpl.rst",
- "scripts/readme-gen/templates/auth.tmpl.rst",
- "scripts/readme-gen/templates/auth_api_key.tmpl.rst",
- "scripts/readme-gen/templates/install_deps.tmpl.rst",
- "scripts/readme-gen/templates/install_portaudio.tmpl.rst",
- "setup.cfg",
- "testing/.gitignore",
- "tests/unit/gapic/data_v1alpha/__init__.py",
- "tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py"
]
}
\ No newline at end of file
diff --git a/synth.py b/synth.py
index a3e9232..27ce6d0 100644
--- a/synth.py
+++ b/synth.py
@@ -22,24 +22,27 @@
gapic = gcp.GAPICBazel()
common = gcp.CommonTemplates()
+versions = ["v1alpha", "v1beta"]
+
# ----------------------------------------------------------------------------
# Generate analytics data GAPIC layer
# ----------------------------------------------------------------------------
-library = gapic.py_library(
- service="analyticsdata",
- version="v1alpha1",
- bazel_target="//google/analytics/data/v1alpha:google-analytics-data-v1alpha-py",
-)
+for version in versions:
+ library = gapic.py_library(
+ service="analyticsdata",
+ version=version,
+ bazel_target=f"//google/analytics/data/{version}:google-analytics-data-{version}-py",
+ )
-s.move(
- library,
- excludes=[
- "setup.py",
- "README.rst",
- "docs/index.rst",
- "scripts/fixup_data_v1alpha_keywords.py",
- ],
-)
+ s.move(
+ library,
+ excludes=[
+ "setup.py",
+ "README.rst",
+ "docs/index.rst",
+ f"scripts/fixup_data_{version}_keywords.py",
+ ],
+ )
# ----------------------------------------------------------------------------
# Add templated files
@@ -52,10 +55,15 @@
# fix coverage target
s.replace(
"noxfile.py",
- """["']--cov=google\.cloud\.analyticsdata",
-(\s+)[""]--cov=google.cloud["'],""",
- """"--cov=google.analytics.data",
-\g<1>"--cov=google.analytics",""",
+ """(\s+)["']--cov=google.cloud["'],""",
+ """"--cov=google.analytics",""",
+)
+
+# Wrap regex in docstring that sphinx thinks is a link with ``
+s.replace(
+ "google/**/data.py",
+ '''"\^\[a-zA-Z0-9_\]\$"''',
+ """``^[a-zA-Z0-9_]$``""",
)
s.shell.run(["nox", "-s", "blacken"], hide_output=False)
diff --git a/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py b/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py
index 17c8aac..59ff037 100644
--- a/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py
+++ b/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py
@@ -176,7 +176,7 @@ def test_alpha_analytics_data_client_client_options(
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -192,7 +192,7 @@ def test_alpha_analytics_data_client_client_options(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -208,7 +208,7 @@ def test_alpha_analytics_data_client_client_options(
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -236,7 +236,7 @@ def test_alpha_analytics_data_client_client_options(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -297,29 +297,25 @@ def test_alpha_analytics_data_client_mtls_env_auto(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
- ssl_channel_creds = mock.Mock()
- with mock.patch(
- "grpc.ssl_channel_credentials", return_value=ssl_channel_creds
- ):
- patched.return_value = None
- client = client_class(client_options=options)
+ patched.return_value = None
+ client = client_class(client_options=options)
- if use_client_cert_env == "false":
- expected_ssl_channel_creds = None
- expected_host = client.DEFAULT_ENDPOINT
- else:
- expected_ssl_channel_creds = ssl_channel_creds
- expected_host = client.DEFAULT_MTLS_ENDPOINT
+ if use_client_cert_env == "false":
+ expected_client_cert_source = None
+ expected_host = client.DEFAULT_ENDPOINT
+ else:
+ expected_client_cert_source = client_cert_source_callback
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=expected_host,
- scopes=None,
- ssl_channel_credentials=expected_ssl_channel_creds,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- )
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
@@ -328,66 +324,53 @@ def test_alpha_analytics_data_client_mtls_env_auto(
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
- "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=True,
):
with mock.patch(
- "google.auth.transport.grpc.SslCredentials.is_mtls",
- new_callable=mock.PropertyMock,
- ) as is_mtls_mock:
- with mock.patch(
- "google.auth.transport.grpc.SslCredentials.ssl_credentials",
- new_callable=mock.PropertyMock,
- ) as ssl_credentials_mock:
- if use_client_cert_env == "false":
- is_mtls_mock.return_value = False
- ssl_credentials_mock.return_value = None
- expected_host = client.DEFAULT_ENDPOINT
- expected_ssl_channel_creds = None
- else:
- is_mtls_mock.return_value = True
- ssl_credentials_mock.return_value = mock.Mock()
- expected_host = client.DEFAULT_MTLS_ENDPOINT
- expected_ssl_channel_creds = (
- ssl_credentials_mock.return_value
- )
-
- patched.return_value = None
- client = client_class()
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=expected_host,
- scopes=None,
- ssl_channel_credentials=expected_ssl_channel_creds,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- )
+ "google.auth.transport.mtls.default_client_cert_source",
+ return_value=client_cert_source_callback,
+ ):
+ if use_client_cert_env == "false":
+ expected_host = client.DEFAULT_ENDPOINT
+ expected_client_cert_source = None
+ else:
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+ expected_client_cert_source = client_cert_source_callback
- # Check the case client_cert_source and ADC client cert are not provided.
- with mock.patch.dict(
- os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
- ):
- with mock.patch.object(transport_class, "__init__") as patched:
- with mock.patch(
- "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
- ):
- with mock.patch(
- "google.auth.transport.grpc.SslCredentials.is_mtls",
- new_callable=mock.PropertyMock,
- ) as is_mtls_mock:
- is_mtls_mock.return_value = False
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
+ host=expected_host,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
+ # Check the case client_cert_source and ADC client cert are not provided.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=False,
+ ):
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
@@ -413,7 +396,7 @@ def test_alpha_analytics_data_client_client_options_scopes(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -443,7 +426,7 @@ def test_alpha_analytics_data_client_client_options_credentials_file(
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -462,7 +445,7 @@ def test_alpha_analytics_data_client_client_options_from_dict():
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -1264,6 +1247,56 @@ def test_alpha_analytics_data_transport_auth_adc():
)
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.AlphaAnalyticsDataGrpcTransport,
+ transports.AlphaAnalyticsDataGrpcAsyncIOTransport,
+ ],
+)
+def test_alpha_analytics_data_grpc_transport_client_cert_source_for_mtls(
+ transport_class,
+):
+ cred = credentials.AnonymousCredentials()
+
+ # Check ssl_channel_credentials is used if provided.
+ with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
+ mock_ssl_channel_creds = mock.Mock()
+ transport_class(
+ host="squid.clam.whelk",
+ credentials=cred,
+ ssl_channel_credentials=mock_ssl_channel_creds,
+ )
+ mock_create_channel.assert_called_once_with(
+ "squid.clam.whelk:443",
+ credentials=cred,
+ credentials_file=None,
+ scopes=(
+ "https://www.googleapis.com/auth/analytics",
+ "https://www.googleapis.com/auth/analytics.readonly",
+ ),
+ ssl_credentials=mock_ssl_channel_creds,
+ quota_project_id=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+ # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
+ # is used.
+ with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
+ with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
+ transport_class(
+ credentials=cred,
+ client_cert_source_for_mtls=client_cert_source_callback,
+ )
+ expected_cert, expected_key = client_cert_source_callback()
+ mock_ssl_cred.assert_called_once_with(
+ certificate_chain=expected_cert, private_key=expected_key
+ )
+
+
def test_alpha_analytics_data_host_no_port():
client = AlphaAnalyticsDataClient(
credentials=credentials.AnonymousCredentials(),
@@ -1308,6 +1341,8 @@ def test_alpha_analytics_data_grpc_asyncio_transport_channel():
assert transport._ssl_channel_credentials == None
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
@@ -1363,6 +1398,8 @@ def test_alpha_analytics_data_transport_channel_mtls_with_client_cert_source(
assert transport._ssl_channel_credentials == mock_ssl_cred
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
diff --git a/tests/unit/gapic/data_v1beta/__init__.py b/tests/unit/gapic/data_v1beta/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/tests/unit/gapic/data_v1beta/__init__.py
@@ -0,0 +1 @@
+
diff --git a/tests/unit/gapic/data_v1beta/test_beta_analytics_data.py b/tests/unit/gapic/data_v1beta/test_beta_analytics_data.py
new file mode 100644
index 0000000..a23ee2f
--- /dev/null
+++ b/tests/unit/gapic/data_v1beta/test_beta_analytics_data.py
@@ -0,0 +1,1965 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import os
+import mock
+
+import grpc
+from grpc.experimental import aio
+import math
+import pytest
+from proto.marshal.rules.dates import DurationRule, TimestampRule
+
+from google import auth
+from google.analytics.data_v1beta.services.beta_analytics_data import (
+ BetaAnalyticsDataAsyncClient,
+)
+from google.analytics.data_v1beta.services.beta_analytics_data import (
+ BetaAnalyticsDataClient,
+)
+from google.analytics.data_v1beta.services.beta_analytics_data import pagers
+from google.analytics.data_v1beta.services.beta_analytics_data import transports
+from google.analytics.data_v1beta.types import analytics_data_api
+from google.analytics.data_v1beta.types import data
+from google.api_core import client_options
+from google.api_core import exceptions
+from google.api_core import gapic_v1
+from google.api_core import grpc_helpers
+from google.api_core import grpc_helpers_async
+from google.auth import credentials
+from google.auth.exceptions import MutualTLSChannelError
+from google.oauth2 import service_account
+
+
+def client_cert_source_callback():
+ return b"cert bytes", b"key bytes"
+
+
+# If default endpoint is localhost, then default mtls endpoint will be the same.
+# This method modifies the default endpoint so the client can produce a different
+# mtls endpoint for endpoint testing purposes.
+def modify_default_endpoint(client):
+ return (
+ "foo.googleapis.com"
+ if ("localhost" in client.DEFAULT_ENDPOINT)
+ else client.DEFAULT_ENDPOINT
+ )
+
+
+def test__get_default_mtls_endpoint():
+ api_endpoint = "example.googleapis.com"
+ api_mtls_endpoint = "example.mtls.googleapis.com"
+ sandbox_endpoint = "example.sandbox.googleapis.com"
+ sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
+ non_googleapi = "api.example.com"
+
+ assert BetaAnalyticsDataClient._get_default_mtls_endpoint(None) is None
+ assert (
+ BetaAnalyticsDataClient._get_default_mtls_endpoint(api_endpoint)
+ == api_mtls_endpoint
+ )
+ assert (
+ BetaAnalyticsDataClient._get_default_mtls_endpoint(api_mtls_endpoint)
+ == api_mtls_endpoint
+ )
+ assert (
+ BetaAnalyticsDataClient._get_default_mtls_endpoint(sandbox_endpoint)
+ == sandbox_mtls_endpoint
+ )
+ assert (
+ BetaAnalyticsDataClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
+ == sandbox_mtls_endpoint
+ )
+ assert (
+ BetaAnalyticsDataClient._get_default_mtls_endpoint(non_googleapi)
+ == non_googleapi
+ )
+
+
+def test_beta_analytics_data_client_from_service_account_info():
+ creds = credentials.AnonymousCredentials()
+ with mock.patch.object(
+ service_account.Credentials, "from_service_account_info"
+ ) as factory:
+ factory.return_value = creds
+ info = {"valid": True}
+ client = BetaAnalyticsDataClient.from_service_account_info(info)
+ assert client.transport._credentials == creds
+
+ assert client.transport._host == "analyticsdata.googleapis.com:443"
+
+
+@pytest.mark.parametrize(
+ "client_class", [BetaAnalyticsDataClient, BetaAnalyticsDataAsyncClient,]
+)
+def test_beta_analytics_data_client_from_service_account_file(client_class):
+ creds = credentials.AnonymousCredentials()
+ with mock.patch.object(
+ service_account.Credentials, "from_service_account_file"
+ ) as factory:
+ factory.return_value = creds
+ client = client_class.from_service_account_file("dummy/file/path.json")
+ assert client.transport._credentials == creds
+
+ client = client_class.from_service_account_json("dummy/file/path.json")
+ assert client.transport._credentials == creds
+
+ assert client.transport._host == "analyticsdata.googleapis.com:443"
+
+
+def test_beta_analytics_data_client_get_transport_class():
+ transport = BetaAnalyticsDataClient.get_transport_class()
+ available_transports = [
+ transports.BetaAnalyticsDataGrpcTransport,
+ ]
+ assert transport in available_transports
+
+ transport = BetaAnalyticsDataClient.get_transport_class("grpc")
+ assert transport == transports.BetaAnalyticsDataGrpcTransport
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [
+ (BetaAnalyticsDataClient, transports.BetaAnalyticsDataGrpcTransport, "grpc"),
+ (
+ BetaAnalyticsDataAsyncClient,
+ transports.BetaAnalyticsDataGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ ),
+ ],
+)
+@mock.patch.object(
+ BetaAnalyticsDataClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(BetaAnalyticsDataClient),
+)
+@mock.patch.object(
+ BetaAnalyticsDataAsyncClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(BetaAnalyticsDataAsyncClient),
+)
+def test_beta_analytics_data_client_client_options(
+ client_class, transport_class, transport_name
+):
+ # Check that if channel is provided we won't create a new one.
+ with mock.patch.object(BetaAnalyticsDataClient, "get_transport_class") as gtc:
+ transport = transport_class(credentials=credentials.AnonymousCredentials())
+ client = client_class(transport=transport)
+ gtc.assert_not_called()
+
+ # Check that if channel is provided via str we will create a new one.
+ with mock.patch.object(BetaAnalyticsDataClient, "get_transport_class") as gtc:
+ client = client_class(transport=transport_name)
+ gtc.assert_called()
+
+ # Check the case api_endpoint is provided.
+ options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host="squid.clam.whelk",
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+ # "never".
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+ # "always".
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_MTLS_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
+ # unsupported value.
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+ with pytest.raises(MutualTLSChannelError):
+ client = client_class()
+
+ # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
+ ):
+ with pytest.raises(ValueError):
+ client = client_class()
+
+ # Check the case quota_project_id is provided
+ options = client_options.ClientOptions(quota_project_id="octopus")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id="octopus",
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name,use_client_cert_env",
+ [
+ (
+ BetaAnalyticsDataClient,
+ transports.BetaAnalyticsDataGrpcTransport,
+ "grpc",
+ "true",
+ ),
+ (
+ BetaAnalyticsDataAsyncClient,
+ transports.BetaAnalyticsDataGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ "true",
+ ),
+ (
+ BetaAnalyticsDataClient,
+ transports.BetaAnalyticsDataGrpcTransport,
+ "grpc",
+ "false",
+ ),
+ (
+ BetaAnalyticsDataAsyncClient,
+ transports.BetaAnalyticsDataGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ "false",
+ ),
+ ],
+)
+@mock.patch.object(
+ BetaAnalyticsDataClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(BetaAnalyticsDataClient),
+)
+@mock.patch.object(
+ BetaAnalyticsDataAsyncClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(BetaAnalyticsDataAsyncClient),
+)
+@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
+def test_beta_analytics_data_client_mtls_env_auto(
+ client_class, transport_class, transport_name, use_client_cert_env
+):
+ # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
+ # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
+
+ # Check the case client_cert_source is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ options = client_options.ClientOptions(
+ client_cert_source=client_cert_source_callback
+ )
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+
+ if use_client_cert_env == "false":
+ expected_client_cert_source = None
+ expected_host = client.DEFAULT_ENDPOINT
+ else:
+ expected_client_cert_source = client_cert_source_callback
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case ADC client cert is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=True,
+ ):
+ with mock.patch(
+ "google.auth.transport.mtls.default_client_cert_source",
+ return_value=client_cert_source_callback,
+ ):
+ if use_client_cert_env == "false":
+ expected_host = client.DEFAULT_ENDPOINT
+ expected_client_cert_source = None
+ else:
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+ expected_client_cert_source = client_cert_source_callback
+
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case client_cert_source and ADC client cert are not provided.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=False,
+ ):
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [
+ (BetaAnalyticsDataClient, transports.BetaAnalyticsDataGrpcTransport, "grpc"),
+ (
+ BetaAnalyticsDataAsyncClient,
+ transports.BetaAnalyticsDataGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ ),
+ ],
+)
+def test_beta_analytics_data_client_client_options_scopes(
+ client_class, transport_class, transport_name
+):
+ # Check the case scopes are provided.
+ options = client_options.ClientOptions(scopes=["1", "2"],)
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=["1", "2"],
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [
+ (BetaAnalyticsDataClient, transports.BetaAnalyticsDataGrpcTransport, "grpc"),
+ (
+ BetaAnalyticsDataAsyncClient,
+ transports.BetaAnalyticsDataGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ ),
+ ],
+)
+def test_beta_analytics_data_client_client_options_credentials_file(
+ client_class, transport_class, transport_name
+):
+ # Check the case credentials file is provided.
+ options = client_options.ClientOptions(credentials_file="credentials.json")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file="credentials.json",
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+def test_beta_analytics_data_client_client_options_from_dict():
+ with mock.patch(
+ "google.analytics.data_v1beta.services.beta_analytics_data.transports.BetaAnalyticsDataGrpcTransport.__init__"
+ ) as grpc_transport:
+ grpc_transport.return_value = None
+ client = BetaAnalyticsDataClient(
+ client_options={"api_endpoint": "squid.clam.whelk"}
+ )
+ grpc_transport.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host="squid.clam.whelk",
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+def test_run_report(
+ transport: str = "grpc", request_type=analytics_data_api.RunReportRequest
+):
+ client = BetaAnalyticsDataClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.run_report), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = analytics_data_api.RunReportResponse(
+ next_page_token="next_page_token_value", total_size=1086,
+ )
+
+ response = client.run_report(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == analytics_data_api.RunReportRequest()
+
+ # Establish that the response is the type that we expect.
+
+ assert isinstance(response, pagers.RunReportPager)
+
+ assert response.next_page_token == "next_page_token_value"
+
+ assert response.total_size == 1086
+
+
+def test_run_report_from_dict():
+ test_run_report(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_run_report_async(
+ transport: str = "grpc_asyncio", request_type=analytics_data_api.RunReportRequest
+):
+ client = BetaAnalyticsDataAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.run_report), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ analytics_data_api.RunReportResponse(
+ next_page_token="next_page_token_value", total_size=1086,
+ )
+ )
+
+ response = await client.run_report(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == analytics_data_api.RunReportRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, pagers.RunReportAsyncPager)
+
+ assert response.next_page_token == "next_page_token_value"
+
+ assert response.total_size == 1086
+
+
+@pytest.mark.asyncio
+async def test_run_report_async_from_dict():
+ await test_run_report_async(request_type=dict)
+
+
+def test_run_report_field_headers():
+ client = BetaAnalyticsDataClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = analytics_data_api.RunReportRequest()
+ request.property = "property/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.run_report), "__call__") as call:
+ call.return_value = analytics_data_api.RunReportResponse()
+
+ client.run_report(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "property=property/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_run_report_field_headers_async():
+ client = BetaAnalyticsDataAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = analytics_data_api.RunReportRequest()
+ request.property = "property/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.run_report), "__call__") as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ analytics_data_api.RunReportResponse()
+ )
+
+ await client.run_report(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "property=property/value",) in kw["metadata"]
+
+
+def test_run_report_pager():
+ client = BetaAnalyticsDataClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.run_report), "__call__") as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ analytics_data_api.RunReportResponse(
+ dimension_headers=[
+ data.DimensionHeader(),
+ data.DimensionHeader(),
+ data.DimensionHeader(),
+ ],
+ next_page_token="abc",
+ ),
+ analytics_data_api.RunReportResponse(
+ dimension_headers=[], next_page_token="def",
+ ),
+ analytics_data_api.RunReportResponse(
+ dimension_headers=[data.DimensionHeader(),], next_page_token="ghi",
+ ),
+ analytics_data_api.RunReportResponse(
+ dimension_headers=[data.DimensionHeader(), data.DimensionHeader(),],
+ ),
+ RuntimeError,
+ )
+
+ metadata = ()
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("property", ""),)),
+ )
+ pager = client.run_report(request={})
+
+ assert pager._metadata == metadata
+
+ results = [i for i in pager]
+ assert len(results) == 6
+ assert all(isinstance(i, data.DimensionHeader) for i in results)
+
+
+def test_run_report_pages():
+ client = BetaAnalyticsDataClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.run_report), "__call__") as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ analytics_data_api.RunReportResponse(
+ dimension_headers=[
+ data.DimensionHeader(),
+ data.DimensionHeader(),
+ data.DimensionHeader(),
+ ],
+ next_page_token="abc",
+ ),
+ analytics_data_api.RunReportResponse(
+ dimension_headers=[], next_page_token="def",
+ ),
+ analytics_data_api.RunReportResponse(
+ dimension_headers=[data.DimensionHeader(),], next_page_token="ghi",
+ ),
+ analytics_data_api.RunReportResponse(
+ dimension_headers=[data.DimensionHeader(), data.DimensionHeader(),],
+ ),
+ RuntimeError,
+ )
+ pages = list(client.run_report(request={}).pages)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
+
+
+@pytest.mark.asyncio
+async def test_run_report_async_pager():
+ client = BetaAnalyticsDataAsyncClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.run_report), "__call__", new_callable=mock.AsyncMock
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ analytics_data_api.RunReportResponse(
+ dimension_headers=[
+ data.DimensionHeader(),
+ data.DimensionHeader(),
+ data.DimensionHeader(),
+ ],
+ next_page_token="abc",
+ ),
+ analytics_data_api.RunReportResponse(
+ dimension_headers=[], next_page_token="def",
+ ),
+ analytics_data_api.RunReportResponse(
+ dimension_headers=[data.DimensionHeader(),], next_page_token="ghi",
+ ),
+ analytics_data_api.RunReportResponse(
+ dimension_headers=[data.DimensionHeader(), data.DimensionHeader(),],
+ ),
+ RuntimeError,
+ )
+ async_pager = await client.run_report(request={},)
+ assert async_pager.next_page_token == "abc"
+ responses = []
+ async for response in async_pager:
+ responses.append(response)
+
+ assert len(responses) == 6
+ assert all(isinstance(i, data.DimensionHeader) for i in responses)
+
+
+@pytest.mark.asyncio
+async def test_run_report_async_pages():
+ client = BetaAnalyticsDataAsyncClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.run_report), "__call__", new_callable=mock.AsyncMock
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ analytics_data_api.RunReportResponse(
+ dimension_headers=[
+ data.DimensionHeader(),
+ data.DimensionHeader(),
+ data.DimensionHeader(),
+ ],
+ next_page_token="abc",
+ ),
+ analytics_data_api.RunReportResponse(
+ dimension_headers=[], next_page_token="def",
+ ),
+ analytics_data_api.RunReportResponse(
+ dimension_headers=[data.DimensionHeader(),], next_page_token="ghi",
+ ),
+ analytics_data_api.RunReportResponse(
+ dimension_headers=[data.DimensionHeader(), data.DimensionHeader(),],
+ ),
+ RuntimeError,
+ )
+ pages = []
+ async for page_ in (await client.run_report(request={})).pages:
+ pages.append(page_)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
+
+
+def test_run_pivot_report(
+ transport: str = "grpc", request_type=analytics_data_api.RunPivotReportRequest
+):
+ client = BetaAnalyticsDataClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.run_pivot_report), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = analytics_data_api.RunPivotReportResponse()
+
+ response = client.run_pivot_report(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == analytics_data_api.RunPivotReportRequest()
+
+ # Establish that the response is the type that we expect.
+
+ assert isinstance(response, analytics_data_api.RunPivotReportResponse)
+
+
+def test_run_pivot_report_from_dict():
+ test_run_pivot_report(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_run_pivot_report_async(
+ transport: str = "grpc_asyncio",
+ request_type=analytics_data_api.RunPivotReportRequest,
+):
+ client = BetaAnalyticsDataAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.run_pivot_report), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ analytics_data_api.RunPivotReportResponse()
+ )
+
+ response = await client.run_pivot_report(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == analytics_data_api.RunPivotReportRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, analytics_data_api.RunPivotReportResponse)
+
+
+@pytest.mark.asyncio
+async def test_run_pivot_report_async_from_dict():
+ await test_run_pivot_report_async(request_type=dict)
+
+
+def test_run_pivot_report_field_headers():
+ client = BetaAnalyticsDataClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = analytics_data_api.RunPivotReportRequest()
+ request.property = "property/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.run_pivot_report), "__call__") as call:
+ call.return_value = analytics_data_api.RunPivotReportResponse()
+
+ client.run_pivot_report(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "property=property/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_run_pivot_report_field_headers_async():
+ client = BetaAnalyticsDataAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = analytics_data_api.RunPivotReportRequest()
+ request.property = "property/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.run_pivot_report), "__call__") as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ analytics_data_api.RunPivotReportResponse()
+ )
+
+ await client.run_pivot_report(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "property=property/value",) in kw["metadata"]
+
+
+def test_batch_run_reports(
+ transport: str = "grpc", request_type=analytics_data_api.BatchRunReportsRequest
+):
+ client = BetaAnalyticsDataClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.batch_run_reports), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = analytics_data_api.BatchRunReportsResponse()
+
+ response = client.batch_run_reports(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == analytics_data_api.BatchRunReportsRequest()
+
+ # Establish that the response is the type that we expect.
+
+ assert isinstance(response, analytics_data_api.BatchRunReportsResponse)
+
+
+def test_batch_run_reports_from_dict():
+ test_batch_run_reports(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_batch_run_reports_async(
+ transport: str = "grpc_asyncio",
+ request_type=analytics_data_api.BatchRunReportsRequest,
+):
+ client = BetaAnalyticsDataAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.batch_run_reports), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ analytics_data_api.BatchRunReportsResponse()
+ )
+
+ response = await client.batch_run_reports(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == analytics_data_api.BatchRunReportsRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, analytics_data_api.BatchRunReportsResponse)
+
+
+@pytest.mark.asyncio
+async def test_batch_run_reports_async_from_dict():
+ await test_batch_run_reports_async(request_type=dict)
+
+
+def test_batch_run_reports_field_headers():
+ client = BetaAnalyticsDataClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = analytics_data_api.BatchRunReportsRequest()
+ request.property = "property/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.batch_run_reports), "__call__"
+ ) as call:
+ call.return_value = analytics_data_api.BatchRunReportsResponse()
+
+ client.batch_run_reports(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "property=property/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_batch_run_reports_field_headers_async():
+ client = BetaAnalyticsDataAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = analytics_data_api.BatchRunReportsRequest()
+ request.property = "property/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.batch_run_reports), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ analytics_data_api.BatchRunReportsResponse()
+ )
+
+ await client.batch_run_reports(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "property=property/value",) in kw["metadata"]
+
+
+def test_batch_run_pivot_reports(
+ transport: str = "grpc", request_type=analytics_data_api.BatchRunPivotReportsRequest
+):
+ client = BetaAnalyticsDataClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.batch_run_pivot_reports), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = analytics_data_api.BatchRunPivotReportsResponse()
+
+ response = client.batch_run_pivot_reports(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == analytics_data_api.BatchRunPivotReportsRequest()
+
+ # Establish that the response is the type that we expect.
+
+ assert isinstance(response, analytics_data_api.BatchRunPivotReportsResponse)
+
+
+def test_batch_run_pivot_reports_from_dict():
+ test_batch_run_pivot_reports(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_batch_run_pivot_reports_async(
+ transport: str = "grpc_asyncio",
+ request_type=analytics_data_api.BatchRunPivotReportsRequest,
+):
+ client = BetaAnalyticsDataAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.batch_run_pivot_reports), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ analytics_data_api.BatchRunPivotReportsResponse()
+ )
+
+ response = await client.batch_run_pivot_reports(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == analytics_data_api.BatchRunPivotReportsRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, analytics_data_api.BatchRunPivotReportsResponse)
+
+
+@pytest.mark.asyncio
+async def test_batch_run_pivot_reports_async_from_dict():
+ await test_batch_run_pivot_reports_async(request_type=dict)
+
+
+def test_batch_run_pivot_reports_field_headers():
+ client = BetaAnalyticsDataClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = analytics_data_api.BatchRunPivotReportsRequest()
+ request.property = "property/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.batch_run_pivot_reports), "__call__"
+ ) as call:
+ call.return_value = analytics_data_api.BatchRunPivotReportsResponse()
+
+ client.batch_run_pivot_reports(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "property=property/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_batch_run_pivot_reports_field_headers_async():
+ client = BetaAnalyticsDataAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = analytics_data_api.BatchRunPivotReportsRequest()
+ request.property = "property/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.batch_run_pivot_reports), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ analytics_data_api.BatchRunPivotReportsResponse()
+ )
+
+ await client.batch_run_pivot_reports(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "property=property/value",) in kw["metadata"]
+
+
+def test_get_metadata(
+ transport: str = "grpc", request_type=analytics_data_api.GetMetadataRequest
+):
+ client = BetaAnalyticsDataClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_metadata), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = analytics_data_api.Metadata(name="name_value",)
+
+ response = client.get_metadata(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == analytics_data_api.GetMetadataRequest()
+
+ # Establish that the response is the type that we expect.
+
+ assert isinstance(response, analytics_data_api.Metadata)
+
+ assert response.name == "name_value"
+
+
+def test_get_metadata_from_dict():
+ test_get_metadata(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_async(
+ transport: str = "grpc_asyncio", request_type=analytics_data_api.GetMetadataRequest
+):
+ client = BetaAnalyticsDataAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_metadata), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ analytics_data_api.Metadata(name="name_value",)
+ )
+
+ response = await client.get_metadata(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == analytics_data_api.GetMetadataRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, analytics_data_api.Metadata)
+
+ assert response.name == "name_value"
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_async_from_dict():
+ await test_get_metadata_async(request_type=dict)
+
+
+def test_get_metadata_field_headers():
+ client = BetaAnalyticsDataClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = analytics_data_api.GetMetadataRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_metadata), "__call__") as call:
+ call.return_value = analytics_data_api.Metadata()
+
+ client.get_metadata(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_field_headers_async():
+ client = BetaAnalyticsDataAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = analytics_data_api.GetMetadataRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_metadata), "__call__") as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ analytics_data_api.Metadata()
+ )
+
+ await client.get_metadata(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+def test_get_metadata_flattened():
+ client = BetaAnalyticsDataClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_metadata), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = analytics_data_api.Metadata()
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.get_metadata(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+
+def test_get_metadata_flattened_error():
+ client = BetaAnalyticsDataClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.get_metadata(
+ analytics_data_api.GetMetadataRequest(), name="name_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_flattened_async():
+ client = BetaAnalyticsDataAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_metadata), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = analytics_data_api.Metadata()
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ analytics_data_api.Metadata()
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.get_metadata(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_flattened_error_async():
+ client = BetaAnalyticsDataAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.get_metadata(
+ analytics_data_api.GetMetadataRequest(), name="name_value",
+ )
+
+
+def test_run_realtime_report(
+ transport: str = "grpc", request_type=analytics_data_api.RunRealtimeReportRequest
+):
+ client = BetaAnalyticsDataClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.run_realtime_report), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = analytics_data_api.RunRealtimeReportResponse(
+ total_size=1086,
+ )
+
+ response = client.run_realtime_report(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == analytics_data_api.RunRealtimeReportRequest()
+
+ # Establish that the response is the type that we expect.
+
+ assert isinstance(response, analytics_data_api.RunRealtimeReportResponse)
+
+ assert response.total_size == 1086
+
+
+def test_run_realtime_report_from_dict():
+ test_run_realtime_report(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_run_realtime_report_async(
+ transport: str = "grpc_asyncio",
+ request_type=analytics_data_api.RunRealtimeReportRequest,
+):
+ client = BetaAnalyticsDataAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.run_realtime_report), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ analytics_data_api.RunRealtimeReportResponse(total_size=1086,)
+ )
+
+ response = await client.run_realtime_report(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == analytics_data_api.RunRealtimeReportRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, analytics_data_api.RunRealtimeReportResponse)
+
+ assert response.total_size == 1086
+
+
+@pytest.mark.asyncio
+async def test_run_realtime_report_async_from_dict():
+ await test_run_realtime_report_async(request_type=dict)
+
+
+def test_run_realtime_report_field_headers():
+ client = BetaAnalyticsDataClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = analytics_data_api.RunRealtimeReportRequest()
+ request.property = "property/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.run_realtime_report), "__call__"
+ ) as call:
+ call.return_value = analytics_data_api.RunRealtimeReportResponse()
+
+ client.run_realtime_report(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "property=property/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_run_realtime_report_field_headers_async():
+ client = BetaAnalyticsDataAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = analytics_data_api.RunRealtimeReportRequest()
+ request.property = "property/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.run_realtime_report), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ analytics_data_api.RunRealtimeReportResponse()
+ )
+
+ await client.run_realtime_report(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "property=property/value",) in kw["metadata"]
+
+
+def test_credentials_transport_error():
+ # It is an error to provide credentials and a transport instance.
+ transport = transports.BetaAnalyticsDataGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = BetaAnalyticsDataClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # It is an error to provide a credentials file and a transport instance.
+ transport = transports.BetaAnalyticsDataGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = BetaAnalyticsDataClient(
+ client_options={"credentials_file": "credentials.json"},
+ transport=transport,
+ )
+
+ # It is an error to provide scopes and a transport instance.
+ transport = transports.BetaAnalyticsDataGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = BetaAnalyticsDataClient(
+ client_options={"scopes": ["1", "2"]}, transport=transport,
+ )
+
+
+def test_transport_instance():
+ # A client may be instantiated with a custom transport instance.
+ transport = transports.BetaAnalyticsDataGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ client = BetaAnalyticsDataClient(transport=transport)
+ assert client.transport is transport
+
+
+def test_transport_get_channel():
+ # A client may be instantiated with a custom transport instance.
+ transport = transports.BetaAnalyticsDataGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ channel = transport.grpc_channel
+ assert channel
+
+ transport = transports.BetaAnalyticsDataGrpcAsyncIOTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ channel = transport.grpc_channel
+ assert channel
+
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.BetaAnalyticsDataGrpcTransport,
+ transports.BetaAnalyticsDataGrpcAsyncIOTransport,
+ ],
+)
+def test_transport_adc(transport_class):
+ # Test default credentials are used if not provided.
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transport_class()
+ adc.assert_called_once()
+
+
+def test_transport_grpc_default():
+ # A client should use the gRPC transport by default.
+ client = BetaAnalyticsDataClient(credentials=credentials.AnonymousCredentials(),)
+ assert isinstance(client.transport, transports.BetaAnalyticsDataGrpcTransport,)
+
+
+def test_beta_analytics_data_base_transport_error():
+ # Passing both a credentials object and credentials_file should raise an error
+ with pytest.raises(exceptions.DuplicateCredentialArgs):
+ transport = transports.BetaAnalyticsDataTransport(
+ credentials=credentials.AnonymousCredentials(),
+ credentials_file="credentials.json",
+ )
+
+
+def test_beta_analytics_data_base_transport():
+ # Instantiate the base transport.
+ with mock.patch(
+ "google.analytics.data_v1beta.services.beta_analytics_data.transports.BetaAnalyticsDataTransport.__init__"
+ ) as Transport:
+ Transport.return_value = None
+ transport = transports.BetaAnalyticsDataTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Every method on the transport should just blindly
+ # raise NotImplementedError.
+ methods = (
+ "run_report",
+ "run_pivot_report",
+ "batch_run_reports",
+ "batch_run_pivot_reports",
+ "get_metadata",
+ "run_realtime_report",
+ )
+ for method in methods:
+ with pytest.raises(NotImplementedError):
+ getattr(transport, method)(request=object())
+
+
+def test_beta_analytics_data_base_transport_with_credentials_file():
+ # Instantiate the base transport with a credentials file
+ with mock.patch.object(
+ auth, "load_credentials_from_file"
+ ) as load_creds, mock.patch(
+ "google.analytics.data_v1beta.services.beta_analytics_data.transports.BetaAnalyticsDataTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ load_creds.return_value = (credentials.AnonymousCredentials(), None)
+ transport = transports.BetaAnalyticsDataTransport(
+ credentials_file="credentials.json", quota_project_id="octopus",
+ )
+ load_creds.assert_called_once_with(
+ "credentials.json",
+ scopes=(
+ "https://www.googleapis.com/auth/analytics",
+ "https://www.googleapis.com/auth/analytics.readonly",
+ ),
+ quota_project_id="octopus",
+ )
+
+
+def test_beta_analytics_data_base_transport_with_adc():
+ # Test the default credentials are used if credentials and credentials_file are None.
+ with mock.patch.object(auth, "default") as adc, mock.patch(
+ "google.analytics.data_v1beta.services.beta_analytics_data.transports.BetaAnalyticsDataTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transport = transports.BetaAnalyticsDataTransport()
+ adc.assert_called_once()
+
+
+def test_beta_analytics_data_auth_adc():
+ # If no credentials are provided, we should use ADC credentials.
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ BetaAnalyticsDataClient()
+ adc.assert_called_once_with(
+ scopes=(
+ "https://www.googleapis.com/auth/analytics",
+ "https://www.googleapis.com/auth/analytics.readonly",
+ ),
+ quota_project_id=None,
+ )
+
+
+def test_beta_analytics_data_transport_auth_adc():
+ # If credentials and host are not provided, the transport class should use
+ # ADC credentials.
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transports.BetaAnalyticsDataGrpcTransport(
+ host="squid.clam.whelk", quota_project_id="octopus"
+ )
+ adc.assert_called_once_with(
+ scopes=(
+ "https://www.googleapis.com/auth/analytics",
+ "https://www.googleapis.com/auth/analytics.readonly",
+ ),
+ quota_project_id="octopus",
+ )
+
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.BetaAnalyticsDataGrpcTransport,
+ transports.BetaAnalyticsDataGrpcAsyncIOTransport,
+ ],
+)
+def test_beta_analytics_data_grpc_transport_client_cert_source_for_mtls(
+ transport_class,
+):
+ cred = credentials.AnonymousCredentials()
+
+ # Check ssl_channel_credentials is used if provided.
+ with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
+ mock_ssl_channel_creds = mock.Mock()
+ transport_class(
+ host="squid.clam.whelk",
+ credentials=cred,
+ ssl_channel_credentials=mock_ssl_channel_creds,
+ )
+ mock_create_channel.assert_called_once_with(
+ "squid.clam.whelk:443",
+ credentials=cred,
+ credentials_file=None,
+ scopes=(
+ "https://www.googleapis.com/auth/analytics",
+ "https://www.googleapis.com/auth/analytics.readonly",
+ ),
+ ssl_credentials=mock_ssl_channel_creds,
+ quota_project_id=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+ # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
+ # is used.
+ with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
+ with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
+ transport_class(
+ credentials=cred,
+ client_cert_source_for_mtls=client_cert_source_callback,
+ )
+ expected_cert, expected_key = client_cert_source_callback()
+ mock_ssl_cred.assert_called_once_with(
+ certificate_chain=expected_cert, private_key=expected_key
+ )
+
+
+def test_beta_analytics_data_host_no_port():
+ client = BetaAnalyticsDataClient(
+ credentials=credentials.AnonymousCredentials(),
+ client_options=client_options.ClientOptions(
+ api_endpoint="analyticsdata.googleapis.com"
+ ),
+ )
+ assert client.transport._host == "analyticsdata.googleapis.com:443"
+
+
+def test_beta_analytics_data_host_with_port():
+ client = BetaAnalyticsDataClient(
+ credentials=credentials.AnonymousCredentials(),
+ client_options=client_options.ClientOptions(
+ api_endpoint="analyticsdata.googleapis.com:8000"
+ ),
+ )
+ assert client.transport._host == "analyticsdata.googleapis.com:8000"
+
+
+def test_beta_analytics_data_grpc_transport_channel():
+ channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials())
+
+ # Check that channel is used if provided.
+ transport = transports.BetaAnalyticsDataGrpcTransport(
+ host="squid.clam.whelk", channel=channel,
+ )
+ assert transport.grpc_channel == channel
+ assert transport._host == "squid.clam.whelk:443"
+ assert transport._ssl_channel_credentials == None
+
+
+def test_beta_analytics_data_grpc_asyncio_transport_channel():
+ channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials())
+
+ # Check that channel is used if provided.
+ transport = transports.BetaAnalyticsDataGrpcAsyncIOTransport(
+ host="squid.clam.whelk", channel=channel,
+ )
+ assert transport.grpc_channel == channel
+ assert transport._host == "squid.clam.whelk:443"
+ assert transport._ssl_channel_credentials == None
+
+
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.BetaAnalyticsDataGrpcTransport,
+ transports.BetaAnalyticsDataGrpcAsyncIOTransport,
+ ],
+)
+def test_beta_analytics_data_transport_channel_mtls_with_client_cert_source(
+ transport_class,
+):
+ with mock.patch(
+ "grpc.ssl_channel_credentials", autospec=True
+ ) as grpc_ssl_channel_cred:
+ with mock.patch.object(
+ transport_class, "create_channel"
+ ) as grpc_create_channel:
+ mock_ssl_cred = mock.Mock()
+ grpc_ssl_channel_cred.return_value = mock_ssl_cred
+
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+
+ cred = credentials.AnonymousCredentials()
+ with pytest.warns(DeprecationWarning):
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (cred, None)
+ transport = transport_class(
+ host="squid.clam.whelk",
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=client_cert_source_callback,
+ )
+ adc.assert_called_once()
+
+ grpc_ssl_channel_cred.assert_called_once_with(
+ certificate_chain=b"cert bytes", private_key=b"key bytes"
+ )
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=cred,
+ credentials_file=None,
+ scopes=(
+ "https://www.googleapis.com/auth/analytics",
+ "https://www.googleapis.com/auth/analytics.readonly",
+ ),
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+ assert transport.grpc_channel == mock_grpc_channel
+ assert transport._ssl_channel_credentials == mock_ssl_cred
+
+
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.BetaAnalyticsDataGrpcTransport,
+ transports.BetaAnalyticsDataGrpcAsyncIOTransport,
+ ],
+)
+def test_beta_analytics_data_transport_channel_mtls_with_adc(transport_class):
+ mock_ssl_cred = mock.Mock()
+ with mock.patch.multiple(
+ "google.auth.transport.grpc.SslCredentials",
+ __init__=mock.Mock(return_value=None),
+ ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
+ ):
+ with mock.patch.object(
+ transport_class, "create_channel"
+ ) as grpc_create_channel:
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+ mock_cred = mock.Mock()
+
+ with pytest.warns(DeprecationWarning):
+ transport = transport_class(
+ host="squid.clam.whelk",
+ credentials=mock_cred,
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=None,
+ )
+
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=mock_cred,
+ credentials_file=None,
+ scopes=(
+ "https://www.googleapis.com/auth/analytics",
+ "https://www.googleapis.com/auth/analytics.readonly",
+ ),
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+ assert transport.grpc_channel == mock_grpc_channel
+
+
+def test_metadata_path():
+ property = "squid"
+
+ expected = "properties/{property}/metadata".format(property=property,)
+ actual = BetaAnalyticsDataClient.metadata_path(property)
+ assert expected == actual
+
+
+def test_parse_metadata_path():
+ expected = {
+ "property": "clam",
+ }
+ path = BetaAnalyticsDataClient.metadata_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = BetaAnalyticsDataClient.parse_metadata_path(path)
+ assert expected == actual
+
+
+def test_common_billing_account_path():
+ billing_account = "whelk"
+
+ expected = "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
+ )
+ actual = BetaAnalyticsDataClient.common_billing_account_path(billing_account)
+ assert expected == actual
+
+
+def test_parse_common_billing_account_path():
+ expected = {
+ "billing_account": "octopus",
+ }
+ path = BetaAnalyticsDataClient.common_billing_account_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = BetaAnalyticsDataClient.parse_common_billing_account_path(path)
+ assert expected == actual
+
+
+def test_common_folder_path():
+ folder = "oyster"
+
+ expected = "folders/{folder}".format(folder=folder,)
+ actual = BetaAnalyticsDataClient.common_folder_path(folder)
+ assert expected == actual
+
+
+def test_parse_common_folder_path():
+ expected = {
+ "folder": "nudibranch",
+ }
+ path = BetaAnalyticsDataClient.common_folder_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = BetaAnalyticsDataClient.parse_common_folder_path(path)
+ assert expected == actual
+
+
+def test_common_organization_path():
+ organization = "cuttlefish"
+
+ expected = "organizations/{organization}".format(organization=organization,)
+ actual = BetaAnalyticsDataClient.common_organization_path(organization)
+ assert expected == actual
+
+
+def test_parse_common_organization_path():
+ expected = {
+ "organization": "mussel",
+ }
+ path = BetaAnalyticsDataClient.common_organization_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = BetaAnalyticsDataClient.parse_common_organization_path(path)
+ assert expected == actual
+
+
+def test_common_project_path():
+ project = "winkle"
+
+ expected = "projects/{project}".format(project=project,)
+ actual = BetaAnalyticsDataClient.common_project_path(project)
+ assert expected == actual
+
+
+def test_parse_common_project_path():
+ expected = {
+ "project": "nautilus",
+ }
+ path = BetaAnalyticsDataClient.common_project_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = BetaAnalyticsDataClient.parse_common_project_path(path)
+ assert expected == actual
+
+
+def test_common_location_path():
+ project = "scallop"
+ location = "abalone"
+
+ expected = "projects/{project}/locations/{location}".format(
+ project=project, location=location,
+ )
+ actual = BetaAnalyticsDataClient.common_location_path(project, location)
+ assert expected == actual
+
+
+def test_parse_common_location_path():
+ expected = {
+ "project": "squid",
+ "location": "clam",
+ }
+ path = BetaAnalyticsDataClient.common_location_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = BetaAnalyticsDataClient.parse_common_location_path(path)
+ assert expected == actual
+
+
+def test_client_withDEFAULT_CLIENT_INFO():
+ client_info = gapic_v1.client_info.ClientInfo()
+
+ with mock.patch.object(
+ transports.BetaAnalyticsDataTransport, "_prep_wrapped_messages"
+ ) as prep:
+ client = BetaAnalyticsDataClient(
+ credentials=credentials.AnonymousCredentials(), client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)
+
+ with mock.patch.object(
+ transports.BetaAnalyticsDataTransport, "_prep_wrapped_messages"
+ ) as prep:
+ transport_class = BetaAnalyticsDataClient.get_transport_class()
+ transport = transport_class(
+ credentials=credentials.AnonymousCredentials(), client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)