From ba32fe15d864c360f240815b3d13bfda0373bc74 Mon Sep 17 00:00:00 2001 From: Bamboo Le <8941262+TrucHLe@users.noreply.github.com> Date: Mon, 13 Sep 2021 11:29:04 -0400 Subject: [PATCH] samples: export data to BigQuery (#45) --- .../samples/snippets/export_to_bigquery.py | 42 ++++++++++++ .../samples/snippets/noxfile.py | 44 +++++++------ .../samples/snippets/requirements.txt | 1 + .../test_enable_pubsub_notifications.py | 2 +- .../snippets/test_export_to_bigquery.py | 65 +++++++++++++++++++ 5 files changed, 134 insertions(+), 20 deletions(-) create mode 100644 packages/google-cloud-contact-center-insights/samples/snippets/export_to_bigquery.py create mode 100644 packages/google-cloud-contact-center-insights/samples/snippets/test_export_to_bigquery.py diff --git a/packages/google-cloud-contact-center-insights/samples/snippets/export_to_bigquery.py b/packages/google-cloud-contact-center-insights/samples/snippets/export_to_bigquery.py new file mode 100644 index 000000000000..06e2e41bda8a --- /dev/null +++ b/packages/google-cloud-contact-center-insights/samples/snippets/export_to_bigquery.py @@ -0,0 +1,42 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# [START contactcenterinsights_export_to_bigquery] +from google.cloud import contact_center_insights_v1 + + +def export_to_bigquery( + project_id: str, + bigquery_project_id: str, + bigquery_dataset_id: str, + bigquery_table_id: str, +) -> None: + # Construct an export request. + request = contact_center_insights_v1.ExportInsightsDataRequest() + request.parent = contact_center_insights_v1.ContactCenterInsightsClient.common_location_path( + project_id, "us-central1" + ) + request.big_query_destination.project_id = bigquery_project_id + request.big_query_destination.dataset = bigquery_dataset_id + request.big_query_destination.table = bigquery_table_id + request.filter = 'agent_id="007"' + + # Call the Insights client to export data to BigQuery. + insights_client = contact_center_insights_v1.ContactCenterInsightsClient() + export_operation = insights_client.export_insights_data(request=request) + export_operation.result(timeout=600000) + print("Exported data to BigQuery") + + +# [END contactcenterinsights_export_to_bigquery] diff --git a/packages/google-cloud-contact-center-insights/samples/snippets/noxfile.py b/packages/google-cloud-contact-center-insights/samples/snippets/noxfile.py index e73436a15626..b008613f03ff 100644 --- a/packages/google-cloud-contact-center-insights/samples/snippets/noxfile.py +++ b/packages/google-cloud-contact-center-insights/samples/snippets/noxfile.py @@ -39,17 +39,15 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - 'ignored_versions': [], - + "ignored_versions": [], # Old samples are opted out of enforcing Python type hints # All new samples should feature them - 'enforce_type_hints': False, - + "enforce_type_hints": False, # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', # If you need to use a specific version of pip, # change pip_version_override to the string representation @@ -57,13 +55,13 @@ "pip_version_override": None, # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - 'envs': {}, + "envs": {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append('.') + sys.path.append(".") from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -78,12 +76,12 @@ def get_pytest_env_vars() -> Dict[str, str]: ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG['gcloud_project_env'] + env_key = TEST_CONFIG["gcloud_project_env"] # This should error out if not set. - ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG['envs']) + ret.update(TEST_CONFIG["envs"]) return ret @@ -92,11 +90,14 @@ def get_pytest_env_vars() -> Dict[str, str]: ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) -INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ("True", "true") +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( + "True", + "true", +) # # Style Checks # @@ -141,7 +142,7 @@ def _determine_local_import_names(start_dir: str) -> List[str]: @nox.session def lint(session: nox.sessions.Session) -> None: - if not TEST_CONFIG['enforce_type_hints']: + if not TEST_CONFIG["enforce_type_hints"]: session.install("flake8", "flake8-import-order") else: session.install("flake8", "flake8-import-order", "flake8-annotations") @@ -150,9 +151,11 @@ def lint(session: nox.sessions.Session) -> None: args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - "." + ".", ] session.run("flake8", *args) + + # # Black # @@ -165,6 +168,7 @@ def blacken(session: nox.sessions.Session) -> None: session.run("black", *python_files) + # # Sample Tests # @@ -173,7 +177,9 @@ def blacken(session: nox.sessions.Session) -> None: PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: if TEST_CONFIG["pip_version_override"]: pip_version = TEST_CONFIG["pip_version_override"] session.install(f"pip=={pip_version}") @@ -203,7 +209,7 @@ def _session_tests(session: nox.sessions.Session, post_install: Callable = None) # on travis where slow and flaky tests are excluded. # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html success_codes=[0, 5], - env=get_pytest_env_vars() + env=get_pytest_env_vars(), ) @@ -213,9 +219,9 @@ def py(session: nox.sessions.Session) -> None: if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip("SKIPPED: {} tests are disabled for this sample.".format( - session.python - )) + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) # diff --git a/packages/google-cloud-contact-center-insights/samples/snippets/requirements.txt b/packages/google-cloud-contact-center-insights/samples/snippets/requirements.txt index d8de57c1ba2f..b9e610596a8b 100644 --- a/packages/google-cloud-contact-center-insights/samples/snippets/requirements.txt +++ b/packages/google-cloud-contact-center-insights/samples/snippets/requirements.txt @@ -1,2 +1,3 @@ google-api-core==2.0.1 +google-cloud-bigquery==2.26.0 google-cloud-contact-center-insights==0.2.0 diff --git a/packages/google-cloud-contact-center-insights/samples/snippets/test_enable_pubsub_notifications.py b/packages/google-cloud-contact-center-insights/samples/snippets/test_enable_pubsub_notifications.py index 2ab1e79ac27b..42eb87d9ab6a 100644 --- a/packages/google-cloud-contact-center-insights/samples/snippets/test_enable_pubsub_notifications.py +++ b/packages/google-cloud-contact-center-insights/samples/snippets/test_enable_pubsub_notifications.py @@ -70,7 +70,7 @@ def disable_pubsub_notifications(project_id): def test_enable_pubsub_notifications( - capsys, project_id, pubsub_topics, disable_pubsub_notifications + capsys, project_id, pubsub_topics, disable_pubsub_notifications ): conversation_topic, analysis_topic = pubsub_topics diff --git a/packages/google-cloud-contact-center-insights/samples/snippets/test_export_to_bigquery.py b/packages/google-cloud-contact-center-insights/samples/snippets/test_export_to_bigquery.py new file mode 100644 index 000000000000..6f628605ce1d --- /dev/null +++ b/packages/google-cloud-contact-center-insights/samples/snippets/test_export_to_bigquery.py @@ -0,0 +1,65 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import uuid + +import google.auth + +from google.cloud import bigquery + +import pytest + +import export_to_bigquery + +GCLOUD_TESTS_PREFIX = "python_samples_tests" + + +@pytest.fixture +def project_id(): + _, project_id = google.auth.default() + return project_id + + +@pytest.fixture +def unique_id(): + uuid_hex = uuid.uuid4().hex[:8] + return f"{GCLOUD_TESTS_PREFIX}_{uuid_hex}" + + +@pytest.fixture +def bigquery_resources(project_id, unique_id): + # Create a BigQuery dataset. + bigquery_client = bigquery.Client() + dataset_id = unique_id + table_id = unique_id + + dataset = bigquery.Dataset(f"{project_id}.{dataset_id}") + dataset.location = "US" + bigquery_client.create_dataset(dataset, timeout=30) + + # Create a BigQuery table under the created dataset. + table = bigquery.Table(f"{project_id}.{dataset_id}.{table_id}") + bigquery_client.create_table(table) + + yield dataset_id, table_id + + # Delete the BigQuery dataset and table. + bigquery_client.delete_dataset(dataset_id, delete_contents=True) + + +def test_export_data_to_bigquery(capsys, project_id, bigquery_resources): + dataset_id, table_id = bigquery_resources + export_to_bigquery.export_to_bigquery(project_id, project_id, dataset_id, table_id) + out, err = capsys.readouterr() + assert "Exported data to BigQuery" in out