Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

BigQuery: Raise ValueError when BQ Storage is required but missing #7726

Merged
merged 1 commit into from
Apr 19, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 26 additions & 4 deletions bigquery/google/cloud/bigquery/table.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
from __future__ import absolute_import

import collections
import concurrent.futures
import copy
import datetime
import json
Expand All @@ -25,6 +26,11 @@

import six

try:
from google.cloud import bigquery_storage_v1beta1
except ImportError: # pragma: NO COVER
bigquery_storage_v1beta1 = None

try:
import pandas
except ImportError: # pragma: NO COVER
Expand All @@ -46,6 +52,10 @@
from google.cloud.bigquery.external_config import ExternalConfig


_NO_BQSTORAGE_ERROR = (
"The google-cloud-bigquery-storage library is not installed, "
"please install google-cloud-bigquery-storage to use bqstorage features."
)
_NO_PANDAS_ERROR = (
"The pandas library is not installed, please install "
"pandas to use the to_dataframe() function."
Expand Down Expand Up @@ -274,6 +284,9 @@ def to_api_repr(self):
def to_bqstorage(self):
"""Construct a BigQuery Storage API representation of this table.

Install the ``google-cloud-bigquery-storage`` package to use this
feature.

If the ``table_id`` contains a partition identifier (e.g.
``my_table$201812``) or a snapshot identifier (e.g.
``mytable@1234567890``), it is ignored. Use
Expand All @@ -285,8 +298,14 @@ def to_bqstorage(self):
Returns:
google.cloud.bigquery_storage_v1beta1.types.TableReference:
A reference to this table in the BigQuery Storage API.

Raises:
ValueError:
If the :mod:`google.cloud.bigquery_storage_v1beta1` module
cannot be imported.
"""
from google.cloud import bigquery_storage_v1beta1
if bigquery_storage_v1beta1 is None:
raise ValueError(_NO_BQSTORAGE_ERROR)

table_ref = bigquery_storage_v1beta1.types.TableReference()
table_ref.project_id = self._project
Expand Down Expand Up @@ -1369,8 +1388,8 @@ def _to_dataframe_tabledata_list(self, dtypes, progress_bar=None):

def _to_dataframe_bqstorage(self, bqstorage_client, dtypes):
"""Use (faster, but billable) BQ Storage API to construct DataFrame."""
import concurrent.futures
from google.cloud import bigquery_storage_v1beta1
if bigquery_storage_v1beta1 is None:
raise ValueError(_NO_BQSTORAGE_ERROR)

if "$" in self._table.table_id:
raise ValueError(
Expand Down Expand Up @@ -1496,7 +1515,10 @@ def to_dataframe(self, bqstorage_client=None, dtypes=None, progress_bar_type=Non
from the destination table's schema.

Raises:
ValueError: If the :mod:`pandas` library cannot be imported.
ValueError:
If the :mod:`pandas` library cannot be imported, or the
:mod:`google.cloud.bigquery_storage_v1beta1` module is
required but cannot be imported.

"""
if pandas is None:
Expand Down
37 changes: 37 additions & 0 deletions bigquery/tests/unit/test_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -1860,6 +1860,28 @@ def test_to_dataframe_w_bqstorage_raises_auth_error(self):
with pytest.raises(google.api_core.exceptions.Forbidden):
row_iterator.to_dataframe(bqstorage_client=bqstorage_client)

@unittest.skipIf(pandas is None, "Requires `pandas`")
@unittest.skipIf(
bigquery_storage_v1beta1 is None, "Requires `google-cloud-bigquery-storage`"
)
def test_to_dataframe_w_bqstorage_raises_import_error(self):
from google.cloud.bigquery import table as mut

bqstorage_client = mock.create_autospec(
bigquery_storage_v1beta1.BigQueryStorageClient
)
path = "/foo"
api_request = mock.Mock(return_value={"rows": []})
row_iterator = mut.RowIterator(
_mock_client(), api_request, path, [], table=mut.Table("proj.dset.tbl")
)

with mock.patch.object(mut, "bigquery_storage_v1beta1", None), pytest.raises(
ValueError
) as exc:
row_iterator.to_dataframe(bqstorage_client=bqstorage_client)
assert mut._NO_BQSTORAGE_ERROR in str(exc)

@unittest.skipIf(
bigquery_storage_v1beta1 is None, "Requires `google-cloud-bigquery-storage`"
)
Expand Down Expand Up @@ -2112,3 +2134,18 @@ def test_table_reference_to_bqstorage():
for case, cls in itertools.product(cases, classes):
got = cls.from_string(case).to_bqstorage()
assert got == expected


@unittest.skipIf(
bigquery_storage_v1beta1 is None, "Requires `google-cloud-bigquery-storage`"
)
def test_table_reference_to_bqstorage_raises_import_error():
from google.cloud.bigquery import table as mut

classes = (mut.TableReference, mut.Table, mut.TableListItem)
for cls in classes:
with mock.patch.object(mut, "bigquery_storage_v1beta1", None), pytest.raises(
ValueError
) as exc:
cls.from_string("my-project.my_dataset.my_table").to_bqstorage()
assert mut._NO_BQSTORAGE_ERROR in str(exc)