Skip to content

Commit

Permalink
Merge branch 'main' into issue790-relax_column
Browse files Browse the repository at this point in the history
  • Loading branch information
tswast authored Feb 27, 2023
2 parents cda89df + cd0aaa1 commit 12f0ecd
Show file tree
Hide file tree
Showing 27 changed files with 262 additions and 165 deletions.
2 changes: 1 addition & 1 deletion .github/.OwlBot.lock.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,4 @@
# limitations under the License.
docker:
image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest
digest: sha256:c43f1d918bcf817d337aa29ff833439494a158a0831508fda4ec75dc4c0d0320
digest: sha256:8555f0e37e6261408f792bfd6635102d2da5ad73f8f09bcb24f25e6afb5fac97
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ docs.metadata

# Virtual environment
env/
venv/

# Test logs
coverage.xml
Expand Down
2 changes: 1 addition & 1 deletion .kokoro/requirements.in
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
gcp-docuploader
gcp-releasetool
gcp-releasetool>=1.10.5 # required for compatibility with cryptography>=39.x
importlib-metadata
typing-extensions
twine
Expand Down
55 changes: 25 additions & 30 deletions .kokoro/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -113,33 +113,28 @@ commonmark==0.9.1 \
--hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \
--hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9
# via rich
cryptography==38.0.3 \
--hash=sha256:068147f32fa662c81aebab95c74679b401b12b57494872886eb5c1139250ec5d \
--hash=sha256:06fc3cc7b6f6cca87bd56ec80a580c88f1da5306f505876a71c8cfa7050257dd \
--hash=sha256:25c1d1f19729fb09d42e06b4bf9895212292cb27bb50229f5aa64d039ab29146 \
--hash=sha256:402852a0aea73833d982cabb6d0c3bb582c15483d29fb7085ef2c42bfa7e38d7 \
--hash=sha256:4e269dcd9b102c5a3d72be3c45d8ce20377b8076a43cbed6f660a1afe365e436 \
--hash=sha256:5419a127426084933076132d317911e3c6eb77568a1ce23c3ac1e12d111e61e0 \
--hash=sha256:554bec92ee7d1e9d10ded2f7e92a5d70c1f74ba9524947c0ba0c850c7b011828 \
--hash=sha256:5e89468fbd2fcd733b5899333bc54d0d06c80e04cd23d8c6f3e0542358c6060b \
--hash=sha256:65535bc550b70bd6271984d9863a37741352b4aad6fb1b3344a54e6950249b55 \
--hash=sha256:6ab9516b85bebe7aa83f309bacc5f44a61eeb90d0b4ec125d2d003ce41932d36 \
--hash=sha256:6addc3b6d593cd980989261dc1cce38263c76954d758c3c94de51f1e010c9a50 \
--hash=sha256:728f2694fa743a996d7784a6194da430f197d5c58e2f4e278612b359f455e4a2 \
--hash=sha256:785e4056b5a8b28f05a533fab69febf5004458e20dad7e2e13a3120d8ecec75a \
--hash=sha256:78cf5eefac2b52c10398a42765bfa981ce2372cbc0457e6bf9658f41ec3c41d8 \
--hash=sha256:7f836217000342d448e1c9a342e9163149e45d5b5eca76a30e84503a5a96cab0 \
--hash=sha256:8d41a46251bf0634e21fac50ffd643216ccecfaf3701a063257fe0b2be1b6548 \
--hash=sha256:984fe150f350a3c91e84de405fe49e688aa6092b3525f407a18b9646f6612320 \
--hash=sha256:9b24bcff7853ed18a63cfb0c2b008936a9554af24af2fb146e16d8e1aed75748 \
--hash=sha256:b1b35d9d3a65542ed2e9d90115dfd16bbc027b3f07ee3304fc83580f26e43249 \
--hash=sha256:b1b52c9e5f8aa2b802d48bd693190341fae201ea51c7a167d69fc48b60e8a959 \
--hash=sha256:bbf203f1a814007ce24bd4d51362991d5cb90ba0c177a9c08825f2cc304d871f \
--hash=sha256:be243c7e2bfcf6cc4cb350c0d5cdf15ca6383bbcb2a8ef51d3c9411a9d4386f0 \
--hash=sha256:bfbe6ee19615b07a98b1d2287d6a6073f734735b49ee45b11324d85efc4d5cbd \
--hash=sha256:c46837ea467ed1efea562bbeb543994c2d1f6e800785bd5a2c98bc096f5cb220 \
--hash=sha256:dfb4f4dd568de1b6af9f4cda334adf7d72cf5bc052516e1b2608b683375dd95c \
--hash=sha256:ed7b00096790213e09eb11c97cc6e2b757f15f3d2f85833cd2d3ec3fe37c1722
cryptography==39.0.1 \
--hash=sha256:0f8da300b5c8af9f98111ffd512910bc792b4c77392a9523624680f7956a99d4 \
--hash=sha256:35f7c7d015d474f4011e859e93e789c87d21f6f4880ebdc29896a60403328f1f \
--hash=sha256:5aa67414fcdfa22cf052e640cb5ddc461924a045cacf325cd164e65312d99502 \
--hash=sha256:5d2d8b87a490bfcd407ed9d49093793d0f75198a35e6eb1a923ce1ee86c62b41 \
--hash=sha256:6687ef6d0a6497e2b58e7c5b852b53f62142cfa7cd1555795758934da363a965 \
--hash=sha256:6f8ba7f0328b79f08bdacc3e4e66fb4d7aab0c3584e0bd41328dce5262e26b2e \
--hash=sha256:706843b48f9a3f9b9911979761c91541e3d90db1ca905fd63fee540a217698bc \
--hash=sha256:807ce09d4434881ca3a7594733669bd834f5b2c6d5c7e36f8c00f691887042ad \
--hash=sha256:83e17b26de248c33f3acffb922748151d71827d6021d98c70e6c1a25ddd78505 \
--hash=sha256:96f1157a7c08b5b189b16b47bc9db2332269d6680a196341bf30046330d15388 \
--hash=sha256:aec5a6c9864be7df2240c382740fcf3b96928c46604eaa7f3091f58b878c0bb6 \
--hash=sha256:b0afd054cd42f3d213bf82c629efb1ee5f22eba35bf0eec88ea9ea7304f511a2 \
--hash=sha256:ced4e447ae29ca194449a3f1ce132ded8fcab06971ef5f618605aacaa612beac \
--hash=sha256:d1f6198ee6d9148405e49887803907fe8962a23e6c6f83ea7d98f1c0de375695 \
--hash=sha256:e124352fd3db36a9d4a21c1aa27fd5d051e621845cb87fb851c08f4f75ce8be6 \
--hash=sha256:e422abdec8b5fa8462aa016786680720d78bdce7a30c652b7fadf83a4ba35336 \
--hash=sha256:ef8b72fa70b348724ff1218267e7f7375b8de4e8194d1636ee60510aae104cd0 \
--hash=sha256:f0c64d1bd842ca2633e74a1a28033d139368ad959872533b1bab8c80e8240a0c \
--hash=sha256:f24077a3b5298a5a06a8e0536e3ea9ec60e4c7ac486755e5fb6e6ea9b3500106 \
--hash=sha256:fdd188c8a6ef8769f148f88f859884507b954cc64db6b52f66ef199bb9ad660a \
--hash=sha256:fe913f20024eb2cb2f323e42a64bdf2911bb9738a15dba7d3cce48151034e3a8
# via
# gcp-releasetool
# secretstorage
Expand All @@ -159,9 +154,9 @@ gcp-docuploader==0.6.4 \
--hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \
--hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf
# via -r requirements.in
gcp-releasetool==1.10.0 \
--hash=sha256:72a38ca91b59c24f7e699e9227c90cbe4dd71b789383cb0164b088abae294c83 \
--hash=sha256:8c7c99320208383d4bb2b808c6880eb7a81424afe7cdba3c8d84b25f4f0e097d
gcp-releasetool==1.10.5 \
--hash=sha256:174b7b102d704b254f2a26a3eda2c684fd3543320ec239baf771542a2e58e109 \
--hash=sha256:e29d29927fe2ca493105a82958c6873bb2b90d503acac56be2c229e74de0eec9
# via -r requirements.in
google-api-core==2.10.2 \
--hash=sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320 \
Expand Down
34 changes: 34 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,40 @@
[1]: https://pypi.org/project/google-cloud-bigquery/#history


## [3.6.0](https://github.com/googleapis/python-bigquery/compare/v3.5.0...v3.6.0) (2023-02-22)


### Features

* Adding preserveAsciiControlCharacter to CSVOptions ([#1491](https://github.com/googleapis/python-bigquery/issues/1491)) ([f832e7a](https://github.com/googleapis/python-bigquery/commit/f832e7a0b79f3567a0773ff11630e2f48bed60db))


### Bug Fixes

* Annotate optional integer parameters with optional type ([#1487](https://github.com/googleapis/python-bigquery/issues/1487)) ([a190aaa](https://github.com/googleapis/python-bigquery/commit/a190aaa09ae73e8b6a83b7b213247f95fde57615))
* Loosen ipywidget dependency ([#1504](https://github.com/googleapis/python-bigquery/issues/1504)) ([20d3276](https://github.com/googleapis/python-bigquery/commit/20d3276cc29e9467eef9476d5fd572099d9a3f6f))
* Removes scope to avoid unnecessary duplication ([#1503](https://github.com/googleapis/python-bigquery/issues/1503)) ([665d7ba](https://github.com/googleapis/python-bigquery/commit/665d7ba74a1b45de1ef51cc75b6860125afc5fe6))


### Dependencies

* Update minimum google-cloud-core to 1.6.0 ([a190aaa](https://github.com/googleapis/python-bigquery/commit/a190aaa09ae73e8b6a83b7b213247f95fde57615))

## [3.5.0](https://github.com/googleapis/python-bigquery/compare/v3.4.2...v3.5.0) (2023-01-31)


### Features

* Add __str__ method to DatasetReference ([#1477](https://github.com/googleapis/python-bigquery/issues/1477)) ([f32df1f](https://github.com/googleapis/python-bigquery/commit/f32df1fb74e4aea24cd8a4099040ad2f7436e54d))
* Add preserveAsciiControlCharacter to LoadJobConfig ([#1484](https://github.com/googleapis/python-bigquery/issues/1484)) ([bd1da9a](https://github.com/googleapis/python-bigquery/commit/bd1da9aa0a40b02b7d5409a0b094d8380e255c91))


### Documentation

* Adds snippet for creating table with external data config ([#1420](https://github.com/googleapis/python-bigquery/issues/1420)) ([f0ace2a](https://github.com/googleapis/python-bigquery/commit/f0ace2ac2307ef359511a235f80f5ce9e46264c1))
* Revise delete label table code sample, add TODO to clean up sni… ([#1466](https://github.com/googleapis/python-bigquery/issues/1466)) ([0dab7d2](https://github.com/googleapis/python-bigquery/commit/0dab7d25ace4b63d2984485e7b0c5bb38f20476f))
* **samples:** Table variable fix ([#1287](https://github.com/googleapis/python-bigquery/issues/1287)) ([a71888a](https://github.com/googleapis/python-bigquery/commit/a71888a60d1e5e5815ab459fe24368ad5b0d032a))

## [3.4.2](https://github.com/googleapis/python-bigquery/compare/v3.4.1...v3.4.2) (2023-01-13)


Expand Down
2 changes: 1 addition & 1 deletion README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ dependencies.

Supported Python Versions
^^^^^^^^^^^^^^^^^^^^^^^^^
Python >= 3.7, < 3.11
Python >= 3.7

Unsupported Python Versions
^^^^^^^^^^^^^^^^^^^^^^^^^^^
Expand Down
60 changes: 0 additions & 60 deletions docs/snippets.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,66 +164,6 @@ def test_create_partitioned_table(client, to_delete):
"https://github.com/GoogleCloudPlatform/google-cloud-python/issues/5589"
)
)
def test_manage_table_labels(client, to_delete):
dataset_id = "label_table_dataset_{}".format(_millis())
table_id = "label_table_{}".format(_millis())
project = client.project
dataset_ref = bigquery.DatasetReference(project, dataset_id)
dataset = bigquery.Dataset(dataset_ref)
client.create_dataset(dataset)
to_delete.append(dataset)

table = bigquery.Table(dataset.table(table_id), schema=SCHEMA)

labels = {"color": "green"}
table.labels = labels
table = client.create_table(table)

# TODO(Mattix23): After code sample is updated from cloud.google.com delete this

# [START bigquery_get_table_labels]
# from google.cloud import bigquery
# client = bigquery.Client()
# dataset_id = 'my_dataset'
# table_id = 'my_table'

project = client.project
dataset_ref = bigquery.DatasetReference(project, dataset_id)
table_ref = dataset_ref.table(table_id)
table = client.get_table(table_ref) # API Request

# View table labels
print("Table ID: {}".format(table_id))
print("Labels:")
if table.labels:
for label, value in table.labels.items():
print("\t{}: {}".format(label, value))
else:
print("\tTable has no labels defined.")
# [END bigquery_get_table_labels]
assert table.labels == labels

# TODO(Mattix23): After code sample is updated from cloud.google.com delete this

# [START bigquery_delete_label_table]
# from google.cloud import bigquery
# client = bigquery.Client()
# project = client.project
# dataset_ref = bigquery.DatasetReference(project, dataset_id)
# table_ref = dataset_ref.table('my_table')
# table = client.get_table(table_ref) # API request

# This example table starts with one label
assert table.labels == {"color": "green"}
# To delete a label from a table, set its value to None
table.labels["color"] = None

table = client.update_table(table, ["labels"]) # API request

assert table.labels == {}
# [END bigquery_delete_label_table]


@pytest.mark.skip(
reason=(
"update_table() is flaky "
Expand Down
12 changes: 0 additions & 12 deletions google/cloud/bigquery/_http.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,22 +14,10 @@

"""Create / interact with Google BigQuery connections."""

import os
import pkg_resources

from google.cloud import _http # type: ignore # pytype: disable=import-error
from google.cloud.bigquery import __version__


# TODO: Increase the minimum version of google-cloud-core to 1.6.0
# and remove this logic. See:
# https://github.com/googleapis/python-bigquery/issues/509
if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE") == "true": # pragma: NO COVER
release = pkg_resources.get_distribution("google-cloud-core").parsed_version
if release < pkg_resources.parse_version("1.6.0"):
raise ImportError("google-cloud-core >= 1.6.0 is required to use mTLS feature")


class Connection(_http.JSONConnection):
"""A connection to Google BigQuery via the JSON REST API.
Expand Down
47 changes: 22 additions & 25 deletions google/cloud/bigquery/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -225,10 +225,7 @@ class Client(ClientWithProject):
to acquire default credentials.
"""

SCOPE = ( # type: ignore
"https://www.googleapis.com/auth/bigquery",
"https://www.googleapis.com/auth/cloud-platform",
)
SCOPE = ("https://www.googleapis.com/auth/cloud-platform",) # type: ignore
"""The scopes required for authenticating as a BigQuery consumer."""

def __init__(
Expand Down Expand Up @@ -329,11 +326,11 @@ def get_service_account_email(

def list_projects(
self,
max_results: int = None,
max_results: Optional[int] = None,
page_token: str = None,
retry: retries.Retry = DEFAULT_RETRY,
timeout: TimeoutType = DEFAULT_TIMEOUT,
page_size: int = None,
page_size: Optional[int] = None,
) -> page_iterator.Iterator:
"""List projects for the project associated with this client.
Expand Down Expand Up @@ -395,11 +392,11 @@ def list_datasets(
project: str = None,
include_all: bool = False,
filter: str = None,
max_results: int = None,
max_results: Optional[int] = None,
page_token: str = None,
retry: retries.Retry = DEFAULT_RETRY,
timeout: TimeoutType = DEFAULT_TIMEOUT,
page_size: int = None,
page_size: Optional[int] = None,
) -> page_iterator.Iterator:
"""List datasets for the project associated with this client.
Expand Down Expand Up @@ -1324,11 +1321,11 @@ def update_table(
def list_models(
self,
dataset: Union[Dataset, DatasetReference, DatasetListItem, str],
max_results: int = None,
max_results: Optional[int] = None,
page_token: str = None,
retry: retries.Retry = DEFAULT_RETRY,
timeout: TimeoutType = DEFAULT_TIMEOUT,
page_size: int = None,
page_size: Optional[int] = None,
) -> page_iterator.Iterator:
"""[Beta] List models in the dataset.
Expand Down Expand Up @@ -1401,11 +1398,11 @@ def api_request(*args, **kwargs):
def list_routines(
self,
dataset: Union[Dataset, DatasetReference, DatasetListItem, str],
max_results: int = None,
max_results: Optional[int] = None,
page_token: str = None,
retry: retries.Retry = DEFAULT_RETRY,
timeout: TimeoutType = DEFAULT_TIMEOUT,
page_size: int = None,
page_size: Optional[int] = None,
) -> page_iterator.Iterator:
"""[Beta] List routines in the dataset.
Expand Down Expand Up @@ -1478,11 +1475,11 @@ def api_request(*args, **kwargs):
def list_tables(
self,
dataset: Union[Dataset, DatasetReference, DatasetListItem, str],
max_results: int = None,
max_results: Optional[int] = None,
page_token: str = None,
retry: retries.Retry = DEFAULT_RETRY,
timeout: TimeoutType = DEFAULT_TIMEOUT,
page_size: int = None,
page_size: Optional[int] = None,
) -> page_iterator.Iterator:
"""List tables in the dataset.
Expand Down Expand Up @@ -1838,7 +1835,7 @@ def _get_query_results(
job_id: str,
retry: retries.Retry,
project: str = None,
timeout_ms: int = None,
timeout_ms: Optional[int] = None,
location: str = None,
timeout: TimeoutType = DEFAULT_TIMEOUT,
) -> _QueryResults:
Expand Down Expand Up @@ -2163,15 +2160,15 @@ def list_jobs(
self,
project: str = None,
parent_job: Optional[Union[QueryJob, str]] = None,
max_results: int = None,
max_results: Optional[int] = None,
page_token: str = None,
all_users: bool = None,
state_filter: str = None,
retry: retries.Retry = DEFAULT_RETRY,
timeout: TimeoutType = DEFAULT_TIMEOUT,
min_creation_time: datetime.datetime = None,
max_creation_time: datetime.datetime = None,
page_size: int = None,
page_size: Optional[int] = None,
) -> page_iterator.Iterator:
"""List jobs for the project associated with this client.
Expand Down Expand Up @@ -2361,7 +2358,7 @@ def load_table_from_file(
file_obj: IO[bytes],
destination: Union[Table, TableReference, TableListItem, str],
rewind: bool = False,
size: int = None,
size: Optional[int] = None,
num_retries: int = _DEFAULT_NUM_RETRIES,
job_id: str = None,
job_id_prefix: str = None,
Expand Down Expand Up @@ -3729,10 +3726,10 @@ def list_rows(
self,
table: Union[Table, TableListItem, TableReference, str],
selected_fields: Sequence[SchemaField] = None,
max_results: int = None,
max_results: Optional[int] = None,
page_token: str = None,
start_index: int = None,
page_size: int = None,
start_index: Optional[int] = None,
page_size: Optional[int] = None,
retry: retries.Retry = DEFAULT_RETRY,
timeout: TimeoutType = DEFAULT_TIMEOUT,
) -> RowIterator:
Expand Down Expand Up @@ -3840,11 +3837,11 @@ def _list_rows_from_query_results(
location: str,
project: str,
schema: SchemaField,
total_rows: int = None,
total_rows: Optional[int] = None,
destination: Union[Table, TableReference, TableListItem, str] = None,
max_results: int = None,
start_index: int = None,
page_size: int = None,
max_results: Optional[int] = None,
start_index: Optional[int] = None,
page_size: Optional[int] = None,
retry: retries.Retry = DEFAULT_RETRY,
timeout: TimeoutType = DEFAULT_TIMEOUT,
) -> RowIterator:
Expand Down
3 changes: 3 additions & 0 deletions google/cloud/bigquery/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -215,6 +215,9 @@ def __ne__(self, other):
def __hash__(self):
return hash(self._key())

def __str__(self):
return f"{self.project}.{self._dataset_id}"

def __repr__(self):
return "DatasetReference{}".format(self._key())

Expand Down
Loading

0 comments on commit 12f0ecd

Please sign in to comment.