From ae54dc347351c713ac7d343bb77210f3f7d013af Mon Sep 17 00:00:00 2001 From: Gurov Ilya Date: Fri, 17 Jan 2020 18:33:05 +0300 Subject: [PATCH] feat(bigquery): check `json_rows` arg type in `insert_rows_json()` (#10162) * feat(bigquery): check json_rows arg type in insert_rows_json() * Spelling --- bigquery/google/cloud/bigquery/client.py | 2 ++ bigquery/tests/unit/test_client.py | 25 ++++++++++++++++++++++++ 2 files changed, 27 insertions(+) diff --git a/bigquery/google/cloud/bigquery/client.py b/bigquery/google/cloud/bigquery/client.py index 34dceaeecd4a..d37d8ac19e21 100644 --- a/bigquery/google/cloud/bigquery/client.py +++ b/bigquery/google/cloud/bigquery/client.py @@ -2506,6 +2506,8 @@ def insert_rows_json( identifies the row, and the "errors" key contains a list of the mappings describing one or more problems with the row. """ + if not isinstance(json_rows, collections_abc.Sequence): + raise TypeError("json_rows argument should be a sequence of dicts") # Convert table to just a reference because unlike insert_rows, # insert_rows_json doesn't need the table schema. It's not doing any # type conversions. diff --git a/bigquery/tests/unit/test_client.py b/bigquery/tests/unit/test_client.py index b87ea52a057d..cce4bc532074 100644 --- a/bigquery/tests/unit/test_client.py +++ b/bigquery/tests/unit/test_client.py @@ -5384,6 +5384,31 @@ def test_insert_rows_json_w_explicit_none_insert_ids(self): timeout=None, ) + def test_insert_rows_w_wrong_arg(self): + from google.cloud.bigquery.dataset import DatasetReference + from google.cloud.bigquery.schema import SchemaField + from google.cloud.bigquery.table import Table + + PROJECT = "PROJECT" + DS_ID = "DS_ID" + TABLE_ID = "TABLE_ID" + ROW = {"full_name": "Bhettye Rhubble", "age": "27", "joined": None} + + creds = _make_credentials() + client = self._make_one(project=PROJECT, credentials=creds, _http=object()) + client._connection = make_connection({}) + + table_ref = DatasetReference(PROJECT, DS_ID).table(TABLE_ID) + schema = [ + SchemaField("full_name", "STRING", mode="REQUIRED"), + SchemaField("age", "INTEGER", mode="REQUIRED"), + SchemaField("joined", "TIMESTAMP", mode="NULLABLE"), + ] + table = Table(table_ref, schema=schema) + + with self.assertRaises(TypeError): + client.insert_rows_json(table, ROW) + def test_list_partitions(self): from google.cloud.bigquery.table import Table