From ac4151fcd2a22e3c699c30c1de6c586a4c6a8f02 Mon Sep 17 00:00:00 2001 From: aribray <45905583+aribray@users.noreply.github.com> Date: Tue, 15 Nov 2022 12:48:30 -0600 Subject: [PATCH 1/2] docs: add comments for streaming quota limits --- google/cloud/bigquery/client.py | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/google/cloud/bigquery/client.py b/google/cloud/bigquery/client.py index 1200d78f9..90ca162a8 100644 --- a/google/cloud/bigquery/client.py +++ b/google/cloud/bigquery/client.py @@ -3358,6 +3358,20 @@ def insert_rows( See https://cloud.google.com/bigquery/docs/reference/rest/v2/tabledata/insertAll + BigQuery will reject insertAll payloads that exceed a defined limit (10MB). + Additionally, if a payload vastly exceeds this limit, the request is rejected + by the intermediate architecture. + + 413 Payload Too Large + + The 413 (Payload Too Large) status code indicates that the server is + refusing to process a request because the request payload is larger + than the server is willing or able to process. + + + See + https://cloud.google.com/bigquery/quotas#streaming_inserts + Args: table (Union[ \ google.cloud.bigquery.table.Table, \ @@ -3424,6 +3438,13 @@ def insert_rows_from_dataframe( ) -> Sequence[Sequence[dict]]: """Insert rows into a table from a dataframe via the streaming API. + BigQuery will reject insertAll payloads that exceed a defined limit (10MB). + Additionally, if a payload vastly exceeds this limit, the request is rejected + by the intermediate architecture. + + See + https://cloud.google.com/bigquery/quotas#streaming_inserts + Args: table (Union[ \ google.cloud.bigquery.table.Table, \ @@ -3485,6 +3506,13 @@ def insert_rows_json( See https://cloud.google.com/bigquery/docs/reference/rest/v2/tabledata/insertAll + BigQuery will reject insertAll payloads that exceed a defined limit (10MB). + Additionally, if a payload vastly exceeds this limit, the request is rejected + by the intermediate architecture. + + See + https://cloud.google.com/bigquery/quotas#streaming_inserts + Args: table (Union[ \ google.cloud.bigquery.table.Table \ From 1d70c511b5d54d590a04f943556b12155e20f229 Mon Sep 17 00:00:00 2001 From: aribray <45905583+aribray@users.noreply.github.com> Date: Tue, 15 Nov 2022 15:11:50 -0600 Subject: [PATCH 2/2] add info about 413 status code --- google/cloud/bigquery/client.py | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/google/cloud/bigquery/client.py b/google/cloud/bigquery/client.py index 90ca162a8..be345fc59 100644 --- a/google/cloud/bigquery/client.py +++ b/google/cloud/bigquery/client.py @@ -3360,13 +3360,7 @@ def insert_rows( BigQuery will reject insertAll payloads that exceed a defined limit (10MB). Additionally, if a payload vastly exceeds this limit, the request is rejected - by the intermediate architecture. - - 413 Payload Too Large - - The 413 (Payload Too Large) status code indicates that the server is - refusing to process a request because the request payload is larger - than the server is willing or able to process. + by the intermediate architecture, which returns a 413 (Payload Too Large) status code. See @@ -3440,7 +3434,7 @@ def insert_rows_from_dataframe( BigQuery will reject insertAll payloads that exceed a defined limit (10MB). Additionally, if a payload vastly exceeds this limit, the request is rejected - by the intermediate architecture. + by the intermediate architecture, which returns a 413 (Payload Too Large) status code. See https://cloud.google.com/bigquery/quotas#streaming_inserts @@ -3508,7 +3502,7 @@ def insert_rows_json( BigQuery will reject insertAll payloads that exceed a defined limit (10MB). Additionally, if a payload vastly exceeds this limit, the request is rejected - by the intermediate architecture. + by the intermediate architecture, which returns a 413 (Payload Too Large) status code. See https://cloud.google.com/bigquery/quotas#streaming_inserts