diff --git a/google/cloud/bigquery/client.py b/google/cloud/bigquery/client.py index b72505a15..1f3647e71 100644 --- a/google/cloud/bigquery/client.py +++ b/google/cloud/bigquery/client.py @@ -3358,6 +3358,14 @@ def insert_rows( See https://cloud.google.com/bigquery/docs/reference/rest/v2/tabledata/insertAll + BigQuery will reject insertAll payloads that exceed a defined limit (10MB). + Additionally, if a payload vastly exceeds this limit, the request is rejected + by the intermediate architecture, which returns a 413 (Payload Too Large) status code. + + + See + https://cloud.google.com/bigquery/quotas#streaming_inserts + Args: table (Union[ \ google.cloud.bigquery.table.Table, \ @@ -3424,6 +3432,13 @@ def insert_rows_from_dataframe( ) -> Sequence[Sequence[dict]]: """Insert rows into a table from a dataframe via the streaming API. + BigQuery will reject insertAll payloads that exceed a defined limit (10MB). + Additionally, if a payload vastly exceeds this limit, the request is rejected + by the intermediate architecture, which returns a 413 (Payload Too Large) status code. + + See + https://cloud.google.com/bigquery/quotas#streaming_inserts + Args: table (Union[ \ google.cloud.bigquery.table.Table, \ @@ -3485,6 +3500,13 @@ def insert_rows_json( See https://cloud.google.com/bigquery/docs/reference/rest/v2/tabledata/insertAll + BigQuery will reject insertAll payloads that exceed a defined limit (10MB). + Additionally, if a payload vastly exceeds this limit, the request is rejected + by the intermediate architecture, which returns a 413 (Payload Too Large) status code. + + See + https://cloud.google.com/bigquery/quotas#streaming_inserts + Args: table (Union[ \ google.cloud.bigquery.table.Table \