From d8066bfc3f5d896bc3527a6382689ba85df7c134 Mon Sep 17 00:00:00 2001 From: Leonid Emar-Kar Date: Wed, 11 Sep 2019 13:18:16 +0300 Subject: [PATCH 1/3] update samples --- bigquery/samples/add_empty_column.py | 7 ++-- bigquery/samples/browse_table_data.py | 12 +++++-- bigquery/samples/create_dataset.py | 2 +- bigquery/samples/create_job.py | 2 +- bigquery/samples/create_routine.py | 2 +- bigquery/samples/create_routine_ddl.py | 4 +-- bigquery/samples/create_table.py | 2 +- bigquery/samples/dataset_exists.py | 2 +- bigquery/samples/delete_dataset.py | 4 ++- bigquery/samples/delete_dataset_labels.py | 4 +-- bigquery/samples/delete_model.py | 2 +- bigquery/samples/delete_routine.py | 2 +- bigquery/samples/delete_table.py | 2 +- bigquery/samples/get_dataset.py | 2 +- bigquery/samples/get_dataset_labels.py | 2 +- bigquery/samples/get_model.py | 2 +- bigquery/samples/get_routine.py | 14 ++++---- bigquery/samples/get_table.py | 5 ++- bigquery/samples/label_dataset.py | 4 +-- bigquery/samples/list_datasets.py | 4 +-- bigquery/samples/list_datasets_by_label.py | 2 +- bigquery/samples/list_models.py | 2 +- bigquery/samples/list_routines.py | 2 +- bigquery/samples/list_tables.py | 2 +- bigquery/samples/load_table_dataframe.py | 13 ++++--- bigquery/samples/query_to_arrow.py | 2 +- .../samples/tests/test_browse_table_data.py | 2 ++ bigquery/samples/tests/test_create_routine.py | 23 ++++++++++++ ..._samples.py => test_create_routine_ddl.py} | 35 +------------------ bigquery/samples/tests/test_delete_routine.py | 23 ++++++++++++ bigquery/samples/tests/test_get_dataset.py | 2 +- bigquery/samples/tests/test_get_routine.py | 27 ++++++++++++++ bigquery/samples/tests/test_get_table.py | 2 +- .../tests/test_list_datasets_by_label.py | 2 +- bigquery/samples/tests/test_list_routines.py | 24 +++++++++++++ .../tests/test_load_table_dataframe.py | 6 ++-- bigquery/samples/tests/test_query_to_arrow.py | 1 - bigquery/samples/tests/test_update_routine.py | 22 ++++++++++++ bigquery/samples/update_dataset_access.py | 4 +-- ...update_dataset_default_table_expiration.py | 6 ++-- .../samples/update_dataset_description.py | 4 +-- bigquery/samples/update_model.py | 4 +-- bigquery/samples/update_routine.py | 5 +-- 43 files changed, 200 insertions(+), 95 deletions(-) create mode 100644 bigquery/samples/tests/test_create_routine.py rename bigquery/samples/tests/{test_routine_samples.py => test_create_routine_ddl.py} (69%) create mode 100644 bigquery/samples/tests/test_delete_routine.py create mode 100644 bigquery/samples/tests/test_get_routine.py create mode 100644 bigquery/samples/tests/test_list_routines.py create mode 100644 bigquery/samples/tests/test_update_routine.py diff --git a/bigquery/samples/add_empty_column.py b/bigquery/samples/add_empty_column.py index eb84037598d3..3d40bc177fbd 100644 --- a/bigquery/samples/add_empty_column.py +++ b/bigquery/samples/add_empty_column.py @@ -21,17 +21,18 @@ def add_empty_column(client, table_id): # TODO(developer): Construct a BigQuery client object. # client = bigquery.Client() - # TODO(developer): Set table_id to the ID of the table to add an empty column. + # TODO(developer): Set table_id to the ID of the table + # to add an empty column. # table_id = "your-project.your_dataset.your_table_name" - table = client.get_table(table_id) + table = client.get_table(table_id) # API request. original_schema = table.schema new_schema = original_schema[:] # creates a copy of the schema new_schema.append(bigquery.SchemaField("phone", "STRING")) table.schema = new_schema - table = client.update_table(table, ["schema"]) # API request + table = client.update_table(table, ["schema"]) # API request. if len(table.schema) == len(original_schema) + 1 == len(new_schema): print("A new column has been added.") diff --git a/bigquery/samples/browse_table_data.py b/bigquery/samples/browse_table_data.py index dd6c572cab6d..1b14701fab1d 100644 --- a/bigquery/samples/browse_table_data.py +++ b/bigquery/samples/browse_table_data.py @@ -26,7 +26,7 @@ def browse_table_data(client, table_id): # table_id = "your-project.your_dataset.your_table_name" # Download all rows from a table. - rows_iter = client.list_rows(table_id) + rows_iter = client.list_rows(table_id) # API request. # Iterate over rows to make the API requests to fetch row data. rows = list(rows_iter) @@ -38,10 +38,18 @@ def browse_table_data(client, table_id): print("Downloaded {} rows from table {}".format(len(rows), table_id)) # Specify selected fields to limit the results to certain columns. - table = client.get_table(table_id) + table = client.get_table(table_id) # API request. fields = table.schema[:2] # first two columns rows_iter = client.list_rows(table_id, selected_fields=fields, max_results=10) rows = list(rows_iter) print("Selected {} columns from table {}.".format(len(rows_iter.schema), table_id)) print("Downloaded {} rows from table {}".format(len(rows), table_id)) + + # Print row data in tabular format. + rows = client.list_rows(table, max_results=10) + format_string = "{!s:<16} " * len(rows.schema) + field_names = [field.name for field in rows.schema] + print(format_string.format(*field_names)) # Prints column headers. + for row in rows: + print(format_string.format(*row)) # Prints row data. # [END bigquery_browse_table] diff --git a/bigquery/samples/create_dataset.py b/bigquery/samples/create_dataset.py index 89ca9d38f5f3..166b7198235b 100644 --- a/bigquery/samples/create_dataset.py +++ b/bigquery/samples/create_dataset.py @@ -33,6 +33,6 @@ def create_dataset(client, dataset_id): # Send the dataset to the API for creation. # Raises google.api_core.exceptions.Conflict if the Dataset already # exists within the project. - dataset = client.create_dataset(dataset) # API request + dataset = client.create_dataset(dataset) # API request. print("Created dataset {}.{}".format(client.project, dataset.dataset_id)) # [END bigquery_create_dataset] diff --git a/bigquery/samples/create_job.py b/bigquery/samples/create_job.py index 24bb85510598..3b615fe4976b 100644 --- a/bigquery/samples/create_job.py +++ b/bigquery/samples/create_job.py @@ -33,7 +33,7 @@ def create_job(client): # The client libraries automatically generate a job ID. Override the # generated ID with either the job_id_prefix or job_id parameters. job_id_prefix="code_sample_", - ) # API request + ) # API request. print("Started job: {}".format(query_job.job_id)) # [END bigquery_create_job] diff --git a/bigquery/samples/create_routine.py b/bigquery/samples/create_routine.py index c08ec4799a3e..dad3ef20f180 100644 --- a/bigquery/samples/create_routine.py +++ b/bigquery/samples/create_routine.py @@ -40,7 +40,7 @@ def create_routine(client, routine_id): ], ) - routine = client.create_routine(routine) + routine = client.create_routine(routine) # API request. print("Created routine {}".format(routine.reference)) # [END bigquery_create_routine] diff --git a/bigquery/samples/create_routine_ddl.py b/bigquery/samples/create_routine_ddl.py index a4ae3318e7b4..ad38f8935823 100644 --- a/bigquery/samples/create_routine_ddl.py +++ b/bigquery/samples/create_routine_ddl.py @@ -36,10 +36,10 @@ def create_routine_ddl(client, routine_id): ) # Initiate the query to create the routine. - query_job = client.query(sql) + query_job = client.query(sql) # API request. # Wait for the query to complete. - query_job.result() + query_job.result() # Waits for the job to complete. print("Created routine {}".format(query_job.ddl_target_routine)) # [END bigquery_create_routine_ddl] diff --git a/bigquery/samples/create_table.py b/bigquery/samples/create_table.py index 2a6e98fc72f6..3f6be9cfea14 100644 --- a/bigquery/samples/create_table.py +++ b/bigquery/samples/create_table.py @@ -30,7 +30,7 @@ def create_table(client, table_id): ] table = bigquery.Table(table_id, schema=schema) - table = client.create_table(table) # API request + table = client.create_table(table) # API request. print( "Created table {}.{}.{}".format(table.project, table.dataset_id, table.table_id) ) diff --git a/bigquery/samples/dataset_exists.py b/bigquery/samples/dataset_exists.py index 46cf26a623bf..71c7a60d9922 100644 --- a/bigquery/samples/dataset_exists.py +++ b/bigquery/samples/dataset_exists.py @@ -22,7 +22,7 @@ def dataset_exists(client, dataset_id): # dataset_id = "your-project.your_dataset" try: - client.get_dataset(dataset_id) + client.get_dataset(dataset_id) # API request. print("Dataset {} already exists".format(dataset_id)) except NotFound: print("Dataset {} is not found".format(dataset_id)) diff --git a/bigquery/samples/delete_dataset.py b/bigquery/samples/delete_dataset.py index 6cde1b6b2d27..1cb1bd6f269f 100644 --- a/bigquery/samples/delete_dataset.py +++ b/bigquery/samples/delete_dataset.py @@ -27,7 +27,9 @@ def delete_dataset(client, dataset_id): # Use the delete_contents parameter to delete a dataset and its contents # Use the not_found_ok parameter to not receive an error if the dataset has already been deleted. - client.delete_dataset(dataset_id, delete_contents=True, not_found_ok=True) + client.delete_dataset( + dataset_id, delete_contents=True, not_found_ok=True + ) # API request. print("Deleted dataset '{}'.".format(dataset_id)) # [END bigquery_delete_dataset] diff --git a/bigquery/samples/delete_dataset_labels.py b/bigquery/samples/delete_dataset_labels.py index 33ff5c0f2620..f4cbf7ba20d9 100644 --- a/bigquery/samples/delete_dataset_labels.py +++ b/bigquery/samples/delete_dataset_labels.py @@ -25,12 +25,12 @@ def delete_dataset_labels(client, dataset_id): # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = "your-project.your_dataset" - dataset = client.get_dataset(dataset_id) + dataset = client.get_dataset(dataset_id) # API request. # To delete a label from a dataset, set its value to None dataset.labels["color"] = None - dataset = client.update_dataset(dataset, ["labels"]) + dataset = client.update_dataset(dataset, ["labels"]) # API request. print("Labels deleted from {}".format(dataset_id)) # [END bigquery_delete_label_dataset] return dataset diff --git a/bigquery/samples/delete_model.py b/bigquery/samples/delete_model.py index 5ac4305bc97e..b4d405aca3a0 100644 --- a/bigquery/samples/delete_model.py +++ b/bigquery/samples/delete_model.py @@ -26,7 +26,7 @@ def delete_model(client, model_id): # TODO(developer): Set model_id to the ID of the model to fetch. # model_id = 'your-project.your_dataset.your_model' - client.delete_model(model_id) + client.delete_model(model_id) # API request. print("Deleted model '{}'.".format(model_id)) # [END bigquery_delete_model] diff --git a/bigquery/samples/delete_routine.py b/bigquery/samples/delete_routine.py index c0164b415008..c792fafef8b3 100644 --- a/bigquery/samples/delete_routine.py +++ b/bigquery/samples/delete_routine.py @@ -25,7 +25,7 @@ def delete_routine(client, routine_id): # TODO(developer): Set the fully-qualified ID for the routine. # routine_id = "my-project.my_dataset.my_routine" - client.delete_routine(routine_id) + client.delete_routine(routine_id) # API request. print("Deleted routine {}.".format(routine_id)) # [END bigquery_delete_routine] diff --git a/bigquery/samples/delete_table.py b/bigquery/samples/delete_table.py index dcdd3d855b2e..db592e9c8385 100644 --- a/bigquery/samples/delete_table.py +++ b/bigquery/samples/delete_table.py @@ -27,6 +27,6 @@ def delete_table(client, table_id): # If the table does not exist, delete_table raises # google.api_core.exceptions.NotFound unless not_found_ok is True - client.delete_table(table_id, not_found_ok=True) + client.delete_table(table_id, not_found_ok=True) # API request. print("Deleted table '{}'.".format(table_id)) # [END bigquery_delete_table] diff --git a/bigquery/samples/get_dataset.py b/bigquery/samples/get_dataset.py index 5586c2b95ebb..4bb73f38061d 100644 --- a/bigquery/samples/get_dataset.py +++ b/bigquery/samples/get_dataset.py @@ -25,7 +25,7 @@ def get_dataset(client, dataset_id): # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = 'your-project.your_dataset' - dataset = client.get_dataset(dataset_id) + dataset = client.get_dataset(dataset_id) # API request. full_dataset_id = "{}.{}".format(dataset.project, dataset.dataset_id) friendly_name = dataset.friendly_name diff --git a/bigquery/samples/get_dataset_labels.py b/bigquery/samples/get_dataset_labels.py index 2f21723a550b..410e7f3cbb11 100644 --- a/bigquery/samples/get_dataset_labels.py +++ b/bigquery/samples/get_dataset_labels.py @@ -25,7 +25,7 @@ def get_dataset_labels(client, dataset_id): # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = "your-project.your_dataset" - dataset = client.get_dataset(dataset_id) + dataset = client.get_dataset(dataset_id) # API request. # View dataset labels print("Dataset ID: {}".format(dataset_id)) diff --git a/bigquery/samples/get_model.py b/bigquery/samples/get_model.py index 69986733c50b..fd1bcf2ec4a1 100644 --- a/bigquery/samples/get_model.py +++ b/bigquery/samples/get_model.py @@ -26,7 +26,7 @@ def get_model(client, model_id): # TODO(developer): Set model_id to the ID of the model to fetch. # model_id = 'your-project.your_dataset.your_model' - model = client.get_model(model_id) + model = client.get_model(model_id) # API request. full_model_id = "{}.{}.{}".format(model.project, model.dataset_id, model.model_id) friendly_name = model.friendly_name diff --git a/bigquery/samples/get_routine.py b/bigquery/samples/get_routine.py index d9035c282438..64e2cf29ac0b 100644 --- a/bigquery/samples/get_routine.py +++ b/bigquery/samples/get_routine.py @@ -25,15 +25,15 @@ def get_routine(client, routine_id): # TODO(developer): Set the fully-qualified ID for the routine. # routine_id = "my-project.my_dataset.my_routine" - routine = client.get_routine(routine_id) + routine = client.get_routine(routine_id) # API request. - print("Routine `{}`:".format(routine.reference)) - print(" Type: '{}'".format(routine.type_)) - print(" Language: '{}'".format(routine.language)) - print(" Arguments:") + print("Routine '{}':".format(routine.reference)) + print("\tType: '{}'".format(routine.type_)) + print("\tLanguage: '{}'".format(routine.language)) + print("\tArguments:") for argument in routine.arguments: - print(" Name: '{}'".format(argument.name)) - print(" Type: '{}'".format(argument.type_)) + print("\t\tName: '{}'".format(argument.name)) + print("\t\tType: '{}'".format(argument.data_type)) # [END bigquery_get_routine] return routine diff --git a/bigquery/samples/get_table.py b/bigquery/samples/get_table.py index e49e032f6e23..f57fc2f31874 100644 --- a/bigquery/samples/get_table.py +++ b/bigquery/samples/get_table.py @@ -25,13 +25,12 @@ def get_table(client, table_id): # TODO(developer): Set table_id to the ID of the model to fetch. # table_id = 'your-project.your_dataset.your_table' - table = client.get_table(table_id) + table = client.get_table(table_id) # API request. + # View table properties print( "Got table '{}.{}.{}'.".format(table.project, table.dataset_id, table.table_id) ) - - # View table properties print("Table schema: {}".format(table.schema)) print("Table description: {}".format(table.description)) print("Table has {} rows".format(table.num_rows)) diff --git a/bigquery/samples/label_dataset.py b/bigquery/samples/label_dataset.py index 7840ea25a63f..15f3a597745b 100644 --- a/bigquery/samples/label_dataset.py +++ b/bigquery/samples/label_dataset.py @@ -25,9 +25,9 @@ def label_dataset(client, dataset_id): # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = "your-project.your_dataset" - dataset = client.get_dataset(dataset_id) + dataset = client.get_dataset(dataset_id) # API request. dataset.labels = {"color": "green"} - dataset = client.update_dataset(dataset, ["labels"]) + dataset = client.update_dataset(dataset, ["labels"]) # API request. print("Labels added to {}".format(dataset_id)) # [END bigquery_label_dataset] diff --git a/bigquery/samples/list_datasets.py b/bigquery/samples/list_datasets.py index b57aad1b5e7b..3615f55edb00 100644 --- a/bigquery/samples/list_datasets.py +++ b/bigquery/samples/list_datasets.py @@ -22,12 +22,12 @@ def list_datasets(client): # TODO(developer): Construct a BigQuery client object. # client = bigquery.Client() - datasets = list(client.list_datasets()) + datasets = list(client.list_datasets()) # API request. project = client.project if datasets: print("Datasets in project {}:".format(project)) - for dataset in datasets: # API request(s) + for dataset in datasets: print("\t{}".format(dataset.dataset_id)) else: print("{} project does not contain any datasets.".format(project)) diff --git a/bigquery/samples/list_datasets_by_label.py b/bigquery/samples/list_datasets_by_label.py index 8b574b1110eb..26d9ed7609f0 100644 --- a/bigquery/samples/list_datasets_by_label.py +++ b/bigquery/samples/list_datasets_by_label.py @@ -23,7 +23,7 @@ def list_datasets_by_label(client): # client = bigquery.Client() label_filter = "labels.color:green" - datasets = list(client.list_datasets(filter=label_filter)) + datasets = list(client.list_datasets(filter=label_filter)) # API request. if datasets: print("Datasets filtered by {}:".format(label_filter)) diff --git a/bigquery/samples/list_models.py b/bigquery/samples/list_models.py index 5b4d21799b28..1e5575139c50 100644 --- a/bigquery/samples/list_models.py +++ b/bigquery/samples/list_models.py @@ -27,7 +27,7 @@ def list_models(client, dataset_id): # the models you are listing. # dataset_id = 'your-project.your_dataset' - models = client.list_models(dataset_id) + models = client.list_models(dataset_id) # API request. print("Models contained in '{}':".format(dataset_id)) for model in models: diff --git a/bigquery/samples/list_routines.py b/bigquery/samples/list_routines.py index 1ae4f441cde1..379381492646 100644 --- a/bigquery/samples/list_routines.py +++ b/bigquery/samples/list_routines.py @@ -26,7 +26,7 @@ def list_routines(client, dataset_id): # the routines you are listing. # dataset_id = 'your-project.your_dataset' - routines = client.list_routines(dataset_id) + routines = client.list_routines(dataset_id) # API request. print("Routines contained in dataset {}:".format(dataset_id)) for routine in routines: diff --git a/bigquery/samples/list_tables.py b/bigquery/samples/list_tables.py index 2057f2d73891..951c600badc7 100644 --- a/bigquery/samples/list_tables.py +++ b/bigquery/samples/list_tables.py @@ -26,7 +26,7 @@ def list_tables(client, dataset_id): # the tables you are listing. # dataset_id = 'your-project.your_dataset' - tables = client.list_tables(dataset_id) + tables = client.list_tables(dataset_id) # API request. print("Tables contained in '{}':".format(dataset_id)) for table in tables: diff --git a/bigquery/samples/load_table_dataframe.py b/bigquery/samples/load_table_dataframe.py index 69eeb6ef89d0..aed88180988c 100644 --- a/bigquery/samples/load_table_dataframe.py +++ b/bigquery/samples/load_table_dataframe.py @@ -14,8 +14,10 @@ def load_table_dataframe(client, table_id): + # [START bigquery_load_table_dataframe] from google.cloud import bigquery + import pandas # TODO(developer): Construct a BigQuery client object. @@ -59,11 +61,14 @@ def load_table_dataframe(client, table_id): ) job = client.load_table_from_dataframe( - dataframe, table_id, job_config=job_config, location="US" - ) - job.result() # Waits for table load to complete. + dataframe, + table_id, + job_config=job_config, + location="US", # Must match the destination dataset location. + ) # API request. + job.result() # Waits for the job to complete. - table = client.get_table(table_id) + table = client.get_table(table_id) # API request. print( "Loaded {} rows and {} columns to {}".format( table.num_rows, len(table.schema), table_id diff --git a/bigquery/samples/query_to_arrow.py b/bigquery/samples/query_to_arrow.py index b13dcf3e1413..81c3b67c4d2d 100644 --- a/bigquery/samples/query_to_arrow.py +++ b/bigquery/samples/query_to_arrow.py @@ -41,7 +41,7 @@ def query_to_arrow(client): CROSS JOIN UNNEST(r.participants) as participant; """ query_job = client.query(sql) - arrow_table = query_job.to_arrow() + arrow_table = query_job.to_arrow() # API request. print( "Downloaded {} rows, {} columns.".format( diff --git a/bigquery/samples/tests/test_browse_table_data.py b/bigquery/samples/tests/test_browse_table_data.py index f777bf91ca00..0e9cc6055494 100644 --- a/bigquery/samples/tests/test_browse_table_data.py +++ b/bigquery/samples/tests/test_browse_table_data.py @@ -24,3 +24,5 @@ def test_browse_table_data(capsys, client, table_with_data_id): assert "Downloaded 10 rows from table {}".format(table_with_data_id) in out assert "Selected 2 columns from table {}".format(table_with_data_id) in out assert "Downloaded 10 rows from table {}".format(table_with_data_id) in out + assert "word" in out + assert "LVII" in out diff --git a/bigquery/samples/tests/test_create_routine.py b/bigquery/samples/tests/test_create_routine.py new file mode 100644 index 000000000000..7220d63542e2 --- /dev/null +++ b/bigquery/samples/tests/test_create_routine.py @@ -0,0 +1,23 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from .. import create_routine + + +def test_create_routine(capsys, client, random_routine_id): + + create_routine.create_routine(client, random_routine_id) + out, err = capsys.readouterr() + assert "Created routine {}".format(random_routine_id) in out diff --git a/bigquery/samples/tests/test_routine_samples.py b/bigquery/samples/tests/test_create_routine_ddl.py similarity index 69% rename from bigquery/samples/tests/test_routine_samples.py rename to bigquery/samples/tests/test_create_routine_ddl.py index 5a1c69c7f60f..cecda2f654ec 100644 --- a/bigquery/samples/tests/test_routine_samples.py +++ b/bigquery/samples/tests/test_create_routine_ddl.py @@ -15,24 +15,15 @@ from google.cloud import bigquery from google.cloud import bigquery_v2 - -def test_create_routine(capsys, client, random_routine_id): - from .. import create_routine - - create_routine.create_routine(client, random_routine_id) - out, err = capsys.readouterr() - assert "Created routine {}".format(random_routine_id) in out +from .. import create_routine_ddl def test_create_routine_ddl(capsys, client, random_routine_id): - from .. import create_routine_ddl create_routine_ddl.create_routine_ddl(client, random_routine_id) routine = client.get_routine(random_routine_id) out, err = capsys.readouterr() - assert "Created routine {}".format(random_routine_id) in out - return routine assert routine.type_ == "SCALAR_FUNCTION" assert routine.language == "SQL" expected_arguments = [ @@ -63,27 +54,3 @@ def test_create_routine_ddl(capsys, client, random_routine_id): ) ] assert routine.arguments == expected_arguments - - -def test_list_routines(capsys, client, dataset_id, routine_id): - from .. import list_routines - - list_routines.list_routines(client, dataset_id) - out, err = capsys.readouterr() - assert "Routines contained in dataset {}:".format(dataset_id) in out - assert routine_id in out - - -def test_delete_routine(capsys, client, routine_id): - from .. import delete_routine - - delete_routine.delete_routine(client, routine_id) - out, err = capsys.readouterr() - assert "Deleted routine {}.".format(routine_id) in out - - -def test_update_routine(client, routine_id): - from .. import update_routine - - routine = update_routine.update_routine(client, routine_id) - assert routine.body == "x * 4" diff --git a/bigquery/samples/tests/test_delete_routine.py b/bigquery/samples/tests/test_delete_routine.py new file mode 100644 index 000000000000..9347d1e22dc2 --- /dev/null +++ b/bigquery/samples/tests/test_delete_routine.py @@ -0,0 +1,23 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from .. import delete_routine + + +def test_delete_routine(capsys, client, routine_id): + + delete_routine.delete_routine(client, routine_id) + out, err = capsys.readouterr() + assert "Deleted routine {}.".format(routine_id) in out diff --git a/bigquery/samples/tests/test_get_dataset.py b/bigquery/samples/tests/test_get_dataset.py index 374f8835211a..dedec1d7b29e 100644 --- a/bigquery/samples/tests/test_get_dataset.py +++ b/bigquery/samples/tests/test_get_dataset.py @@ -19,4 +19,4 @@ def test_get_dataset(capsys, client, dataset_id): get_dataset.get_dataset(client, dataset_id) out, err = capsys.readouterr() - assert "{}".format(dataset_id) in out + assert dataset_id in out diff --git a/bigquery/samples/tests/test_get_routine.py b/bigquery/samples/tests/test_get_routine.py new file mode 100644 index 000000000000..fa5f3093116c --- /dev/null +++ b/bigquery/samples/tests/test_get_routine.py @@ -0,0 +1,27 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from .. import get_routine + + +def test_get_routine(capsys, client, routine_id): + + get_routine.get_routine(client, routine_id) + out, err = capsys.readouterr() + assert "Routine '{}':".format(routine_id) in out + assert "Type: 'SCALAR_FUNCTION'" in out + assert "Language: 'SQL'" in out + assert "Name: 'x'" in out + assert "Type: 'type_kind: INT64\n'" in out diff --git a/bigquery/samples/tests/test_get_table.py b/bigquery/samples/tests/test_get_table.py index b811ccecad1f..efbd464d54ad 100644 --- a/bigquery/samples/tests/test_get_table.py +++ b/bigquery/samples/tests/test_get_table.py @@ -30,7 +30,7 @@ def test_get_table(capsys, client, random_table_id): get_table.get_table(client, random_table_id) out, err = capsys.readouterr() assert "Got table '{}'.".format(random_table_id) in out - assert "full_name" in out # test that schema is printed + assert "full_name" in out assert "Table description: Sample Table" in out assert "Table has 0 rows" in out client.delete_table(table, not_found_ok=True) diff --git a/bigquery/samples/tests/test_list_datasets_by_label.py b/bigquery/samples/tests/test_list_datasets_by_label.py index 346cbf1a982d..f414539b00b3 100644 --- a/bigquery/samples/tests/test_list_datasets_by_label.py +++ b/bigquery/samples/tests/test_list_datasets_by_label.py @@ -23,4 +23,4 @@ def test_list_datasets_by_label(capsys, client, dataset_id): dataset = client.update_dataset(dataset, ["labels"]) list_datasets_by_label.list_datasets_by_label(client) out, err = capsys.readouterr() - assert "{}".format(dataset_id) in out + assert dataset_id in out diff --git a/bigquery/samples/tests/test_list_routines.py b/bigquery/samples/tests/test_list_routines.py new file mode 100644 index 000000000000..e249238e1976 --- /dev/null +++ b/bigquery/samples/tests/test_list_routines.py @@ -0,0 +1,24 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from .. import list_routines + + +def test_list_routines(capsys, client, dataset_id, routine_id): + + list_routines.list_routines(client, dataset_id) + out, err = capsys.readouterr() + assert "Routines contained in dataset {}:".format(dataset_id) in out + assert routine_id in out diff --git a/bigquery/samples/tests/test_load_table_dataframe.py b/bigquery/samples/tests/test_load_table_dataframe.py index d553d449a525..2151704d3b25 100644 --- a/bigquery/samples/tests/test_load_table_dataframe.py +++ b/bigquery/samples/tests/test_load_table_dataframe.py @@ -12,16 +12,18 @@ # See the License for the specific language governing permissions and # limitations under the License. + import pytest from .. import load_table_dataframe -pytest.importorskip("pandas") -pytest.importorskip("pyarrow") +pandas = pytest.importorskip("pandas") +pyarrow = pytest.importorskip("pyarrow") def test_load_table_dataframe(capsys, client, random_table_id): + table = load_table_dataframe.load_table_dataframe(client, random_table_id) out, _ = capsys.readouterr() assert "Loaded 4 rows and 3 columns" in out diff --git a/bigquery/samples/tests/test_query_to_arrow.py b/bigquery/samples/tests/test_query_to_arrow.py index f70bd49fe565..dd9b3ab508cc 100644 --- a/bigquery/samples/tests/test_query_to_arrow.py +++ b/bigquery/samples/tests/test_query_to_arrow.py @@ -22,7 +22,6 @@ def test_query_to_arrow(capsys, client): arrow_table = query_to_arrow.query_to_arrow(client) out, err = capsys.readouterr() assert "Downloaded 8 rows, 2 columns." in out - arrow_schema = arrow_table.schema assert arrow_schema.names == ["race", "participant"] assert pyarrow.types.is_string(arrow_schema.types[0]) diff --git a/bigquery/samples/tests/test_update_routine.py b/bigquery/samples/tests/test_update_routine.py new file mode 100644 index 000000000000..8adfab32e032 --- /dev/null +++ b/bigquery/samples/tests/test_update_routine.py @@ -0,0 +1,22 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from .. import update_routine + + +def test_update_routine(client, routine_id): + + routine = update_routine.update_routine(client, routine_id) + assert routine.body == "x * 4" diff --git a/bigquery/samples/update_dataset_access.py b/bigquery/samples/update_dataset_access.py index aa316a38dff9..69a957ae2c5d 100644 --- a/bigquery/samples/update_dataset_access.py +++ b/bigquery/samples/update_dataset_access.py @@ -24,7 +24,7 @@ def update_dataset_access(client, dataset_id): # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = 'your-project.your_dataset' - dataset = client.get_dataset(dataset_id) + dataset = client.get_dataset(dataset_id) # API request. entry = bigquery.AccessEntry( role="READER", @@ -36,7 +36,7 @@ def update_dataset_access(client, dataset_id): entries.append(entry) dataset.access_entries = entries - dataset = client.update_dataset(dataset, ["access_entries"]) # API request + dataset = client.update_dataset(dataset, ["access_entries"]) # API request. full_dataset_id = "{}.{}".format(dataset.project, dataset.dataset_id) print( diff --git a/bigquery/samples/update_dataset_default_table_expiration.py b/bigquery/samples/update_dataset_default_table_expiration.py index 4534bb2011eb..aefa44a8afad 100644 --- a/bigquery/samples/update_dataset_default_table_expiration.py +++ b/bigquery/samples/update_dataset_default_table_expiration.py @@ -25,12 +25,12 @@ def update_dataset_default_table_expiration(client, dataset_id): # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = 'your-project.your_dataset' - dataset = client.get_dataset(dataset_id) - dataset.default_table_expiration_ms = 24 * 60 * 60 * 1000 # in milliseconds + dataset = client.get_dataset(dataset_id) # API request. + dataset.default_table_expiration_ms = 24 * 60 * 60 * 1000 # in milliseconds. dataset = client.update_dataset( dataset, ["default_table_expiration_ms"] - ) # API request + ) # API request. full_dataset_id = "{}.{}".format(dataset.project, dataset.dataset_id) print( diff --git a/bigquery/samples/update_dataset_description.py b/bigquery/samples/update_dataset_description.py index f3afb7fa68ce..1a67bcacd374 100644 --- a/bigquery/samples/update_dataset_description.py +++ b/bigquery/samples/update_dataset_description.py @@ -25,9 +25,9 @@ def update_dataset_description(client, dataset_id): # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = 'your-project.your_dataset' - dataset = client.get_dataset(dataset_id) + dataset = client.get_dataset(dataset_id) # API request. dataset.description = "Updated description." - dataset = client.update_dataset(dataset, ["description"]) + dataset = client.update_dataset(dataset, ["description"]) # API request. full_dataset_id = "{}.{}".format(dataset.project, dataset.dataset_id) print( diff --git a/bigquery/samples/update_model.py b/bigquery/samples/update_model.py index 5df4ada886ed..725d95840905 100644 --- a/bigquery/samples/update_model.py +++ b/bigquery/samples/update_model.py @@ -26,9 +26,9 @@ def update_model(client, model_id): # TODO(developer): Set model_id to the ID of the model to fetch. # model_id = 'your-project.your_dataset.your_model' - model = client.get_model(model_id) + model = client.get_model(model_id) # API request. model.description = "This model was modified from a Python program." - model = client.update_model(model, ["description"]) + model = client.update_model(model, ["description"]) # API request. full_model_id = "{}.{}.{}".format(model.project, model.dataset_id, model.model_id) print( diff --git a/bigquery/samples/update_routine.py b/bigquery/samples/update_routine.py index 4d491d42e168..8de7a05297fd 100644 --- a/bigquery/samples/update_routine.py +++ b/bigquery/samples/update_routine.py @@ -33,13 +33,14 @@ def update_routine(client, routine_id): routine, [ "body", - # Due to a limitation of the API, all fields are required, not just + # Due to a limitation of the API, + # all fields are required, not just # those that have been updated. "arguments", "language", "type_", "return_type", ], - ) + ) # API request. # [END bigquery_update_routine] return routine From 5decf08f958c27e537dac3e3e504557b95933e83 Mon Sep 17 00:00:00 2001 From: Leonid Emar-Kar Date: Wed, 25 Sep 2019 10:27:32 +0300 Subject: [PATCH 2/3] comments rephrasing --- bigquery/samples/add_empty_column.py | 4 ++-- bigquery/samples/browse_table_data.py | 4 ++-- bigquery/samples/create_dataset.py | 2 +- bigquery/samples/create_job.py | 2 +- bigquery/samples/create_routine.py | 2 +- bigquery/samples/create_routine_ddl.py | 2 +- bigquery/samples/create_table.py | 2 +- bigquery/samples/dataset_exists.py | 2 +- bigquery/samples/delete_dataset.py | 2 +- bigquery/samples/delete_dataset_labels.py | 4 ++-- bigquery/samples/delete_model.py | 2 +- bigquery/samples/delete_routine.py | 2 +- bigquery/samples/delete_table.py | 2 +- bigquery/samples/get_dataset.py | 2 +- bigquery/samples/get_dataset_labels.py | 2 +- bigquery/samples/get_model.py | 2 +- bigquery/samples/get_routine.py | 2 +- bigquery/samples/get_table.py | 2 +- bigquery/samples/label_dataset.py | 4 ++-- bigquery/samples/list_datasets.py | 2 +- bigquery/samples/list_datasets_by_label.py | 2 +- bigquery/samples/list_models.py | 2 +- bigquery/samples/list_routines.py | 2 +- bigquery/samples/list_tables.py | 2 +- bigquery/samples/load_table_dataframe.py | 4 ++-- bigquery/samples/query_to_arrow.py | 2 +- bigquery/samples/update_dataset_access.py | 4 ++-- bigquery/samples/update_dataset_default_table_expiration.py | 4 ++-- bigquery/samples/update_dataset_description.py | 4 ++-- bigquery/samples/update_model.py | 4 ++-- bigquery/samples/update_routine.py | 2 +- 31 files changed, 40 insertions(+), 40 deletions(-) diff --git a/bigquery/samples/add_empty_column.py b/bigquery/samples/add_empty_column.py index 3d40bc177fbd..4f0b971e577a 100644 --- a/bigquery/samples/add_empty_column.py +++ b/bigquery/samples/add_empty_column.py @@ -25,14 +25,14 @@ def add_empty_column(client, table_id): # to add an empty column. # table_id = "your-project.your_dataset.your_table_name" - table = client.get_table(table_id) # API request. + table = client.get_table(table_id) # Make an API request. original_schema = table.schema new_schema = original_schema[:] # creates a copy of the schema new_schema.append(bigquery.SchemaField("phone", "STRING")) table.schema = new_schema - table = client.update_table(table, ["schema"]) # API request. + table = client.update_table(table, ["schema"]) # Make an API request. if len(table.schema) == len(original_schema) + 1 == len(new_schema): print("A new column has been added.") diff --git a/bigquery/samples/browse_table_data.py b/bigquery/samples/browse_table_data.py index 1b14701fab1d..bba8dc434dd9 100644 --- a/bigquery/samples/browse_table_data.py +++ b/bigquery/samples/browse_table_data.py @@ -26,7 +26,7 @@ def browse_table_data(client, table_id): # table_id = "your-project.your_dataset.your_table_name" # Download all rows from a table. - rows_iter = client.list_rows(table_id) # API request. + rows_iter = client.list_rows(table_id) # Make an API request. # Iterate over rows to make the API requests to fetch row data. rows = list(rows_iter) @@ -38,7 +38,7 @@ def browse_table_data(client, table_id): print("Downloaded {} rows from table {}".format(len(rows), table_id)) # Specify selected fields to limit the results to certain columns. - table = client.get_table(table_id) # API request. + table = client.get_table(table_id) # Make an API request. fields = table.schema[:2] # first two columns rows_iter = client.list_rows(table_id, selected_fields=fields, max_results=10) rows = list(rows_iter) diff --git a/bigquery/samples/create_dataset.py b/bigquery/samples/create_dataset.py index 166b7198235b..3d64473a2321 100644 --- a/bigquery/samples/create_dataset.py +++ b/bigquery/samples/create_dataset.py @@ -33,6 +33,6 @@ def create_dataset(client, dataset_id): # Send the dataset to the API for creation. # Raises google.api_core.exceptions.Conflict if the Dataset already # exists within the project. - dataset = client.create_dataset(dataset) # API request. + dataset = client.create_dataset(dataset) # Make an API request. print("Created dataset {}.{}".format(client.project, dataset.dataset_id)) # [END bigquery_create_dataset] diff --git a/bigquery/samples/create_job.py b/bigquery/samples/create_job.py index 3b615fe4976b..4f7f27a8e668 100644 --- a/bigquery/samples/create_job.py +++ b/bigquery/samples/create_job.py @@ -33,7 +33,7 @@ def create_job(client): # The client libraries automatically generate a job ID. Override the # generated ID with either the job_id_prefix or job_id parameters. job_id_prefix="code_sample_", - ) # API request. + ) # Make an API request. print("Started job: {}".format(query_job.job_id)) # [END bigquery_create_job] diff --git a/bigquery/samples/create_routine.py b/bigquery/samples/create_routine.py index dad3ef20f180..424ee4ef5553 100644 --- a/bigquery/samples/create_routine.py +++ b/bigquery/samples/create_routine.py @@ -40,7 +40,7 @@ def create_routine(client, routine_id): ], ) - routine = client.create_routine(routine) # API request. + routine = client.create_routine(routine) # Make an API request. print("Created routine {}".format(routine.reference)) # [END bigquery_create_routine] diff --git a/bigquery/samples/create_routine_ddl.py b/bigquery/samples/create_routine_ddl.py index ad38f8935823..836e0cdde34a 100644 --- a/bigquery/samples/create_routine_ddl.py +++ b/bigquery/samples/create_routine_ddl.py @@ -36,7 +36,7 @@ def create_routine_ddl(client, routine_id): ) # Initiate the query to create the routine. - query_job = client.query(sql) # API request. + query_job = client.query(sql) # Make an API request. # Wait for the query to complete. query_job.result() # Waits for the job to complete. diff --git a/bigquery/samples/create_table.py b/bigquery/samples/create_table.py index 3f6be9cfea14..b77812f7e0ce 100644 --- a/bigquery/samples/create_table.py +++ b/bigquery/samples/create_table.py @@ -30,7 +30,7 @@ def create_table(client, table_id): ] table = bigquery.Table(table_id, schema=schema) - table = client.create_table(table) # API request. + table = client.create_table(table) # Make an API request. print( "Created table {}.{}.{}".format(table.project, table.dataset_id, table.table_id) ) diff --git a/bigquery/samples/dataset_exists.py b/bigquery/samples/dataset_exists.py index 71c7a60d9922..b8b53b8a4580 100644 --- a/bigquery/samples/dataset_exists.py +++ b/bigquery/samples/dataset_exists.py @@ -22,7 +22,7 @@ def dataset_exists(client, dataset_id): # dataset_id = "your-project.your_dataset" try: - client.get_dataset(dataset_id) # API request. + client.get_dataset(dataset_id) # Make an API request. print("Dataset {} already exists".format(dataset_id)) except NotFound: print("Dataset {} is not found".format(dataset_id)) diff --git a/bigquery/samples/delete_dataset.py b/bigquery/samples/delete_dataset.py index 1cb1bd6f269f..29302f099998 100644 --- a/bigquery/samples/delete_dataset.py +++ b/bigquery/samples/delete_dataset.py @@ -29,7 +29,7 @@ def delete_dataset(client, dataset_id): # Use the not_found_ok parameter to not receive an error if the dataset has already been deleted. client.delete_dataset( dataset_id, delete_contents=True, not_found_ok=True - ) # API request. + ) # Make an API request. print("Deleted dataset '{}'.".format(dataset_id)) # [END bigquery_delete_dataset] diff --git a/bigquery/samples/delete_dataset_labels.py b/bigquery/samples/delete_dataset_labels.py index f4cbf7ba20d9..425bc98dd96e 100644 --- a/bigquery/samples/delete_dataset_labels.py +++ b/bigquery/samples/delete_dataset_labels.py @@ -25,12 +25,12 @@ def delete_dataset_labels(client, dataset_id): # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = "your-project.your_dataset" - dataset = client.get_dataset(dataset_id) # API request. + dataset = client.get_dataset(dataset_id) # Make an API request. # To delete a label from a dataset, set its value to None dataset.labels["color"] = None - dataset = client.update_dataset(dataset, ["labels"]) # API request. + dataset = client.update_dataset(dataset, ["labels"]) # Make an API request. print("Labels deleted from {}".format(dataset_id)) # [END bigquery_delete_label_dataset] return dataset diff --git a/bigquery/samples/delete_model.py b/bigquery/samples/delete_model.py index b4d405aca3a0..b6f32a59ebd9 100644 --- a/bigquery/samples/delete_model.py +++ b/bigquery/samples/delete_model.py @@ -26,7 +26,7 @@ def delete_model(client, model_id): # TODO(developer): Set model_id to the ID of the model to fetch. # model_id = 'your-project.your_dataset.your_model' - client.delete_model(model_id) # API request. + client.delete_model(model_id) # Make an API request. print("Deleted model '{}'.".format(model_id)) # [END bigquery_delete_model] diff --git a/bigquery/samples/delete_routine.py b/bigquery/samples/delete_routine.py index c792fafef8b3..c20b49837b75 100644 --- a/bigquery/samples/delete_routine.py +++ b/bigquery/samples/delete_routine.py @@ -25,7 +25,7 @@ def delete_routine(client, routine_id): # TODO(developer): Set the fully-qualified ID for the routine. # routine_id = "my-project.my_dataset.my_routine" - client.delete_routine(routine_id) # API request. + client.delete_routine(routine_id) # Make an API request. print("Deleted routine {}.".format(routine_id)) # [END bigquery_delete_routine] diff --git a/bigquery/samples/delete_table.py b/bigquery/samples/delete_table.py index db592e9c8385..4c4377418556 100644 --- a/bigquery/samples/delete_table.py +++ b/bigquery/samples/delete_table.py @@ -27,6 +27,6 @@ def delete_table(client, table_id): # If the table does not exist, delete_table raises # google.api_core.exceptions.NotFound unless not_found_ok is True - client.delete_table(table_id, not_found_ok=True) # API request. + client.delete_table(table_id, not_found_ok=True) # Make an API request. print("Deleted table '{}'.".format(table_id)) # [END bigquery_delete_table] diff --git a/bigquery/samples/get_dataset.py b/bigquery/samples/get_dataset.py index 4bb73f38061d..cd35745c0dc5 100644 --- a/bigquery/samples/get_dataset.py +++ b/bigquery/samples/get_dataset.py @@ -25,7 +25,7 @@ def get_dataset(client, dataset_id): # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = 'your-project.your_dataset' - dataset = client.get_dataset(dataset_id) # API request. + dataset = client.get_dataset(dataset_id) # Make an API request. full_dataset_id = "{}.{}".format(dataset.project, dataset.dataset_id) friendly_name = dataset.friendly_name diff --git a/bigquery/samples/get_dataset_labels.py b/bigquery/samples/get_dataset_labels.py index 410e7f3cbb11..46e38a3a9a56 100644 --- a/bigquery/samples/get_dataset_labels.py +++ b/bigquery/samples/get_dataset_labels.py @@ -25,7 +25,7 @@ def get_dataset_labels(client, dataset_id): # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = "your-project.your_dataset" - dataset = client.get_dataset(dataset_id) # API request. + dataset = client.get_dataset(dataset_id) # Make an API request. # View dataset labels print("Dataset ID: {}".format(dataset_id)) diff --git a/bigquery/samples/get_model.py b/bigquery/samples/get_model.py index fd1bcf2ec4a1..0ebd59c9d067 100644 --- a/bigquery/samples/get_model.py +++ b/bigquery/samples/get_model.py @@ -26,7 +26,7 @@ def get_model(client, model_id): # TODO(developer): Set model_id to the ID of the model to fetch. # model_id = 'your-project.your_dataset.your_model' - model = client.get_model(model_id) # API request. + model = client.get_model(model_id) # Make an API request. full_model_id = "{}.{}.{}".format(model.project, model.dataset_id, model.model_id) friendly_name = model.friendly_name diff --git a/bigquery/samples/get_routine.py b/bigquery/samples/get_routine.py index 64e2cf29ac0b..da4e89f57f19 100644 --- a/bigquery/samples/get_routine.py +++ b/bigquery/samples/get_routine.py @@ -25,7 +25,7 @@ def get_routine(client, routine_id): # TODO(developer): Set the fully-qualified ID for the routine. # routine_id = "my-project.my_dataset.my_routine" - routine = client.get_routine(routine_id) # API request. + routine = client.get_routine(routine_id) # Make an API request. print("Routine '{}':".format(routine.reference)) print("\tType: '{}'".format(routine.type_)) diff --git a/bigquery/samples/get_table.py b/bigquery/samples/get_table.py index f57fc2f31874..201b8808a846 100644 --- a/bigquery/samples/get_table.py +++ b/bigquery/samples/get_table.py @@ -25,7 +25,7 @@ def get_table(client, table_id): # TODO(developer): Set table_id to the ID of the model to fetch. # table_id = 'your-project.your_dataset.your_table' - table = client.get_table(table_id) # API request. + table = client.get_table(table_id) # Make an API request. # View table properties print( diff --git a/bigquery/samples/label_dataset.py b/bigquery/samples/label_dataset.py index 15f3a597745b..019b2aa374a0 100644 --- a/bigquery/samples/label_dataset.py +++ b/bigquery/samples/label_dataset.py @@ -25,9 +25,9 @@ def label_dataset(client, dataset_id): # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = "your-project.your_dataset" - dataset = client.get_dataset(dataset_id) # API request. + dataset = client.get_dataset(dataset_id) # Make an API request. dataset.labels = {"color": "green"} - dataset = client.update_dataset(dataset, ["labels"]) # API request. + dataset = client.update_dataset(dataset, ["labels"]) # Make an API request. print("Labels added to {}".format(dataset_id)) # [END bigquery_label_dataset] diff --git a/bigquery/samples/list_datasets.py b/bigquery/samples/list_datasets.py index 3615f55edb00..77ae8c785d22 100644 --- a/bigquery/samples/list_datasets.py +++ b/bigquery/samples/list_datasets.py @@ -22,7 +22,7 @@ def list_datasets(client): # TODO(developer): Construct a BigQuery client object. # client = bigquery.Client() - datasets = list(client.list_datasets()) # API request. + datasets = list(client.list_datasets()) # Make an API request. project = client.project if datasets: diff --git a/bigquery/samples/list_datasets_by_label.py b/bigquery/samples/list_datasets_by_label.py index 26d9ed7609f0..9fa939ad0c19 100644 --- a/bigquery/samples/list_datasets_by_label.py +++ b/bigquery/samples/list_datasets_by_label.py @@ -23,7 +23,7 @@ def list_datasets_by_label(client): # client = bigquery.Client() label_filter = "labels.color:green" - datasets = list(client.list_datasets(filter=label_filter)) # API request. + datasets = list(client.list_datasets(filter=label_filter)) # Make an API request. if datasets: print("Datasets filtered by {}:".format(label_filter)) diff --git a/bigquery/samples/list_models.py b/bigquery/samples/list_models.py index 1e5575139c50..a2477ffc795b 100644 --- a/bigquery/samples/list_models.py +++ b/bigquery/samples/list_models.py @@ -27,7 +27,7 @@ def list_models(client, dataset_id): # the models you are listing. # dataset_id = 'your-project.your_dataset' - models = client.list_models(dataset_id) # API request. + models = client.list_models(dataset_id) # Make an API request. print("Models contained in '{}':".format(dataset_id)) for model in models: diff --git a/bigquery/samples/list_routines.py b/bigquery/samples/list_routines.py index 379381492646..5eaad0cec8f4 100644 --- a/bigquery/samples/list_routines.py +++ b/bigquery/samples/list_routines.py @@ -26,7 +26,7 @@ def list_routines(client, dataset_id): # the routines you are listing. # dataset_id = 'your-project.your_dataset' - routines = client.list_routines(dataset_id) # API request. + routines = client.list_routines(dataset_id) # Make an API request. print("Routines contained in dataset {}:".format(dataset_id)) for routine in routines: diff --git a/bigquery/samples/list_tables.py b/bigquery/samples/list_tables.py index 951c600badc7..d7576616e191 100644 --- a/bigquery/samples/list_tables.py +++ b/bigquery/samples/list_tables.py @@ -26,7 +26,7 @@ def list_tables(client, dataset_id): # the tables you are listing. # dataset_id = 'your-project.your_dataset' - tables = client.list_tables(dataset_id) # API request. + tables = client.list_tables(dataset_id) # Make an API request. print("Tables contained in '{}':".format(dataset_id)) for table in tables: diff --git a/bigquery/samples/load_table_dataframe.py b/bigquery/samples/load_table_dataframe.py index aed88180988c..f08712d4dc32 100644 --- a/bigquery/samples/load_table_dataframe.py +++ b/bigquery/samples/load_table_dataframe.py @@ -65,10 +65,10 @@ def load_table_dataframe(client, table_id): table_id, job_config=job_config, location="US", # Must match the destination dataset location. - ) # API request. + ) # Make an API request. job.result() # Waits for the job to complete. - table = client.get_table(table_id) # API request. + table = client.get_table(table_id) # Make an API request. print( "Loaded {} rows and {} columns to {}".format( table.num_rows, len(table.schema), table_id diff --git a/bigquery/samples/query_to_arrow.py b/bigquery/samples/query_to_arrow.py index 81c3b67c4d2d..4cc69d4e902a 100644 --- a/bigquery/samples/query_to_arrow.py +++ b/bigquery/samples/query_to_arrow.py @@ -41,7 +41,7 @@ def query_to_arrow(client): CROSS JOIN UNNEST(r.participants) as participant; """ query_job = client.query(sql) - arrow_table = query_job.to_arrow() # API request. + arrow_table = query_job.to_arrow() # Make an API request. print( "Downloaded {} rows, {} columns.".format( diff --git a/bigquery/samples/update_dataset_access.py b/bigquery/samples/update_dataset_access.py index 69a957ae2c5d..134cf1b940cf 100644 --- a/bigquery/samples/update_dataset_access.py +++ b/bigquery/samples/update_dataset_access.py @@ -24,7 +24,7 @@ def update_dataset_access(client, dataset_id): # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = 'your-project.your_dataset' - dataset = client.get_dataset(dataset_id) # API request. + dataset = client.get_dataset(dataset_id) # Make an API request. entry = bigquery.AccessEntry( role="READER", @@ -36,7 +36,7 @@ def update_dataset_access(client, dataset_id): entries.append(entry) dataset.access_entries = entries - dataset = client.update_dataset(dataset, ["access_entries"]) # API request. + dataset = client.update_dataset(dataset, ["access_entries"]) # Make an API request. full_dataset_id = "{}.{}".format(dataset.project, dataset.dataset_id) print( diff --git a/bigquery/samples/update_dataset_default_table_expiration.py b/bigquery/samples/update_dataset_default_table_expiration.py index aefa44a8afad..7b68ede8d2be 100644 --- a/bigquery/samples/update_dataset_default_table_expiration.py +++ b/bigquery/samples/update_dataset_default_table_expiration.py @@ -25,12 +25,12 @@ def update_dataset_default_table_expiration(client, dataset_id): # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = 'your-project.your_dataset' - dataset = client.get_dataset(dataset_id) # API request. + dataset = client.get_dataset(dataset_id) # Make an API request. dataset.default_table_expiration_ms = 24 * 60 * 60 * 1000 # in milliseconds. dataset = client.update_dataset( dataset, ["default_table_expiration_ms"] - ) # API request. + ) # Make an API request. full_dataset_id = "{}.{}".format(dataset.project, dataset.dataset_id) print( diff --git a/bigquery/samples/update_dataset_description.py b/bigquery/samples/update_dataset_description.py index 1a67bcacd374..08eed8da2b64 100644 --- a/bigquery/samples/update_dataset_description.py +++ b/bigquery/samples/update_dataset_description.py @@ -25,9 +25,9 @@ def update_dataset_description(client, dataset_id): # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = 'your-project.your_dataset' - dataset = client.get_dataset(dataset_id) # API request. + dataset = client.get_dataset(dataset_id) # Make an API request. dataset.description = "Updated description." - dataset = client.update_dataset(dataset, ["description"]) # API request. + dataset = client.update_dataset(dataset, ["description"]) # Make an API request. full_dataset_id = "{}.{}".format(dataset.project, dataset.dataset_id) print( diff --git a/bigquery/samples/update_model.py b/bigquery/samples/update_model.py index 725d95840905..7583c410e1ef 100644 --- a/bigquery/samples/update_model.py +++ b/bigquery/samples/update_model.py @@ -26,9 +26,9 @@ def update_model(client, model_id): # TODO(developer): Set model_id to the ID of the model to fetch. # model_id = 'your-project.your_dataset.your_model' - model = client.get_model(model_id) # API request. + model = client.get_model(model_id) # Make an API request. model.description = "This model was modified from a Python program." - model = client.update_model(model, ["description"]) # API request. + model = client.update_model(model, ["description"]) # Make an API request. full_model_id = "{}.{}.{}".format(model.project, model.dataset_id, model.model_id) print( diff --git a/bigquery/samples/update_routine.py b/bigquery/samples/update_routine.py index 8de7a05297fd..4489d68f7ee4 100644 --- a/bigquery/samples/update_routine.py +++ b/bigquery/samples/update_routine.py @@ -41,6 +41,6 @@ def update_routine(client, routine_id): "type_", "return_type", ], - ) # API request. + ) # Make an API request. # [END bigquery_update_routine] return routine From 6624f4c2f53033b833795097bbebd1a5940913f8 Mon Sep 17 00:00:00 2001 From: Leonid Emar-Kar Date: Wed, 25 Sep 2019 10:27:32 +0300 Subject: [PATCH 3/3] docs: comments rephrasing --- bigquery/samples/add_empty_column.py | 4 ++-- bigquery/samples/browse_table_data.py | 4 ++-- bigquery/samples/create_dataset.py | 2 +- bigquery/samples/create_job.py | 2 +- bigquery/samples/create_routine.py | 2 +- bigquery/samples/create_routine_ddl.py | 2 +- bigquery/samples/create_table.py | 2 +- bigquery/samples/dataset_exists.py | 2 +- bigquery/samples/delete_dataset.py | 2 +- bigquery/samples/delete_dataset_labels.py | 4 ++-- bigquery/samples/delete_model.py | 2 +- bigquery/samples/delete_routine.py | 2 +- bigquery/samples/delete_table.py | 2 +- bigquery/samples/get_dataset.py | 2 +- bigquery/samples/get_dataset_labels.py | 2 +- bigquery/samples/get_model.py | 2 +- bigquery/samples/get_routine.py | 2 +- bigquery/samples/get_table.py | 2 +- bigquery/samples/label_dataset.py | 4 ++-- bigquery/samples/list_datasets.py | 2 +- bigquery/samples/list_datasets_by_label.py | 2 +- bigquery/samples/list_models.py | 2 +- bigquery/samples/list_routines.py | 2 +- bigquery/samples/list_tables.py | 2 +- bigquery/samples/load_table_dataframe.py | 4 ++-- bigquery/samples/query_to_arrow.py | 2 +- bigquery/samples/update_dataset_access.py | 4 ++-- bigquery/samples/update_dataset_default_table_expiration.py | 4 ++-- bigquery/samples/update_dataset_description.py | 4 ++-- bigquery/samples/update_model.py | 4 ++-- bigquery/samples/update_routine.py | 2 +- 31 files changed, 40 insertions(+), 40 deletions(-) diff --git a/bigquery/samples/add_empty_column.py b/bigquery/samples/add_empty_column.py index 3d40bc177fbd..4f0b971e577a 100644 --- a/bigquery/samples/add_empty_column.py +++ b/bigquery/samples/add_empty_column.py @@ -25,14 +25,14 @@ def add_empty_column(client, table_id): # to add an empty column. # table_id = "your-project.your_dataset.your_table_name" - table = client.get_table(table_id) # API request. + table = client.get_table(table_id) # Make an API request. original_schema = table.schema new_schema = original_schema[:] # creates a copy of the schema new_schema.append(bigquery.SchemaField("phone", "STRING")) table.schema = new_schema - table = client.update_table(table, ["schema"]) # API request. + table = client.update_table(table, ["schema"]) # Make an API request. if len(table.schema) == len(original_schema) + 1 == len(new_schema): print("A new column has been added.") diff --git a/bigquery/samples/browse_table_data.py b/bigquery/samples/browse_table_data.py index 1b14701fab1d..bba8dc434dd9 100644 --- a/bigquery/samples/browse_table_data.py +++ b/bigquery/samples/browse_table_data.py @@ -26,7 +26,7 @@ def browse_table_data(client, table_id): # table_id = "your-project.your_dataset.your_table_name" # Download all rows from a table. - rows_iter = client.list_rows(table_id) # API request. + rows_iter = client.list_rows(table_id) # Make an API request. # Iterate over rows to make the API requests to fetch row data. rows = list(rows_iter) @@ -38,7 +38,7 @@ def browse_table_data(client, table_id): print("Downloaded {} rows from table {}".format(len(rows), table_id)) # Specify selected fields to limit the results to certain columns. - table = client.get_table(table_id) # API request. + table = client.get_table(table_id) # Make an API request. fields = table.schema[:2] # first two columns rows_iter = client.list_rows(table_id, selected_fields=fields, max_results=10) rows = list(rows_iter) diff --git a/bigquery/samples/create_dataset.py b/bigquery/samples/create_dataset.py index 166b7198235b..3d64473a2321 100644 --- a/bigquery/samples/create_dataset.py +++ b/bigquery/samples/create_dataset.py @@ -33,6 +33,6 @@ def create_dataset(client, dataset_id): # Send the dataset to the API for creation. # Raises google.api_core.exceptions.Conflict if the Dataset already # exists within the project. - dataset = client.create_dataset(dataset) # API request. + dataset = client.create_dataset(dataset) # Make an API request. print("Created dataset {}.{}".format(client.project, dataset.dataset_id)) # [END bigquery_create_dataset] diff --git a/bigquery/samples/create_job.py b/bigquery/samples/create_job.py index 3b615fe4976b..4f7f27a8e668 100644 --- a/bigquery/samples/create_job.py +++ b/bigquery/samples/create_job.py @@ -33,7 +33,7 @@ def create_job(client): # The client libraries automatically generate a job ID. Override the # generated ID with either the job_id_prefix or job_id parameters. job_id_prefix="code_sample_", - ) # API request. + ) # Make an API request. print("Started job: {}".format(query_job.job_id)) # [END bigquery_create_job] diff --git a/bigquery/samples/create_routine.py b/bigquery/samples/create_routine.py index dad3ef20f180..424ee4ef5553 100644 --- a/bigquery/samples/create_routine.py +++ b/bigquery/samples/create_routine.py @@ -40,7 +40,7 @@ def create_routine(client, routine_id): ], ) - routine = client.create_routine(routine) # API request. + routine = client.create_routine(routine) # Make an API request. print("Created routine {}".format(routine.reference)) # [END bigquery_create_routine] diff --git a/bigquery/samples/create_routine_ddl.py b/bigquery/samples/create_routine_ddl.py index ad38f8935823..836e0cdde34a 100644 --- a/bigquery/samples/create_routine_ddl.py +++ b/bigquery/samples/create_routine_ddl.py @@ -36,7 +36,7 @@ def create_routine_ddl(client, routine_id): ) # Initiate the query to create the routine. - query_job = client.query(sql) # API request. + query_job = client.query(sql) # Make an API request. # Wait for the query to complete. query_job.result() # Waits for the job to complete. diff --git a/bigquery/samples/create_table.py b/bigquery/samples/create_table.py index 3f6be9cfea14..b77812f7e0ce 100644 --- a/bigquery/samples/create_table.py +++ b/bigquery/samples/create_table.py @@ -30,7 +30,7 @@ def create_table(client, table_id): ] table = bigquery.Table(table_id, schema=schema) - table = client.create_table(table) # API request. + table = client.create_table(table) # Make an API request. print( "Created table {}.{}.{}".format(table.project, table.dataset_id, table.table_id) ) diff --git a/bigquery/samples/dataset_exists.py b/bigquery/samples/dataset_exists.py index 71c7a60d9922..b8b53b8a4580 100644 --- a/bigquery/samples/dataset_exists.py +++ b/bigquery/samples/dataset_exists.py @@ -22,7 +22,7 @@ def dataset_exists(client, dataset_id): # dataset_id = "your-project.your_dataset" try: - client.get_dataset(dataset_id) # API request. + client.get_dataset(dataset_id) # Make an API request. print("Dataset {} already exists".format(dataset_id)) except NotFound: print("Dataset {} is not found".format(dataset_id)) diff --git a/bigquery/samples/delete_dataset.py b/bigquery/samples/delete_dataset.py index 1cb1bd6f269f..29302f099998 100644 --- a/bigquery/samples/delete_dataset.py +++ b/bigquery/samples/delete_dataset.py @@ -29,7 +29,7 @@ def delete_dataset(client, dataset_id): # Use the not_found_ok parameter to not receive an error if the dataset has already been deleted. client.delete_dataset( dataset_id, delete_contents=True, not_found_ok=True - ) # API request. + ) # Make an API request. print("Deleted dataset '{}'.".format(dataset_id)) # [END bigquery_delete_dataset] diff --git a/bigquery/samples/delete_dataset_labels.py b/bigquery/samples/delete_dataset_labels.py index f4cbf7ba20d9..425bc98dd96e 100644 --- a/bigquery/samples/delete_dataset_labels.py +++ b/bigquery/samples/delete_dataset_labels.py @@ -25,12 +25,12 @@ def delete_dataset_labels(client, dataset_id): # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = "your-project.your_dataset" - dataset = client.get_dataset(dataset_id) # API request. + dataset = client.get_dataset(dataset_id) # Make an API request. # To delete a label from a dataset, set its value to None dataset.labels["color"] = None - dataset = client.update_dataset(dataset, ["labels"]) # API request. + dataset = client.update_dataset(dataset, ["labels"]) # Make an API request. print("Labels deleted from {}".format(dataset_id)) # [END bigquery_delete_label_dataset] return dataset diff --git a/bigquery/samples/delete_model.py b/bigquery/samples/delete_model.py index b4d405aca3a0..b6f32a59ebd9 100644 --- a/bigquery/samples/delete_model.py +++ b/bigquery/samples/delete_model.py @@ -26,7 +26,7 @@ def delete_model(client, model_id): # TODO(developer): Set model_id to the ID of the model to fetch. # model_id = 'your-project.your_dataset.your_model' - client.delete_model(model_id) # API request. + client.delete_model(model_id) # Make an API request. print("Deleted model '{}'.".format(model_id)) # [END bigquery_delete_model] diff --git a/bigquery/samples/delete_routine.py b/bigquery/samples/delete_routine.py index c792fafef8b3..c20b49837b75 100644 --- a/bigquery/samples/delete_routine.py +++ b/bigquery/samples/delete_routine.py @@ -25,7 +25,7 @@ def delete_routine(client, routine_id): # TODO(developer): Set the fully-qualified ID for the routine. # routine_id = "my-project.my_dataset.my_routine" - client.delete_routine(routine_id) # API request. + client.delete_routine(routine_id) # Make an API request. print("Deleted routine {}.".format(routine_id)) # [END bigquery_delete_routine] diff --git a/bigquery/samples/delete_table.py b/bigquery/samples/delete_table.py index db592e9c8385..4c4377418556 100644 --- a/bigquery/samples/delete_table.py +++ b/bigquery/samples/delete_table.py @@ -27,6 +27,6 @@ def delete_table(client, table_id): # If the table does not exist, delete_table raises # google.api_core.exceptions.NotFound unless not_found_ok is True - client.delete_table(table_id, not_found_ok=True) # API request. + client.delete_table(table_id, not_found_ok=True) # Make an API request. print("Deleted table '{}'.".format(table_id)) # [END bigquery_delete_table] diff --git a/bigquery/samples/get_dataset.py b/bigquery/samples/get_dataset.py index 4bb73f38061d..cd35745c0dc5 100644 --- a/bigquery/samples/get_dataset.py +++ b/bigquery/samples/get_dataset.py @@ -25,7 +25,7 @@ def get_dataset(client, dataset_id): # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = 'your-project.your_dataset' - dataset = client.get_dataset(dataset_id) # API request. + dataset = client.get_dataset(dataset_id) # Make an API request. full_dataset_id = "{}.{}".format(dataset.project, dataset.dataset_id) friendly_name = dataset.friendly_name diff --git a/bigquery/samples/get_dataset_labels.py b/bigquery/samples/get_dataset_labels.py index 410e7f3cbb11..46e38a3a9a56 100644 --- a/bigquery/samples/get_dataset_labels.py +++ b/bigquery/samples/get_dataset_labels.py @@ -25,7 +25,7 @@ def get_dataset_labels(client, dataset_id): # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = "your-project.your_dataset" - dataset = client.get_dataset(dataset_id) # API request. + dataset = client.get_dataset(dataset_id) # Make an API request. # View dataset labels print("Dataset ID: {}".format(dataset_id)) diff --git a/bigquery/samples/get_model.py b/bigquery/samples/get_model.py index fd1bcf2ec4a1..0ebd59c9d067 100644 --- a/bigquery/samples/get_model.py +++ b/bigquery/samples/get_model.py @@ -26,7 +26,7 @@ def get_model(client, model_id): # TODO(developer): Set model_id to the ID of the model to fetch. # model_id = 'your-project.your_dataset.your_model' - model = client.get_model(model_id) # API request. + model = client.get_model(model_id) # Make an API request. full_model_id = "{}.{}.{}".format(model.project, model.dataset_id, model.model_id) friendly_name = model.friendly_name diff --git a/bigquery/samples/get_routine.py b/bigquery/samples/get_routine.py index 64e2cf29ac0b..da4e89f57f19 100644 --- a/bigquery/samples/get_routine.py +++ b/bigquery/samples/get_routine.py @@ -25,7 +25,7 @@ def get_routine(client, routine_id): # TODO(developer): Set the fully-qualified ID for the routine. # routine_id = "my-project.my_dataset.my_routine" - routine = client.get_routine(routine_id) # API request. + routine = client.get_routine(routine_id) # Make an API request. print("Routine '{}':".format(routine.reference)) print("\tType: '{}'".format(routine.type_)) diff --git a/bigquery/samples/get_table.py b/bigquery/samples/get_table.py index f57fc2f31874..201b8808a846 100644 --- a/bigquery/samples/get_table.py +++ b/bigquery/samples/get_table.py @@ -25,7 +25,7 @@ def get_table(client, table_id): # TODO(developer): Set table_id to the ID of the model to fetch. # table_id = 'your-project.your_dataset.your_table' - table = client.get_table(table_id) # API request. + table = client.get_table(table_id) # Make an API request. # View table properties print( diff --git a/bigquery/samples/label_dataset.py b/bigquery/samples/label_dataset.py index 15f3a597745b..019b2aa374a0 100644 --- a/bigquery/samples/label_dataset.py +++ b/bigquery/samples/label_dataset.py @@ -25,9 +25,9 @@ def label_dataset(client, dataset_id): # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = "your-project.your_dataset" - dataset = client.get_dataset(dataset_id) # API request. + dataset = client.get_dataset(dataset_id) # Make an API request. dataset.labels = {"color": "green"} - dataset = client.update_dataset(dataset, ["labels"]) # API request. + dataset = client.update_dataset(dataset, ["labels"]) # Make an API request. print("Labels added to {}".format(dataset_id)) # [END bigquery_label_dataset] diff --git a/bigquery/samples/list_datasets.py b/bigquery/samples/list_datasets.py index 3615f55edb00..77ae8c785d22 100644 --- a/bigquery/samples/list_datasets.py +++ b/bigquery/samples/list_datasets.py @@ -22,7 +22,7 @@ def list_datasets(client): # TODO(developer): Construct a BigQuery client object. # client = bigquery.Client() - datasets = list(client.list_datasets()) # API request. + datasets = list(client.list_datasets()) # Make an API request. project = client.project if datasets: diff --git a/bigquery/samples/list_datasets_by_label.py b/bigquery/samples/list_datasets_by_label.py index 26d9ed7609f0..9fa939ad0c19 100644 --- a/bigquery/samples/list_datasets_by_label.py +++ b/bigquery/samples/list_datasets_by_label.py @@ -23,7 +23,7 @@ def list_datasets_by_label(client): # client = bigquery.Client() label_filter = "labels.color:green" - datasets = list(client.list_datasets(filter=label_filter)) # API request. + datasets = list(client.list_datasets(filter=label_filter)) # Make an API request. if datasets: print("Datasets filtered by {}:".format(label_filter)) diff --git a/bigquery/samples/list_models.py b/bigquery/samples/list_models.py index 1e5575139c50..a2477ffc795b 100644 --- a/bigquery/samples/list_models.py +++ b/bigquery/samples/list_models.py @@ -27,7 +27,7 @@ def list_models(client, dataset_id): # the models you are listing. # dataset_id = 'your-project.your_dataset' - models = client.list_models(dataset_id) # API request. + models = client.list_models(dataset_id) # Make an API request. print("Models contained in '{}':".format(dataset_id)) for model in models: diff --git a/bigquery/samples/list_routines.py b/bigquery/samples/list_routines.py index 379381492646..5eaad0cec8f4 100644 --- a/bigquery/samples/list_routines.py +++ b/bigquery/samples/list_routines.py @@ -26,7 +26,7 @@ def list_routines(client, dataset_id): # the routines you are listing. # dataset_id = 'your-project.your_dataset' - routines = client.list_routines(dataset_id) # API request. + routines = client.list_routines(dataset_id) # Make an API request. print("Routines contained in dataset {}:".format(dataset_id)) for routine in routines: diff --git a/bigquery/samples/list_tables.py b/bigquery/samples/list_tables.py index 951c600badc7..d7576616e191 100644 --- a/bigquery/samples/list_tables.py +++ b/bigquery/samples/list_tables.py @@ -26,7 +26,7 @@ def list_tables(client, dataset_id): # the tables you are listing. # dataset_id = 'your-project.your_dataset' - tables = client.list_tables(dataset_id) # API request. + tables = client.list_tables(dataset_id) # Make an API request. print("Tables contained in '{}':".format(dataset_id)) for table in tables: diff --git a/bigquery/samples/load_table_dataframe.py b/bigquery/samples/load_table_dataframe.py index aed88180988c..f08712d4dc32 100644 --- a/bigquery/samples/load_table_dataframe.py +++ b/bigquery/samples/load_table_dataframe.py @@ -65,10 +65,10 @@ def load_table_dataframe(client, table_id): table_id, job_config=job_config, location="US", # Must match the destination dataset location. - ) # API request. + ) # Make an API request. job.result() # Waits for the job to complete. - table = client.get_table(table_id) # API request. + table = client.get_table(table_id) # Make an API request. print( "Loaded {} rows and {} columns to {}".format( table.num_rows, len(table.schema), table_id diff --git a/bigquery/samples/query_to_arrow.py b/bigquery/samples/query_to_arrow.py index 81c3b67c4d2d..4cc69d4e902a 100644 --- a/bigquery/samples/query_to_arrow.py +++ b/bigquery/samples/query_to_arrow.py @@ -41,7 +41,7 @@ def query_to_arrow(client): CROSS JOIN UNNEST(r.participants) as participant; """ query_job = client.query(sql) - arrow_table = query_job.to_arrow() # API request. + arrow_table = query_job.to_arrow() # Make an API request. print( "Downloaded {} rows, {} columns.".format( diff --git a/bigquery/samples/update_dataset_access.py b/bigquery/samples/update_dataset_access.py index 69a957ae2c5d..134cf1b940cf 100644 --- a/bigquery/samples/update_dataset_access.py +++ b/bigquery/samples/update_dataset_access.py @@ -24,7 +24,7 @@ def update_dataset_access(client, dataset_id): # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = 'your-project.your_dataset' - dataset = client.get_dataset(dataset_id) # API request. + dataset = client.get_dataset(dataset_id) # Make an API request. entry = bigquery.AccessEntry( role="READER", @@ -36,7 +36,7 @@ def update_dataset_access(client, dataset_id): entries.append(entry) dataset.access_entries = entries - dataset = client.update_dataset(dataset, ["access_entries"]) # API request. + dataset = client.update_dataset(dataset, ["access_entries"]) # Make an API request. full_dataset_id = "{}.{}".format(dataset.project, dataset.dataset_id) print( diff --git a/bigquery/samples/update_dataset_default_table_expiration.py b/bigquery/samples/update_dataset_default_table_expiration.py index aefa44a8afad..7b68ede8d2be 100644 --- a/bigquery/samples/update_dataset_default_table_expiration.py +++ b/bigquery/samples/update_dataset_default_table_expiration.py @@ -25,12 +25,12 @@ def update_dataset_default_table_expiration(client, dataset_id): # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = 'your-project.your_dataset' - dataset = client.get_dataset(dataset_id) # API request. + dataset = client.get_dataset(dataset_id) # Make an API request. dataset.default_table_expiration_ms = 24 * 60 * 60 * 1000 # in milliseconds. dataset = client.update_dataset( dataset, ["default_table_expiration_ms"] - ) # API request. + ) # Make an API request. full_dataset_id = "{}.{}".format(dataset.project, dataset.dataset_id) print( diff --git a/bigquery/samples/update_dataset_description.py b/bigquery/samples/update_dataset_description.py index 1a67bcacd374..08eed8da2b64 100644 --- a/bigquery/samples/update_dataset_description.py +++ b/bigquery/samples/update_dataset_description.py @@ -25,9 +25,9 @@ def update_dataset_description(client, dataset_id): # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = 'your-project.your_dataset' - dataset = client.get_dataset(dataset_id) # API request. + dataset = client.get_dataset(dataset_id) # Make an API request. dataset.description = "Updated description." - dataset = client.update_dataset(dataset, ["description"]) # API request. + dataset = client.update_dataset(dataset, ["description"]) # Make an API request. full_dataset_id = "{}.{}".format(dataset.project, dataset.dataset_id) print( diff --git a/bigquery/samples/update_model.py b/bigquery/samples/update_model.py index 725d95840905..7583c410e1ef 100644 --- a/bigquery/samples/update_model.py +++ b/bigquery/samples/update_model.py @@ -26,9 +26,9 @@ def update_model(client, model_id): # TODO(developer): Set model_id to the ID of the model to fetch. # model_id = 'your-project.your_dataset.your_model' - model = client.get_model(model_id) # API request. + model = client.get_model(model_id) # Make an API request. model.description = "This model was modified from a Python program." - model = client.update_model(model, ["description"]) # API request. + model = client.update_model(model, ["description"]) # Make an API request. full_model_id = "{}.{}.{}".format(model.project, model.dataset_id, model.model_id) print( diff --git a/bigquery/samples/update_routine.py b/bigquery/samples/update_routine.py index 8de7a05297fd..4489d68f7ee4 100644 --- a/bigquery/samples/update_routine.py +++ b/bigquery/samples/update_routine.py @@ -41,6 +41,6 @@ def update_routine(client, routine_id): "type_", "return_type", ], - ) # API request. + ) # Make an API request. # [END bigquery_update_routine] return routine