Skip to content

Commit

Permalink
feat: list_tables, list_projects, list_datasets, list_models, list_ro…
Browse files Browse the repository at this point in the history
…utines, and list_jobs now accept a page_size parameter to control page size (#686)
  • Loading branch information
jimfulton authored Jun 6, 2021
1 parent dea2402 commit 1f1c4b7
Show file tree
Hide file tree
Showing 10 changed files with 605 additions and 507 deletions.
39 changes: 34 additions & 5 deletions google/cloud/bigquery/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -286,6 +286,7 @@ def list_projects(
page_token: str = None,
retry: retries.Retry = DEFAULT_RETRY,
timeout: float = None,
page_size: int = None,
) -> page_iterator.Iterator:
"""List projects for the project associated with this client.
Expand All @@ -294,8 +295,8 @@ def list_projects(
Args:
max_results (Optional[int]):
Maximum number of projects to return, If not passed,
defaults to a value set by the API.
Maximum number of projects to return.
Defaults to a value set by the API.
page_token (Optional[str]):
Token representing a cursor into the projects. If not passed,
Expand All @@ -310,6 +311,10 @@ def list_projects(
The number of seconds to wait for the underlying HTTP transport
before using ``retry``.
page_size (Optional[int]):
Maximum number of projects to return in each page.
Defaults to a value set by the API.
Returns:
google.api_core.page_iterator.Iterator:
Iterator of :class:`~google.cloud.bigquery.client.Project`
Expand All @@ -335,6 +340,7 @@ def api_request(*args, **kwargs):
items_key="projects",
page_token=page_token,
max_results=max_results,
page_size=page_size,
)

def list_datasets(
Expand All @@ -346,6 +352,7 @@ def list_datasets(
page_token: str = None,
retry: retries.Retry = DEFAULT_RETRY,
timeout: float = None,
page_size: int = None,
) -> page_iterator.Iterator:
"""List datasets for the project associated with this client.
Expand Down Expand Up @@ -375,6 +382,8 @@ def list_datasets(
timeout (Optional[float]):
The number of seconds to wait for the underlying HTTP transport
before using ``retry``.
page_size (Optional[int]):
Maximum number of datasets to return per page.
Returns:
google.api_core.page_iterator.Iterator:
Expand Down Expand Up @@ -414,6 +423,7 @@ def api_request(*args, **kwargs):
page_token=page_token,
max_results=max_results,
extra_params=extra_params,
page_size=page_size,
)

def dataset(self, dataset_id: str, project: str = None) -> DatasetReference:
Expand Down Expand Up @@ -1270,6 +1280,7 @@ def list_models(
page_token: str = None,
retry: retries.Retry = DEFAULT_RETRY,
timeout: float = None,
page_size: int = None,
) -> page_iterator.Iterator:
"""[Beta] List models in the dataset.
Expand All @@ -1288,7 +1299,7 @@ def list_models(
to create a dataset reference from a string using
:func:`google.cloud.bigquery.dataset.DatasetReference.from_string`.
max_results (Optional[int]):
Maximum number of models to return. If not passed, defaults to a
Maximum number of models to return. Defaults to a
value set by the API.
page_token (Optional[str]):
Token representing a cursor into the models. If not passed,
Expand All @@ -1301,6 +1312,9 @@ def list_models(
timeout (Optional[float]):
The number of seconds to wait for the underlying HTTP transport
before using ``retry``.
page_size (Optional[int]):
Maximum number of models to return per page.
Defaults to a value set by the API.
Returns:
google.api_core.page_iterator.Iterator:
Expand Down Expand Up @@ -1331,6 +1345,7 @@ def api_request(*args, **kwargs):
items_key="models",
page_token=page_token,
max_results=max_results,
page_size=page_size,
)
result.dataset = dataset
return result
Expand All @@ -1342,6 +1357,7 @@ def list_routines(
page_token: str = None,
retry: retries.Retry = DEFAULT_RETRY,
timeout: float = None,
page_size: int = None,
) -> page_iterator.Iterator:
"""[Beta] List routines in the dataset.
Expand All @@ -1360,7 +1376,7 @@ def list_routines(
to create a dataset reference from a string using
:func:`google.cloud.bigquery.dataset.DatasetReference.from_string`.
max_results (Optional[int]):
Maximum number of routines to return. If not passed, defaults
Maximum number of routines to return. Defaults
to a value set by the API.
page_token (Optional[str]):
Token representing a cursor into the routines. If not passed,
Expand All @@ -1373,6 +1389,9 @@ def list_routines(
timeout (Optional[float]):
The number of seconds to wait for the underlying HTTP transport
before using ``retry``.
page_size (Optional[int]):
Maximum number of routines to return per page.
Defaults to a value set by the API.
Returns:
google.api_core.page_iterator.Iterator:
Expand Down Expand Up @@ -1403,6 +1422,7 @@ def api_request(*args, **kwargs):
items_key="routines",
page_token=page_token,
max_results=max_results,
page_size=page_size,
)
result.dataset = dataset
return result
Expand All @@ -1414,6 +1434,7 @@ def list_tables(
page_token: str = None,
retry: retries.Retry = DEFAULT_RETRY,
timeout: float = None,
page_size: int = None,
) -> page_iterator.Iterator:
"""List tables in the dataset.
Expand All @@ -1432,7 +1453,7 @@ def list_tables(
to create a dataset reference from a string using
:func:`google.cloud.bigquery.dataset.DatasetReference.from_string`.
max_results (Optional[int]):
Maximum number of tables to return. If not passed, defaults
Maximum number of tables to return. Defaults
to a value set by the API.
page_token (Optional[str]):
Token representing a cursor into the tables. If not passed,
Expand All @@ -1445,6 +1466,9 @@ def list_tables(
timeout (Optional[float]):
The number of seconds to wait for the underlying HTTP transport
before using ``retry``.
page_size (Optional[int]):
Maximum number of tables to return per page.
Defaults to a value set by the API.
Returns:
google.api_core.page_iterator.Iterator:
Expand Down Expand Up @@ -1474,6 +1498,7 @@ def api_request(*args, **kwargs):
items_key="tables",
page_token=page_token,
max_results=max_results,
page_size=page_size,
)
result.dataset = dataset
return result
Expand Down Expand Up @@ -2112,6 +2137,7 @@ def list_jobs(
timeout: float = None,
min_creation_time: datetime.datetime = None,
max_creation_time: datetime.datetime = None,
page_size: int = None,
) -> page_iterator.Iterator:
"""List jobs for the project associated with this client.
Expand Down Expand Up @@ -2157,6 +2183,8 @@ def list_jobs(
Max value for job creation time. If set, only jobs created
before or at this timestamp are returned. If the datetime has
no time zone assumes UTC time.
page_size (Optional[int]):
Maximum number of jobs to return per page.
Returns:
google.api_core.page_iterator.Iterator:
Expand Down Expand Up @@ -2208,6 +2236,7 @@ def api_request(*args, **kwargs):
page_token=page_token,
max_results=max_results,
extra_params=extra_params,
page_size=page_size,
)

def load_table_from_uri(
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
# 'Development Status :: 5 - Production/Stable'
release_status = "Development Status :: 5 - Production/Stable"
dependencies = [
"google-api-core[grpc] >= 1.23.0, < 2.0.0dev",
"google-api-core[grpc] >= 1.29.0, < 2.0.0dev",
"proto-plus >= 1.10.0",
"google-cloud-core >= 1.4.1, < 2.0dev",
"google-resumable-media >= 0.6.0, < 2.0dev",
Expand Down
2 changes: 1 addition & 1 deletion testing/constraints-3.6.txt
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
#
# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev",
# Then this file should have foo==1.14.0
google-api-core==1.23.0
google-api-core==1.29.0
google-cloud-bigquery-storage==2.0.0
google-cloud-core==1.4.1
google-resumable-media==0.6.0
Expand Down
Loading

0 comments on commit 1f1c4b7

Please sign in to comment.