Skip to content

Commit

Permalink
bigquery: rename name field of Table to table_id (#3959)
Browse files Browse the repository at this point in the history
* bigquery: rename name field of Table to table_id

Also rename table_id to full_table_id.

* fix lint errors

* fix doc
  • Loading branch information
jba authored Sep 14, 2017
1 parent 6ceef1e commit 7cd0989
Show file tree
Hide file tree
Showing 5 changed files with 70 additions and 61 deletions.
10 changes: 5 additions & 5 deletions bigquery/google/cloud/bigquery/job.py
Original file line number Diff line number Diff line change
Expand Up @@ -774,7 +774,7 @@ def _build_resource(self):
'destinationTable': {
'projectId': self.destination.project,
'datasetId': self.destination.dataset_id,
'tableId': self.destination.name,
'tableId': self.destination.table_id,
},
},
},
Expand Down Expand Up @@ -901,7 +901,7 @@ def _build_resource(self):
source_refs = [{
'projectId': table.project,
'datasetId': table.dataset_id,
'tableId': table.name,
'tableId': table.table_id,
} for table in self.sources]

resource = {
Expand All @@ -915,7 +915,7 @@ def _build_resource(self):
'destinationTable': {
'projectId': self.destination.project,
'datasetId': self.destination.dataset_id,
'tableId': self.destination.name,
'tableId': self.destination.table_id,
},
},
},
Expand Down Expand Up @@ -1059,7 +1059,7 @@ def _build_resource(self):
source_ref = {
'projectId': self.source.project,
'datasetId': self.source.dataset_id,
'tableId': self.source.name,
'tableId': self.source.table_id,
}

resource = {
Expand Down Expand Up @@ -1248,7 +1248,7 @@ def _destination_table_resource(self):
return {
'projectId': self.destination.project,
'datasetId': self.destination.dataset_id,
'tableId': self.destination.name,
'tableId': self.destination.table_id,
}

def _populate_config_resource_booleans(self, configuration):
Expand Down
45 changes: 27 additions & 18 deletions bigquery/google/cloud/bigquery/table.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,8 +90,8 @@ class Table(object):
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/tables
:type name: str
:param name: the name of the table
:type table_id: str
:param table_id: the ID of the table
:type dataset: :class:`google.cloud.bigquery.dataset.Dataset`
:param dataset: The dataset which contains the table.
Expand All @@ -102,8 +102,8 @@ class Table(object):

_schema = None

def __init__(self, name, dataset, schema=()):
self.name = name
def __init__(self, table_id, dataset, schema=()):
self._table_id = table_id
self._dataset = dataset
self._properties = {}
# Let the @property do validation.
Expand All @@ -127,14 +127,23 @@ def dataset_id(self):
"""
return self._dataset.dataset_id

@property
def table_id(self):
"""ID of the table.
:rtype: str
:returns: the table ID.
"""
return self._table_id

@property
def path(self):
"""URL path for the table's APIs.
:rtype: str
:returns: the path based on project and dataste name.
:returns: the path based on project, dataset and table IDs.
"""
return '%s/tables/%s' % (self._dataset.path, self.name)
return '%s/tables/%s' % (self._dataset.path, self.table_id)

@property
def schema(self):
Expand Down Expand Up @@ -224,11 +233,11 @@ def self_link(self):
return self._properties.get('selfLink')

@property
def table_id(self):
"""ID for the table resource.
def full_table_id(self):
"""ID for the table, in the form ``project_id:dataset_id:table_id``.
:rtype: str, or ``NoneType``
:returns: the ID (None until set from the server).
:returns: the full ID (None until set from the server).
"""
return self._properties.get('id')

Expand Down Expand Up @@ -463,7 +472,7 @@ def list_partitions(self, client=None):
"""
query = self._require_client(client).run_sync_query(
'SELECT partition_id from [%s.%s$__PARTITIONS_SUMMARY__]' %
(self.dataset_id, self.name))
(self.dataset_id, self.table_id))
query.run()
return [row[0] for row in query.rows]

Expand All @@ -484,8 +493,8 @@ def from_api_repr(cls, resource, dataset):
'tableId' not in resource['tableReference']):
raise KeyError('Resource lacks required identity information:'
'["tableReference"]["tableId"]')
table_name = resource['tableReference']['tableId']
table = cls(table_name, dataset=dataset)
table_id = resource['tableReference']['tableId']
table = cls(table_id, dataset=dataset)
table._set_properties(resource)
return table

Expand Down Expand Up @@ -528,7 +537,7 @@ def _build_resource(self):
'tableReference': {
'projectId': self._dataset.project,
'datasetId': self._dataset.dataset_id,
'tableId': self.name},
'tableId': self.table_id},
}
if self.description is not None:
resource['description'] = self.description
Expand Down Expand Up @@ -1173,7 +1182,7 @@ def upload_from_file(self,
_maybe_rewind(file_obj, rewind=rewind)
_check_mode(file_obj)
metadata = _get_upload_metadata(
source_format, self._schema, self._dataset, self.name)
source_format, self._schema, self._dataset, self.table_id)
_configure_job_metadata(metadata, allow_jagged_rows,
allow_quoted_newlines, create_disposition,
encoding, field_delimiter,
Expand Down Expand Up @@ -1338,7 +1347,7 @@ def _get_upload_headers(user_agent):
}


def _get_upload_metadata(source_format, schema, dataset, name):
def _get_upload_metadata(source_format, schema, dataset, table_id):
"""Get base metadata for creating a table.
:type source_format: str
Expand All @@ -1351,8 +1360,8 @@ def _get_upload_metadata(source_format, schema, dataset, name):
:type dataset: :class:`~google.cloud.bigquery.dataset.Dataset`
:param dataset: A dataset which contains a table.
:type name: str
:param name: The name of the table.
:type table_id: str
:param table_id: The table_id of the table.
:rtype: dict
:returns: The metadata dictionary.
Expand All @@ -1362,7 +1371,7 @@ def _get_upload_metadata(source_format, schema, dataset, name):
'destinationTable': {
'projectId': dataset.project,
'datasetId': dataset.dataset_id,
'tableId': name,
'tableId': table_id,
},
}
if schema:
Expand Down
12 changes: 6 additions & 6 deletions bigquery/tests/unit/test_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -763,7 +763,7 @@ def test_list_tables_defaults(self):
self.assertEqual(len(tables), len(DATA['tables']))
for found, expected in zip(tables, DATA['tables']):
self.assertIsInstance(found, Table)
self.assertEqual(found.table_id, expected['id'])
self.assertEqual(found.full_table_id, expected['id'])
self.assertEqual(found.table_type, expected['type'])
self.assertEqual(token, TOKEN)

Expand Down Expand Up @@ -810,7 +810,7 @@ def test_list_tables_explicit(self):
self.assertEqual(len(tables), len(DATA['tables']))
for found, expected in zip(tables, DATA['tables']):
self.assertIsInstance(found, Table)
self.assertEqual(found.table_id, expected['id'])
self.assertEqual(found.full_table_id, expected['id'])
self.assertEqual(found.table_type, expected['type'])
self.assertIsNone(token)

Expand All @@ -827,9 +827,9 @@ def test_table_wo_schema(self):
conn = _Connection({})
client = _Client(project=self.PROJECT, connection=conn)
dataset = self._make_one(self.DS_ID, client=client)
table = dataset.table('table_name')
table = dataset.table('table_id')
self.assertIsInstance(table, Table)
self.assertEqual(table.name, 'table_name')
self.assertEqual(table.table_id, 'table_id')
self.assertIs(table._dataset, dataset)
self.assertEqual(table.schema, [])

Expand All @@ -842,9 +842,9 @@ def test_table_w_schema(self):
dataset = self._make_one(self.DS_ID, client=client)
full_name = SchemaField('full_name', 'STRING', mode='REQUIRED')
age = SchemaField('age', 'INTEGER', mode='REQUIRED')
table = dataset.table('table_name', schema=[full_name, age])
table = dataset.table('table_id', schema=[full_name, age])
self.assertIsInstance(table, Table)
self.assertEqual(table.name, 'table_name')
self.assertEqual(table.table_id, 'table_id')
self.assertIs(table._dataset, dataset)
self.assertEqual(table.schema, [full_name, age])

Expand Down
40 changes: 20 additions & 20 deletions bigquery/tests/unit/test_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ class _Base(object):
PROJECT = 'project'
SOURCE1 = 'http://example.com/source1.csv'
DS_ID = 'datset_id'
TABLE_NAME = 'table_name'
TABLE_ID = 'table_id'
JOB_NAME = 'job_name'

def _make_one(self, *args, **kw):
Expand Down Expand Up @@ -207,7 +207,7 @@ def _makeResource(self, started=False, ended=False):
config['destinationTable'] = {
'projectId': self.PROJECT,
'datasetId': self.DS_ID,
'tableId': self.TABLE_NAME,
'tableId': self.TABLE_ID,
}

if ended:
Expand Down Expand Up @@ -276,7 +276,7 @@ def _verifyResourceProperties(self, job, resource):
table_ref = config['destinationTable']
self.assertEqual(job.destination.project, table_ref['projectId'])
self.assertEqual(job.destination.dataset_id, table_ref['datasetId'])
self.assertEqual(job.destination.name, table_ref['tableId'])
self.assertEqual(job.destination.table_id, table_ref['tableId'])

if 'fieldDelimiter' in config:
self.assertEqual(job.field_delimiter,
Expand Down Expand Up @@ -544,7 +544,7 @@ def test_from_api_repr_bare(self):
'destinationTable': {
'projectId': self.PROJECT,
'datasetId': self.DS_ID,
'tableId': self.TABLE_NAME,
'tableId': self.TABLE_ID,
},
}
},
Expand Down Expand Up @@ -604,7 +604,7 @@ def test_begin_w_bound_client(self):
'destinationTable': {
'projectId': self.PROJECT,
'datasetId': self.DS_ID,
'tableId': self.TABLE_NAME,
'tableId': self.TABLE_ID,
},
},
},
Expand Down Expand Up @@ -639,7 +639,7 @@ def test_begin_w_autodetect(self):
'destinationTable': {
'projectId': self.PROJECT,
'datasetId': self.DS_ID,
'tableId': self.TABLE_NAME,
'tableId': self.TABLE_ID,
},
'autodetect': True
},
Expand All @@ -663,7 +663,7 @@ def test_begin_w_alternate_client(self):
'destinationTable': {
'projectId': self.PROJECT,
'datasetId': self.DS_ID,
'tableId': self.TABLE_NAME,
'tableId': self.TABLE_ID,
},
'allowJaggedRows': True,
'allowQuotedNewlines': True,
Expand Down Expand Up @@ -867,7 +867,7 @@ def _verifyResourceProperties(self, job, resource):
table_ref = config['destinationTable']
self.assertEqual(job.destination.project, table_ref['projectId'])
self.assertEqual(job.destination.dataset_id, table_ref['datasetId'])
self.assertEqual(job.destination.name, table_ref['tableId'])
self.assertEqual(job.destination.table_id, table_ref['tableId'])

sources = config.get('sourceTables')
if sources is None:
Expand All @@ -876,7 +876,7 @@ def _verifyResourceProperties(self, job, resource):
for table_ref, table in zip(sources, job.sources):
self.assertEqual(table.project, table_ref['projectId'])
self.assertEqual(table.dataset_id, table_ref['datasetId'])
self.assertEqual(table.name, table_ref['tableId'])
self.assertEqual(table.table_id, table_ref['tableId'])

if 'createDisposition' in config:
self.assertEqual(job.create_disposition,
Expand Down Expand Up @@ -1219,7 +1219,7 @@ def _verifyResourceProperties(self, job, resource):
table_ref = config['sourceTable']
self.assertEqual(job.source.project, table_ref['projectId'])
self.assertEqual(job.source.dataset_id, table_ref['datasetId'])
self.assertEqual(job.source.name, table_ref['tableId'])
self.assertEqual(job.source.table_id, table_ref['tableId'])

if 'compression' in config:
self.assertEqual(job.compression,
Expand Down Expand Up @@ -1614,7 +1614,7 @@ def _verifyResourceProperties(self, job, resource):
tb_ref = {
'projectId': table.project,
'datasetId': table.dataset_id,
'tableId': table.name
'tableId': table.table_id
}
self.assertEqual(tb_ref, query_config['destinationTable'])
else:
Expand Down Expand Up @@ -1934,21 +1934,21 @@ def test_referenced_tables(self):
local1, local2, remote = job.referenced_tables

self.assertIsInstance(local1, Table)
self.assertEqual(local1.name, 'local1')
self.assertEqual(local1.table_id, 'local1')
self.assertIsInstance(local1._dataset, Dataset)
self.assertEqual(local1.dataset_id, 'dataset')
self.assertEqual(local1.project, self.PROJECT)
self.assertIs(local1._dataset._client, client)

self.assertIsInstance(local2, Table)
self.assertEqual(local2.name, 'local2')
self.assertEqual(local2.table_id, 'local2')
self.assertIsInstance(local2._dataset, Dataset)
self.assertEqual(local2.dataset_id, 'dataset')
self.assertEqual(local2.project, self.PROJECT)
self.assertIs(local2._dataset._client, client)

self.assertIsInstance(remote, Table)
self.assertEqual(remote.name, 'other-table')
self.assertEqual(remote.table_id, 'other-table')
self.assertIsInstance(remote._dataset, Dataset)
self.assertEqual(remote.dataset_id, 'other-dataset')
self.assertEqual(remote.project, 'other-project-123')
Expand Down Expand Up @@ -2706,14 +2706,14 @@ def _get_query_results(self, job_id):

class _Table(object):

def __init__(self, name=None):
self._name = name
def __init__(self, table_id=None):
self._table_id = table_id

@property
def name(self):
if self._name is not None:
return self._name
return TestLoadJob.TABLE_NAME
def table_id(self):
if self._table_id is not None:
return self._table_id
return TestLoadJob.TABLE_ID

@property
def project(self):
Expand Down
Loading

0 comments on commit 7cd0989

Please sign in to comment.