Skip to content

Commit

Permalink
Merge pull request #1282 from tseaver/1278-sync_query_results_not_job
Browse files Browse the repository at this point in the history
Don't treat synchronous query as a job
  • Loading branch information
tseaver committed Dec 14, 2015
2 parents f7d0371 + 2262fa0 commit 883bfe6
Show file tree
Hide file tree
Showing 9 changed files with 880 additions and 765 deletions.
12 changes: 6 additions & 6 deletions docs/bigquery-usage.rst
Original file line number Diff line number Diff line change
Expand Up @@ -279,17 +279,17 @@ Run a query which can be expected to complete within bounded time:
>>> query = """\
SELECT count(*) AS age_count FROM dataset_name.person_ages
"""
>>> job = client.run_sync_query(query)
>>> job.timeout_ms = 1000
>>> job.run() # API request
>>> query = client.run_sync_query(query)
>>> query.timeout_ms = 1000
>>> query.run() # API request
>>> retry_count = 100
>>> while retry_count > 0 and not job.complete:
... retry_count -= 1
... time.sleep(10)
... job.reload() # API request
>>> job.schema
... query.reload() # API request
>>> query.schema
[{'name': 'age_count', 'type': 'integer', 'mode': 'nullable'}]
>>> job.rows
>>> query.rows
[(15,)]

.. note::
Expand Down
71 changes: 71 additions & 0 deletions gcloud/bigquery/_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ def _record_from_json(value, field):
def _string_from_json(value, _):
return value


_CELLDATA_FROM_JSON = {
'INTEGER': _int_from_json,
'FLOAT': _float_from_json,
Expand All @@ -81,3 +82,73 @@ def _rows_from_json(rows, schema):
row_data.append(converter(cell['v'], field))
rows_data.append(tuple(row_data))
return rows_data


class _ConfigurationProperty(object):
"""Base property implementation.
Values will be stored on a `_configuration` helper attribute of the
property's job instance.
:type name: string
:param name: name of the property
"""

def __init__(self, name):
self.name = name
self._backing_name = '_%s' % (self.name,)

def __get__(self, instance, owner):
"""Descriptor protocal: accesstor"""
if instance is None:
return self
return getattr(instance._configuration, self._backing_name)

def _validate(self, value):
"""Subclasses override to impose validation policy."""
pass

def __set__(self, instance, value):
"""Descriptor protocal: mutator"""
self._validate(value)
setattr(instance._configuration, self._backing_name, value)

def __delete__(self, instance):
"""Descriptor protocal: deleter"""
delattr(instance._configuration, self._backing_name)


class _TypedProperty(_ConfigurationProperty):
"""Property implementation: validates based on value type.
:type name: string
:param name: name of the property
:type property_type: type or sequence of types
:param property_type: type to be validated
"""
def __init__(self, name, property_type):
super(_TypedProperty, self).__init__(name)
self.property_type = property_type

def _validate(self, value):
if not isinstance(value, self.property_type):
raise ValueError('Required type: %s' % (self.property_type,))


class _EnumProperty(_ConfigurationProperty):
"""Psedo-enumeration class.
Subclasses must define ``ALLOWED`` as a class-level constant: it must
be a sequence of strings.
:type name: string
:param name: name of the property
"""
def _validate(self, value):
"""Check that ``value`` is one of the allowed values.
:raises: ValueError if value is not allowed.
"""
if value not in self.ALLOWED:
raise ValueError('Pass one of: %s' ', '.join(self.ALLOWED))
18 changes: 9 additions & 9 deletions gcloud/bigquery/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@
from gcloud.bigquery.job import CopyJob
from gcloud.bigquery.job import ExtractTableToStorageJob
from gcloud.bigquery.job import LoadTableFromStorageJob
from gcloud.bigquery.job import RunAsyncQueryJob
from gcloud.bigquery.job import RunSyncQueryJob
from gcloud.bigquery.job import QueryJob
from gcloud.bigquery.query import QueryResults


class Client(JSONClient):
Expand Down Expand Up @@ -179,18 +179,18 @@ def run_async_query(self, job_name, query):
:type query: string
:param query: SQL query to be executed
:rtype: :class:`gcloud.bigquery.job.RunAsyncQueryJob`
:returns: a new ``RunAsyncQueryJob`` instance
:rtype: :class:`gcloud.bigquery.job.QueryJob`
:returns: a new ``QueryJob`` instance
"""
return RunAsyncQueryJob(job_name, query, client=self)
return QueryJob(job_name, query, client=self)

def run_sync_query(self, query):
"""Construct a job for running a SQL query synchronously.
"""Run a SQL query synchronously.
:type query: string
:param query: SQL query to be executed
:rtype: :class:`gcloud.bigquery.job.RunSyncQueryJob`
:returns: a new ``RunSyncQueryJob`` instance
:rtype: :class:`gcloud.bigquery.query.QueryResults`
:returns: a new ``QueryResults`` instance
"""
return RunSyncQueryJob(query, client=self)
return QueryResults(query, client=self)
Loading

0 comments on commit 883bfe6

Please sign in to comment.