Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Cherry pick cleanups from #499. #501

Merged
merged 7 commits into from
Jan 7, 2015
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
48 changes: 30 additions & 18 deletions gcloud/datastore/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,28 +104,40 @@ def get_connection():
return Connection(credentials=scoped_credentials)


def _require_dataset_id():
"""Convenience method to ensure DATASET_ID is set.
def _require_dataset_id(dataset_id=None):
"""Infer a dataset ID from the environment, if not passed explicitly.

:rtype: :class:`str`
:returns: A dataset ID based on the current environment.
:raises: :class:`EnvironmentError` if DATASET_ID is not set.
:type dataset_id: :class:`str`.
:param dataset_id: Optional.

:rtype: :class:`gcloud.datastore.dataset.Dataset`
:returns: A dataset based on the current environment.
:raises: :class:`EnvironmentError` if ``dataset_id`` is None,
and cannot be inferred from the environment.
"""
if _implicit_environ.DATASET_ID is None:
raise EnvironmentError('Dataset ID could not be inferred.')
return _implicit_environ.DATASET_ID
if dataset_id is None:
if _implicit_environ.DATASET_ID is None:
raise EnvironmentError('Dataset ID could not be inferred.')
dataset_id = _implicit_environ.DATASET_ID
return dataset_id


def _require_connection():
"""Convenience method to ensure CONNECTION is set.
def _require_connection(connection=None):
"""Infer a connection from the environment, if not passed explicitly.

:type connection: :class:`gcloud.datastore.connection.Connection`
:param connection: Optional.

:rtype: :class:`gcloud.datastore.connection.Connection`
:returns: A connection based on the current environment.
:raises: :class:`EnvironmentError` if CONNECTION is not set.
:raises: :class:`EnvironmentError` if ``connection`` is None, and
cannot be inferred from the environment.
"""
if _implicit_environ.CONNECTION is None:
raise EnvironmentError('Connection could not be inferred.')
return _implicit_environ.CONNECTION
if connection is None:
if _implicit_environ.CONNECTION is None:
raise EnvironmentError('Connection could not be inferred.')
connection = _implicit_environ.CONNECTION
return connection


def get_entities(keys, missing=None, deferred=None,
Expand Down Expand Up @@ -154,8 +166,8 @@ def get_entities(keys, missing=None, deferred=None,
:rtype: list of :class:`gcloud.datastore.entity.Entity`
:returns: The requested entities.
"""
connection = connection or _require_connection()
dataset_id = dataset_id or _require_dataset_id()
connection = _require_connection(connection)
dataset_id = _require_dataset_id(dataset_id)

entity_pbs = connection.lookup(
dataset_id=dataset_id,
Expand Down Expand Up @@ -199,8 +211,8 @@ def allocate_ids(incomplete_key, num_ids, connection=None, dataset_id=None):
:returns: The (complete) keys allocated with `incomplete_key` as root.
:raises: `ValueError` if `incomplete_key` is not a partial key.
"""
connection = connection or _require_connection()
dataset_id = dataset_id or _require_dataset_id()
connection = _require_connection(connection)
dataset_id = _require_dataset_id(dataset_id)

if not incomplete_key.is_partial:
raise ValueError(('Key is not partial.', incomplete_key))
Expand Down
34 changes: 16 additions & 18 deletions gcloud/datastore/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,17 +163,13 @@ def lookup(self, dataset_id, key_pbs,
(:class:`gcloud.datastore.datastore_v1_pb2.Key` and
:class:`gcloud.datastore.datastore_v1_pb2.Entity`) and is used
under the hood for methods like
:func:`gcloud.datastore.dataset.Dataset.get_entity`:
:func:`gcloud.datastore.key.Key.get`:

>>> from gcloud import datastore
>>> from gcloud.datastore.key import Key
>>> connection = datastore.get_connection()
>>> dataset = connection.dataset('dataset-id')
>>> key = Key(dataset=dataset).kind('MyKind').id(1234)

Using the :class:`gcloud.datastore.dataset.Dataset` helper:

>>> dataset.get_entity(key)
>>> key = Key('MyKind', 1234, dataset_id='dataset-id')
>>> key.get()
<Entity object>

Using the ``connection`` class directly:
Expand All @@ -182,7 +178,7 @@ def lookup(self, dataset_id, key_pbs,
<Entity protobuf>

:type dataset_id: string
:param dataset_id: The dataset to look up the keys.
:param dataset_id: The ID of the dataset to look up the keys.

:type key_pbs: list of :class:`gcloud.datastore.datastore_v1_pb2.Key`
(or a single Key)
Expand Down Expand Up @@ -262,12 +258,12 @@ def run_query(self, dataset_id, query_pb, namespace=None, eventual=False):
uses this method to fetch data:

>>> from gcloud import datastore
>>> from gcloud.datastore.query import Query
>>> connection = datastore.get_connection()
>>> dataset = connection.dataset('dataset-id')
>>> query = dataset.query().kind('MyKind').filter(
... 'property', '=', 'val')
>>> query = Query(dataset_id='dataset-id', 'MyKind')
>>> query.add_filter('property', '=', 'val')

Using the `fetch`` method...
Using the query's ``fetch_page`` method...

>>> entities, cursor, more_results = query.fetch_page()
>>> entities
Expand Down Expand Up @@ -319,7 +315,7 @@ def begin_transaction(self, dataset_id, serializable=False):
Maps the ``DatastoreService.BeginTransaction`` protobuf RPC.

:type dataset_id: string
:param dataset_id: The dataset over which to execute the transaction.
:param dataset_id: The ID dataset to which the transaction applies.
"""

if self.transaction():
Expand All @@ -346,7 +342,7 @@ def commit(self, dataset_id, mutation_pb):
Maps the ``DatastoreService.Commit`` protobuf RPC.

:type dataset_id: string
:param dataset_id: The dataset in which to perform the changes.
:param dataset_id: The ID dataset to which the transaction applies.

:type mutation_pb: :class:`gcloud.datastore.datastore_v1_pb2.Mutation`.
:param mutation_pb: The protobuf for the mutations being saved.
Expand Down Expand Up @@ -376,7 +372,8 @@ def rollback(self, dataset_id):
if the connection isn't currently in a transaction.

:type dataset_id: string
:param dataset_id: The dataset to which the transaction belongs.
:param dataset_id: The ID of the dataset to which the transaction
belongs.
"""
if not self.transaction() or not self.transaction().id:
raise ValueError('No transaction to rollback.')
Expand All @@ -393,7 +390,8 @@ def allocate_ids(self, dataset_id, key_pbs):
Maps the ``DatastoreService.AllocateIds`` protobuf RPC.

:type dataset_id: string
:param dataset_id: The dataset to which the transaction belongs.
:param dataset_id: The ID of the dataset to which the transaction
belongs.

:type key_pbs: list of :class:`gcloud.datastore.datastore_v1_pb2.Key`
:param key_pbs: The keys for which the backend should allocate IDs.
Expand All @@ -418,7 +416,7 @@ def save_entity(self, dataset_id, key_pb, properties,
not passed in 'properties' no longer be set for the entity.

:type dataset_id: string
:param dataset_id: The dataset in which to save the entity.
:param dataset_id: The ID of the dataset in which to save the entity.

:type key_pb: :class:`gcloud.datastore.datastore_v1_pb2.Key`
:param key_pb: The complete or partial key for the entity.
Expand Down Expand Up @@ -490,7 +488,7 @@ def delete_entities(self, dataset_id, key_pbs):
:func:`gcloud.datastore.entity.Entity.delete` method.

:type dataset_id: string
:param dataset_id: The dataset from which to delete the keys.
:param dataset_id: The ID of the dataset from which to delete the keys.

:type key_pbs: list of :class:`gcloud.datastore.datastore_v1_pb2.Key`
:param key_pbs: The keys to delete from the datastore.
Expand Down
5 changes: 2 additions & 3 deletions gcloud/datastore/entity.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,10 +40,9 @@ class Entity(dict):
This means you could take an existing entity and change the key
to duplicate the object.

Use :func:`gcloud.datastore.dataset.Dataset.get_entity`
to retrieve an existing entity.
Use :metho:`gcloud.datastore.key.Key.get` to retrieve an existing entity.

>>> dataset.get_entity(key)
>>> key.get()
<Entity[{'kind': 'EntityKind', id: 1234}] {'property': 'value'}>

You can the set values on the entity just like you would on any
Expand Down
50 changes: 29 additions & 21 deletions gcloud/datastore/key.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,39 +66,25 @@ def __init__(self, *path_args, **kwargs):
keyword argument.
"""
self._flat_path = path_args
self._parent = kwargs.get('parent')
parent = self._parent = kwargs.get('parent')
self._namespace = kwargs.get('namespace')
self._dataset_id = kwargs.get('dataset_id')
dataset_id = kwargs.get('dataset_id')
self._dataset_id = _validate_dataset_id(dataset_id, parent)
# _flat_path, _parent, _namespace and _dataset_id must be set before
# _combine_args() is called.
self._path = self._combine_args()
self._validate_dataset_id()

def _validate_dataset_id(self):
"""Ensures the dataset ID is set.

If unset, attempts to imply the ID from the environment.

:raises: `ValueError` if there is no `dataset_id` and none
can be implied.
"""
if self._dataset_id is None:
if _implicit_environ.DATASET_ID is not None:
self._dataset_id = _implicit_environ.DATASET_ID
else:
raise ValueError('A Key must have a dataset ID set.')

@staticmethod
def _parse_path(path_args):
"""Parses positional arguments into key path with kinds and IDs.

:type path_args: :class:`tuple`
:param path_args: A tuple from positional arguments. Should be
alternating list of kinds (string) and id/name
alternating list of kinds (string) and ID/name
parts (int or string).

:rtype: list of dict
:returns: A list of key parts with kind and id or name set.
:returns: A list of key parts with kind and ID or name set.
:raises: `ValueError` if there are no `path_args`, if one of the
kinds is not a string or if one of the IDs/names is not
a string or an integer.
Expand Down Expand Up @@ -140,7 +126,7 @@ def _combine_args(self):
_namespace and _dataset_id if not already set.

:rtype: list of dict
:returns: A list of key parts with kind and id or name set.
:returns: A list of key parts with kind and ID or name set.
:raises: `ValueError` if the parent key is not complete.
"""
child_path = self._parse_path(self._flat_path)
Expand Down Expand Up @@ -344,7 +330,7 @@ def dataset_id(self):
"""Dataset ID getter.

:rtype: :class:`str`
:returns: The key's dataset.
:returns: The key's dataset ID.
"""
return self._dataset_id

Expand Down Expand Up @@ -383,3 +369,25 @@ def parent(self):

def __repr__(self):
return '<Key%s, dataset=%s>' % (self.path, self.dataset_id)


def _validate_dataset_id(dataset_id, parent):
"""Ensure the dataset ID is set appropriately.

If ``parent`` is passed, skip the test (it will be checked / fixed up
later).

If ``dataset_id`` is unset, attempt to infer the ID from the environment.

:raises: `ValueError` if ``dataset_id`` is None and none can be inferred.
"""
if parent is None:

if dataset_id is None:

if _implicit_environ.DATASET_ID is None:
raise ValueError("A Key must have a dataset ID set.")

dataset_id = _implicit_environ.DATASET_ID

return dataset_id
Loading