Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Adding lazy loading support for datastore connection. #668

Merged
merged 4 commits into from
Feb 25, 2015
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 3 additions & 39 deletions gcloud/datastore/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,10 +46,11 @@
when race conditions may occur.
"""

from gcloud import credentials
from gcloud.datastore import _implicit_environ
from gcloud.datastore._implicit_environ import SCOPE
from gcloud.datastore._implicit_environ import get_connection
from gcloud.datastore._implicit_environ import get_default_connection
from gcloud.datastore._implicit_environ import get_default_dataset_id
from gcloud.datastore._implicit_environ import set_default_connection
from gcloud.datastore._implicit_environ import set_default_dataset_id
from gcloud.datastore.api import allocate_ids
from gcloud.datastore.api import delete
Expand All @@ -63,21 +64,6 @@
from gcloud.datastore.transaction import Transaction


SCOPE = ('https://www.googleapis.com/auth/datastore',
'https://www.googleapis.com/auth/userinfo.email')
"""The scopes required for authenticating as a Cloud Datastore consumer."""


def set_default_connection(connection=None):
"""Set default connection either explicitly or implicitly as fall-back.

:type connection: :class:`gcloud.datastore.connection.Connection`
:param connection: A connection provided to be the default.
"""
connection = connection or get_connection()
_implicit_environ._DEFAULTS.connection = connection


def set_defaults(dataset_id=None, connection=None):
"""Set defaults either explicitly or implicitly as fall-back.

Expand All @@ -96,25 +82,3 @@ def set_defaults(dataset_id=None, connection=None):
"""
set_default_dataset_id(dataset_id=dataset_id)
set_default_connection(connection=connection)


def get_connection():
"""Shortcut method to establish a connection to the Cloud Datastore.

Use this if you are going to access several datasets
with the same set of credentials (unlikely):

>>> from gcloud import datastore

>>> connection = datastore.get_connection()
>>> key1 = datastore.Key('Kind', 1234, dataset_id='dataset1')
>>> key2 = datastore.Key('Kind', 1234, dataset_id='dataset2')
>>> entity1 = datastore.get(key1, connection=connection)
>>> entity2 = datastore.get(key2, connection=connection)

:rtype: :class:`gcloud.datastore.connection.Connection`
:returns: A connection defined with the proper credentials.
"""
implicit_credentials = credentials.get_credentials()
scoped_credentials = implicit_credentials.create_scoped(SCOPE)
return Connection(credentials=scoped_credentials)
48 changes: 47 additions & 1 deletion gcloud/datastore/_implicit_environ.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,13 @@
except ImportError:
app_identity = None

from gcloud import credentials
from gcloud.datastore.connection import Connection


SCOPE = ('https://www.googleapis.com/auth/datastore',
'https://www.googleapis.com/auth/userinfo.email')
"""The scopes required for authenticating as a Cloud Datastore consumer."""

_DATASET_ENV_VAR_NAME = 'GCLOUD_DATASET_ID'
_GCD_DATASET_ENV_VAR_NAME = 'DATASTORE_DATASET'
Expand Down Expand Up @@ -143,6 +150,38 @@ def get_default_dataset_id():
return _DEFAULTS.dataset_id


def get_connection():
"""Shortcut method to establish a connection to the Cloud Datastore.

Use this if you are going to access several datasets
with the same set of credentials (unlikely):

>>> from gcloud import datastore

>>> connection = datastore.get_connection()
>>> key1 = datastore.Key('Kind', 1234, dataset_id='dataset1')
>>> key2 = datastore.Key('Kind', 1234, dataset_id='dataset2')
>>> entity1 = datastore.get(key1, connection=connection)
>>> entity2 = datastore.get(key2, connection=connection)

:rtype: :class:`gcloud.datastore.connection.Connection`
:returns: A connection defined with the proper credentials.
"""
implicit_credentials = credentials.get_credentials()
scoped_credentials = implicit_credentials.create_scoped(SCOPE)
return Connection(credentials=scoped_credentials)


def set_default_connection(connection=None):
"""Set default connection either explicitly or implicitly as fall-back.

:type connection: :class:`gcloud.datastore.connection.Connection`
:param connection: A connection provided to be the default.
"""
connection = connection or get_connection()
_DEFAULTS.connection = connection


def get_default_connection():
"""Get default connection.

Expand Down Expand Up @@ -211,8 +250,15 @@ def dataset_id():
"""Return the implicit default dataset ID."""
return _determine_default_dataset_id()

@_lazy_property_deco
@staticmethod
def connection():
"""Return the implicit default connection.."""
return get_connection()

def __init__(self, connection=None, dataset_id=None, implicit=False):
self.connection = connection
if connection is not None or not implicit:
self.connection = connection
if dataset_id is not None or not implicit:
self.dataset_id = dataset_id

Expand Down
4 changes: 2 additions & 2 deletions gcloud/datastore/batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,8 +189,8 @@ def delete(self, key):
if not _dataset_ids_equal(self._dataset_id, key.dataset_id):
raise ValueError("Key must be from same dataset as batch")

key_pb = key.to_protobuf()
helpers._add_keys_to_request(self.mutation.delete, [key_pb])
key_pb = helpers._prepare_key_for_request(key.to_protobuf())

This comment was marked as spam.

This comment was marked as spam.

This comment was marked as spam.

This comment was marked as spam.

self.mutation.delete.add().CopyFrom(key_pb)

def begin(self):
"""No-op
Expand Down
41 changes: 38 additions & 3 deletions gcloud/datastore/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
from gcloud import connection
from gcloud.exceptions import make_exception
from gcloud.datastore import _datastore_v1_pb2 as datastore_pb
from gcloud.datastore import helpers


_GCD_HOST_ENV_VAR_NAME = 'DATASTORE_HOST'
Expand Down Expand Up @@ -183,7 +182,7 @@ def lookup(self, dataset_id, key_pbs,
"""
lookup_request = datastore_pb.LookupRequest()
_set_read_options(lookup_request, eventual, transaction_id)
helpers._add_keys_to_request(lookup_request.key, key_pbs)
_add_keys_to_request(lookup_request.key, key_pbs)

lookup_response = self._rpc(dataset_id, 'lookup', lookup_request,
datastore_pb.LookupResponse)
Expand Down Expand Up @@ -363,7 +362,7 @@ def allocate_ids(self, dataset_id, key_pbs):
:returns: An equal number of keys, with IDs filled in by the backend.
"""
request = datastore_pb.AllocateIdsRequest()
helpers._add_keys_to_request(request.key, key_pbs)
_add_keys_to_request(request.key, key_pbs)
# Nothing to do with this response, so just execute the method.
response = self._rpc(dataset_id, 'allocateIds', request,
datastore_pb.AllocateIdsResponse)
Expand All @@ -386,3 +385,39 @@ def _set_read_options(request, eventual, transaction_id):
opts.read_consistency = datastore_pb.ReadOptions.EVENTUAL
elif transaction_id:
opts.transaction = transaction_id


def _prepare_key_for_request(key_pb): # pragma: NO COVER copied from helpers
"""Add protobuf keys to a request object.

.. note::
This is copied from `helpers` to avoid a cycle:
_implicit_environ -> connection -> helpers -> key -> _implicit_environ

:type key_pb: :class:`gcloud.datastore._datastore_v1_pb2.Key`
:param key_pb: A key to be added to a request.

:rtype: :class:`gcloud.datastore._datastore_v1_pb2.Key`
:returns: A key which will be added to a request. It will be the
original if nothing needs to be changed.
"""
if key_pb.partition_id.HasField('dataset_id'):
new_key_pb = datastore_pb.Key()
new_key_pb.CopyFrom(key_pb)
new_key_pb.partition_id.ClearField('dataset_id')
key_pb = new_key_pb
return key_pb


def _add_keys_to_request(request_field_pb, key_pbs):
"""Add protobuf keys to a request object.

:type request_field_pb: `RepeatedCompositeFieldContainer`
:param request_field_pb: A repeated proto field that contains keys.

:type key_pbs: list of :class:`gcloud.datastore._datastore_v1_pb2.Key`
:param key_pbs: The keys to add to a request.
"""
for key_pb in key_pbs:
key_pb = _prepare_key_for_request(key_pb)
request_field_pb.add().CopyFrom(key_pb)
14 changes: 0 additions & 14 deletions gcloud/datastore/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -308,17 +308,3 @@ def _prepare_key_for_request(key_pb):
new_key_pb.partition_id.ClearField('dataset_id')
key_pb = new_key_pb
return key_pb


def _add_keys_to_request(request_field_pb, key_pbs):
"""Add protobuf keys to a request object.

:type request_field_pb: `RepeatedCompositeFieldContainer`
:param request_field_pb: A repeated proto field that contains keys.

:type key_pbs: list of :class:`gcloud.datastore._datastore_v1_pb2.Key`
:param key_pbs: The keys to add to a request.
"""
for key_pb in key_pbs:
key_pb = _prepare_key_for_request(key_pb)
request_field_pb.add().CopyFrom(key_pb)
56 changes: 0 additions & 56 deletions gcloud/datastore/test___init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,42 +15,6 @@
import unittest2


class Test_set_default_connection(unittest2.TestCase):

def setUp(self):
from gcloud.datastore._testing import _setup_defaults
_setup_defaults(self)

def tearDown(self):
from gcloud.datastore._testing import _tear_down_defaults
_tear_down_defaults(self)

def _callFUT(self, connection=None):
from gcloud.datastore import set_default_connection
return set_default_connection(connection=connection)

def test_set_explicit(self):
from gcloud.datastore import _implicit_environ

self.assertEqual(_implicit_environ.get_default_connection(), None)
fake_cnxn = object()
self._callFUT(connection=fake_cnxn)
self.assertEqual(_implicit_environ.get_default_connection(), fake_cnxn)

def test_set_implicit(self):
from gcloud._testing import _Monkey
from gcloud import datastore
from gcloud.datastore import _implicit_environ

self.assertEqual(_implicit_environ.get_default_connection(), None)

fake_cnxn = object()
with _Monkey(datastore, get_connection=lambda: fake_cnxn):
self._callFUT()

self.assertEqual(_implicit_environ.get_default_connection(), fake_cnxn)


class Test_set_defaults(unittest2.TestCase):

def _callFUT(self, dataset_id=None, connection=None):
Expand Down Expand Up @@ -80,23 +44,3 @@ def call_set_connection(connection=None):

self.assertEqual(SET_DATASET_CALLED, [DATASET_ID])
self.assertEqual(SET_CONNECTION_CALLED, [CONNECTION])


class Test_get_connection(unittest2.TestCase):

def _callFUT(self):
from gcloud.datastore import get_connection
return get_connection()

def test_it(self):
from gcloud import credentials
from gcloud.datastore.connection import Connection
from gcloud.test_credentials import _Client
from gcloud._testing import _Monkey

client = _Client()
with _Monkey(credentials, client=client):
found = self._callFUT()
self.assertTrue(isinstance(found, Connection))
self.assertTrue(found._credentials is client._signed)
self.assertTrue(client._get_app_default_called)
Loading