From 0f687d56d46e5b14ac78a5f185c38a1bf876bf2a Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 2 Oct 2014 13:35:02 -0700 Subject: [PATCH 1/4] Indenting four spaces in all files. --- gcloud/connection.py | 62 +- gcloud/credentials.py | 67 +- gcloud/datastore/__init__.py | 107 ++- gcloud/datastore/connection.py | 614 ++++++++--------- gcloud/datastore/dataset.py | 156 ++--- gcloud/datastore/datastore_v1_pb2.py | 270 ++++---- gcloud/datastore/demo/__init__.py | 4 +- gcloud/datastore/demo/__main__.py | 1 - gcloud/datastore/demo/demo.py | 36 +- gcloud/datastore/entity.py | 4 +- gcloud/datastore/helpers.py | 155 +++-- gcloud/datastore/key.py | 550 +++++++-------- gcloud/datastore/query.py | 675 +++++++++---------- gcloud/datastore/test_connection.py | 1 + gcloud/datastore/test_entity.py | 5 +- gcloud/datastore/transaction.py | 485 +++++++------- gcloud/demo.py | 171 ++--- gcloud/storage/__init__.py | 93 ++- gcloud/storage/acl.py | 454 ++++++------- gcloud/storage/bucket.py | 957 ++++++++++++++------------- gcloud/storage/connection.py | 871 ++++++++++++------------ gcloud/storage/demo/__init__.py | 4 +- gcloud/storage/demo/__main__.py | 1 - gcloud/storage/exceptions.py | 14 +- gcloud/storage/iterator.py | 280 ++++---- gcloud/storage/key.py | 625 ++++++++--------- 26 files changed, 3325 insertions(+), 3337 deletions(-) diff --git a/gcloud/connection.py b/gcloud/connection.py index c8911f3c6384..7ecf47763ce0 100644 --- a/gcloud/connection.py +++ b/gcloud/connection.py @@ -2,42 +2,42 @@ class Connection(object): - """A generic connection to Google Cloud Platform. + """A generic connection to Google Cloud Platform. - Subclasses should understand - only the basic types - in method arguments, - however they should be capable - of returning advanced types. - """ + Subclasses should understand + only the basic types + in method arguments, + however they should be capable + of returning advanced types. + """ - API_BASE_URL = 'https://www.googleapis.com' - """The base of the API call URL.""" + API_BASE_URL = 'https://www.googleapis.com' + """The base of the API call URL.""" - _EMPTY = object() - """A pointer to represent an empty value for default arguments.""" + _EMPTY = object() + """A pointer to represent an empty value for default arguments.""" - def __init__(self, credentials=None): - """ - :type credentials: :class:`gcloud.credentials.Credentials` - :param credentials: The OAuth2 Credentials to use for this connection. - """ + def __init__(self, credentials=None): + """:type credentials: :class:`gcloud.credentials.Credentials` + :param credentials: The OAuth2 Credentials to use for this connection. - self._credentials = credentials + """ - @property - def credentials(self): - return self._credentials + self._credentials = credentials - @property - def http(self): - """A getter for the HTTP transport used in talking to the API. + @property + def credentials(self): + return self._credentials - :rtype: :class:`httplib2.Http` - :returns: A Http object used to transport data. - """ - if not hasattr(self, '_http'): - self._http = httplib2.Http() - if self._credentials: - self._http = self._credentials.authorize(self._http) - return self._http + @property + def http(self): + """A getter for the HTTP transport used in talking to the API. + + :rtype: :class:`httplib2.Http` + :returns: A Http object used to transport data. + """ + if not hasattr(self, '_http'): + self._http = httplib2.Http() + if self._credentials: + self._http = self._credentials.authorize(self._http) + return self._http diff --git a/gcloud/credentials.py b/gcloud/credentials.py index dd8a5bd619b4..e4bcb7d32fe0 100644 --- a/gcloud/credentials.py +++ b/gcloud/credentials.py @@ -4,37 +4,38 @@ class Credentials(object): - """An object used to simplify the OAuth2 credentials library. - - .. note:: - You should not need to use this class directly. - Instead, use the helper methods provided in - :func:`gcloud.datastore.__init__.get_connection` - and - :func:`gcloud.datastore.__init__.get_dataset` - which use this class under the hood. - """ - - @classmethod - def get_for_service_account(cls, client_email, private_key_path, scope=None): - """Gets the credentials for a service account. - - :type client_email: string - :param client_email: The e-mail attached to the service account. - - :type private_key_path: string - :param private_key_path: The path to a private key file (this file was - given to you when you created the service - account). - - :type scope: string or tuple of strings - :param scope: The scope against which to authenticate. - (Different services require different scopes, - check the documentation for which scope is required - for the different levels of access - to any particular API.) + """An object used to simplify the OAuth2 credentials library. + + .. note:: + You should not need to use this class directly. + Instead, use the helper methods provided in + :func:`gcloud.datastore.__init__.get_connection` + and + :func:`gcloud.datastore.__init__.get_dataset` + which use this class under the hood. """ - return client.SignedJwtAssertionCredentials( - service_account_name=client_email, - private_key=open(private_key_path).read(), - scope=scope) + + @classmethod + def get_for_service_account(cls, client_email, private_key_path, + scope=None): + """Gets the credentials for a service account. + + :type client_email: string + :param client_email: The e-mail attached to the service account. + + :type private_key_path: string + :param private_key_path: The path to a private key file (this file was + given to you when you created the service + account). + + :type scope: string or tuple of strings + :param scope: The scope against which to authenticate. + (Different services require different scopes, + check the documentation for which scope is required + for the different levels of access + to any particular API.) + """ + return client.SignedJwtAssertionCredentials( + service_account_name=client_email, + private_key=open(private_key_path).read(), + scope=scope) diff --git a/gcloud/datastore/__init__.py b/gcloud/datastore/__init__.py index 5d663e95c24c..492b56f3180f 100644 --- a/gcloud/datastore/__init__.py +++ b/gcloud/datastore/__init__.py @@ -32,7 +32,6 @@ which represents a lookup or search over the rows in the datastore. """ - __version__ = '0.1.2' SCOPE = ('https://www.googleapis.com/auth/datastore ', @@ -41,65 +40,65 @@ def get_connection(client_email, private_key_path): - """Shortcut method to establish a connection to the Cloud Datastore. + """Shortcut method to establish a connection to the Cloud Datastore. - Use this if you are going to access several datasets - with the same set of credentials (unlikely): + Use this if you are going to access several datasets + with the same set of credentials (unlikely): - >>> from gcloud import datastore - >>> connection = datastore.get_connection(email, key_path) - >>> dataset1 = connection.dataset('dataset1') - >>> dataset2 = connection.dataset('dataset2') + >>> from gcloud import datastore + >>> connection = datastore.get_connection(email, key_path) + >>> dataset1 = connection.dataset('dataset1') + >>> dataset2 = connection.dataset('dataset2') - :type client_email: string - :param client_email: The e-mail attached to the service account. + :type client_email: string + :param client_email: The e-mail attached to the service account. - :type private_key_path: string - :param private_key_path: The path to a private key file (this file was - given to you when you created the service - account). + :type private_key_path: string + :param private_key_path: The path to a private key file (this file was + given to you when you created the service + account). - :rtype: :class:`gcloud.datastore.connection.Connection` - :returns: A connection defined with the proper credentials. - """ - from gcloud.credentials import Credentials - from gcloud.datastore.connection import Connection + :rtype: :class:`gcloud.datastore.connection.Connection` + :returns: A connection defined with the proper credentials. + """ + from gcloud.credentials import Credentials + from gcloud.datastore.connection import Connection - credentials = Credentials.get_for_service_account( - client_email, private_key_path, scope=SCOPE) - return Connection(credentials=credentials) + credentials = Credentials.get_for_service_account( + client_email, private_key_path, scope=SCOPE) + return Connection(credentials=credentials) def get_dataset(dataset_id, client_email, private_key_path): - """Establish a connection to a particular dataset in the Cloud Datastore. - - This is a shortcut method for creating a connection and using it - to connect to a dataset. - - You'll generally use this as the first call to working with the API: - - >>> from gcloud import datastore - >>> dataset = datastore.get_dataset('dataset-id', email, key_path) - >>> # Now you can do things with the dataset. - >>> dataset.query().kind('TestKind').fetch() - [...] - - :type dataset_id: string - :param dataset_id: The id of the dataset you want to use. - This is akin to a database name - and is usually the same as your Cloud Datastore project - name. - - :type client_email: string - :param client_email: The e-mail attached to the service account. - - :type private_key_path: string - :param private_key_path: The path to a private key file (this file was - given to you when you created the service - account). - - :rtype: :class:`gcloud.datastore.dataset.Dataset` - :returns: A dataset with a connection using the provided credentials. - """ - connection = get_connection(client_email, private_key_path) - return connection.dataset(dataset_id) + """Establish a connection to a particular dataset in the Cloud Datastore. + + This is a shortcut method for creating a connection and using it + to connect to a dataset. + + You'll generally use this as the first call to working with the API: + + >>> from gcloud import datastore + >>> dataset = datastore.get_dataset('dataset-id', email, key_path) + >>> # Now you can do things with the dataset. + >>> dataset.query().kind('TestKind').fetch() + [...] + + :type dataset_id: string + :param dataset_id: The id of the dataset you want to use. + This is akin to a database name + and is usually the same as your Cloud Datastore project + name. + + :type client_email: string + :param client_email: The e-mail attached to the service account. + + :type private_key_path: string + :param private_key_path: The path to a private key file (this file was + given to you when you created the service + account). + + :rtype: :class:`gcloud.datastore.dataset.Dataset` + :returns: A dataset with a connection using the provided credentials. + """ + connection = get_connection(client_email, private_key_path) + return connection.dataset(dataset_id) diff --git a/gcloud/datastore/connection.py b/gcloud/datastore/connection.py index 5961ed00b29a..fd5602577f97 100644 --- a/gcloud/datastore/connection.py +++ b/gcloud/datastore/connection.py @@ -8,360 +8,364 @@ class Connection(connection.Connection): - """A connection to the Google Cloud Datastore via the Protobuf API. + """A connection to the Google Cloud Datastore via the Protobuf API. - This class should understand only the basic types (and protobufs) - in method arguments, however should be capable of returning advanced types. + This class should understand only the basic types (and protobufs) + in method arguments, however should be capable of returning advanced types. - :type credentials: :class:`gcloud.credentials.Credentials` - :param credentials: The OAuth2 Credentials to use for this connection. - """ - - API_VERSION = 'v1beta2' - """The version of the API, used in building the API call's URL.""" - - API_URL_TEMPLATE = ('{api_base}/datastore/{api_version}' - '/datasets/{dataset_id}/{method}') - """A template used to craft the URL pointing toward a particular API call.""" - - def __init__(self, credentials=None): - self._credentials = credentials - self._current_transaction = None - - def _request(self, dataset_id, method, data): - """Make a request over the Http transport to the Cloud Datastore API. - - :type dataset_id: string - :param dataset_id: The ID of the dataset of which to make the request. - - :type method: string - :param method: The API call method name (ie, ``runQuery``, ``lookup``, etc) - - :type data: string - :param data: The data to send with the API call. - Typically this is a serialized Protobuf string. - - :rtype: string - :returns: The string response content from the API call. - - :raises: Exception if the response code is not 200 OK. - """ - headers = { - 'Content-Type': 'application/x-protobuf', - 'Content-Length': str(len(data)), - } - headers, content = self.http.request( - uri=self.build_api_url(dataset_id=dataset_id, method=method), - method='POST', headers=headers, body=data) - - if headers['status'] != '200': - raise Exception('Request failed. Error was: %s' % content) - - return content - - def _rpc(self, dataset_id, method, request_pb, response_pb_cls): - response = self._request(dataset_id=dataset_id, method=method, - data=request_pb.SerializeToString()) - return response_pb_cls.FromString(response) - - @classmethod - def build_api_url(cls, dataset_id, method, base_url=None, api_version=None): - """Construct the URL for a particular API call. - - This method is used internally - to come up with the URL - to use when making RPCs - to the Cloud Datastore API. - - :type dataset_id: string - :param dataset_id: The ID of the dataset to connect to. - This is usually your project name in the cloud console. - - :type method: string - :param method: The API method to call (ie, runQuery, lookup, ...). - - :type base_url: string - :param base_url: The base URL where the API lives. - You shouldn't have to provide this. - - :type api_version: string - :param api_version: The version of the API to connect to. - You shouldn't have to provide this. + :type credentials: :class:`gcloud.credentials.Credentials` + :param credentials: The OAuth2 Credentials to use for this connection. """ - return cls.API_URL_TEMPLATE.format( - api_base=(base_url or cls.API_BASE_URL), - api_version=(api_version or cls.API_VERSION), - dataset_id=dataset_id, method=method) - - def transaction(self, transaction=connection.Connection._EMPTY): - if transaction is self._EMPTY: - return self._current_transaction - else: - self._current_transaction = transaction - return self - - def mutation(self): - if self.transaction(): - return self.transaction().mutation() - else: - return datastore_pb.Mutation() - - def dataset(self, *args, **kwargs): - """Factory method for Dataset objects. - - :param args: All args and kwargs will be passed along to the - :class:`gcloud.datastore.dataset.Dataset` initializer. - - :rtype: :class:`gcloud.datastore.dataset.Dataset` - :returns: A dataset object that will use this connection as its transport. - """ - kwargs['connection'] = self - return Dataset(*args, **kwargs) - - def begin_transaction(self, dataset_id, serializable=False): - """Begin a transaction. - - :type dataset_id: string - :param dataset_id: The dataset over which to execute the transaction. - """ - - if self.transaction(): - raise ValueError('Cannot start a transaction with another already ' - 'in progress.') - request = datastore_pb.BeginTransactionRequest() + API_VERSION = 'v1beta2' + """The version of the API, used in building the API call's URL.""" + + API_URL_TEMPLATE = ('{api_base}/datastore/{api_version}' + '/datasets/{dataset_id}/{method}') + """A template for the URL of a particular API call.""" + + def __init__(self, credentials=None): + self._credentials = credentials + self._current_transaction = None + + def _request(self, dataset_id, method, data): + """Make a request over the Http transport to the Cloud Datastore API. + + :type dataset_id: string + :param dataset_id: The ID of the dataset of which to make the request. + + :type method: string + :param method: The API call method name (ie, ``runQuery``, + ``lookup``, etc) + + :type data: string + :param data: The data to send with the API call. + Typically this is a serialized Protobuf string. + + :rtype: string + :returns: The string response content from the API call. + + :raises: Exception if the response code is not 200 OK. + """ + headers = { + 'Content-Type': 'application/x-protobuf', + 'Content-Length': str(len(data)), + } + headers, content = self.http.request( + uri=self.build_api_url(dataset_id=dataset_id, method=method), + method='POST', headers=headers, body=data) + + if headers['status'] != '200': + raise Exception('Request failed. Error was: %s' % content) + + return content + + def _rpc(self, dataset_id, method, request_pb, response_pb_cls): + response = self._request(dataset_id=dataset_id, method=method, + data=request_pb.SerializeToString()) + return response_pb_cls.FromString(response) + + @classmethod + def build_api_url(cls, dataset_id, method, base_url=None, + api_version=None): + """Construct the URL for a particular API call. + + This method is used internally + to come up with the URL + to use when making RPCs + to the Cloud Datastore API. + + :type dataset_id: string + :param dataset_id: The ID of the dataset to connect to. This is + usually your project name in the cloud console. + + :type method: string + :param method: The API method to call (ie, runQuery, lookup, ...). + + :type base_url: string + :param base_url: The base URL where the API lives. + You shouldn't have to provide this. + + :type api_version: string + :param api_version: The version of the API to connect to. + You shouldn't have to provide this. + """ + return cls.API_URL_TEMPLATE.format( + api_base=(base_url or cls.API_BASE_URL), + api_version=(api_version or cls.API_VERSION), + dataset_id=dataset_id, method=method) - if serializable: - request.isolation_level = ( - datastore_pb.BeginTransactionRequest.SERIALIZABLE) - else: - request.isolation_level = datastore_pb.BeginTransactionRequest.SNAPSHOT + def transaction(self, transaction=connection.Connection._EMPTY): + if transaction is self._EMPTY: + return self._current_transaction + else: + self._current_transaction = transaction + return self - response = self._rpc(dataset_id, 'beginTransaction', request, - datastore_pb.BeginTransactionResponse) + def mutation(self): + if self.transaction(): + return self.transaction().mutation() + else: + return datastore_pb.Mutation() - return response.transaction + def dataset(self, *args, **kwargs): + """Factory method for Dataset objects. - def rollback_transaction(self, dataset_id): - """Rollback the connection's existing transaction. + :param args: All args and kwargs will be passed along to the + :class:`gcloud.datastore.dataset.Dataset` initializer. + + :rtype: :class:`gcloud.datastore.dataset.Dataset` + :returns: A dataset object that will use this connection as + its transport. + """ + kwargs['connection'] = self + return Dataset(*args, **kwargs) + + def begin_transaction(self, dataset_id, serializable=False): + """Begin a transaction. + + :type dataset_id: string + :param dataset_id: The dataset over which to execute the transaction. + """ + + if self.transaction(): + raise ValueError('Cannot start a transaction with another already ' + 'in progress.') + + request = datastore_pb.BeginTransactionRequest() + + if serializable: + request.isolation_level = ( + datastore_pb.BeginTransactionRequest.SERIALIZABLE) + else: + request.isolation_level = ( + datastore_pb.BeginTransactionRequest.SNAPSHOT) + + response = self._rpc(dataset_id, 'beginTransaction', request, + datastore_pb.BeginTransactionResponse) + + return response.transaction + + def rollback_transaction(self, dataset_id): + """Rollback the connection's existing transaction. + + Raises a ``ValueError`` + if the connection isn't currently in a transaction. + + :type dataset_id: string + :param dataset_id: The dataset to which the transaction belongs. + """ + if not self.transaction() or not self.transaction().id(): + raise ValueError('No transaction to rollback.') + + request = datastore_pb.RollbackRequest() + request.transaction = self.transaction().id() + # Nothing to do with this response, so just execute the method. + self._rpc(dataset_id, 'rollback', request, + datastore_pb.RollbackResponse) + + def run_query(self, dataset_id, query_pb, namespace=None): + """Run a query on the Cloud Datastore. + + Given a Query protobuf, + sends a ``runQuery`` request to the Cloud Datastore API + and returns a list of entity protobufs matching the query. - Raises a ``ValueError`` - if the connection isn't currently in a transaction. + You typically wouldn't use this method directly, + in favor of the :func:`gcloud.datastore.query.Query.fetch` method. - :type dataset_id: string - :param dataset_id: The dataset to which the transaction belongs. - """ - if not self.transaction() or not self.transaction().id(): - raise ValueError('No transaction to rollback.') + Under the hood, the :class:`gcloud.datastore.query.Query` class + uses this method to fetch data: - request = datastore_pb.RollbackRequest() - request.transaction = self.transaction().id() - # Nothing to do with this response, so just execute the method. - self._rpc(dataset_id, 'rollback', request, - datastore_pb.RollbackResponse) + >>> from gcloud import datastore + >>> connection = datastore.get_connection(email, key_path) + >>> dataset = connection.dataset('dataset-id') + >>> query = dataset.query().kind('MyKind').filter('property =', 'val') - def run_query(self, dataset_id, query_pb, namespace=None): - """Run a query on the Cloud Datastore. + Using the `fetch`` method... - Given a Query protobuf, - sends a ``runQuery`` request to the Cloud Datastore API - and returns a list of entity protobufs matching the query. + >>> query.fetch() + [], cursor, more_results, skipped_results + + Under the hood this is doing... - You typically wouldn't use this method directly, - in favor of the :func:`gcloud.datastore.query.Query.fetch` method. + >>> connection.run_query('dataset-id', query.to_protobuf()) + [] - Under the hood, the :class:`gcloud.datastore.query.Query` class - uses this method to fetch data: + :type dataset_id: string + :param dataset_id: The ID of the dataset over which to run the query. - >>> from gcloud import datastore - >>> connection = datastore.get_connection(email, key_path) - >>> dataset = connection.dataset('dataset-id') - >>> query = dataset.query().kind('MyKind').filter('property =', 'value') + :type query_pb: :class:`gcloud.datastore.datastore_v1_pb2.Query` + :param query_pb: The Protobuf representing the query to run. - Using the `fetch`` method... + :type namespace: string + :param namespace: The namespace over which to run the query. + """ + request = datastore_pb.RunQueryRequest() - >>> query.fetch() - [] + if namespace: + request.partition_id.namespace = namespace - Under the hood this is doing... + request.query.CopyFrom(query_pb) + response = self._rpc(dataset_id, 'runQuery', request, + datastore_pb.RunQueryResponse) + return ([e.entity for e in response.batch.entity_result], + response.batch.end_cursor, + response.batch.more_results, + response.batch.skipped_results, + ) - >>> connection.run_query('dataset-id', query.to_protobuf()) - [], cursor, more_results, skipped_results + def lookup(self, dataset_id, key_pbs): + """Lookup keys from a dataset in the Cloud Datastore. - :type dataset_id: string - :param dataset_id: The ID of the dataset over which to run the query. + This method deals only with protobufs + (:class:`gcloud.datastore.datastore_v1_pb2.Key` + and + :class:`gcloud.datastore.datastore_v1_pb2.Entity`) + and is used under the hood for methods like + :func:`gcloud.datastore.dataset.Dataset.get_entity`: - :type query_pb: :class:`gcloud.datastore.datastore_v1_pb2.Query` - :param query_pb: The Protobuf representing the query to run. + >>> from gcloud import datastore + >>> from gcloud.datastore.key import Key + >>> connection = datastore.get_connection(email, key_path) + >>> dataset = connection.dataset('dataset-id') + >>> key = Key(dataset=dataset).kind('MyKind').id(1234) - :type namespace: string - :param namespace: The namespace over which to run the query. - """ - request = datastore_pb.RunQueryRequest() - - if namespace: - request.partition_id.namespace = namespace - - request.query.CopyFrom(query_pb) - response = self._rpc(dataset_id, 'runQuery', request, - datastore_pb.RunQueryResponse) - return ([e.entity for e in response.batch.entity_result], - response.batch.end_cursor, - response.batch.more_results, - response.batch.skipped_results, - ) - - def lookup(self, dataset_id, key_pbs): - """Lookup keys from a dataset in the Cloud Datastore. - - This method deals only with protobufs - (:class:`gcloud.datastore.datastore_v1_pb2.Key` - and - :class:`gcloud.datastore.datastore_v1_pb2.Entity`) - and is used under the hood for methods like - :func:`gcloud.datastore.dataset.Dataset.get_entity`: - - >>> from gcloud import datastore - >>> from gcloud.datastore.key import Key - >>> connection = datastore.get_connection(email, key_path) - >>> dataset = connection.dataset('dataset-id') - >>> key = Key(dataset=dataset).kind('MyKind').id(1234) - - Using the :class:`gcloud.datastore.dataset.Dataset` helper: - - >>> dataset.get_entity(key) - - - Using the ``connection`` class directly: - - >>> connection.lookup('dataset-id', key.to_protobuf()) - - - :type dataset_id: string - :param dataset_id: The dataset to look up the keys. - - :type key_pbs: list of :class:`gcloud.datastore.datastore_v1_pb2.Key` - (or a single Key) - :param key_pbs: The key (or keys) to retrieve from the datastore. - - :rtype: list of :class:`gcloud.datastore.datastore_v1_pb2.Entity` - (or a single Entity) - :returns: The entities corresponding to the keys provided. - If a single key was provided and no results matched, - this will return None. - If multiple keys were provided and no results matched, - this will return an empty list. - """ - lookup_request = datastore_pb.LookupRequest() + Using the :class:`gcloud.datastore.dataset.Dataset` helper: - single_key = isinstance(key_pbs, datastore_pb.Key) + >>> dataset.get_entity(key) + - if single_key: - key_pbs = [key_pbs] + Using the ``connection`` class directly: - for key_pb in key_pbs: - lookup_request.key.add().CopyFrom(key_pb) + >>> connection.lookup('dataset-id', key.to_protobuf()) + - lookup_response = self._rpc(dataset_id, 'lookup', lookup_request, - datastore_pb.LookupResponse) + :type dataset_id: string + :param dataset_id: The dataset to look up the keys. - results = [result.entity for result in lookup_response.found] + :type key_pbs: list of :class:`gcloud.datastore.datastore_v1_pb2.Key` + (or a single Key) + :param key_pbs: The key (or keys) to retrieve from the datastore. - if single_key: - if results: - return results[0] - else: - return None + :rtype: list of :class:`gcloud.datastore.datastore_v1_pb2.Entity` + (or a single Entity) + :returns: The entities corresponding to the keys provided. + If a single key was provided and no results matched, + this will return None. + If multiple keys were provided and no results matched, + this will return an empty list. + """ + lookup_request = datastore_pb.LookupRequest() - return results + single_key = isinstance(key_pbs, datastore_pb.Key) - def commit(self, dataset_id, mutation_pb): - request = datastore_pb.CommitRequest() + if single_key: + key_pbs = [key_pbs] - if self.transaction(): - request.mode = datastore_pb.CommitRequest.TRANSACTIONAL - request.transaction = self.transaction().id() - else: - request.mode = datastore_pb.CommitRequest.NON_TRANSACTIONAL + for key_pb in key_pbs: + lookup_request.key.add().CopyFrom(key_pb) - request.mutation.CopyFrom(mutation_pb) - response = self._rpc(dataset_id, 'commit', request, - datastore_pb.CommitResponse) - return response.mutation_result + lookup_response = self._rpc(dataset_id, 'lookup', lookup_request, + datastore_pb.LookupResponse) - def save_entity(self, dataset_id, key_pb, properties): - """Save an entity to the Cloud Datastore with the provided properties. + results = [result.entity for result in lookup_response.found] - :type dataset_id: string - :param dataset_id: The dataset in which to save the entity. + if single_key: + if results: + return results[0] + else: + return None - :type key_pb: :class:`gcloud.datastore.datastore_v1_pb2.Key` - :param key_pb: The complete or partial key for the entity. + return results - :type properties: dict - :param properties: The properties to store on the entity. - """ - mutation = self.mutation() + def commit(self, dataset_id, mutation_pb): + request = datastore_pb.CommitRequest() - # If the Key is complete, we should upsert - # instead of using insert_auto_id. - path = key_pb.path_element[-1] - auto_id = not (path.HasField('id') or path.HasField('name')) + if self.transaction(): + request.mode = datastore_pb.CommitRequest.TRANSACTIONAL + request.transaction = self.transaction().id() + else: + request.mode = datastore_pb.CommitRequest.NON_TRANSACTIONAL - if auto_id: - insert = mutation.insert_auto_id.add() - else: - insert = mutation.upsert.add() + request.mutation.CopyFrom(mutation_pb) + response = self._rpc(dataset_id, 'commit', request, + datastore_pb.CommitResponse) + return response.mutation_result + + def save_entity(self, dataset_id, key_pb, properties): + """Save an entity to the Cloud Datastore with the provided properties. + + :type dataset_id: string + :param dataset_id: The dataset in which to save the entity. + + :type key_pb: :class:`gcloud.datastore.datastore_v1_pb2.Key` + :param key_pb: The complete or partial key for the entity. + + :type properties: dict + :param properties: The properties to store on the entity. + """ + mutation = self.mutation() + + # If the Key is complete, we should upsert + # instead of using insert_auto_id. + path = key_pb.path_element[-1] + auto_id = not (path.HasField('id') or path.HasField('name')) - insert.key.CopyFrom(key_pb) + if auto_id: + insert = mutation.insert_auto_id.add() + else: + insert = mutation.upsert.add() - for name, value in properties.iteritems(): - prop = insert.property.add() - # Set the name of the property. - prop.name = name + insert.key.CopyFrom(key_pb) - # Set the appropriate value. - pb_attr, pb_value = helpers.get_protobuf_attribute_and_value(value) - setattr(prop.value, pb_attr, pb_value) + for name, value in properties.iteritems(): + prop = insert.property.add() + # Set the name of the property. + prop.name = name - # If this is in a transaction, we should just return True. The transaction - # will handle assigning any keys as necessary. - if self.transaction(): - return True + # Set the appropriate value. + pb_attr, pb_value = helpers.get_protobuf_attribute_and_value(value) + setattr(prop.value, pb_attr, pb_value) - result = self.commit(dataset_id, mutation) - # If this was an auto-assigned ID, return the new Key. - if auto_id: - return result.insert_auto_id_key[0] + # If this is in a transaction, we should just return True. The + # transaction will handle assigning any keys as necessary. + if self.transaction(): + return True - return True + result = self.commit(dataset_id, mutation) + # If this was an auto-assigned ID, return the new Key. + if auto_id: + return result.insert_auto_id_key[0] - def delete_entities(self, dataset_id, key_pbs): - """Delete keys from a dataset in the Cloud Datastore. + return True - This method deals only with - :class:`gcloud.datastore.datastore_v1_pb2.Key` protobufs - and not with any of the other abstractions. - For example, it's used under the hood in the - :func:`gcloud.datastore.entity.Entity.delete` method. + def delete_entities(self, dataset_id, key_pbs): + """Delete keys from a dataset in the Cloud Datastore. - :type dataset_id: string - :param dataset_id: The dataset from which to delete the keys. + This method deals only with + :class:`gcloud.datastore.datastore_v1_pb2.Key` protobufs + and not with any of the other abstractions. + For example, it's used under the hood in the + :func:`gcloud.datastore.entity.Entity.delete` method. + + :type dataset_id: string + :param dataset_id: The dataset from which to delete the keys. - :type key_pbs: list of :class:`gcloud.datastore.datastore_v1_pb2.Key` - (or a single Key) - :param key_pbs: The key (or keys) to delete from the datastore. - """ - mutation = self.mutation() + :type key_pbs: list of :class:`gcloud.datastore.datastore_v1_pb2.Key` + (or a single Key) + :param key_pbs: The key (or keys) to delete from the datastore. + """ + mutation = self.mutation() - for key_pb in key_pbs: - delete = mutation.delete.add() - delete.CopyFrom(key_pb) + for key_pb in key_pbs: + delete = mutation.delete.add() + delete.CopyFrom(key_pb) - if self.transaction(): - return True - else: - return self.commit(dataset_id, mutation) + if self.transaction(): + return True + else: + return self.commit(dataset_id, mutation) - def delete_entity(self, dataset_id, key_pb): - return self.delete_entities(dataset_id, [key_pb]) + def delete_entity(self, dataset_id, key_pb): + return self.delete_entities(dataset_id, [key_pb]) diff --git a/gcloud/datastore/dataset.py b/gcloud/datastore/dataset.py index 556a82a8f08e..a5e4bbb0f637 100644 --- a/gcloud/datastore/dataset.py +++ b/gcloud/datastore/dataset.py @@ -1,100 +1,100 @@ class Dataset(object): - """A dataset in the Cloud Datastore. + """A dataset in the Cloud Datastore. - This class acts as an abstraction of a single dataset - in the Cloud Datastore. + This class acts as an abstraction of a single dataset + in the Cloud Datastore. - A dataset is analogous to a database - in relational database world, - and corresponds to a single project - using the Cloud Datastore. + A dataset is analogous to a database + in relational database world, + and corresponds to a single project + using the Cloud Datastore. - Typically, you would only have one of these per connection - however it didn't seem right to collapse the functionality - of a connection and a dataset together into a single class. + Typically, you would only have one of these per connection + however it didn't seem right to collapse the functionality + of a connection and a dataset together into a single class. - Datasets (like :class:`gcloud.datastore.query.Query`) - are immutable. - That is, you cannot change the ID and connection - references. - If you need to modify the connection or ID, - it's recommended to construct a new :class:`Dataset`. + Datasets (like :class:`gcloud.datastore.query.Query`) + are immutable. + That is, you cannot change the ID and connection + references. + If you need to modify the connection or ID, + it's recommended to construct a new :class:`Dataset`. - :type id: string - :param id: The ID of the dataset (your project ID) + :type id: string + :param id: The ID of the dataset (your project ID) - :type connection: :class:`gcloud.datastore.connection.Connection` - :param connection: The connection to use for executing API calls. - """ + :type connection: :class:`gcloud.datastore.connection.Connection` + :param connection: The connection to use for executing API calls. + """ - def __init__(self, id, connection=None): - self._connection = connection - self._id = id + def __init__(self, id, connection=None): + self._connection = connection + self._id = id - def connection(self): - """Get the current connection. + def connection(self): + """Get the current connection. - >>> dataset = Dataset('dataset-id', connection=conn) - >>> dataset.connection() - + >>> dataset = Dataset('dataset-id', connection=conn) + >>> dataset.connection() + - :rtype: :class:`gcloud.datastore.connection.Connection` - :returns: Returns the current connection. - """ + :rtype: :class:`gcloud.datastore.connection.Connection` + :returns: Returns the current connection. + """ - return self._connection + return self._connection - def id(self): - """Get the current dataset ID. + def id(self): + """Get the current dataset ID. - >>> dataset = Dataset('dataset-id', connection=conn) - >>> dataset.id() - 'dataset-id' + >>> dataset = Dataset('dataset-id', connection=conn) + >>> dataset.id() + 'dataset-id' - :rtype: string - :returns: The current dataset ID. - """ + :rtype: string + :returns: The current dataset ID. + """ - return self._id + return self._id - def query(self, *args, **kwargs): - from gcloud.datastore.query import Query - kwargs['dataset'] = self - return Query(*args, **kwargs) + def query(self, *args, **kwargs): + from gcloud.datastore.query import Query + kwargs['dataset'] = self + return Query(*args, **kwargs) - def entity(self, kind): - from gcloud.datastore.entity import Entity - return Entity(dataset=self, kind=kind) + def entity(self, kind): + from gcloud.datastore.entity import Entity + return Entity(dataset=self, kind=kind) - def transaction(self, *args, **kwargs): - from gcloud.datastore.transaction import Transaction - kwargs['dataset'] = self - return Transaction(*args, **kwargs) + def transaction(self, *args, **kwargs): + from gcloud.datastore.transaction import Transaction + kwargs['dataset'] = self + return Transaction(*args, **kwargs) - def get_entity(self, key): - """ - Retrieves an entity from the dataset, along with all of its attributes. + def get_entity(self, key): + """Retrieves entity from the dataset, along with all of its attributes. - :type key: :class:`gcloud.datastore.key.Key` - :param item_name: The name of the item to retrieve. + :type key: :class:`gcloud.datastore.key.Key` + :param item_name: The name of the item to retrieve. - :rtype: :class:`gcloud.datastore.entity.Entity` or ``None`` - :return: The requested entity, or ``None`` if there was no match found. - """ - entities = self.get_entities([key]) - if entities: - return entities[0] - - def get_entities(self, keys): - # This import is here to avoid circular references. - from gcloud.datastore.entity import Entity - - entity_pbs = self.connection().lookup( - dataset_id=self.id(), - key_pbs=[k.to_protobuf() for k in keys] - ) - - entities = [] - for entity_pb in entity_pbs: - entities.append(Entity.from_protobuf(entity_pb, dataset=self)) - return entities + :rtype: :class:`gcloud.datastore.entity.Entity` or ``None`` + :return: The requested entity, or ``None`` if there was no match found. + + """ + entities = self.get_entities([key]) + if entities: + return entities[0] + + def get_entities(self, keys): + # This import is here to avoid circular references. + from gcloud.datastore.entity import Entity + + entity_pbs = self.connection().lookup( + dataset_id=self.id(), + key_pbs=[k.to_protobuf() for k in keys] + ) + + entities = [] + for entity_pb in entity_pbs: + entities.append(Entity.from_protobuf(entity_pb, dataset=self)) + return entities diff --git a/gcloud/datastore/datastore_v1_pb2.py b/gcloud/datastore/datastore_v1_pb2.py index 6c789c922c4e..de7be0fdda59 100644 --- a/gcloud/datastore/datastore_v1_pb2.py +++ b/gcloud/datastore/datastore_v1_pb2.py @@ -7,16 +7,11 @@ from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) - - - DESCRIPTOR = _descriptor.FileDescriptor( name='datastore_v1.proto', package='api.services.datastore', serialized_pb='\n\x12\x64\x61tastore_v1.proto\x12\x16\x61pi.services.datastore\"4\n\x0bPartitionId\x12\x12\n\ndataset_id\x18\x03 \x01(\t\x12\x11\n\tnamespace\x18\x04 \x01(\t\"\xb6\x01\n\x03Key\x12\x39\n\x0cpartition_id\x18\x01 \x01(\x0b\x32#.api.services.datastore.PartitionId\x12=\n\x0cpath_element\x18\x02 \x03(\x0b\x32\'.api.services.datastore.Key.PathElement\x1a\x35\n\x0bPathElement\x12\x0c\n\x04kind\x18\x01 \x02(\t\x12\n\n\x02id\x18\x02 \x01(\x03\x12\x0c\n\x04name\x18\x03 \x01(\t\"\xf4\x02\n\x05Value\x12\x15\n\rboolean_value\x18\x01 \x01(\x08\x12\x15\n\rinteger_value\x18\x02 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x03 \x01(\x01\x12$\n\x1ctimestamp_microseconds_value\x18\x04 \x01(\x03\x12.\n\tkey_value\x18\x05 \x01(\x0b\x32\x1b.api.services.datastore.Key\x12\x16\n\x0e\x62lob_key_value\x18\x10 \x01(\t\x12\x14\n\x0cstring_value\x18\x11 \x01(\t\x12\x12\n\nblob_value\x18\x12 \x01(\x0c\x12\x34\n\x0c\x65ntity_value\x18\x06 \x01(\x0b\x32\x1e.api.services.datastore.Entity\x12\x31\n\nlist_value\x18\x07 \x03(\x0b\x32\x1d.api.services.datastore.Value\x12\x0f\n\x07meaning\x18\x0e \x01(\x05\x12\x15\n\x07indexed\x18\x0f \x01(\x08:\x04true\"F\n\x08Property\x12\x0c\n\x04name\x18\x01 \x02(\t\x12,\n\x05value\x18\x04 \x02(\x0b\x32\x1d.api.services.datastore.Value\"f\n\x06\x45ntity\x12(\n\x03key\x18\x01 \x01(\x0b\x32\x1b.api.services.datastore.Key\x12\x32\n\x08property\x18\x02 \x03(\x0b\x32 .api.services.datastore.Property\"t\n\x0c\x45ntityResult\x12.\n\x06\x65ntity\x18\x01 \x02(\x0b\x32\x1e.api.services.datastore.Entity\"4\n\nResultType\x12\x08\n\x04\x46ULL\x10\x01\x12\x0e\n\nPROJECTION\x10\x02\x12\x0c\n\x08KEY_ONLY\x10\x03\"\xec\x02\n\x05Query\x12>\n\nprojection\x18\x02 \x03(\x0b\x32*.api.services.datastore.PropertyExpression\x12\x34\n\x04kind\x18\x03 \x03(\x0b\x32&.api.services.datastore.KindExpression\x12.\n\x06\x66ilter\x18\x04 \x01(\x0b\x32\x1e.api.services.datastore.Filter\x12\x34\n\x05order\x18\x05 \x03(\x0b\x32%.api.services.datastore.PropertyOrder\x12;\n\x08group_by\x18\x06 \x03(\x0b\x32).api.services.datastore.PropertyReference\x12\x14\n\x0cstart_cursor\x18\x07 \x01(\x0c\x12\x12\n\nend_cursor\x18\x08 \x01(\x0c\x12\x11\n\x06offset\x18\n \x01(\x05:\x01\x30\x12\r\n\x05limit\x18\x0b \x01(\x05\"\x1e\n\x0eKindExpression\x12\x0c\n\x04name\x18\x01 \x02(\t\"!\n\x11PropertyReference\x12\x0c\n\x04name\x18\x02 \x02(\t\"\xd1\x01\n\x12PropertyExpression\x12;\n\x08property\x18\x01 \x02(\x0b\x32).api.services.datastore.PropertyReference\x12\\\n\x14\x61ggregation_function\x18\x02 \x01(\x0e\x32>.api.services.datastore.PropertyExpression.AggregationFunction\" \n\x13\x41ggregationFunction\x12\t\n\x05\x46IRST\x10\x01\"\xc7\x01\n\rPropertyOrder\x12;\n\x08property\x18\x01 \x02(\x0b\x32).api.services.datastore.PropertyReference\x12M\n\tdirection\x18\x02 \x01(\x0e\x32/.api.services.datastore.PropertyOrder.Direction:\tASCENDING\"*\n\tDirection\x12\r\n\tASCENDING\x10\x01\x12\x0e\n\nDESCENDING\x10\x02\"\x8c\x01\n\x06\x46ilter\x12\x41\n\x10\x63omposite_filter\x18\x01 \x01(\x0b\x32\'.api.services.datastore.CompositeFilter\x12?\n\x0fproperty_filter\x18\x02 \x01(\x0b\x32&.api.services.datastore.PropertyFilter\"\x9a\x01\n\x0f\x43ompositeFilter\x12\x42\n\x08operator\x18\x01 \x02(\x0e\x32\x30.api.services.datastore.CompositeFilter.Operator\x12.\n\x06\x66ilter\x18\x02 \x03(\x0b\x32\x1e.api.services.datastore.Filter\"\x13\n\x08Operator\x12\x07\n\x03\x41ND\x10\x01\"\xbb\x02\n\x0ePropertyFilter\x12;\n\x08property\x18\x01 \x02(\x0b\x32).api.services.datastore.PropertyReference\x12\x41\n\x08operator\x18\x02 \x02(\x0e\x32/.api.services.datastore.PropertyFilter.Operator\x12,\n\x05value\x18\x03 \x02(\x0b\x32\x1d.api.services.datastore.Value\"{\n\x08Operator\x12\r\n\tLESS_THAN\x10\x01\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x02\x12\x10\n\x0cGREATER_THAN\x10\x03\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x04\x12\t\n\x05\x45QUAL\x10\x05\x12\x10\n\x0cHAS_ANCESTOR\x10\x0b\"\xae\x01\n\x08GqlQuery\x12\x14\n\x0cquery_string\x18\x01 \x02(\t\x12\x1c\n\rallow_literal\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x35\n\x08name_arg\x18\x03 \x03(\x0b\x32#.api.services.datastore.GqlQueryArg\x12\x37\n\nnumber_arg\x18\x04 \x03(\x0b\x32#.api.services.datastore.GqlQueryArg\"Y\n\x0bGqlQueryArg\x12\x0c\n\x04name\x18\x01 \x01(\t\x12,\n\x05value\x18\x02 \x01(\x0b\x32\x1d.api.services.datastore.Value\x12\x0e\n\x06\x63ursor\x18\x03 \x01(\x0c\"\xf1\x02\n\x10QueryResultBatch\x12K\n\x12\x65ntity_result_type\x18\x01 \x02(\x0e\x32/.api.services.datastore.EntityResult.ResultType\x12;\n\rentity_result\x18\x02 \x03(\x0b\x32$.api.services.datastore.EntityResult\x12\x12\n\nend_cursor\x18\x04 \x01(\x0c\x12N\n\x0cmore_results\x18\x05 \x02(\x0e\x32\x38.api.services.datastore.QueryResultBatch.MoreResultsType\x12\x17\n\x0fskipped_results\x18\x06 \x01(\x05\"V\n\x0fMoreResultsType\x12\x10\n\x0cNOT_FINISHED\x10\x01\x12\x1c\n\x18MORE_RESULTS_AFTER_LIMIT\x10\x02\x12\x13\n\x0fNO_MORE_RESULTS\x10\x03\"\x8e\x02\n\x08Mutation\x12.\n\x06upsert\x18\x01 \x03(\x0b\x32\x1e.api.services.datastore.Entity\x12.\n\x06update\x18\x02 \x03(\x0b\x32\x1e.api.services.datastore.Entity\x12.\n\x06insert\x18\x03 \x03(\x0b\x32\x1e.api.services.datastore.Entity\x12\x36\n\x0einsert_auto_id\x18\x04 \x03(\x0b\x32\x1e.api.services.datastore.Entity\x12+\n\x06\x64\x65lete\x18\x05 \x03(\x0b\x32\x1b.api.services.datastore.Key\x12\r\n\x05\x66orce\x18\x06 \x01(\x08\"`\n\x0eMutationResult\x12\x15\n\rindex_updates\x18\x01 \x02(\x05\x12\x37\n\x12insert_auto_id_key\x18\x02 \x03(\x0b\x32\x1b.api.services.datastore.Key\"\xb4\x01\n\x0bReadOptions\x12V\n\x10read_consistency\x18\x01 \x01(\x0e\x32\x33.api.services.datastore.ReadOptions.ReadConsistency:\x07\x44\x45\x46\x41ULT\x12\x13\n\x0btransaction\x18\x02 \x01(\x0c\"8\n\x0fReadConsistency\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\n\n\x06STRONG\x10\x01\x12\x0c\n\x08\x45VENTUAL\x10\x02\"t\n\rLookupRequest\x12\x39\n\x0cread_options\x18\x01 \x01(\x0b\x32#.api.services.datastore.ReadOptions\x12(\n\x03key\x18\x03 \x03(\x0b\x32\x1b.api.services.datastore.Key\"\xab\x01\n\x0eLookupResponse\x12\x33\n\x05\x66ound\x18\x01 \x03(\x0b\x32$.api.services.datastore.EntityResult\x12\x35\n\x07missing\x18\x02 \x03(\x0b\x32$.api.services.datastore.EntityResult\x12-\n\x08\x64\x65\x66\x65rred\x18\x03 \x03(\x0b\x32\x1b.api.services.datastore.Key\"\xea\x01\n\x0fRunQueryRequest\x12\x39\n\x0cread_options\x18\x01 \x01(\x0b\x32#.api.services.datastore.ReadOptions\x12\x39\n\x0cpartition_id\x18\x02 \x01(\x0b\x32#.api.services.datastore.PartitionId\x12,\n\x05query\x18\x03 \x01(\x0b\x32\x1d.api.services.datastore.Query\x12\x33\n\tgql_query\x18\x07 \x01(\x0b\x32 .api.services.datastore.GqlQuery\"K\n\x10RunQueryResponse\x12\x37\n\x05\x62\x61tch\x18\x01 \x01(\x0b\x32(.api.services.datastore.QueryResultBatch\"\xae\x01\n\x17\x42\x65ginTransactionRequest\x12\x61\n\x0fisolation_level\x18\x01 \x01(\x0e\x32>.api.services.datastore.BeginTransactionRequest.IsolationLevel:\x08SNAPSHOT\"0\n\x0eIsolationLevel\x12\x0c\n\x08SNAPSHOT\x10\x00\x12\x10\n\x0cSERIALIZABLE\x10\x01\"/\n\x18\x42\x65ginTransactionResponse\x12\x13\n\x0btransaction\x18\x01 \x01(\x0c\"&\n\x0fRollbackRequest\x12\x13\n\x0btransaction\x18\x01 \x02(\x0c\"\x12\n\x10RollbackResponse\"\xd3\x01\n\rCommitRequest\x12\x13\n\x0btransaction\x18\x01 \x01(\x0c\x12\x32\n\x08mutation\x18\x02 \x01(\x0b\x32 .api.services.datastore.Mutation\x12G\n\x04mode\x18\x05 \x01(\x0e\x32*.api.services.datastore.CommitRequest.Mode:\rTRANSACTIONAL\"0\n\x04Mode\x12\x11\n\rTRANSACTIONAL\x10\x01\x12\x15\n\x11NON_TRANSACTIONAL\x10\x02\"Q\n\x0e\x43ommitResponse\x12?\n\x0fmutation_result\x18\x01 \x01(\x0b\x32&.api.services.datastore.MutationResult\">\n\x12\x41llocateIdsRequest\x12(\n\x03key\x18\x01 \x03(\x0b\x32\x1b.api.services.datastore.Key\"?\n\x13\x41llocateIdsResponse\x12(\n\x03key\x18\x01 \x03(\x0b\x32\x1b.api.services.datastore.Key2\xed\x04\n\x10\x44\x61tastoreService\x12Y\n\x06Lookup\x12%.api.services.datastore.LookupRequest\x1a&.api.services.datastore.LookupResponse\"\x00\x12_\n\x08RunQuery\x12\'.api.services.datastore.RunQueryRequest\x1a(.api.services.datastore.RunQueryResponse\"\x00\x12w\n\x10\x42\x65ginTransaction\x12/.api.services.datastore.BeginTransactionRequest\x1a\x30.api.services.datastore.BeginTransactionResponse\"\x00\x12Y\n\x06\x43ommit\x12%.api.services.datastore.CommitRequest\x1a&.api.services.datastore.CommitResponse\"\x00\x12_\n\x08Rollback\x12\'.api.services.datastore.RollbackRequest\x1a(.api.services.datastore.RollbackResponse\"\x00\x12h\n\x0b\x41llocateIds\x12*.api.services.datastore.AllocateIdsRequest\x1a+.api.services.datastore.AllocateIdsResponse\"\x00\x42#\n!com.google.api.services.datastore') - - _ENTITYRESULT_RESULTTYPE = _descriptor.EnumDescriptor( name='ResultType', full_name='api.services.datastore.EntityResult.ResultType', @@ -226,7 +221,6 @@ serialized_end=4718, ) - _PARTITIONID = _descriptor.Descriptor( name='PartitionId', full_name='api.services.datastore.PartitionId', @@ -261,7 +255,6 @@ serialized_end=98, ) - _KEY_PATHELEMENT = _descriptor.Descriptor( name='PathElement', full_name='api.services.datastore.Key.PathElement', @@ -337,7 +330,6 @@ serialized_end=283, ) - _VALUE = _descriptor.Descriptor( name='Value', full_name='api.services.datastore.Value', @@ -442,7 +434,6 @@ serialized_end=658, ) - _PROPERTY = _descriptor.Descriptor( name='Property', full_name='api.services.datastore.Property', @@ -477,7 +468,6 @@ serialized_end=730, ) - _ENTITY = _descriptor.Descriptor( name='Entity', full_name='api.services.datastore.Entity', @@ -512,7 +502,6 @@ serialized_end=834, ) - _ENTITYRESULT = _descriptor.Descriptor( name='EntityResult', full_name='api.services.datastore.EntityResult', @@ -541,7 +530,6 @@ serialized_end=952, ) - _QUERY = _descriptor.Descriptor( name='Query', full_name='api.services.datastore.Query', @@ -625,7 +613,6 @@ serialized_end=1319, ) - _KINDEXPRESSION = _descriptor.Descriptor( name='KindExpression', full_name='api.services.datastore.KindExpression', @@ -653,7 +640,6 @@ serialized_end=1351, ) - _PROPERTYREFERENCE = _descriptor.Descriptor( name='PropertyReference', full_name='api.services.datastore.PropertyReference', @@ -681,7 +667,6 @@ serialized_end=1386, ) - _PROPERTYEXPRESSION = _descriptor.Descriptor( name='PropertyExpression', full_name='api.services.datastore.PropertyExpression', @@ -717,7 +702,6 @@ serialized_end=1598, ) - _PROPERTYORDER = _descriptor.Descriptor( name='PropertyOrder', full_name='api.services.datastore.PropertyOrder', @@ -753,7 +737,6 @@ serialized_end=1800, ) - _FILTER = _descriptor.Descriptor( name='Filter', full_name='api.services.datastore.Filter', @@ -788,7 +771,6 @@ serialized_end=1943, ) - _COMPOSITEFILTER = _descriptor.Descriptor( name='CompositeFilter', full_name='api.services.datastore.CompositeFilter', @@ -824,7 +806,6 @@ serialized_end=2100, ) - _PROPERTYFILTER = _descriptor.Descriptor( name='PropertyFilter', full_name='api.services.datastore.PropertyFilter', @@ -867,7 +848,6 @@ serialized_end=2418, ) - _GQLQUERY = _descriptor.Descriptor( name='GqlQuery', full_name='api.services.datastore.GqlQuery', @@ -916,7 +896,6 @@ serialized_end=2595, ) - _GQLQUERYARG = _descriptor.Descriptor( name='GqlQueryArg', full_name='api.services.datastore.GqlQueryArg', @@ -958,7 +937,6 @@ serialized_end=2686, ) - _QUERYRESULTBATCH = _descriptor.Descriptor( name='QueryResultBatch', full_name='api.services.datastore.QueryResultBatch', @@ -1015,7 +993,6 @@ serialized_end=3058, ) - _MUTATION = _descriptor.Descriptor( name='Mutation', full_name='api.services.datastore.Mutation', @@ -1078,7 +1055,6 @@ serialized_end=3331, ) - _MUTATIONRESULT = _descriptor.Descriptor( name='MutationResult', full_name='api.services.datastore.MutationResult', @@ -1113,7 +1089,6 @@ serialized_end=3429, ) - _READOPTIONS = _descriptor.Descriptor( name='ReadOptions', full_name='api.services.datastore.ReadOptions', @@ -1149,7 +1124,6 @@ serialized_end=3612, ) - _LOOKUPREQUEST = _descriptor.Descriptor( name='LookupRequest', full_name='api.services.datastore.LookupRequest', @@ -1184,7 +1158,6 @@ serialized_end=3730, ) - _LOOKUPRESPONSE = _descriptor.Descriptor( name='LookupResponse', full_name='api.services.datastore.LookupResponse', @@ -1226,7 +1199,6 @@ serialized_end=3904, ) - _RUNQUERYREQUEST = _descriptor.Descriptor( name='RunQueryRequest', full_name='api.services.datastore.RunQueryRequest', @@ -1275,7 +1247,6 @@ serialized_end=4141, ) - _RUNQUERYRESPONSE = _descriptor.Descriptor( name='RunQueryResponse', full_name='api.services.datastore.RunQueryResponse', @@ -1303,7 +1274,6 @@ serialized_end=4218, ) - _BEGINTRANSACTIONREQUEST = _descriptor.Descriptor( name='BeginTransactionRequest', full_name='api.services.datastore.BeginTransactionRequest', @@ -1332,7 +1302,6 @@ serialized_end=4395, ) - _BEGINTRANSACTIONRESPONSE = _descriptor.Descriptor( name='BeginTransactionResponse', full_name='api.services.datastore.BeginTransactionResponse', @@ -1360,7 +1329,6 @@ serialized_end=4444, ) - _ROLLBACKREQUEST = _descriptor.Descriptor( name='RollbackRequest', full_name='api.services.datastore.RollbackRequest', @@ -1388,7 +1356,6 @@ serialized_end=4484, ) - _ROLLBACKRESPONSE = _descriptor.Descriptor( name='RollbackResponse', full_name='api.services.datastore.RollbackResponse', @@ -1409,7 +1376,6 @@ serialized_end=4504, ) - _COMMITREQUEST = _descriptor.Descriptor( name='CommitRequest', full_name='api.services.datastore.CommitRequest', @@ -1452,7 +1418,6 @@ serialized_end=4718, ) - _COMMITRESPONSE = _descriptor.Descriptor( name='CommitResponse', full_name='api.services.datastore.CommitResponse', @@ -1480,7 +1445,6 @@ serialized_end=4801, ) - _ALLOCATEIDSREQUEST = _descriptor.Descriptor( name='AllocateIdsRequest', full_name='api.services.datastore.AllocateIdsRequest', @@ -1508,7 +1472,6 @@ serialized_end=4865, ) - _ALLOCATEIDSRESPONSE = _descriptor.Descriptor( name='AllocateIdsResponse', full_name='api.services.datastore.AllocateIdsResponse', @@ -1536,7 +1499,8 @@ serialized_end=4930, ) -_KEY_PATHELEMENT.containing_type = _KEY; +_KEY_PATHELEMENT.containing_type = _KEY + _KEY.fields_by_name['partition_id'].message_type = _PARTITIONID _KEY.fields_by_name['path_element'].message_type = _KEY_PATHELEMENT _VALUE.fields_by_name['key_value'].message_type = _KEY @@ -1546,7 +1510,8 @@ _ENTITY.fields_by_name['key'].message_type = _KEY _ENTITY.fields_by_name['property'].message_type = _PROPERTY _ENTITYRESULT.fields_by_name['entity'].message_type = _ENTITY -_ENTITYRESULT_RESULTTYPE.containing_type = _ENTITYRESULT; +_ENTITYRESULT_RESULTTYPE.containing_type = _ENTITYRESULT + _QUERY.fields_by_name['projection'].message_type = _PROPERTYEXPRESSION _QUERY.fields_by_name['kind'].message_type = _KINDEXPRESSION _QUERY.fields_by_name['filter'].message_type = _FILTER @@ -1554,26 +1519,31 @@ _QUERY.fields_by_name['group_by'].message_type = _PROPERTYREFERENCE _PROPERTYEXPRESSION.fields_by_name['property'].message_type = _PROPERTYREFERENCE _PROPERTYEXPRESSION.fields_by_name['aggregation_function'].enum_type = _PROPERTYEXPRESSION_AGGREGATIONFUNCTION -_PROPERTYEXPRESSION_AGGREGATIONFUNCTION.containing_type = _PROPERTYEXPRESSION; +_PROPERTYEXPRESSION_AGGREGATIONFUNCTION.containing_type = _PROPERTYEXPRESSION + _PROPERTYORDER.fields_by_name['property'].message_type = _PROPERTYREFERENCE _PROPERTYORDER.fields_by_name['direction'].enum_type = _PROPERTYORDER_DIRECTION -_PROPERTYORDER_DIRECTION.containing_type = _PROPERTYORDER; +_PROPERTYORDER_DIRECTION.containing_type = _PROPERTYORDER + _FILTER.fields_by_name['composite_filter'].message_type = _COMPOSITEFILTER _FILTER.fields_by_name['property_filter'].message_type = _PROPERTYFILTER _COMPOSITEFILTER.fields_by_name['operator'].enum_type = _COMPOSITEFILTER_OPERATOR _COMPOSITEFILTER.fields_by_name['filter'].message_type = _FILTER -_COMPOSITEFILTER_OPERATOR.containing_type = _COMPOSITEFILTER; +_COMPOSITEFILTER_OPERATOR.containing_type = _COMPOSITEFILTER + _PROPERTYFILTER.fields_by_name['property'].message_type = _PROPERTYREFERENCE _PROPERTYFILTER.fields_by_name['operator'].enum_type = _PROPERTYFILTER_OPERATOR _PROPERTYFILTER.fields_by_name['value'].message_type = _VALUE -_PROPERTYFILTER_OPERATOR.containing_type = _PROPERTYFILTER; +_PROPERTYFILTER_OPERATOR.containing_type = _PROPERTYFILTER + _GQLQUERY.fields_by_name['name_arg'].message_type = _GQLQUERYARG _GQLQUERY.fields_by_name['number_arg'].message_type = _GQLQUERYARG _GQLQUERYARG.fields_by_name['value'].message_type = _VALUE _QUERYRESULTBATCH.fields_by_name['entity_result_type'].enum_type = _ENTITYRESULT_RESULTTYPE _QUERYRESULTBATCH.fields_by_name['entity_result'].message_type = _ENTITYRESULT _QUERYRESULTBATCH.fields_by_name['more_results'].enum_type = _QUERYRESULTBATCH_MORERESULTSTYPE -_QUERYRESULTBATCH_MORERESULTSTYPE.containing_type = _QUERYRESULTBATCH; +_QUERYRESULTBATCH_MORERESULTSTYPE.containing_type = _QUERYRESULTBATCH + _MUTATION.fields_by_name['upsert'].message_type = _ENTITY _MUTATION.fields_by_name['update'].message_type = _ENTITY _MUTATION.fields_by_name['insert'].message_type = _ENTITY @@ -1581,7 +1551,8 @@ _MUTATION.fields_by_name['delete'].message_type = _KEY _MUTATIONRESULT.fields_by_name['insert_auto_id_key'].message_type = _KEY _READOPTIONS.fields_by_name['read_consistency'].enum_type = _READOPTIONS_READCONSISTENCY -_READOPTIONS_READCONSISTENCY.containing_type = _READOPTIONS; +_READOPTIONS_READCONSISTENCY.containing_type = _READOPTIONS + _LOOKUPREQUEST.fields_by_name['read_options'].message_type = _READOPTIONS _LOOKUPREQUEST.fields_by_name['key'].message_type = _KEY _LOOKUPRESPONSE.fields_by_name['found'].message_type = _ENTITYRESULT @@ -1593,10 +1564,12 @@ _RUNQUERYREQUEST.fields_by_name['gql_query'].message_type = _GQLQUERY _RUNQUERYRESPONSE.fields_by_name['batch'].message_type = _QUERYRESULTBATCH _BEGINTRANSACTIONREQUEST.fields_by_name['isolation_level'].enum_type = _BEGINTRANSACTIONREQUEST_ISOLATIONLEVEL -_BEGINTRANSACTIONREQUEST_ISOLATIONLEVEL.containing_type = _BEGINTRANSACTIONREQUEST; +_BEGINTRANSACTIONREQUEST_ISOLATIONLEVEL.containing_type = _BEGINTRANSACTIONREQUEST + _COMMITREQUEST.fields_by_name['mutation'].message_type = _MUTATION _COMMITREQUEST.fields_by_name['mode'].enum_type = _COMMITREQUEST_MODE -_COMMITREQUEST_MODE.containing_type = _COMMITREQUEST; +_COMMITREQUEST_MODE.containing_type = _COMMITREQUEST + _COMMITRESPONSE.fields_by_name['mutation_result'].message_type = _MUTATIONRESULT _ALLOCATEIDSREQUEST.fields_by_name['key'].message_type = _KEY _ALLOCATEIDSRESPONSE.fields_by_name['key'].message_type = _KEY @@ -1633,204 +1606,203 @@ DESCRIPTOR.message_types_by_name['AllocateIdsRequest'] = _ALLOCATEIDSREQUEST DESCRIPTOR.message_types_by_name['AllocateIdsResponse'] = _ALLOCATEIDSRESPONSE + class PartitionId(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _PARTITIONID + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _PARTITIONID - # @@protoc_insertion_point(class_scope:api.services.datastore.PartitionId) + # @@protoc_insertion_point(class_scope:api.services.datastore.PartitionId) class Key(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - - class PathElement(_message.Message): __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _KEY_PATHELEMENT - # @@protoc_insertion_point(class_scope:api.services.datastore.Key.PathElement) - DESCRIPTOR = _KEY + class PathElement(_message.Message): + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _KEY_PATHELEMENT + + # @@protoc_insertion_point(class_scope:api.services.datastore.Key.PathElement) + DESCRIPTOR = _KEY - # @@protoc_insertion_point(class_scope:api.services.datastore.Key) + # @@protoc_insertion_point(class_scope:api.services.datastore.Key) class Value(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _VALUE + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _VALUE - # @@protoc_insertion_point(class_scope:api.services.datastore.Value) + # @@protoc_insertion_point(class_scope:api.services.datastore.Value) class Property(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _PROPERTY + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _PROPERTY - # @@protoc_insertion_point(class_scope:api.services.datastore.Property) + # @@protoc_insertion_point(class_scope:api.services.datastore.Property) class Entity(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _ENTITY + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _ENTITY - # @@protoc_insertion_point(class_scope:api.services.datastore.Entity) + # @@protoc_insertion_point(class_scope:api.services.datastore.Entity) class EntityResult(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _ENTITYRESULT + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _ENTITYRESULT - # @@protoc_insertion_point(class_scope:api.services.datastore.EntityResult) + # @@protoc_insertion_point(class_scope:api.services.datastore.EntityResult) class Query(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _QUERY + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _QUERY - # @@protoc_insertion_point(class_scope:api.services.datastore.Query) + # @@protoc_insertion_point(class_scope:api.services.datastore.Query) class KindExpression(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _KINDEXPRESSION + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _KINDEXPRESSION - # @@protoc_insertion_point(class_scope:api.services.datastore.KindExpression) + # @@protoc_insertion_point(class_scope:api.services.datastore.KindExpression) class PropertyReference(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _PROPERTYREFERENCE + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _PROPERTYREFERENCE - # @@protoc_insertion_point(class_scope:api.services.datastore.PropertyReference) + # @@protoc_insertion_point(class_scope:api.services.datastore.PropertyReference) class PropertyExpression(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _PROPERTYEXPRESSION + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _PROPERTYEXPRESSION - # @@protoc_insertion_point(class_scope:api.services.datastore.PropertyExpression) + # @@protoc_insertion_point(class_scope:api.services.datastore.PropertyExpression) class PropertyOrder(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _PROPERTYORDER + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _PROPERTYORDER - # @@protoc_insertion_point(class_scope:api.services.datastore.PropertyOrder) + # @@protoc_insertion_point(class_scope:api.services.datastore.PropertyOrder) class Filter(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _FILTER + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _FILTER - # @@protoc_insertion_point(class_scope:api.services.datastore.Filter) + # @@protoc_insertion_point(class_scope:api.services.datastore.Filter) class CompositeFilter(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _COMPOSITEFILTER + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _COMPOSITEFILTER - # @@protoc_insertion_point(class_scope:api.services.datastore.CompositeFilter) + # @@protoc_insertion_point(class_scope:api.services.datastore.CompositeFilter) class PropertyFilter(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _PROPERTYFILTER + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _PROPERTYFILTER - # @@protoc_insertion_point(class_scope:api.services.datastore.PropertyFilter) + # @@protoc_insertion_point(class_scope:api.services.datastore.PropertyFilter) class GqlQuery(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _GQLQUERY + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _GQLQUERY - # @@protoc_insertion_point(class_scope:api.services.datastore.GqlQuery) + # @@protoc_insertion_point(class_scope:api.services.datastore.GqlQuery) class GqlQueryArg(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _GQLQUERYARG + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _GQLQUERYARG - # @@protoc_insertion_point(class_scope:api.services.datastore.GqlQueryArg) + # @@protoc_insertion_point(class_scope:api.services.datastore.GqlQueryArg) class QueryResultBatch(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _QUERYRESULTBATCH + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _QUERYRESULTBATCH - # @@protoc_insertion_point(class_scope:api.services.datastore.QueryResultBatch) + # @@protoc_insertion_point(class_scope:api.services.datastore.QueryResultBatch) class Mutation(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _MUTATION + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _MUTATION - # @@protoc_insertion_point(class_scope:api.services.datastore.Mutation) + # @@protoc_insertion_point(class_scope:api.services.datastore.Mutation) class MutationResult(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _MUTATIONRESULT + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _MUTATIONRESULT - # @@protoc_insertion_point(class_scope:api.services.datastore.MutationResult) + # @@protoc_insertion_point(class_scope:api.services.datastore.MutationResult) class ReadOptions(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _READOPTIONS + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _READOPTIONS - # @@protoc_insertion_point(class_scope:api.services.datastore.ReadOptions) + # @@protoc_insertion_point(class_scope:api.services.datastore.ReadOptions) class LookupRequest(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _LOOKUPREQUEST + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _LOOKUPREQUEST - # @@protoc_insertion_point(class_scope:api.services.datastore.LookupRequest) + # @@protoc_insertion_point(class_scope:api.services.datastore.LookupRequest) class LookupResponse(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _LOOKUPRESPONSE + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _LOOKUPRESPONSE - # @@protoc_insertion_point(class_scope:api.services.datastore.LookupResponse) + # @@protoc_insertion_point(class_scope:api.services.datastore.LookupResponse) class RunQueryRequest(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _RUNQUERYREQUEST + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _RUNQUERYREQUEST - # @@protoc_insertion_point(class_scope:api.services.datastore.RunQueryRequest) + # @@protoc_insertion_point(class_scope:api.services.datastore.RunQueryRequest) class RunQueryResponse(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _RUNQUERYRESPONSE + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _RUNQUERYRESPONSE - # @@protoc_insertion_point(class_scope:api.services.datastore.RunQueryResponse) + # @@protoc_insertion_point(class_scope:api.services.datastore.RunQueryResponse) class BeginTransactionRequest(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _BEGINTRANSACTIONREQUEST + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _BEGINTRANSACTIONREQUEST - # @@protoc_insertion_point(class_scope:api.services.datastore.BeginTransactionRequest) + # @@protoc_insertion_point(class_scope:api.services.datastore.BeginTransactionRequest) class BeginTransactionResponse(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _BEGINTRANSACTIONRESPONSE + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _BEGINTRANSACTIONRESPONSE - # @@protoc_insertion_point(class_scope:api.services.datastore.BeginTransactionResponse) + # @@protoc_insertion_point(class_scope:api.services.datastore.BeginTransactionResponse) class RollbackRequest(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _ROLLBACKREQUEST + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _ROLLBACKREQUEST - # @@protoc_insertion_point(class_scope:api.services.datastore.RollbackRequest) + # @@protoc_insertion_point(class_scope:api.services.datastore.RollbackRequest) class RollbackResponse(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _ROLLBACKRESPONSE + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _ROLLBACKRESPONSE - # @@protoc_insertion_point(class_scope:api.services.datastore.RollbackResponse) + # @@protoc_insertion_point(class_scope:api.services.datastore.RollbackResponse) class CommitRequest(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _COMMITREQUEST + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _COMMITREQUEST - # @@protoc_insertion_point(class_scope:api.services.datastore.CommitRequest) + # @@protoc_insertion_point(class_scope:api.services.datastore.CommitRequest) class CommitResponse(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _COMMITRESPONSE + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _COMMITRESPONSE - # @@protoc_insertion_point(class_scope:api.services.datastore.CommitResponse) + # @@protoc_insertion_point(class_scope:api.services.datastore.CommitResponse) class AllocateIdsRequest(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _ALLOCATEIDSREQUEST + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _ALLOCATEIDSREQUEST - # @@protoc_insertion_point(class_scope:api.services.datastore.AllocateIdsRequest) + # @@protoc_insertion_point(class_scope:api.services.datastore.AllocateIdsRequest) class AllocateIdsResponse(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _ALLOCATEIDSRESPONSE - - # @@protoc_insertion_point(class_scope:api.services.datastore.AllocateIdsResponse) - + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _ALLOCATEIDSRESPONSE + # @@protoc_insertion_point(class_scope:api.services.datastore.AllocateIdsResponse) DESCRIPTOR.has_options = True DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), '\n!com.google.api.services.datastore') diff --git a/gcloud/datastore/demo/__init__.py b/gcloud/datastore/demo/__init__.py index 9de811485b0c..c688a2196cb4 100644 --- a/gcloud/datastore/demo/__init__.py +++ b/gcloud/datastore/demo/__init__.py @@ -1,10 +1,8 @@ import os from gcloud import datastore - __all__ = ['get_dataset', 'CLIENT_EMAIL', 'DATASET_ID', 'PRIVATE_KEY_PATH'] - CLIENT_EMAIL = ('754762820716-gimou6egs2hq1rli7el2t621a1b04t9i' '@developer.gserviceaccount.com') DATASET_ID = 'gcloud-datastore-demo' @@ -12,4 +10,4 @@ def get_dataset(): # pragma NO COVER - return datastore.get_dataset(DATASET_ID, CLIENT_EMAIL, PRIVATE_KEY_PATH) + return datastore.get_dataset(DATASET_ID, CLIENT_EMAIL, PRIVATE_KEY_PATH) diff --git a/gcloud/datastore/demo/__main__.py b/gcloud/datastore/demo/__main__.py index 69b07aee5281..bcf0de5668c8 100644 --- a/gcloud/datastore/demo/__main__.py +++ b/gcloud/datastore/demo/__main__.py @@ -1,5 +1,4 @@ from gcloud import demo from gcloud import datastore - demo.DemoRunner.from_module(datastore).run() diff --git a/gcloud/datastore/demo/demo.py b/gcloud/datastore/demo/demo.py index 1818f8547b4e..c8bf94beb2f5 100644 --- a/gcloud/datastore/demo/demo.py +++ b/gcloud/datastore/demo/demo.py @@ -40,29 +40,29 @@ # You can also work inside a transaction. # (Check the official docs for explanations of what's happening here.) with dataset.transaction(): - print 'Creating and savng an entity...' - thing = dataset.entity('Thing') - thing.key(thing.key().name('foo')) - thing['age'] = 10 - thing.save() + print 'Creating and savng an entity...' + thing = dataset.entity('Thing') + thing.key(thing.key().name('foo')) + thing['age'] = 10 + thing.save() - print 'Creating and saving another entity...' - thing2 = dataset.entity('Thing') - thing2.key(thing2.key().name('bar')) - thing2['age'] = 15 - thing2.save() + print 'Creating and saving another entity...' + thing2 = dataset.entity('Thing') + thing2.key(thing2.key().name('bar')) + thing2['age'] = 15 + thing2.save() - print 'Committing the transaction...' + print 'Committing the transaction...' # Now that the transaction is commited, let's delete the entities. print thing.delete(), thing2.delete() # To rollback a transaction, just call .rollback() with dataset.transaction() as t: - thing = dataset.entity('Thing') - thing.key(thing.key().name('another')) - thing.save() - t.rollback() + thing = dataset.entity('Thing') + thing.key(thing.key().name('another')) + thing.save() + t.rollback() # Let's check if the entity was actually created: created = dataset.get_entities([thing.key()]) @@ -71,9 +71,9 @@ # Remember, a key won't be complete until the transaction is commited. # That is, while inside the transaction block, thing.key() will be incomplete. with dataset.transaction(): - thing = dataset.entity('Thing') - thing.save() - print thing.key() # This will be partial + thing = dataset.entity('Thing') + thing.save() + print thing.key() # This will be partial print thing.key() # This will be complete diff --git a/gcloud/datastore/entity.py b/gcloud/datastore/entity.py index b2bbba1d3f0b..06913fb3223b 100644 --- a/gcloud/datastore/entity.py +++ b/gcloud/datastore/entity.py @@ -20,8 +20,7 @@ class Entity(dict): # pylint: disable=too-many-public-methods - """ - :type dataset: :class:`gcloud.datastore.dataset.Dataset` + """:type dataset: :class:`gcloud.datastore.dataset.Dataset` :param dataset: The dataset in which this entity belongs. :type kind: string @@ -60,6 +59,7 @@ class Entity(dict): # pylint: disable=too-many-public-methods >>> dict(entity) {'age': 20, 'name': 'JJ'} + """ def __init__(self, dataset=None, kind=None): diff --git a/gcloud/datastore/helpers.py b/gcloud/datastore/helpers.py index fc814dcfd323..5f3965213729 100644 --- a/gcloud/datastore/helpers.py +++ b/gcloud/datastore/helpers.py @@ -7,99 +7,98 @@ from gcloud.datastore.key import Key - INT64 = Int64ValueChecker().CheckValue def get_protobuf_attribute_and_value(val): - """Given a value, return the protobuf attribute name and proper value. - - The Protobuf API uses different attribute names - based on value types rather than inferring the type. - This method simply determines the proper attribute name - based on the type of the value provided - and returns the attribute name - as well as a properly formatted value. - - Certain value types need to be coerced into a different type (such as a - `datetime.datetime` into an integer timestamp, or a - `gcloud.datastore.key.Key` into a Protobuf representation. - This method handles that for you. - - For example: - - >>> get_protobuf_attribute_and_value(1234) - ('integer_value', 1234) - >>> get_protobuf_attribute_and_value('my_string') - ('string_value', 'my_string') - - :type val: `datetime.datetime`, :class:`gcloud.datastore.key.Key`, - bool, float, integer, string - :param val: The value to be scrutinized. - - :returns: A tuple of the attribute name and proper value type. - """ - - if isinstance(val, datetime): - name = 'timestamp_microseconds' - # If the datetime is naive (no timezone), consider that it was - # intended to be UTC and replace the tzinfo to that effect. - if not val.tzinfo: - val = val.replace(tzinfo=pytz.utc) - # Regardless of what timezone is on the value, convert it to UTC. - val = val.astimezone(pytz.utc) - # Convert the datetime to a microsecond timestamp. - value = long(calendar.timegm(val.timetuple()) * 1e6) + val.microsecond - elif isinstance(val, Key): - name, value = 'key', val.to_protobuf() - elif isinstance(val, bool): - name, value = 'boolean', val - elif isinstance(val, float): - name, value = 'double', val - elif isinstance(val, (int, long)): - name, value = 'integer', INT64(val) - elif isinstance(val, basestring): - name, value = 'string', val - - return name + '_value', value + """Given a value, return the protobuf attribute name and proper value. + + The Protobuf API uses different attribute names + based on value types rather than inferring the type. + This method simply determines the proper attribute name + based on the type of the value provided + and returns the attribute name + as well as a properly formatted value. + + Certain value types need to be coerced into a different type (such as a + `datetime.datetime` into an integer timestamp, or a + `gcloud.datastore.key.Key` into a Protobuf representation. + This method handles that for you. + + For example: + + >>> get_protobuf_attribute_and_value(1234) + ('integer_value', 1234) + >>> get_protobuf_attribute_and_value('my_string') + ('string_value', 'my_string') + + :type val: `datetime.datetime`, :class:`gcloud.datastore.key.Key`, + bool, float, integer, string + :param val: The value to be scrutinized. + + :returns: A tuple of the attribute name and proper value type. + """ + + if isinstance(val, datetime): + name = 'timestamp_microseconds' + # If the datetime is naive (no timezone), consider that it was + # intended to be UTC and replace the tzinfo to that effect. + if not val.tzinfo: + val = val.replace(tzinfo=pytz.utc) + # Regardless of what timezone is on the value, convert it to UTC. + val = val.astimezone(pytz.utc) + # Convert the datetime to a microsecond timestamp. + value = long(calendar.timegm(val.timetuple()) * 1e6) + val.microsecond + elif isinstance(val, Key): + name, value = 'key', val.to_protobuf() + elif isinstance(val, bool): + name, value = 'boolean', val + elif isinstance(val, float): + name, value = 'double', val + elif isinstance(val, (int, long)): + name, value = 'integer', INT64(val) + elif isinstance(val, basestring): + name, value = 'string', val + + return name + '_value', value def get_value_from_protobuf(pb): - """Given a protobuf for a Property, get the correct value. + """Given a protobuf for a Property, get the correct value. - The Cloud Datastore Protobuf API returns a Property Protobuf - which has one value set and the rest blank. - This method retrieves the the one value provided. + The Cloud Datastore Protobuf API returns a Property Protobuf + which has one value set and the rest blank. + This method retrieves the the one value provided. - Some work is done to coerce the return value into a more useful type - (particularly in the case of a timestamp value, or a key value). + Some work is done to coerce the return value into a more useful type + (particularly in the case of a timestamp value, or a key value). - :type pb: :class:`gcloud.datastore.datastore_v1_pb2.Property` - :param pb: The Property Protobuf. + :type pb: :class:`gcloud.datastore.datastore_v1_pb2.Property` + :param pb: The Property Protobuf. - :returns: The value provided by the Protobuf. - """ + :returns: The value provided by the Protobuf. + """ - if pb.value.HasField('timestamp_microseconds_value'): - microseconds = pb.value.timestamp_microseconds_value - naive = (datetime.utcfromtimestamp(0) + - timedelta(microseconds=microseconds)) - return naive.replace(tzinfo=pytz.utc) + if pb.value.HasField('timestamp_microseconds_value'): + microseconds = pb.value.timestamp_microseconds_value + naive = (datetime.utcfromtimestamp(0) + + timedelta(microseconds=microseconds)) + return naive.replace(tzinfo=pytz.utc) - elif pb.value.HasField('key_value'): - return Key.from_protobuf(pb.value.key_value) + elif pb.value.HasField('key_value'): + return Key.from_protobuf(pb.value.key_value) - elif pb.value.HasField('boolean_value'): - return pb.value.boolean_value + elif pb.value.HasField('boolean_value'): + return pb.value.boolean_value - elif pb.value.HasField('double_value'): - return pb.value.double_value + elif pb.value.HasField('double_value'): + return pb.value.double_value - elif pb.value.HasField('integer_value'): - return pb.value.integer_value + elif pb.value.HasField('integer_value'): + return pb.value.integer_value - elif pb.value.HasField('string_value'): - return pb.value.string_value + elif pb.value.HasField('string_value'): + return pb.value.string_value - else: - return None + else: + return None diff --git a/gcloud/datastore/key.py b/gcloud/datastore/key.py index 40cd81de0513..dce179c22cca 100644 --- a/gcloud/datastore/key.py +++ b/gcloud/datastore/key.py @@ -6,281 +6,281 @@ class Key(object): - """ - An immutable representation of a datastore Key. + """An immutable representation of a datastore Key. - .. automethod:: __init__ - """ - - def __init__(self, dataset=None, namespace=None, path=None): - """Constructor / initializer for a key. - - :type dataset: :class:`gcloud.datastore.dataset.Dataset` - :param dataset: A dataset instance for the key. - - :type namespace: :class:`str` - :param namespace: A namespace identifier for the key. - - :type path: sequence of dicts - :param path: Each dict must have keys 'kind' (a string) and optionally - 'name' (a string) or 'id' (an integer). - """ - self._dataset = dataset - self._namespace = namespace - self._path = path or [{'kind': ''}] - - def _clone(self): - """Duplicates the Key. - - We make a shallow copy of the :class:`gcloud.datastore.dataset.Dataset` - because it holds a reference an authenticated connection, - which we don't want to lose. - - :rtype: :class:`gcloud.datastore.key.Key` - :returns: a new `Key` instance - """ - clone = copy.deepcopy(self) - clone._dataset = self._dataset # Make a shallow copy of the Dataset. - return clone - - @classmethod - def from_protobuf(cls, pb, dataset=None): - """Factory method for creating a key based on a protobuf. - - The protobuf should be one returned from the Cloud Datastore Protobuf API. - - :type pb: :class:`gcloud.datastore.datastore_v1_pb2.Key` - :param pb: The Protobuf representing the key. - - :type dataset: :class:`gcloud.datastore.dataset.Dataset` - :param dataset: A dataset instance. If not passed, defaults to an - instance whose ID is derived from pb. - - :rtype: :class:`gcloud.datastore.key.Key` - :returns: a new `Key` instance - """ - path = [] - for element in pb.path_element: - element_dict = {'kind': element.kind} - - if element.HasField('id'): - element_dict['id'] = element.id - - elif element.HasField('name'): - element_dict['name'] = element.name - - path.append(element_dict) - - if not dataset: - dataset = Dataset(id=pb.partition_id.dataset_id) - namespace = pb.partition_id.namespace - else: - namespace = None - - return cls(dataset, namespace, path) - - def to_protobuf(self): - """Return a protobuf corresponding to the key. - - :rtype: :class:`gcloud.datastore.datastore_v1_pb2.Key` - :returns: The Protobuf representing the key. - """ - key = datastore_pb.Key() - - # Technically a dataset is required to do anything with the key, - # but we shouldn't throw a cryptic error if one isn't provided - # in the initializer. - if self.dataset(): - # Apparently 's~' is a prefix for High-Replication and is necessary here. - # Another valid preflix is 'e~' indicating EU datacenters. - dataset_id = self.dataset().id() - if dataset_id: - if dataset_id[:2] not in ['s~', 'e~']: - dataset_id = 's~' + dataset_id - - key.partition_id.dataset_id = dataset_id - - if self._namespace: - key.partition_id.namespace = self._namespace - - for item in self.path(): - element = key.path_element.add() - if 'kind' in item: - element.kind = item['kind'] - if 'id' in item: - element.id = item['id'] - if 'name' in item: - element.name = item['name'] - - return key - - @classmethod - def from_path(cls, *args, **kwargs): - """Factory method for creating a key based on a path. - - :type args: :class:`tuple` - :param args: sequence of even length, where the first of each pair is a - string representing the 'kind' of the path element, and the - second of the pair is either a string (for the path - element's name) or an integer (for its id). - - :type kwargs: :class:`dict` - :param kwargs: Other named parameters which can be passed to - :func:`Key.__init__`. - - :rtype: :class:`gcloud.datastore.key.Key` - :returns: a new :class:`Key` instance - """ - if len(args) % 2: - raise ValueError('Must pass an even number of args.') - - path = [] - items = iter(args) - - for kind, id_or_name in izip(items, items): - entry = {'kind': kind} - if isinstance(id_or_name, basestring): - entry['name'] = id_or_name - else: - entry['id'] = id_or_name - path.append(entry) - - kwargs['path'] = path - return cls(**kwargs) - - def is_partial(self): - """Boolean test: is the key fully mapped onto a backend entity? - - :rtype: :class:`bool` - :returns: True if the last element of the key's path does not have an 'id' - or a 'name'. - """ - return (self.id_or_name() is None) - - def dataset(self, dataset=None): - """Setter / getter. - - :type dataset: :class:`gcloud.datastore.dataset.Dataset` - :param dataset: A dataset instance for the key. - - :rtype: :class:`Key` (for setter); or - :class:`gcloud.datastore.dataset.Dataset` (for getter) - :returns: a new key, cloned from self., with the given dataset (setter); - or self's dataset (getter). - """ - if dataset: - clone = self._clone() - clone._dataset = dataset - return clone - else: - return self._dataset - - def namespace(self, namespace=None): - """Setter / getter. - - :type namespace: :class:`str` - :param namespace: A namespace identifier for the key. - - :rtype: :class:`Key` (for setter); or :class:`str` (for getter) - :returns: a new key, cloned from self., with the given namespace (setter); - or self's namespace (getter). - """ - if namespace: - clone = self._clone() - clone._namespace = namespace - return clone - else: - return self._namespace - - def path(self, path=None): - """Setter / getter. - - :type path: sequence of dicts - :param path: Each dict must have keys 'kind' (a string) and optionally - 'name' (a string) or 'id' (an integer). - - :rtype: :class:`Key` (for setter); or :class:`str` (for getter) - :returns: a new key, cloned from self., with the given path (setter); - or self's path (getter). - """ - if path: - clone = self._clone() - clone._path = path - return clone - else: - return self._path - - def kind(self, kind=None): - """Setter / getter. Based on the last element of path. - - :type kind: :class:`str` - :param kind: The new kind for the key. - - :rtype: :class:`Key` (for setter); or :class:`str` (for getter) - :returns: a new key, cloned from self., with the given kind (setter); - or self's kind (getter). - """ - if kind: - clone = self._clone() - clone._path[-1]['kind'] = kind - return clone - elif self.path(): - return self._path[-1]['kind'] - - def id(self, id=None): - """Setter / getter. Based on the last element of path. - - :type kind: :class:`str` - :param kind: The new kind for the key. - - :rtype: :class:`Key` (for setter); or :class:`int` (for getter) - :returns: a new key, cloned from self., with the given id (setter); - or self's id (getter). - """ - if id: - clone = self._clone() - clone._path[-1]['id'] = id - return clone - elif self.path(): - return self._path[-1].get('id') - - def name(self, name=None): - """Setter / getter. Based on the last element of path. - - :type kind: :class:`str` - :param kind: The new name for the key. - - :rtype: :class:`Key` (for setter); or :class:`str` (for getter) - :returns: a new key, cloned from self., with the given name (setter); - or self's name (getter). - """ - if name: - clone = self._clone() - clone._path[-1]['name'] = name - return clone - elif self.path(): - return self._path[-1].get('name') - - def id_or_name(self): - """Getter. Based on the last element of path. - - :rtype: :class:`int` (if 'id' is set); or :class:`str` (the 'name') - :returns: True if the last element of the key's path has either an 'id' - or a 'name'. - """ - return self.id() or self.name() - - def parent(self): # pragma NO COVER - """Getter: return a new key for the next highest element in path. - - :rtype: :class:`gcloud.datastore.key.Key` - :returns: a new `Key` instance, whose path consists of all but the last - element of self's path. If self has only one path element, - return None. + .. automethod:: __init__ """ - if len(self._path) <= 1: - return None - return self.path(self.path()[:-1]) - def __repr__(self): # pragma NO COVER - return '' % self.path() + def __init__(self, dataset=None, namespace=None, path=None): + """Constructor / initializer for a key. + + :type dataset: :class:`gcloud.datastore.dataset.Dataset` + :param dataset: A dataset instance for the key. + + :type namespace: :class:`str` + :param namespace: A namespace identifier for the key. + + :type path: sequence of dicts + :param path: Each dict must have keys 'kind' (a string) and optionally + 'name' (a string) or 'id' (an integer). + """ + self._dataset = dataset + self._namespace = namespace + self._path = path or [{'kind': ''}] + + def _clone(self): + """Duplicates the Key. + + We make a shallow copy of the :class:`gcloud.datastore.dataset.Dataset` + because it holds a reference an authenticated connection, + which we don't want to lose. + + :rtype: :class:`gcloud.datastore.key.Key` + :returns: a new `Key` instance + """ + clone = copy.deepcopy(self) + clone._dataset = self._dataset # Make a shallow copy of the Dataset. + return clone + + @classmethod + def from_protobuf(cls, pb, dataset=None): + """Factory method for creating a key based on a protobuf. + + The protobuf should be one returned from the Cloud Datastore + Protobuf API. + + :type pb: :class:`gcloud.datastore.datastore_v1_pb2.Key` + :param pb: The Protobuf representing the key. + + :type dataset: :class:`gcloud.datastore.dataset.Dataset` + :param dataset: A dataset instance. If not passed, defaults to an + instance whose ID is derived from pb. + + :rtype: :class:`gcloud.datastore.key.Key` + :returns: a new `Key` instance + """ + path = [] + for element in pb.path_element: + element_dict = {'kind': element.kind} + + if element.HasField('id'): + element_dict['id'] = element.id + + elif element.HasField('name'): + element_dict['name'] = element.name + + path.append(element_dict) + + if not dataset: + dataset = Dataset(id=pb.partition_id.dataset_id) + namespace = pb.partition_id.namespace + else: + namespace = None + + return cls(dataset, namespace, path) + + def to_protobuf(self): + """Return a protobuf corresponding to the key. + + :rtype: :class:`gcloud.datastore.datastore_v1_pb2.Key` + :returns: The Protobuf representing the key. + """ + key = datastore_pb.Key() + + # Technically a dataset is required to do anything with the key, + # but we shouldn't throw a cryptic error if one isn't provided + # in the initializer. + if self.dataset(): + # Apparently 's~' is a prefix for High-Replication and is necessary + # here. Another valid preflix is 'e~' indicating EU datacenters. + dataset_id = self.dataset().id() + if dataset_id: + if dataset_id[:2] not in ['s~', 'e~']: + dataset_id = 's~' + dataset_id + + key.partition_id.dataset_id = dataset_id + + if self._namespace: + key.partition_id.namespace = self._namespace + + for item in self.path(): + element = key.path_element.add() + if 'kind' in item: + element.kind = item['kind'] + if 'id' in item: + element.id = item['id'] + if 'name' in item: + element.name = item['name'] + + return key + + @classmethod + def from_path(cls, *args, **kwargs): + """Factory method for creating a key based on a path. + + :type args: :class:`tuple` + :param args: sequence of even length, where the first of each pair is a + string representing the 'kind' of the path element, and + the second of the pair is either a string (for the path + element's name) or an integer (for its id). + + :type kwargs: :class:`dict` + :param kwargs: Other named parameters which can be passed to + :func:`Key.__init__`. + + :rtype: :class:`gcloud.datastore.key.Key` + :returns: a new :class:`Key` instance + """ + if len(args) % 2: + raise ValueError('Must pass an even number of args.') + + path = [] + items = iter(args) + + for kind, id_or_name in izip(items, items): + entry = {'kind': kind} + if isinstance(id_or_name, basestring): + entry['name'] = id_or_name + else: + entry['id'] = id_or_name + path.append(entry) + + kwargs['path'] = path + return cls(**kwargs) + + def is_partial(self): + """Boolean test: is the key fully mapped onto a backend entity? + + :rtype: :class:`bool` + :returns: True if the last element of the key's path does not have + an 'id' or a 'name'. + """ + return (self.id_or_name() is None) + + def dataset(self, dataset=None): + """Dataset setter / getter. + + :type dataset: :class:`gcloud.datastore.dataset.Dataset` + :param dataset: A dataset instance for the key. + + :rtype: :class:`Key` (for setter); or + :class:`gcloud.datastore.dataset.Dataset` (for getter) + :returns: a new key, cloned from self., with the given dataset + (setter); or self's dataset (getter). + """ + if dataset: + clone = self._clone() + clone._dataset = dataset + return clone + else: + return self._dataset + + def namespace(self, namespace=None): + """Namespace setter / getter. + + :type namespace: :class:`str` + :param namespace: A namespace identifier for the key. + + :rtype: :class:`Key` (for setter); or :class:`str` (for getter) + :returns: a new key, cloned from self., with the given namespace + (setter); or self's namespace (getter). + """ + if namespace: + clone = self._clone() + clone._namespace = namespace + return clone + else: + return self._namespace + + def path(self, path=None): + """Path setter / getter. + + :type path: sequence of dicts + :param path: Each dict must have keys 'kind' (a string) and optionally + 'name' (a string) or 'id' (an integer). + + :rtype: :class:`Key` (for setter); or :class:`str` (for getter) + :returns: a new key, cloned from self., with the given path (setter); + or self's path (getter). + """ + if path: + clone = self._clone() + clone._path = path + return clone + else: + return self._path + + def kind(self, kind=None): + """Kind setter / getter. Based on the last element of path. + + :type kind: :class:`str` + :param kind: The new kind for the key. + + :rtype: :class:`Key` (for setter); or :class:`str` (for getter) + :returns: a new key, cloned from self., with the given kind (setter); + or self's kind (getter). + """ + if kind: + clone = self._clone() + clone._path[-1]['kind'] = kind + return clone + elif self.path(): + return self._path[-1]['kind'] + + def id(self, id=None): + """ID setter / getter. Based on the last element of path. + + :type kind: :class:`str` + :param kind: The new kind for the key. + + :rtype: :class:`Key` (for setter); or :class:`int` (for getter) + :returns: a new key, cloned from self., with the given id (setter); + or self's id (getter). + """ + if id: + clone = self._clone() + clone._path[-1]['id'] = id + return clone + elif self.path(): + return self._path[-1].get('id') + + def name(self, name=None): + """Name setter / getter. Based on the last element of path. + + :type kind: :class:`str` + :param kind: The new name for the key. + + :rtype: :class:`Key` (for setter); or :class:`str` (for getter) + :returns: a new key, cloned from self., with the given name (setter); + or self's name (getter). + """ + if name: + clone = self._clone() + clone._path[-1]['name'] = name + return clone + elif self.path(): + return self._path[-1].get('name') + + def id_or_name(self): + """Getter. Based on the last element of path. + + :rtype: :class:`int` (if 'id' is set); or :class:`str` (the 'name') + :returns: True if the last element of the key's path has either an 'id' + or a 'name'. + """ + return self.id() or self.name() + + def parent(self): # pragma NO COVER + """Getter: return a new key for the next highest element in path. + + :rtype: :class:`gcloud.datastore.key.Key` + :returns: a new `Key` instance, whose path consists of all but the last + element of self's path. If self has only one path element, + return None. + """ + if len(self._path) <= 1: + return None + return self.path(self.path()[:-1]) + + def __repr__(self): # pragma NO COVER + return '' % self.path() diff --git a/gcloud/datastore/query.py b/gcloud/datastore/query.py index 6f1a49472996..42369684ace1 100644 --- a/gcloud/datastore/query.py +++ b/gcloud/datastore/query.py @@ -8,376 +8,379 @@ class Query(object): - """A Query against the Cloud Datastore. - - This class serves as an abstraction for creating - a query over data stored in the Cloud Datastore. - - Each :class:`Query` object is immutable, - and a clone is returned whenever - any part of the query is modified:: - - >>> query = Query('MyKind') - >>> limited_query = query.limit(10) - >>> query.limit() == 10 - False - >>> limited_query.limit() == 10 - True - - You typically won't construct a :class:`Query` - by initializing it like ``Query('MyKind', dataset=...)`` - but instead use the helper - :func:`gcloud.datastore.dataset.Dataset.query` method - which generates a query that can be executed - without any additional work:: - - >>> from gcloud import datastore - >>> dataset = datastore.get_dataset('dataset-id', email, key_path) - >>> query = dataset.query('MyKind') - - :type kind: string - :param kind: The kind to query. - - :type dataset: :class:`gcloud.datastore.dataset.Dataset` - :param dataset: The dataset to query. - """ - - OPERATORS = { - '<': datastore_pb.PropertyFilter.LESS_THAN, - '<=': datastore_pb.PropertyFilter.LESS_THAN_OR_EQUAL, - '>': datastore_pb.PropertyFilter.GREATER_THAN, - '>=': datastore_pb.PropertyFilter.GREATER_THAN_OR_EQUAL, - '=': datastore_pb.PropertyFilter.EQUAL, - } - """Mapping of operator strings and their protobuf equivalents.""" - - def __init__(self, kind=None, dataset=None): - self._dataset = dataset - self._pb = datastore_pb.Query() - self._cursor = None - - if kind: - self._pb.kind.add().name = kind - - def _clone(self): - clone = copy.deepcopy(self) - clone._dataset = self._dataset # Shallow copy the dataset. - return clone - - def to_protobuf(self): - """Convert :class:`Query` instance to :class:`.datastore_v1_pb2.Query`. - - :rtype: :class:`gcloud.datastore.datastore_v1_pb2.Query` - :returns: A Query protobuf that can be sent to the protobuf API. - """ - return self._pb - - def filter(self, expression, value): - """Filter the query based on an expression and a value. - - This will return a clone of the current :class:`Query` - filtered by the expression and value provided. - - Expressions take the form of:: - - .filter(' ', ) - - where property is a property stored on the entity in the datastore - and operator is one of ``OPERATORS`` - (ie, ``=``, ``<``, ``<=``, ``>``, ``>=``):: - - >>> query = Query('Person') - >>> filtered_query = query.filter('name =', 'James') - >>> filtered_query = query.filter('age >', 50) - - Because each call to ``.filter()`` returns a cloned ``Query`` object - we are able to string these together:: - - >>> query = Query('Person').filter('name =', 'James').filter('age >', 50) - - :type expression: string - :param expression: An expression of a property and an operator (ie, ``=``). - - :type value: integer, string, boolean, float, None, datetime - :param value: The value to filter on. - - :rtype: :class:`Query` - :returns: A Query filtered by the expression and value provided. - """ - clone = self._clone() - - # Take an expression like 'property >=', and parse it into useful pieces. - property_name, operator = None, None - expression = expression.strip() - - for operator_string in self.OPERATORS: - if expression.endswith(operator_string): - operator = self.OPERATORS[operator_string] - property_name = expression[0:-len(operator_string)].strip() - - if not operator or not property_name: - raise ValueError('Invalid expression: "%s"' % expression) - - # Build a composite filter AND'd together. - composite_filter = clone._pb.filter.composite_filter - composite_filter.operator = datastore_pb.CompositeFilter.AND - - # Add the specific filter - property_filter = composite_filter.filter.add().property_filter - property_filter.property.name = property_name - property_filter.operator = operator - - # Set the value to filter on based on the type. - attr_name, pb_value = helpers.get_protobuf_attribute_and_value(value) - setattr(property_filter.value, attr_name, pb_value) - return clone - - def ancestor(self, ancestor): - """Filter the query based on an ancestor. + """A Query against the Cloud Datastore. - This will return a clone of the current :class:`Query` - filtered by the ancestor provided. + This class serves as an abstraction for creating + a query over data stored in the Cloud Datastore. - For example:: + Each :class:`Query` object is immutable, + and a clone is returned whenever + any part of the query is modified:: - >>> parent_key = Key.from_path('Person', '1') - >>> query = dataset.query('Person') - >>> filtered_query = query.ancestor(parent_key) + >>> query = Query('MyKind') + >>> limited_query = query.limit(10) + >>> query.limit() == 10 + False + >>> limited_query.limit() == 10 + True - If you don't have a :class:`gcloud.datastore.key.Key` but just - know the path, you can provide that as well:: + You typically won't construct a :class:`Query` + by initializing it like ``Query('MyKind', dataset=...)`` + but instead use the helper + :func:`gcloud.datastore.dataset.Dataset.query` method + which generates a query that can be executed + without any additional work:: - >>> query = dataset.query('Person') - >>> filtered_query = query.ancestor(['Person', '1']) - - Each call to ``.ancestor()`` returns a cloned :class:`Query`, - however a query may only have one ancestor at a time. + >>> from gcloud import datastore + >>> dataset = datastore.get_dataset('dataset-id', email, key_path) + >>> query = dataset.query('MyKind') - :type ancestor: :class:`gcloud.datastore.key.Key` or list - :param ancestor: Either a Key or a path of the form - ``['Kind', 'id or name', 'Kind', 'id or name', ...]``. + :type kind: string + :param kind: The kind to query. - :rtype: :class:`Query` - :returns: A Query filtered by the ancestor provided. + :type dataset: :class:`gcloud.datastore.dataset.Dataset` + :param dataset: The dataset to query. """ - clone = self._clone() + OPERATORS = { + '<': datastore_pb.PropertyFilter.LESS_THAN, + '<=': datastore_pb.PropertyFilter.LESS_THAN_OR_EQUAL, + '>': datastore_pb.PropertyFilter.GREATER_THAN, + '>=': datastore_pb.PropertyFilter.GREATER_THAN_OR_EQUAL, + '=': datastore_pb.PropertyFilter.EQUAL, + } + """Mapping of operator strings and their protobuf equivalents.""" - # If an ancestor filter already exists, remove it. - for i, filter in enumerate(clone._pb.filter.composite_filter.filter): - property_filter = filter.property_filter - if property_filter.operator == datastore_pb.PropertyFilter.HAS_ANCESTOR: - del clone._pb.filter.composite_filter.filter[i] + def __init__(self, kind=None, dataset=None): + self._dataset = dataset + self._pb = datastore_pb.Query() + self._cursor = None - # If we just deleted the last item, make sure to clear out the filter - # property all together. - if not clone._pb.filter.composite_filter.filter: - clone._pb.ClearField('filter') + if kind: + self._pb.kind.add().name = kind - # If the ancestor is None, just return (we already removed the filter). - if not ancestor: - return clone + def _clone(self): + clone = copy.deepcopy(self) + clone._dataset = self._dataset # Shallow copy the dataset. + return clone - # If a list was provided, turn it into a Key. - if isinstance(ancestor, list): - ancestor = Key.from_path(*ancestor) + def to_protobuf(self): + """Convert :class:`Query` instance to :class:`.datastore_v1_pb2.Query`. - # If we don't have a Key value by now, something is wrong. - if not isinstance(ancestor, Key): - raise TypeError('Expected list or Key, got %s.' % type(ancestor)) + :rtype: :class:`gcloud.datastore.datastore_v1_pb2.Query` + :returns: A Query protobuf that can be sent to the protobuf API. + """ + return self._pb - # Get the composite filter and add a new property filter. - composite_filter = clone._pb.filter.composite_filter - composite_filter.operator = datastore_pb.CompositeFilter.AND + def filter(self, expression, value): + """Filter the query based on an expression and a value. - # Filter on __key__ HAS_ANCESTOR == ancestor. - ancestor_filter = composite_filter.filter.add().property_filter - ancestor_filter.property.name = '__key__' - ancestor_filter.operator = datastore_pb.PropertyFilter.HAS_ANCESTOR - ancestor_filter.value.key_value.CopyFrom(ancestor.to_protobuf()) + This will return a clone of the current :class:`Query` + filtered by the expression and value provided. - return clone + Expressions take the form of:: - def kind(self, *kinds): - """Get or set the Kind of the Query. + .filter(' ', ) - .. note:: - This is an **additive** operation. - That is, if the Query is set for kinds A and B, - and you call ``.kind('C')``, - it will query for kinds A, B, *and*, C. + where property is a property stored on the entity in the datastore + and operator is one of ``OPERATORS`` + (ie, ``=``, ``<``, ``<=``, ``>``, ``>=``):: - :type kinds: string - :param kinds: The entity kinds for which to query. - - :rtype: string or :class:`Query` - :returns: If no arguments, returns the kind. - If a kind is provided, returns a clone of the :class:`Query` - with those kinds set. - """ - if kinds: - clone = self._clone() - for kind in kinds: - clone._pb.kind.add().name = kind - return clone - else: - return self._pb.kind - - def limit(self, limit=None): - """Get or set the limit of the Query. - - This is the maximum number of rows (Entities) to return for this Query. - - This is a hybrid getter / setter, used as:: - - >>> query = Query('Person') - >>> query = query.limit(100) # Set the limit to 100 rows. - >>> query.limit() # Get the limit for this query. - 100 - - :rtype: integer, None, or :class:`Query` - :returns: If no arguments, returns the current limit. - If a limit is provided, returns a clone of the :class:`Query` - with that limit set. - """ - if limit: - clone = self._clone() - clone._pb.limit = limit - return clone - else: - return self._pb.limit + >>> query = Query('Person') + >>> filtered_query = query.filter('name =', 'James') + >>> filtered_query = query.filter('age >', 50) - def dataset(self, dataset=None): - """Get or set the :class:`gcloud.datastore.dataset.Dataset` for this Query. + Because each call to ``.filter()`` returns a cloned ``Query`` object + we are able to string these together:: - This is the dataset against which the Query will be run. + >>> query = Query('Person').filter( + ... 'name =', 'James').filter('age >', 50) - This is a hybrid getter / setter, used as:: + :type expression: string + :param expression: An expression of a property and an + operator (ie, ``=``). - >>> query = Query('Person') - >>> query = query.dataset(my_dataset) # Set the dataset. - >>> query.dataset() # Get the current dataset. - + :type value: integer, string, boolean, float, None, datetime + :param value: The value to filter on. - :rtype: :class:`gcloud.datastore.dataset.Dataset`, None, or :class:`Query` - :returns: If no arguments, returns the current dataset. - If a dataset is provided, returns a clone of the :class:`Query` - with that dataset set. - """ - if dataset: - clone = self._clone() - clone._dataset = dataset - return clone - else: - return self._dataset + :rtype: :class:`Query` + :returns: A Query filtered by the expression and value provided. + """ + clone = self._clone() - def fetch(self, limit=None): - """Executes the Query and returns all matching entities. + # Take an expression like 'property >=', and parse it into + # useful pieces. + property_name, operator = None, None + expression = expression.strip() - This makes an API call to the Cloud Datastore, - sends the Query as a protobuf, - parses the responses to Entity protobufs, - and then converts them to :class:`gcloud.datastore.entity.Entity` objects. + for operator_string in self.OPERATORS: + if expression.endswith(operator_string): + operator = self.OPERATORS[operator_string] + property_name = expression[0:-len(operator_string)].strip() - For example:: + if not operator or not property_name: + raise ValueError('Invalid expression: "%s"' % expression) - >>> from gcloud import datastore - >>> dataset = datastore.get_dataset('dataset-id', email, key_path) - >>> query = dataset.query('Person').filter('name =', 'Sally') - >>> query.fetch() - [, , ...] - >>> query.fetch(1) - [] - >>> query.limit() - None - - :type limit: integer - :param limit: An optional limit to apply temporarily to this query. - That is, the Query itself won't be altered, - but the limit will be applied to the query - before it is executed. - - :rtype: list of :class:`gcloud.datastore.entity.Entity`'s - :returns: The list of entities matching this query's criteria. - """ - clone = self + # Build a composite filter AND'd together. + composite_filter = clone._pb.filter.composite_filter + composite_filter.operator = datastore_pb.CompositeFilter.AND + + # Add the specific filter + property_filter = composite_filter.filter.add().property_filter + property_filter.property.name = property_name + property_filter.operator = operator - if limit: - clone = self.limit(limit) + # Set the value to filter on based on the type. + attr_name, pb_value = helpers.get_protobuf_attribute_and_value(value) + setattr(property_filter.value, attr_name, pb_value) + return clone - (entity_pbs, - end_cursor, - more_results, - skipped_results) = self.dataset().connection().run_query( - query_pb=clone.to_protobuf(), dataset_id=self.dataset().id()) + def ancestor(self, ancestor): + """Filter the query based on an ancestor. - self._cursor = end_cursor - return [Entity.from_protobuf(entity, dataset=self.dataset()) - for entity in entity_pbs] + This will return a clone of the current :class:`Query` + filtered by the ancestor provided. - def cursor(self): - """Returns cursor ID + For example:: + + >>> parent_key = Key.from_path('Person', '1') + >>> query = dataset.query('Person') + >>> filtered_query = query.ancestor(parent_key) + + If you don't have a :class:`gcloud.datastore.key.Key` but just + know the path, you can provide that as well:: + + >>> query = dataset.query('Person') + >>> filtered_query = query.ancestor(['Person', '1']) + + Each call to ``.ancestor()`` returns a cloned :class:`Query`, + however a query may only have one ancestor at a time. - .. Caution:: Invoking this method on a query that has not yet been - executed will raise a RuntimeError. - - :rtype: string - :returns: base64-encoded cursor ID string denoting the last position - consumed in the query's result set. - """ - if not self._cursor: - raise RuntimeError('No cursor') - return base64.b64encode(self._cursor) - - def with_cursor(self, start_cursor, end_cursor=None): - """Specifies the starting / ending positions in a query's result set. - - :type start_cursor: bytes - :param start_cursor: Base64-encoded cursor string specifying where to - start reading query results. - - :type end_cursor: bytes - :param end_cursor: Base64-encoded cursor string specifying where to stop - reading query results. - - :rtype: :class:`Query` - :returns: If neither cursor is passed, returns self; else, returns a - clone of the :class:`Query`, with cursors updated. - - """ - clone = self - if start_cursor or end_cursor: - clone = self._clone() - if start_cursor: - clone._pb.start_cursor = base64.b64decode(start_cursor) - if end_cursor: - clone._pb.end_cursor = base64.b64decode(end_cursor) - return clone - - def order(self, *properties): - """Adds a sort order to the query. - - Sort fields will be applied in the order specified. - - :type properties: sequence of strings - :param properties: Each value is a string giving the name of the property - on which to sort, optionally preceded by a hyphen - (-) to specify descending order. - Omitting the hyphen implies ascending order. - - :rtype: :class:`Query` - :returns: A new Query instance, ordered as specified. - """ - clone = self._clone() + :type ancestor: :class:`gcloud.datastore.key.Key` or list + :param ancestor: Either a Key or a path of the form + ``['Kind', 'id or name', 'Kind', 'id or name', ...]``. - for p in properties: - property_order = clone._pb.order.add() + :rtype: :class:`Query` + :returns: A Query filtered by the ancestor provided. + """ - if p.startswith('-'): - property_order.property.name = p[1:] - property_order.direction = property_order.DESCENDING - else: - property_order.property.name = p - property_order.direction = property_order.ASCENDING + clone = self._clone() - return clone + # If an ancestor filter already exists, remove it. + for i, filter in enumerate(clone._pb.filter.composite_filter.filter): + property_filter = filter.property_filter + if (property_filter.operator == + datastore_pb.PropertyFilter.HAS_ANCESTOR): + del clone._pb.filter.composite_filter.filter[i] + + # If we just deleted the last item, make sure to clear out the + # filter property all together. + if not clone._pb.filter.composite_filter.filter: + clone._pb.ClearField('filter') + + # If the ancestor is None, just return (we already removed the filter). + if not ancestor: + return clone + + # If a list was provided, turn it into a Key. + if isinstance(ancestor, list): + ancestor = Key.from_path(*ancestor) + + # If we don't have a Key value by now, something is wrong. + if not isinstance(ancestor, Key): + raise TypeError('Expected list or Key, got %s.' % type(ancestor)) + + # Get the composite filter and add a new property filter. + composite_filter = clone._pb.filter.composite_filter + composite_filter.operator = datastore_pb.CompositeFilter.AND + + # Filter on __key__ HAS_ANCESTOR == ancestor. + ancestor_filter = composite_filter.filter.add().property_filter + ancestor_filter.property.name = '__key__' + ancestor_filter.operator = datastore_pb.PropertyFilter.HAS_ANCESTOR + ancestor_filter.value.key_value.CopyFrom(ancestor.to_protobuf()) + + return clone + + def kind(self, *kinds): + """Get or set the Kind of the Query. + + .. note:: + This is an **additive** operation. + That is, if the Query is set for kinds A and B, + and you call ``.kind('C')``, + it will query for kinds A, B, *and*, C. + + :type kinds: string + :param kinds: The entity kinds for which to query. + + :rtype: string or :class:`Query` + :returns: If no arguments, returns the kind. + If a kind is provided, returns a clone of the :class:`Query` + with those kinds set. + """ + if kinds: + clone = self._clone() + for kind in kinds: + clone._pb.kind.add().name = kind + return clone + else: + return self._pb.kind + + def limit(self, limit=None): + """Get or set the limit of the Query. + + This is the maximum number of rows (Entities) to return for this Query. + + This is a hybrid getter / setter, used as:: + + >>> query = Query('Person') + >>> query = query.limit(100) # Set the limit to 100 rows. + >>> query.limit() # Get the limit for this query. + 100 + + :rtype: integer, None, or :class:`Query` + :returns: If no arguments, returns the current limit. + If a limit is provided, returns a clone of the :class:`Query` + with that limit set. + """ + if limit: + clone = self._clone() + clone._pb.limit = limit + return clone + else: + return self._pb.limit + + def dataset(self, dataset=None): + """Get or set the :class:`.datastore.dataset.Dataset` for this Query. + + This is the dataset against which the Query will be run. + + This is a hybrid getter / setter, used as:: + + >>> query = Query('Person') + >>> query = query.dataset(my_dataset) # Set the dataset. + >>> query.dataset() # Get the current dataset. + + + :rtype: :class:`gcloud.datastore.dataset.Dataset`, None, + or :class:`Query` + :returns: If no arguments, returns the current dataset. + If a dataset is provided, returns a clone of the + :class:`Query` with that dataset set. + """ + if dataset: + clone = self._clone() + clone._dataset = dataset + return clone + else: + return self._dataset + + def fetch(self, limit=None): + """Executes the Query and returns all matching entities. + + This makes an API call to the Cloud Datastore, sends the Query as a + protobuf, parses the responses to Entity protobufs, and then converts + them to :class:`gcloud.datastore.entity.Entity` objects. + + For example:: + + >>> from gcloud import datastore + >>> dataset = datastore.get_dataset('dataset-id', email, key_path) + >>> query = dataset.query('Person').filter('name =', 'Sally') + >>> query.fetch() + [, , ...] + >>> query.fetch(1) + [] + >>> query.limit() + None + + :type limit: integer + :param limit: An optional limit to apply temporarily to this query. + That is, the Query itself won't be altered, + but the limit will be applied to the query + before it is executed. + + :rtype: list of :class:`gcloud.datastore.entity.Entity`'s + :returns: The list of entities matching this query's criteria. + """ + clone = self + + if limit: + clone = self.limit(limit) + + (entity_pbs, + end_cursor, + more_results, + skipped_results) = self.dataset().connection().run_query( + query_pb=clone.to_protobuf(), dataset_id=self.dataset().id()) + + self._cursor = end_cursor + return [Entity.from_protobuf(entity, dataset=self.dataset()) + for entity in entity_pbs] + + def cursor(self): + """Returns cursor ID + + .. Caution:: Invoking this method on a query that has not yet been + executed will raise a RuntimeError. + + :rtype: string + :returns: base64-encoded cursor ID string denoting the last position + consumed in the query's result set. + """ + if not self._cursor: + raise RuntimeError('No cursor') + return base64.b64encode(self._cursor) + + def with_cursor(self, start_cursor, end_cursor=None): + """Specifies the starting / ending positions in a query's result set. + + :type start_cursor: bytes + :param start_cursor: Base64-encoded cursor string specifying where to + start reading query results. + + :type end_cursor: bytes + :param end_cursor: Base64-encoded cursor string specifying where to stop + reading query results. + + :rtype: :class:`Query` + :returns: If neither cursor is passed, returns self; else, returns a + clone of the :class:`Query`, with cursors updated. + """ + clone = self + if start_cursor or end_cursor: + clone = self._clone() + if start_cursor: + clone._pb.start_cursor = base64.b64decode(start_cursor) + if end_cursor: + clone._pb.end_cursor = base64.b64decode(end_cursor) + return clone + + def order(self, *properties): + """Adds a sort order to the query. + + Sort fields will be applied in the order specified. + + :type properties: sequence of strings + :param properties: Each value is a string giving the name of the + property on which to sort, optionally preceded by a + hyphen (-) to specify descending order. + Omitting the hyphen implies ascending order. + + :rtype: :class:`Query` + :returns: A new Query instance, ordered as specified. + """ + clone = self._clone() + + for p in properties: + property_order = clone._pb.order.add() + + if p.startswith('-'): + property_order.property.name = p[1:] + property_order.direction = property_order.DESCENDING + else: + property_order.property.name = p + property_order.direction = property_order.ASCENDING + + return clone diff --git a/gcloud/datastore/test_connection.py b/gcloud/datastore/test_connection.py index b243e52cb2a4..a3fdaa900d3a 100644 --- a/gcloud/datastore/test_connection.py +++ b/gcloud/datastore/test_connection.py @@ -862,6 +862,7 @@ def request(self, **kw): class _Monkey(object): + # context-manager for replacing module names in the scope of a test. def __init__(self, module, **kw): self.module = module diff --git a/gcloud/datastore/test_entity.py b/gcloud/datastore/test_entity.py index 21d45f6540c1..720143e59f4f 100644 --- a/gcloud/datastore/test_entity.py +++ b/gcloud/datastore/test_entity.py @@ -1,6 +1,5 @@ import unittest2 - _MARKER = object() _DATASET_ID = 'DATASET' _KIND = 'KIND' @@ -149,7 +148,7 @@ def test_save_w_transaction_w_partial_key(self): self.assertEqual(entity['foo'], 'Foo') self.assertEqual(connection._saved, (_DATASET_ID, 'KEY', {'foo': 'Foo'})) - self.assertEqual(transaction._added, (entity,)) + self.assertEqual(transaction._added, (entity, )) self.assertEqual(key._path, None) def test_save_w_returned_key(self): @@ -242,4 +241,4 @@ def __nonzero__(self): __bool__ = __nonzero__ def add_auto_id_entity(self, entity): - self._added += (entity,) + self._added += (entity, ) diff --git a/gcloud/datastore/transaction.py b/gcloud/datastore/transaction.py index c218c544ee0e..7eda4ecc8255 100644 --- a/gcloud/datastore/transaction.py +++ b/gcloud/datastore/transaction.py @@ -3,251 +3,252 @@ class Transaction(object): - """An abstraction representing datastore Transactions. - - Transactions can be used - to build up a bulk mutuation - as well as provide isolation. - - For example, - the following snippet of code - will put the two ``save`` operations - (either ``insert_auto_id`` or ``upsert``) - into the same mutation, and execute those within a transaction:: - - >>> from gcloud import datastore - >>> dataset = datastore.get_dataset('dataset-id', email, key_path) - >>> with dataset.transaction(bulk_mutation=True) # The default. - ... entity1.save() - ... entity2.save() - - By default, the transaction is rolled back if the transaction block - exits with an error:: - - >>> from gcloud import datastore - >>> dataset = datastore.get_dataset('dataset-id', email, key_path) - >>> with dataset.transaction() as t: - ... do_some_work() - ... raise Exception() # rolls back - - If the transaction block exists without an exception, - it will commit by default. - - .. warning:: - Inside a transaction, - automatically assigned IDs for entities - will not be available at save time! - That means, - if you try:: - + """An abstraction representing datastore Transactions. + + Transactions can be used + to build up a bulk mutuation + as well as provide isolation. + + For example, + the following snippet of code + will put the two ``save`` operations + (either ``insert_auto_id`` or ``upsert``) + into the same mutation, and execute those within a transaction:: + + >>> from gcloud import datastore + >>> dataset = datastore.get_dataset('dataset-id', email, key_path) + >>> with dataset.transaction(bulk_mutation=True) # The default. + ... entity1.save() + ... entity2.save() + + By default, the transaction is rolled back if the transaction block + exits with an error:: + + >>> from gcloud import datastore + >>> dataset = datastore.get_dataset('dataset-id', email, key_path) + >>> with dataset.transaction() as t: + ... do_some_work() + ... raise Exception() # rolls back + + If the transaction block exists without an exception, + it will commit by default. + + .. warning:: + Inside a transaction, + automatically assigned IDs for entities + will not be available at save time! + That means, + if you try:: + + >>> with dataset.transaction(): + ... entity = dataset.entity('Thing').save() + + ``entity`` won't have a complete Key + until the transaction is committed. + + Once you exit the transaction (or call ``commit()``), + the automatically generated ID will be assigned + to the entity:: + + >>> with dataset.transaction(): + ... entity = dataset.entity('Thing') + ... entity.save() + ... assert entity.key().is_partial() # There is no ID on this key. + >>> assert not entity.key().is_partial() # There *is* an ID. + + .. warning:: + If you're using the automatically generated ID functionality, + it's important that you only use + :func:`gcloud.datastore.entity.Entity.save` + rather than using + :func:`gcloud.datastore.connection.Connection.save_entity` directly. + + If you mix the two, + the results will have extra IDs generated + and it could jumble things up. + + If you don't want to use the context manager + you can initialize a transaction manually:: + + >>> transaction = dataset.transaction() + >>> transaction.begin() + + >>> entity = dataset.entity('Thing') + >>> entity.save() + + >>> if error: + ... transaction.rollback() + ... else: + ... transaction.commit() + + For now, + this library will enforce a rule of + one transaction per connection. + That is, + If you want to work with two transactions at the same time + (for whatever reason), + that must happen over two separate + :class:`gcloud.datastore.connection.Connection` s. + + For example, this is perfectly valid:: + + >>> from gcloud import datastore + >>> dataset = datastore.get_dataset('dataset-id', email, key_path) >>> with dataset.transaction(): - ... entity = dataset.entity('Thing').save() - - ``entity`` won't have a complete Key - until the transaction is committed. + ... dataset.entity('Thing').save() - Once you exit the transaction (or call ``commit()``), - the automatically generated ID will be assigned - to the entity:: + However, this **wouldn't** be acceptable:: + >>> from gcloud import datastore + >>> dataset = datastore.get_dataset('dataset-id', email, key_path) >>> with dataset.transaction(): - ... entity = dataset.entity('Thing') - ... entity.save() - ... assert entity.key().is_partial() # There is no ID on this key. - >>> assert not entity.key().is_partial() # There *is* an ID on this key. - - .. warning:: - If you're using the automatically generated ID functionality, - it's important that you only use - :func:`gcloud.datastore.entity.Entity.save` - rather than using - :func:`gcloud.datastore.connection.Connection.save_entity` directly. - - If you mix the two, - the results will have extra IDs generated - and it could jumble things up. - - If you don't want to use the context manager - you can initialize a transaction manually:: - - >>> transaction = dataset.transaction() - >>> transaction.begin() - - >>> entity = dataset.entity('Thing') - >>> entity.save() - - >>> if error: - ... transaction.rollback() - ... else: - ... transaction.commit() - - For now, - this library will enforce a rule of - one transaction per connection. - That is, - If you want to work with two transactions at the same time - (for whatever reason), - that must happen over two separate - :class:`gcloud.datastore.connection.Connection` s. - - For example, this is perfectly valid:: - - >>> from gcloud import datastore - >>> dataset = datastore.get_dataset('dataset-id', email, key_path) - >>> with dataset.transaction(): - ... dataset.entity('Thing').save() - - However, this **wouldn't** be acceptable:: - - >>> from gcloud import datastore - >>> dataset = datastore.get_dataset('dataset-id', email, key_path) - >>> with dataset.transaction(): - ... dataset.entity('Thing').save() - ... with dataset.transaction(): - ... dataset.entity('Thing').save() - - Technically, it looks like the Protobuf API supports this type of pattern, - however it makes the code particularly messy. - If you really need to nest transactions, try:: - - >>> from gcloud import datastore - >>> dataset1 = datastore.get_dataset('dataset-id', email, key_path) - >>> dataset2 = datastore.get_dataset('dataset-id', email, key_path) - >>> with dataset1.transaction(): - ... dataset1.entity('Thing').save() - ... with dataset2.transaction(): - ... dataset2.entity('Thing').save() - - :type dataset: :class:`gcloud.datastore.dataset.Dataset` - :param dataset: The dataset to which this :class:`Transaction` belongs. - """ - - def __init__(self, dataset): - self._dataset = dataset - self._id = None - self._mutation = datastore_pb.Mutation() - self._auto_id_entities = [] - - def connection(self): - """Getter for the current connection over which the transaction will run. - - :rtype: :class:`gcloud.datastore.connection.Connection` - :returns: The connection over which the transaction will run. - """ - - return self.dataset().connection() - - def dataset(self): - """Getter for the current dataset. - - :rtype: :class:`gcloud.datastore.dataset.Dataset` - :returns: The dataset to which the transaction belongs. - """ - - return self._dataset - - def id(self): - """Getter for the transaction ID. - - :rtype: string - :returns: The ID of the current transaction. + ... dataset.entity('Thing').save() + ... with dataset.transaction(): + ... dataset.entity('Thing').save() + + Technically, it looks like the Protobuf API supports this type of pattern, + however it makes the code particularly messy. + If you really need to nest transactions, try:: + + >>> from gcloud import datastore + >>> dataset1 = datastore.get_dataset('dataset-id', email, key_path) + >>> dataset2 = datastore.get_dataset('dataset-id', email, key_path) + >>> with dataset1.transaction(): + ... dataset1.entity('Thing').save() + ... with dataset2.transaction(): + ... dataset2.entity('Thing').save() + + :type dataset: :class:`gcloud.datastore.dataset.Dataset` + :param dataset: The dataset to which this :class:`Transaction` belongs. """ - return self._id - - def mutation(self): - """Getter for the current mutation. - - Every transaction is committed - with a single Mutation - representing the 'work' to be done as part of the transaction. - Inside a transaction, - calling ``save()`` on an entity - builds up the mutation. - This getter returns the Mutation protobuf - that has been built-up so far. - - :rtype: :class:`gcloud.datastore.datastore_v1_pb2.Mutation` - :returns: The Mutation protobuf to be sent in the commit request. - """ - return self._mutation - - def add_auto_id_entity(self, entity): - """Adds an entity to the list of entities to update with IDs. - - When an entity has a partial key, - calling ``save()`` adds an insert_auto_id entry in the mutation. - In order to make sure we update the Entity - once the transaction is committed, - we need to keep track of which entities to update - (and the order is important). - - When you call ``save()`` on an entity inside a transaction, - if the entity has a partial key, - it adds itself to the list of entities to be updated - once the transaction is committed - by calling this method. - """ - self._auto_id_entities.append(entity) - - def begin(self): - """Begins a transaction. - - This method is called automatically when entering a with statement, - however it can be called explicitly - if you don't want to use a context manager. - """ - self._id = self.connection().begin_transaction(self.dataset().id()) - self.connection().transaction(self) - - def rollback(self): - """Rolls back the current transaction. - - This method has necessary side-effects: - - - Sets the current connection's transaction reference to None. - - Sets the current transaction's ID to None. - """ - self.connection().rollback_transaction(self.dataset().id()) - self.connection().transaction(None) - self._id = None - - def commit(self): - """Commits the transaction. - - This is called automatically upon exiting a with statement, - however it can be called explicitly - if you don't want to use a context manager. - - This method has necessary side-effects: - - - Sets the current connection's transaction reference to None. - - Sets the current transaction's ID to None. - - Updates paths for any keys that needed an automatically generated ID. - """ - # It's possible that they called commit() already, in which case - # we shouldn't do any committing of our own. - if self.connection().transaction(): - result = self.connection().commit(self.dataset().id(), self.mutation()) - - # For any of the auto-id entities, make sure we update their keys. - for i, entity in enumerate(self._auto_id_entities): - key_pb = result.insert_auto_id_key[i] - key = Key.from_protobuf(key_pb) - entity.key(entity.key().path(key.path())) - - # Tell the connection that the transaction is over. - self.connection().transaction(None) - - # Clear our own ID in case this gets accidentally reused. - self._id = None - - def __enter__(self): - self.begin() - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - if exc_type is None: - self.commit() - else: - self.rollback() + def __init__(self, dataset): + self._dataset = dataset + self._id = None + self._mutation = datastore_pb.Mutation() + self._auto_id_entities = [] + + def connection(self): + """Getter for current connection over which the transaction will run. + + :rtype: :class:`gcloud.datastore.connection.Connection` + :returns: The connection over which the transaction will run. + """ + + return self.dataset().connection() + + def dataset(self): + """Getter for the current dataset. + + :rtype: :class:`gcloud.datastore.dataset.Dataset` + :returns: The dataset to which the transaction belongs. + """ + + return self._dataset + + def id(self): + """Getter for the transaction ID. + + :rtype: string + :returns: The ID of the current transaction. + """ + + return self._id + + def mutation(self): + """Getter for the current mutation. + + Every transaction is committed + with a single Mutation + representing the 'work' to be done as part of the transaction. + Inside a transaction, + calling ``save()`` on an entity + builds up the mutation. + This getter returns the Mutation protobuf + that has been built-up so far. + + :rtype: :class:`gcloud.datastore.datastore_v1_pb2.Mutation` + :returns: The Mutation protobuf to be sent in the commit request. + """ + return self._mutation + + def add_auto_id_entity(self, entity): + """Adds an entity to the list of entities to update with IDs. + + When an entity has a partial key, + calling ``save()`` adds an insert_auto_id entry in the mutation. + In order to make sure we update the Entity + once the transaction is committed, + we need to keep track of which entities to update + (and the order is important). + + When you call ``save()`` on an entity inside a transaction, + if the entity has a partial key, + it adds itself to the list of entities to be updated + once the transaction is committed + by calling this method. + """ + self._auto_id_entities.append(entity) + + def begin(self): + """Begins a transaction. + + This method is called automatically when entering a with statement, + however it can be called explicitly + if you don't want to use a context manager. + """ + self._id = self.connection().begin_transaction(self.dataset().id()) + self.connection().transaction(self) + + def rollback(self): + """Rolls back the current transaction. + + This method has necessary side-effects: + + - Sets the current connection's transaction reference to None. + - Sets the current transaction's ID to None. + """ + self.connection().rollback_transaction(self.dataset().id()) + self.connection().transaction(None) + self._id = None + + def commit(self): + """Commits the transaction. + + This is called automatically upon exiting a with statement, + however it can be called explicitly + if you don't want to use a context manager. + + This method has necessary side-effects: + + - Sets the current connection's transaction reference to None. + - Sets the current transaction's ID to None. + - Updates paths for any keys that needed an automatically generated ID. + """ + # It's possible that they called commit() already, in which case + # we shouldn't do any committing of our own. + if self.connection().transaction(): + result = self.connection().commit(self.dataset().id(), + self.mutation()) + + # For any of the auto-id entities, make sure we update their keys. + for i, entity in enumerate(self._auto_id_entities): + key_pb = result.insert_auto_id_key[i] + key = Key.from_protobuf(key_pb) + entity.key(entity.key().path(key.path())) + + # Tell the connection that the transaction is over. + self.connection().transaction(None) + + # Clear our own ID in case this gets accidentally reused. + self._id = None + + def __enter__(self): + self.begin() + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if exc_type is None: + self.commit() + else: + self.rollback() diff --git a/gcloud/demo.py b/gcloud/demo.py index 40fe3b6869cc..28aa4108c69b 100644 --- a/gcloud/demo.py +++ b/gcloud/demo.py @@ -6,103 +6,104 @@ class DemoRunner(object): - """An interactive runner of demo scripts.""" + """An interactive runner of demo scripts.""" - KEYPRESS_DELAY = 0.05 - GLOBALS, LOCALS = globals(), locals() - CODE, COMMENT = 'code', 'comment' + KEYPRESS_DELAY = 0.05 + GLOBALS, LOCALS = globals(), locals() + CODE, COMMENT = 'code', 'comment' - def __init__(self, fp): - self.lines = [line.rstrip() for line in fp.readlines()] + def __init__(self, fp): + self.lines = [line.rstrip() for line in fp.readlines()] - @classmethod - def from_module(cls, module): - path = os.path.join(os.path.dirname(module.__file__), - 'demo', 'demo.py') + @classmethod + def from_module(cls, module): + path = os.path.join(os.path.dirname(module.__file__), + 'demo', 'demo.py') - return cls(open(path, 'r')) + return cls(open(path, 'r')) - def run(self): - line_groups = itertools.groupby(self.lines, self.get_line_type) + def run(self): + line_groups = itertools.groupby(self.lines, self.get_line_type) - for group_type, lines in line_groups: - if group_type == self.COMMENT: - self.write(lines) + for group_type, lines in line_groups: + if group_type == self.COMMENT: + self.write(lines) - elif group_type == self.CODE: - self.code(lines) + elif group_type == self.CODE: + self.code(lines) - interact('(Hit CTRL-D to exit...)', local=self.LOCALS) + interact('(Hit CTRL-D to exit...)', local=self.LOCALS) - def wait(self): - raw_input() + def wait(self): + raw_input() - @classmethod - def get_line_type(cls, line): - if line.startswith('#'): - return cls.COMMENT - else: - return cls.CODE + @classmethod + def get_line_type(cls, line): + if line.startswith('#'): + return cls.COMMENT + else: + return cls.CODE - @staticmethod - def get_indent_level(line): - if not line.strip(): - return None - return len(line) - len(line.lstrip()) + @staticmethod + def get_indent_level(line): + if not line.strip(): + return None + return len(line) - len(line.lstrip()) - def write(self, lines): - print - print '\n'.join(lines), - self.wait() + def write(self, lines): + print + print '\n'.join(lines), + self.wait() - def code(self, lines): - code_lines = [] - - for line in lines: - indent = self.get_indent_level(line) - - # If we've completed a block, - # run whatever code was built up in code_lines. - if indent == 0: - self._execute_lines(code_lines) + def code(self, lines): code_lines = [] - # Print the prefix for the line depending on the indentation level. - if indent == 0: - print '>>> ', - elif indent > 0: - print '\n... ', - elif indent is None: - continue - - # Break the line into the code section and the comment section. - if '#' in line: - code, comment = line.split('#', 2) - else: - code, comment = line, None - - # 'Type' out the comment section. - for char in code.rstrip(): - time.sleep(self.KEYPRESS_DELAY) - sys.stdout.write(char) - sys.stdout.flush() - - # Print the comment section (not typed out). - if comment: - sys.stdout.write(' # %s' % comment.strip()) - - # Add the current line to the list of lines to be run in this block. - code_lines.append(line) - - # If we had any code built up that wasn't part of a completed block - # (ie, the lines ended with an indented line), - # run that code. - if code_lines: - self._execute_lines(code_lines) - - def _execute_lines(self, lines): - if lines: - self.wait() - - # Yes, this is crazy unsafe... but it's demo code. - exec('\n'.join(lines), self.GLOBALS, self.LOCALS) + for line in lines: + indent = self.get_indent_level(line) + + # If we've completed a block, + # run whatever code was built up in code_lines. + if indent == 0: + self._execute_lines(code_lines) + code_lines = [] + + # Print the prefix for the line depending on the indentation level. + if indent == 0: + print '>>> ', + elif indent > 0: + print '\n... ', + elif indent is None: + continue + + # Break the line into the code section and the comment section. + if '#' in line: + code, comment = line.split('#', 2) + else: + code, comment = line, None + + # 'Type' out the comment section. + for char in code.rstrip(): + time.sleep(self.KEYPRESS_DELAY) + sys.stdout.write(char) + sys.stdout.flush() + + # Print the comment section (not typed out). + if comment: + sys.stdout.write(' # %s' % comment.strip()) + + # Add the current line to the list of lines to be run + # in this block. + code_lines.append(line) + + # If we had any code built up that wasn't part of a completed block + # (ie, the lines ended with an indented line), + # run that code. + if code_lines: + self._execute_lines(code_lines) + + def _execute_lines(self, lines): + if lines: + self.wait() + + # Yes, this is crazy unsafe... but it's demo code. + exec('\n'.join(lines), self.GLOBALS, self.LOCALS) diff --git a/gcloud/storage/__init__.py b/gcloud/storage/__init__.py index de71d5ee9580..405ee2393e55 100644 --- a/gcloud/storage/__init__.py +++ b/gcloud/storage/__init__.py @@ -29,7 +29,6 @@ (akin to a file path on a remote machine). """ - __version__ = '0.1' SCOPE = ('https://www.googleapis.com/auth/devstorage.full_control', @@ -38,68 +37,68 @@ def get_connection(project, client_email, private_key_path): - """Shortcut method to establish a connection to Cloud Storage. + """Shortcut method to establish a connection to Cloud Storage. - Use this if you are going to access several buckets - with the same set of credentials: + Use this if you are going to access several buckets + with the same set of credentials: - >>> from gcloud import storage - >>> connection = storage.get_connection(project, email, key_path) - >>> bucket1 = connection.get_bucket('bucket1') - >>> bucket2 = connection.get_bucket('bucket2') + >>> from gcloud import storage + >>> connection = storage.get_connection(project, email, key_path) + >>> bucket1 = connection.get_bucket('bucket1') + >>> bucket2 = connection.get_bucket('bucket2') - :type project: string - :param project: The name of the project to connect to. + :type project: string + :param project: The name of the project to connect to. - :type client_email: string - :param client_email: The e-mail attached to the service account. + :type client_email: string + :param client_email: The e-mail attached to the service account. - :type private_key_path: string - :param private_key_path: The path to a private key file (this file was - given to you when you created the service - account). + :type private_key_path: string + :param private_key_path: The path to a private key file (this file was + given to you when you created the service + account). - :rtype: :class:`gcloud.storage.connection.Connection` - :returns: A connection defined with the proper credentials. - """ + :rtype: :class:`gcloud.storage.connection.Connection` + :returns: A connection defined with the proper credentials. + """ - from gcloud.credentials import Credentials - from gcloud.storage.connection import Connection + from gcloud.credentials import Credentials + from gcloud.storage.connection import Connection - credentials = Credentials.get_for_service_account( - client_email, private_key_path, scope=SCOPE) - return Connection(project=project, credentials=credentials) + credentials = Credentials.get_for_service_account( + client_email, private_key_path, scope=SCOPE) + return Connection(project=project, credentials=credentials) def get_bucket(bucket_name, project, client_email, private_key_path): - """Shortcut method to establish a connection to a particular bucket. + """Shortcut method to establish a connection to a particular bucket. - You'll generally use this as the first call to working with the API: + You'll generally use this as the first call to working with the API: - >>> from gcloud import storage - >>> bucket = storage.get_bucket(project, bucket_name, email, key_path) - >>> # Now you can do things with the bucket. - >>> bucket.exists('/path/to/file.txt') - False + >>> from gcloud import storage + >>> bucket = storage.get_bucket(project, bucket_name, email, key_path) + >>> # Now you can do things with the bucket. + >>> bucket.exists('/path/to/file.txt') + False - :type bucket_name: string - :param bucket_name: The id of the bucket you want to use. - This is akin to a disk name on a file system. + :type bucket_name: string + :param bucket_name: The id of the bucket you want to use. + This is akin to a disk name on a file system. - :type project: string - :param project: The name of the project to connect to. + :type project: string + :param project: The name of the project to connect to. - :type client_email: string - :param client_email: The e-mail attached to the service account. + :type client_email: string + :param client_email: The e-mail attached to the service account. - :type private_key_path: string - :param private_key_path: The path to a private key file (this file was - given to you when you created the service - account). + :type private_key_path: string + :param private_key_path: The path to a private key file (this file was + given to you when you created the service + account). - :rtype: :class:`gcloud.storage.bucket.Bucket` - :returns: A bucket with a connection using the provided credentials. - """ + :rtype: :class:`gcloud.storage.bucket.Bucket` + :returns: A bucket with a connection using the provided credentials. + """ - connection = get_connection(project, client_email, private_key_path) - return connection.get_bucket(bucket_name) + connection = get_connection(project, client_email, private_key_path) + return connection.get_bucket(bucket_name) diff --git a/gcloud/storage/acl.py b/gcloud/storage/acl.py index 9b4f38370efc..b0546e212bc9 100644 --- a/gcloud/storage/acl.py +++ b/gcloud/storage/acl.py @@ -1,5 +1,4 @@ -""" -This module makes it simple to interact +"""This module makes it simple to interact with the access control lists that Cloud Storage provides. :class:`gcloud.storage.bucket.Bucket` has a getting method @@ -74,332 +73,335 @@ class ACL(object): - """Container class representing a list of access controls.""" + """Container class representing a list of access controls.""" + + class Role(object): + """Enum style class for role-type constants.""" - class Role(object): - """Enum style class for role-type constants.""" + Reader = 'READER' + Writer = 'WRITER' + Owner = 'OWNER' - Reader = 'READER' - Writer = 'WRITER' - Owner = 'OWNER' + class Entity(object): + """Class representing a set of roles for an entity. - class Entity(object): - """Class representing a set of roles for an entity. + This is a helper class that you likely won't ever construct + outside of using the factor methods on the :class:`ACL` object. + """ - This is a helper class that you likely won't ever construct - outside of using the factor methods on the :class:`ACL` object. - """ + def __init__(self, entity_type, identifier=None): + """Entity constructor. - def __init__(self, entity_type, identifier=None): - """ - :type entity_type: string - :param entity_type: The type of entity (ie, 'group' or 'user'). + :type entity_type: string + :param entity_type: The type of entity (ie, 'group' or 'user'). - :type identifier: string - :param identifier: The ID or e-mail of the entity. - For the special entity types (like 'allUsers') this - is optional. - """ + :type identifier: string + :param identifier: The ID or e-mail of the entity. For the special + entity types (like 'allUsers') this is optional. + """ - self.identifier = identifier - self.roles = set([]) - self.type = entity_type + self.identifier = identifier + self.roles = set([]) + self.type = entity_type - def __str__(self): - if not self.identifier: - return str(self.type) - else: - return '{self.type}-{self.identifier}'.format(self=self) + def __str__(self): + if not self.identifier: + return str(self.type) + else: + return '{self.type}-{self.identifier}'.format(self=self) - def __repr__(self): # pragma NO COVER - return ''.format( - self=self, roles=', '.join(self.roles)) + def __repr__(self): # pragma NO COVER + return ''.format( + self=self, roles=', '.join(self.roles)) - def get_roles(self): - """Get the list of roles permitted by this entity. + def get_roles(self): + """Get the list of roles permitted by this entity. - :rtype: list of strings - :returns: The list of roles associated with this entity. - """ + :rtype: list of strings + :returns: The list of roles associated with this entity. + """ - return self.roles + return self.roles - def grant(self, role): - """Add a role to the entity. + def grant(self, role): + """Add a role to the entity. - :type role: string - :param role: The role to add to the entity. + :type role: string + :param role: The role to add to the entity. - :rtype: :class:`ACL.Entity` - :returns: The entity class. - """ + :rtype: :class:`ACL.Entity` + :returns: The entity class. + """ - self.roles.add(role) - return self + self.roles.add(role) + return self - def revoke(self, role): - """Remove a role from the entity. + def revoke(self, role): + """Remove a role from the entity. - :type role: string - :param role: The role to remove from the entity. + :type role: string + :param role: The role to remove from the entity. - :rtype: :class:`ACL.Entity` - :returns: The entity class. - """ + :rtype: :class:`ACL.Entity` + :returns: The entity class. + """ - if role in self.roles: - self.roles.remove(role) - return self + if role in self.roles: + self.roles.remove(role) + return self - def grant_read(self): - """Grant read access to the current entity.""" + def grant_read(self): + """Grant read access to the current entity.""" - return self.grant(ACL.Role.Reader) + return self.grant(ACL.Role.Reader) - def grant_write(self): - """Grant write access to the current entity.""" + def grant_write(self): + """Grant write access to the current entity.""" - return self.grant(ACL.Role.Writer) + return self.grant(ACL.Role.Writer) - def grant_owner(self): - """Grant owner access to the current entity.""" + def grant_owner(self): + """Grant owner access to the current entity.""" - return self.grant(ACL.Role.Owner) + return self.grant(ACL.Role.Owner) - def revoke_read(self): - """Revoke read access from the current entity.""" + def revoke_read(self): + """Revoke read access from the current entity.""" - return self.revoke(ACL.Role.Reader) + return self.revoke(ACL.Role.Reader) - def revoke_write(self): - """Revoke write access from the current entity.""" + def revoke_write(self): + """Revoke write access from the current entity.""" - return self.revoke(ACL.Role.Writer) + return self.revoke(ACL.Role.Writer) - def revoke_owner(self): - """Revoke owner access from the current entity.""" + def revoke_owner(self): + """Revoke owner access from the current entity.""" - return self.revoke(ACL.Role.Owner) + return self.revoke(ACL.Role.Owner) - def __init__(self): - self.entities = {} + def __init__(self): + self.entities = {} - def __iter__(self): - for entity in self.entities.itervalues(): - for role in entity.get_roles(): - if role: - yield {'entity': str(entity), 'role': role} + def __iter__(self): + for entity in self.entities.itervalues(): + for role in entity.get_roles(): + if role: + yield {'entity': str(entity), 'role': role} - def entity_from_dict(self, entity_dict): - """Build an ACL.Entity object from a dictionary of data. + def entity_from_dict(self, entity_dict): + """Build an ACL.Entity object from a dictionary of data. - An entity is a mutable object - that represents a list of roles - belonging to either a user or group - or the special types - for all users - and all authenticated users. + An entity is a mutable object + that represents a list of roles + belonging to either a user or group + or the special types + for all users + and all authenticated users. - :type entity_dict: dict - :param entity_dict: Dictionary full of data from an ACL lookup. + :type entity_dict: dict + :param entity_dict: Dictionary full of data from an ACL lookup. - :rtype: :class:`ACL.Entity` - :returns: An Entity constructed from the dictionary. - """ + :rtype: :class:`ACL.Entity` + :returns: An Entity constructed from the dictionary. + """ - entity = entity_dict['entity'] - role = entity_dict['role'] + entity = entity_dict['entity'] + role = entity_dict['role'] - if entity == 'allUsers': - entity = self.all() + if entity == 'allUsers': + entity = self.all() - elif entity == 'allAuthenticatedUsers': - entity = self.all_authenticated() + elif entity == 'allAuthenticatedUsers': + entity = self.all_authenticated() - elif '-' in entity: - entity_type, identifier = entity.split('-', 1) - entity = self.entity(entity_type=entity_type, identifier=identifier) + elif '-' in entity: + entity_type, identifier = entity.split('-', 1) + entity = self.entity(entity_type=entity_type, + identifier=identifier) - if not isinstance(entity, ACL.Entity): - raise ValueError('Invalid dictionary: %s' % entity_dict) + if not isinstance(entity, ACL.Entity): + raise ValueError('Invalid dictionary: %s' % entity_dict) - return entity.grant(role) + return entity.grant(role) - def has_entity(self, entity): - """Returns whether or not this ACL has any entries for an entity. + def has_entity(self, entity): + """Returns whether or not this ACL has any entries for an entity. - :type entity: :class:`ACL.Entity` - :param entity: The entity to check for existence in this ACL. + :type entity: :class:`ACL.Entity` + :param entity: The entity to check for existence in this ACL. - :rtype: bool - :returns: True of the entity exists in the ACL. - """ + :rtype: bool + :returns: True of the entity exists in the ACL. + """ - return str(entity) in self.entities + return str(entity) in self.entities - def get_entity(self, entity, default=None): - """Gets an entity object from the ACL. + def get_entity(self, entity, default=None): + """Gets an entity object from the ACL. - :type entity: :class:`ACL.Entity` or string - :param entity: The entity to get lookup in the ACL. + :type entity: :class:`ACL.Entity` or string + :param entity: The entity to get lookup in the ACL. - :type default: anything - :param default: This value will be returned if the entity doesn't exist. + :type default: anything + :param default: This value will be returned if the entity + doesn't exist. - :rtype: :class:`ACL.Entity` - :returns: The corresponding entity or the value provided to ``default``. - """ + :rtype: :class:`ACL.Entity` + :returns: The corresponding entity or the value provided + to ``default``. + """ - return self.entities.get(str(entity), default) + return self.entities.get(str(entity), default) - def add_entity(self, entity): - """Add an entity to the ACL. + def add_entity(self, entity): + """Add an entity to the ACL. - :type entity: :class:`ACL.Entity` - :param entity: The entity to add to this ACL. - """ + :type entity: :class:`ACL.Entity` + :param entity: The entity to add to this ACL. + """ - self.entities[str(entity)] = entity + self.entities[str(entity)] = entity - def entity(self, entity_type, identifier=None): - """Factory method for creating an Entity. + def entity(self, entity_type, identifier=None): + """Factory method for creating an Entity. - If an entity with the same type and identifier already exists, - this will return a reference to that entity. - If not, it will create a new one and add it to the list - of known entities for this ACL. + If an entity with the same type and identifier already exists, + this will return a reference to that entity. + If not, it will create a new one and add it to the list + of known entities for this ACL. - :type entity_type: string - :param entity_type: The type of entity to create - (ie, ``user``, ``group``, etc) + :type entity_type: string + :param entity_type: The type of entity to create + (ie, ``user``, ``group``, etc) - :type identifier: string - :param identifier: The ID of the entity (if applicable). - This can be either an ID or an e-mail address. + :type identifier: string + :param identifier: The ID of the entity (if applicable). + This can be either an ID or an e-mail address. - :rtype: :class:`ACL.Entity` - :returns: A new Entity or a refernece to an existing identical entity. - """ + :rtype: :class:`ACL.Entity` + :returns: A new Entity or a refernece to an existing identical entity. + """ - entity = ACL.Entity(entity_type=entity_type, identifier=identifier) - if self.has_entity(entity): - entity = self.get_entity(entity) - else: - self.add_entity(entity) - return entity + entity = ACL.Entity(entity_type=entity_type, identifier=identifier) + if self.has_entity(entity): + entity = self.get_entity(entity) + else: + self.add_entity(entity) + return entity - def user(self, identifier): - """Factory method for a user Entity. + def user(self, identifier): + """Factory method for a user Entity. - :type identifier: string - :param identifier: An id or e-mail for this particular user. + :type identifier: string + :param identifier: An id or e-mail for this particular user. - :rtype: :class:`ACL.Entity` - :returns: An Entity corresponding to this user. - """ + :rtype: :class:`ACL.Entity` + :returns: An Entity corresponding to this user. + """ - return self.entity('user', identifier=identifier) + return self.entity('user', identifier=identifier) - def group(self, identifier): - """Factory method for a group Entity. + def group(self, identifier): + """Factory method for a group Entity. - :type identifier: string - :param identifier: An id or e-mail for this particular group. + :type identifier: string + :param identifier: An id or e-mail for this particular group. - :rtype: :class:`ACL.Entity` - :returns: An Entity corresponding to this group. - """ + :rtype: :class:`ACL.Entity` + :returns: An Entity corresponding to this group. + """ - return self.entity('group', identifier=identifier) + return self.entity('group', identifier=identifier) - def domain(self, domain): - """Factory method for a domain Entity. + def domain(self, domain): + """Factory method for a domain Entity. - :type domain: string - :param domain: The domain for this entity. + :type domain: string + :param domain: The domain for this entity. - :rtype: :class:`ACL.Entity` - :returns: An entity corresponding to this domain. - """ + :rtype: :class:`ACL.Entity` + :returns: An entity corresponding to this domain. + """ - return self.entity('domain', identifier=domain) + return self.entity('domain', identifier=domain) - def all(self): - """Factory method for an Entity representing all users. + def all(self): + """Factory method for an Entity representing all users. - :rtype: :class:`ACL.Entity` - :returns: An entity representing all users. - """ + :rtype: :class:`ACL.Entity` + :returns: An entity representing all users. + """ - return self.entity('allUsers') + return self.entity('allUsers') - def all_authenticated(self): - """Factory method for an Entity representing all authenticated users. + def all_authenticated(self): + """Factory method for an Entity representing all authenticated users. - :rtype: :class:`ACL.Entity` - :returns: An entity representing all authenticated users. - """ + :rtype: :class:`ACL.Entity` + :returns: An entity representing all authenticated users. + """ - return self.entity('allAuthenticatedUsers') + return self.entity('allAuthenticatedUsers') - def get_entities(self): - """Get a list of all Entity objects. + def get_entities(self): + """Get a list of all Entity objects. - :rtype: list of :class:`ACL.Entity` objects - :returns: A list of all Entity objects. - """ + :rtype: list of :class:`ACL.Entity` objects + :returns: A list of all Entity objects. + """ - return self.entities.values() + return self.entities.values() - def save(self): # pragma NO COVER - """A method to be overridden by subclasses. + def save(self): # pragma NO COVER + """A method to be overridden by subclasses. - :raises: NotImplementedError - """ + :raises: NotImplementedError + """ - raise NotImplementedError + raise NotImplementedError class BucketACL(ACL): - """An ACL specifically for a bucket.""" + """An ACL specifically for a bucket.""" - def __init__(self, bucket): - """ - :type bucket: :class:`gcloud.storage.bucket.Bucket` - :param bucket: The bucket to which this ACL relates. - """ + def __init__(self, bucket): + """ + :type bucket: :class:`gcloud.storage.bucket.Bucket` + :param bucket: The bucket to which this ACL relates. + """ - super(BucketACL, self).__init__() - self.bucket = bucket + super(BucketACL, self).__init__() + self.bucket = bucket - def save(self): - """Save this ACL for the current bucket.""" + def save(self): + """Save this ACL for the current bucket.""" - return self.bucket.save_acl(acl=self) + return self.bucket.save_acl(acl=self) class DefaultObjectACL(BucketACL): - """A class representing the default object ACL for a bucket.""" + """A class representing the default object ACL for a bucket.""" - def save(self): - """Save this ACL as the default object ACL for the current bucket.""" + def save(self): + """Save this ACL as the default object ACL for the current bucket.""" - return self.bucket.save_default_object_acl(acl=self) + return self.bucket.save_default_object_acl(acl=self) class ObjectACL(ACL): - """An ACL specifically for a key.""" + """An ACL specifically for a key.""" - def __init__(self, key): - """ - :type key: :class:`gcloud.storage.key.Key` - :param key: The key that this ACL corresponds to. - """ + def __init__(self, key): + """ + :type key: :class:`gcloud.storage.key.Key` + :param key: The key that this ACL corresponds to. + """ - super(ObjectACL, self).__init__() - self.key = key + super(ObjectACL, self).__init__() + self.key = key - def save(self): - """Save this ACL for the current key.""" + def save(self): + """Save this ACL for the current key.""" - return self.key.save_acl(acl=self) + return self.key.save_acl(acl=self) diff --git a/gcloud/storage/bucket.py b/gcloud/storage/bucket.py index a6e98256744b..ea64670a05d4 100644 --- a/gcloud/storage/bucket.py +++ b/gcloud/storage/bucket.py @@ -8,599 +8,602 @@ class Bucket(object): - """A class representing a Bucket on Cloud Storage. + """A class representing a Bucket on Cloud Storage. - :type connection: :class:`gcloud.storage.connection.Connection` - :param connection: The connection to use when sending requests. + :type connection: :class:`gcloud.storage.connection.Connection` + :param connection: The connection to use when sending requests. - :type name: string - :param name: The name of the bucket. - """ - - def __init__(self, connection=None, name=None, metadata=None): - self.connection = connection - self.name = name - self.metadata = metadata - - # ACL rules are lazily retrieved. - self.acl = None - self.default_object_acl = None - - @classmethod - def from_dict(cls, bucket_dict, connection=None): - """Construct a new bucket from a dictionary of data from Cloud Storage. - - :type bucket_dict: dict - :param bucket_dict: The dictionary of data to construct a bucket from. - - :rtype: :class:`Bucket` - :returns: A bucket constructed from the data provided. - """ - - return cls(connection=connection, name=bucket_dict['name'], - metadata=bucket_dict) - - def __repr__(self): # pragma NO COVER - return '' % self.name - - def __iter__(self): - return iter(KeyIterator(bucket=self)) - - def __contains__(self, key): - return self.get_key(key) is not None - - @property - def path(self): - """The URL path to this bucket.""" - - if not self.name: - raise ValueError('Cannot determine path without bucket name.') - - return '/b/' + self.name - - def get_key(self, key): - """Get a key object by name. - - This will return None if the key doesn't exist:: - - >>> from gcloud import storage - >>> connection = storage.get_connection(project, email, key_path) - >>> bucket = connection.get_bucket('my-bucket') - >>> print bucket.get_key('/path/to/key.txt') - - >>> print bucket.get_key('/does-not-exist.txt') - None - - :type key: string or :class:`gcloud.storage.key.Key` - :param key: The name of the key to retrieve. - - :rtype: :class:`gcloud.storage.key.Key` or None - :returns: The key object if it exists, otherwise None. - """ - - # Coerce this to a key object (either from a Key or a string). - key = self.new_key(key) - - try: - response = self.connection.api_request(method='GET', path=key.path) - return Key.from_dict(response, bucket=self) - except exceptions.NotFoundError: - return None - - def get_all_keys(self): - """List all the keys in this bucket. - - This will **not** retrieve all the data for all the keys, - it will only retrieve metadata about the keys. - - This is equivalent to:: - - keys = [key for key in bucket] - - :rtype: list of :class:`gcloud.storage.key.Key` - :returns: A list of all the Key objects in this bucket. + :type name: string + :param name: The name of the bucket. """ - return list(self) - - def new_key(self, key): - """Given a path name (or Key), return a :class:`.storage.key.Key` object. + def __init__(self, connection=None, name=None, metadata=None): + self.connection = connection + self.name = name + self.metadata = metadata - This is really useful when you're not sure - if you have a Key object or a string path name. - Given either of those types, - this returns the corresponding Key object. - - :type key: string or :class:`gcloud.storage.key.Key` - :param key: A path name or actual key object. - - :rtype: :class:`gcloud.storage.key.Key` - :returns: A Key object with the path provided. - """ + # ACL rules are lazily retrieved. + self.acl = None + self.default_object_acl = None - if isinstance(key, Key): - return key + @classmethod + def from_dict(cls, bucket_dict, connection=None): + """Construct a new bucket from a dictionary of data from Cloud Storage. - # Support Python 2 and 3. - try: - string_type = basestring - except NameError: # pragma NO COVER PY3k - string_type = str + :type bucket_dict: dict + :param bucket_dict: The dictionary of data to construct a bucket from. - if isinstance(key, string_type): - return Key(bucket=self, name=key) + :rtype: :class:`Bucket` + :returns: A bucket constructed from the data provided. + """ - raise TypeError('Invalid key: %s' % key) + return cls(connection=connection, name=bucket_dict['name'], + metadata=bucket_dict) - def delete(self, force=False): - """Delete this bucket. + def __repr__(self): # pragma NO COVER + return '' % self.name - The bucket **must** be empty in order to delete it. - If the bucket doesn't exist, - this will raise a :class:`gcloud.storage.exceptions.NotFoundError`. - If the bucket is not empty, - this will raise an Exception. + def __iter__(self): + return iter(KeyIterator(bucket=self)) - If you want to delete a non-empty bucket you can pass - in a force parameter set to true. - This will iterate through the bucket's keys and delete the related objects, - before deleting the bucket. + def __contains__(self, key): + return self.get_key(key) is not None - :type force: bool - :param full: If True, empties the bucket's objects then deletes it. + @property + def path(self): + """The URL path to this bucket.""" - :raises: :class:`gcloud.storage.exceptions.NotFoundError` - """ - return self.connection.delete_bucket(self.name, force=force) + if not self.name: + raise ValueError('Cannot determine path without bucket name.') - def delete_key(self, key): - """Deletes a key from the current bucket. + return '/b/' + self.name - If the key isn't found, - this will throw a :class:`gcloud.storage.exceptions.NotFoundError`. + def get_key(self, key): + """Get a key object by name. - For example:: + This will return None if the key doesn't exist:: - >>> from gcloud import storage - >>> from gcloud.storage import exceptions - >>> connection = storage.get_connection(project, email, key_path) - >>> bucket = connection.get_bucket('my-bucket') - >>> print bucket.get_all_keys() - [] - >>> bucket.delete_key('my-file.txt') - >>> try: - ... bucket.delete_key('doesnt-exist') - ... except exceptions.NotFoundError: - ... pass + >>> from gcloud import storage + >>> connection = storage.get_connection(project, email, key_path) + >>> bucket = connection.get_bucket('my-bucket') + >>> print bucket.get_key('/path/to/key.txt') + + >>> print bucket.get_key('/does-not-exist.txt') + None + :type key: string or :class:`gcloud.storage.key.Key` + :param key: The name of the key to retrieve. - :type key: string or :class:`gcloud.storage.key.Key` - :param key: A key name or Key object to delete. + :rtype: :class:`gcloud.storage.key.Key` or None + :returns: The key object if it exists, otherwise None. + """ - :rtype: :class:`gcloud.storage.key.Key` - :returns: The key that was just deleted. - :raises: :class:`gcloud.storage.exceptions.NotFoundError` - """ + # Coerce this to a key object (either from a Key or a string). + key = self.new_key(key) - key = self.new_key(key) - self.connection.api_request(method='DELETE', path=key.path) - return key + try: + response = self.connection.api_request(method='GET', path=key.path) + return Key.from_dict(response, bucket=self) + except exceptions.NotFoundError: + return None - def delete_keys(self, keys): - # NOTE: boto returns a MultiDeleteResult instance. - for key in keys: - self.delete_key(key) + def get_all_keys(self): + """List all the keys in this bucket. - def copy_key(self): # pragma NO COVER - raise NotImplementedError + This will **not** retrieve all the data for all the keys, + it will only retrieve metadata about the keys. - def upload_file(self, filename, key=None): - """Shortcut method to upload a file into this bucket. + This is equivalent to:: - Use this method to quickly put a local file in Cloud Storage. + keys = [key for key in bucket] - For example:: + :rtype: list of :class:`gcloud.storage.key.Key` + :returns: A list of all the Key objects in this bucket. + """ - >>> from gcloud import storage - >>> connection = storage.get_connection(project, email, key_path) - >>> bucket = connection.get_bucket('my-bucket') - >>> bucket.upload_file('~/my-file.txt', 'remote-text-file.txt') - >>> print bucket.get_all_keys() - [] + return list(self) - If you don't provide a key value, - we will try to upload the file using the local filename - as the key - (**not** the complete path):: + def new_key(self, key): + """Given path name (or Key), return a :class:`.storage.key.Key` object. - >>> from gcloud import storage - >>> connection = storage.get_connection(project, email, key_path) - >>> bucket = connection.get_bucket('my-bucket') - >>> bucket.upload_file('~/my-file.txt') - >>> print bucket.get_all_keys() - [] + This is really useful when you're not sure + if you have a Key object or a string path name. + Given either of those types, + this returns the corresponding Key object. - :type filename: string - :param filename: Local path to the file you want to upload. + :type key: string or :class:`gcloud.storage.key.Key` + :param key: A path name or actual key object. - :type key: string or :class:`gcloud.storage.key.Key` - :param key: The key (either an object or a remote path) - of where to put the file. + :rtype: :class:`gcloud.storage.key.Key` + :returns: A Key object with the path provided. + """ - If this is blank, - we will try to upload the file - to the root of the bucket - with the same name as on your local file system. - """ - if key is None: - key = os.path.basename(filename) - key = self.new_key(key) - return key.set_contents_from_filename(filename) - - def upload_file_object(self, fh, key=None): - # TODO: What do we do about overwriting data? - """Shortcut method to upload a file into this bucket. - - Use this method to quickly put a local file in Cloud Storage. - - For example:: - - >>> from gcloud import storage - >>> connection = storage.get_connection(project, email, key_path) - >>> bucket = connection.get_bucket('my-bucket') - >>> bucket.upload_file(open('~/my-file.txt'), 'remote-text-file.txt') - >>> print bucket.get_all_keys() - [] - - If you don't provide a key value, - we will try to upload the file using the local filename - as the key - (**not** the complete path):: - - >>> from gcloud import storage - >>> connection = storage.get_connection(project, email, key_path) - >>> bucket = connection.get_bucket('my-bucket') - >>> bucket.upload_file(open('~/my-file.txt')) - >>> print bucket.get_all_keys() - [] - - :type fh: file - :param fh: A file handle open for reading. - - :type key: string or :class:`gcloud.storage.key.Key` - :param key: The key (either an object or a remote path) - of where to put the file. - - If this is blank, - we will try to upload the file - to the root of the bucket - with the same name as on your local file system. - """ - if key: - key = self.new_key(key) - else: - key = self.new_key(os.path.basename(fh.name)) - return key.set_contents_from_file(fh) + if isinstance(key, Key): + return key - def has_metadata(self, field=None): - """Check if metadata is available locally. + # Support Python 2 and 3. + try: + string_type = basestring + except NameError: # pragma NO COVER PY3k + string_type = str - :type field: string - :param field: (optional) the particular field to check for. + if isinstance(key, string_type): + return Key(bucket=self, name=key) - :rtype: bool - :returns: Whether metadata is available locally. - """ + raise TypeError('Invalid key: %s' % key) - if not self.metadata: - return False - elif field and field not in self.metadata: - return False - else: - return True + def delete(self, force=False): + """Delete this bucket. - def reload_metadata(self, full=False): - """Reload metadata from Cloud Storage. + The bucket **must** be empty in order to delete it. + If the bucket doesn't exist, + this will raise a :class:`gcloud.storage.exceptions.NotFoundError`. + If the bucket is not empty, + this will raise an Exception. - :type full: bool - :param full: If True, loads all data (include ACL data). + If you want to delete a non-empty bucket you can pass + in a force parameter set to true. + This will iterate through the bucket's keys and delete the + related objects, before deleting the bucket. - :rtype: :class:`Bucket` - :returns: The bucket you just reloaded data for. - """ + :type force: bool + :param full: If True, empties the bucket's objects then deletes it. - projection = 'full' if full else 'noAcl' - query_params = {'projection': projection} - self.metadata = self.connection.api_request( - method='GET', path=self.path, query_params=query_params) - return self + :raises: :class:`gcloud.storage.exceptions.NotFoundError` + """ + return self.connection.delete_bucket(self.name, force=force) - def get_metadata(self, field=None, default=None): - """Get all metadata or a specific field. + def delete_key(self, key): + """Deletes a key from the current bucket. - If you request a field that isn't available, - and that field can be retrieved by refreshing data - from Cloud Storage, - this method will reload the data using - :func:`Bucket.reload_metadata`. + If the key isn't found, + this will throw a :class:`gcloud.storage.exceptions.NotFoundError`. - :type field: string - :param field: (optional) A particular field to retrieve from metadata. + For example:: - :type default: anything - :param default: The value to return if the field provided wasn't found. + >>> from gcloud import storage + >>> from gcloud.storage import exceptions + >>> connection = storage.get_connection(project, email, key_path) + >>> bucket = connection.get_bucket('my-bucket') + >>> print bucket.get_all_keys() + [] + >>> bucket.delete_key('my-file.txt') + >>> try: + ... bucket.delete_key('doesnt-exist') + ... except exceptions.NotFoundError: + ... pass - :rtype: dict or anything - :returns: All metadata or the value of the specific field. - """ - if not self.has_metadata(field=field): - full = (field and field in ('acl', 'defaultObjectAcl')) - self.reload_metadata(full=full) + :type key: string or :class:`gcloud.storage.key.Key` + :param key: A key name or Key object to delete. + + :rtype: :class:`gcloud.storage.key.Key` + :returns: The key that was just deleted. + :raises: :class:`gcloud.storage.exceptions.NotFoundError` + """ + + key = self.new_key(key) + self.connection.api_request(method='DELETE', path=key.path) + return key + + def delete_keys(self, keys): + # NOTE: boto returns a MultiDeleteResult instance. + for key in keys: + self.delete_key(key) + + def copy_key(self): # pragma NO COVER + raise NotImplementedError + + def upload_file(self, filename, key=None): + """Shortcut method to upload a file into this bucket. + + Use this method to quickly put a local file in Cloud Storage. + + For example:: + + >>> from gcloud import storage + >>> connection = storage.get_connection(project, email, key_path) + >>> bucket = connection.get_bucket('my-bucket') + >>> bucket.upload_file('~/my-file.txt', 'remote-text-file.txt') + >>> print bucket.get_all_keys() + [] + + If you don't provide a key value, + we will try to upload the file using the local filename + as the key + (**not** the complete path):: + + >>> from gcloud import storage + >>> connection = storage.get_connection(project, email, key_path) + >>> bucket = connection.get_bucket('my-bucket') + >>> bucket.upload_file('~/my-file.txt') + >>> print bucket.get_all_keys() + [] + + :type filename: string + :param filename: Local path to the file you want to upload. + + :type key: string or :class:`gcloud.storage.key.Key` + :param key: The key (either an object or a remote path) + of where to put the file. + + If this is blank, + we will try to upload the file + to the root of the bucket + with the same name as on your local file system. + """ + if key is None: + key = os.path.basename(filename) + key = self.new_key(key) + return key.set_contents_from_filename(filename) + + def upload_file_object(self, fh, key=None): + # TODO: What do we do about overwriting data? + """Shortcut method to upload a file object into this bucket. + + Use this method to quickly put a local file in Cloud Storage. + + For example:: + + >>> from gcloud import storage + >>> connection = storage.get_connection(project, email, key_path) + >>> bucket = connection.get_bucket('my-bucket') + >>> bucket.upload_file(open('~/my-file.txt'), 'remote-text-file.txt') + >>> print bucket.get_all_keys() + [] + + If you don't provide a key value, + we will try to upload the file using the local filename + as the key + (**not** the complete path):: + + >>> from gcloud import storage + >>> connection = storage.get_connection(project, email, key_path) + >>> bucket = connection.get_bucket('my-bucket') + >>> bucket.upload_file(open('~/my-file.txt')) + >>> print bucket.get_all_keys() + [] + + :type fh: file + :param fh: A file handle open for reading. + + :type key: string or :class:`gcloud.storage.key.Key` + :param key: The key (either an object or a remote path) + of where to put the file. + + If this is blank, + we will try to upload the file + to the root of the bucket + with the same name as on your local file system. + """ + if key: + key = self.new_key(key) + else: + key = self.new_key(os.path.basename(fh.name)) + return key.set_contents_from_file(fh) + + def has_metadata(self, field=None): + """Check if metadata is available locally. + + :type field: string + :param field: (optional) the particular field to check for. + + :rtype: bool + :returns: Whether metadata is available locally. + """ + + if not self.metadata: + return False + elif field and field not in self.metadata: + return False + else: + return True + + def reload_metadata(self, full=False): + """Reload metadata from Cloud Storage. + + :type full: bool + :param full: If True, loads all data (include ACL data). + + :rtype: :class:`Bucket` + :returns: The bucket you just reloaded data for. + """ + + projection = 'full' if full else 'noAcl' + query_params = {'projection': projection} + self.metadata = self.connection.api_request( + method='GET', path=self.path, query_params=query_params) + return self + + def get_metadata(self, field=None, default=None): + """Get all metadata or a specific field. + + If you request a field that isn't available, + and that field can be retrieved by refreshing data + from Cloud Storage, + this method will reload the data using + :func:`Bucket.reload_metadata`. + + :type field: string + :param field: (optional) A particular field to retrieve from metadata. + + :type default: anything + :param default: The value to return if the field provided wasn't found. + + :rtype: dict or anything + :returns: All metadata or the value of the specific field. + """ - if field: - return self.metadata.get(field, default) - else: - return self.metadata + if not self.has_metadata(field=field): + full = (field and field in ('acl', 'defaultObjectAcl')) + self.reload_metadata(full=full) - def patch_metadata(self, metadata): - """Update particular fields of this bucket's metadata. + if field: + return self.metadata.get(field, default) + else: + return self.metadata - This method will only update the fields provided - and will not touch the other fields. + def patch_metadata(self, metadata): + """Update particular fields of this bucket's metadata. + + This method will only update the fields provided + and will not touch the other fields. - It will also reload the metadata locally - based on the servers response. + It will also reload the metadata locally + based on the servers response. - :type metadata: dict - :param metadata: The dictionary of values to update. + :type metadata: dict + :param metadata: The dictionary of values to update. - :rtype: :class:`Bucket` - :returns: The current bucket. - """ + :rtype: :class:`Bucket` + :returns: The current bucket. + """ - self.metadata = self.connection.api_request( - method='PATCH', path=self.path, data=metadata, - query_params={'projection': 'full'}) - return self + self.metadata = self.connection.api_request( + method='PATCH', path=self.path, data=metadata, + query_params={'projection': 'full'}) + return self - def configure_website(self, main_page_suffix=None, not_found_page=None): - """Configure website-related metadata. + def configure_website(self, main_page_suffix=None, not_found_page=None): + """Configure website-related metadata. - .. note:: - This (apparently) only works - if your bucket name is a domain name - (and to do that, you need to get approved somehow...). + .. note:: + This (apparently) only works + if your bucket name is a domain name + (and to do that, you need to get approved somehow...). - Check out the official documentation here: - https://developers.google.com/storage/docs/website-configuration + Check out the official documentation here: + https://developers.google.com/storage/docs/website-configuration - If you want this bucket to host a website, - just provide the name of an index page - and a page to use when a key isn't found:: + If you want this bucket to host a website, + just provide the name of an index page + and a page to use when a key isn't found:: - >>> from gcloud import storage - >>> connection = storage.get_connection(project, email, private_key_path) - >>> bucket = connection.get_bucket(bucket_name) - >>> bucket.configure_website('index.html', '404.html') + >>> from gcloud import storage + >>> connection = storage.get_connection(project, email, + private_key_path) + >>> bucket = connection.get_bucket(bucket_name) + >>> bucket.configure_website('index.html', '404.html') - You probably should also make the whole bucket public:: + You probably should also make the whole bucket public:: - >>> bucket.make_public(recursive=True, future=True) + >>> bucket.make_public(recursive=True, future=True) - This says: - "Make the bucket public, - and all the stuff already in the bucket, - and anything else I add to the bucket. - Just make it all public." + This says: + "Make the bucket public, + and all the stuff already in the bucket, + and anything else I add to the bucket. + Just make it all public." - :type main_page_suffix: string - :param main_page_suffix: The page to use as the main page of a directory. - Typically something like index.html. + :type main_page_suffix: string + :param main_page_suffix: The page to use as the main page + of a directory. + Typically something like index.html. - :type not_found_page: string - :param not_found_page: The file to use when a page isn't found. - """ + :type not_found_page: string + :param not_found_page: The file to use when a page isn't found. + """ - data = {'website': {'mainPageSuffix': main_page_suffix, - 'notFoundPage': not_found_page, - } - } - return self.patch_metadata(data) + data = {'website': {'mainPageSuffix': main_page_suffix, + 'notFoundPage': not_found_page, + } + } + return self.patch_metadata(data) - def disable_website(self): - """Disable the website configuration for this bucket. + def disable_website(self): + """Disable the website configuration for this bucket. - This is really just a shortcut for - setting the website-related attributes to ``None``. - """ + This is really just a shortcut for + setting the website-related attributes to ``None``. + """ - return self.configure_website(None, None) + return self.configure_website(None, None) - def reload_acl(self): - """Reload the ACL data from Cloud Storage. + def reload_acl(self): + """Reload the ACL data from Cloud Storage. - :rtype: :class:`Bucket` - :returns: The current bucket. - """ + :rtype: :class:`Bucket` + :returns: The current bucket. + """ - self.acl = BucketACL(bucket=self) + self.acl = BucketACL(bucket=self) - for entry in self.get_metadata('acl', []): - entity = self.acl.entity_from_dict(entry) - self.acl.add_entity(entity) + for entry in self.get_metadata('acl', []): + entity = self.acl.entity_from_dict(entry) + self.acl.add_entity(entity) - return self + return self - def get_acl(self): - """Get ACL metadata as a :class:`gcloud.storage.acl.BucketACL` object. + def get_acl(self): + """Get ACL metadata as a :class:`gcloud.storage.acl.BucketACL` object. - :rtype: :class:`gcloud.storage.acl.BucketACL` - :returns: An ACL object for the current bucket. - """ + :rtype: :class:`gcloud.storage.acl.BucketACL` + :returns: An ACL object for the current bucket. + """ - if not self.acl: - self.reload_acl() - return self.acl + if not self.acl: + self.reload_acl() + return self.acl - def save_acl(self, acl=None): - """Save the ACL data for this bucket. + def save_acl(self, acl=None): + """Save the ACL data for this bucket. - If called without arguments, - this will save the ACL currently stored on the Bucket object. - For example, - this will save - the ACL stored in ``some_other_acl``:: + If called without arguments, + this will save the ACL currently stored on the Bucket object. + For example, + this will save + the ACL stored in ``some_other_acl``:: - >>> bucket.acl = some_other_acl - >>> bucket.save_acl() + >>> bucket.acl = some_other_acl + >>> bucket.save_acl() - You can also provide a specific ACL to save - instead of the one currently set - on the Bucket object:: + You can also provide a specific ACL to save + instead of the one currently set + on the Bucket object:: - >>> bucket.save_acl(acl=my_other_acl) + >>> bucket.save_acl(acl=my_other_acl) - You can use this to set access controls - to be consistent from one bucket to another:: + You can use this to set access controls + to be consistent from one bucket to another:: - >>> bucket1 = connection.get_bucket(bucket1_name) - >>> bucket2 = connection.get_bucket(bucket2_name) - >>> bucket2.save_acl(bucket1.get_acl()) + >>> bucket1 = connection.get_bucket(bucket1_name) + >>> bucket2 = connection.get_bucket(bucket2_name) + >>> bucket2.save_acl(bucket1.get_acl()) - If you want to **clear** the ACL for the bucket, - you must save an empty list (``[]``) - rather than using ``None`` - (which is interpreted as wanting to save the current ACL):: + If you want to **clear** the ACL for the bucket, + you must save an empty list (``[]``) + rather than using ``None`` + (which is interpreted as wanting to save the current ACL):: - >>> bucket.save_acl(None) # Saves the current ACL (self.acl). - >>> bucket.save_acl([]) # Clears the current ACL. + >>> bucket.save_acl(None) # Saves the current ACL (self.acl). + >>> bucket.save_acl([]) # Clears the current ACL. - :type acl: :class:`gcloud.storage.acl.ACL` - :param acl: The ACL object to save. - If left blank, this will save the ACL - set locally on the bucket. - """ + :type acl: :class:`gcloud.storage.acl.ACL` + :param acl: The ACL object to save. + If left blank, this will save the ACL + set locally on the bucket. + """ - # We do things in this weird way because [] and None - # both evaluate to False, but mean very different things. - if acl is None: - acl = self.acl + # We do things in this weird way because [] and None + # both evaluate to False, but mean very different things. + if acl is None: + acl = self.acl - if acl is None: - return self + if acl is None: + return self - self.patch_metadata({'acl': list(acl)}) - self.reload_acl() - return self + self.patch_metadata({'acl': list(acl)}) + self.reload_acl() + return self - def clear_acl(self): - """Remove all ACL rules from the bucket. + def clear_acl(self): + """Remove all ACL rules from the bucket. - Note that this won't actually remove *ALL* the rules, - but it will remove all the non-default rules. - In short, - you'll still have access - to a bucket that you created - even after you clear ACL rules - with this method. + Note that this won't actually remove *ALL* the rules, + but it will remove all the non-default rules. + In short, + you'll still have access + to a bucket that you created + even after you clear ACL rules + with this method. - For example, - imagine that you granted access to this bucket - to a bunch of coworkers:: + For example, + imagine that you granted access to this bucket + to a bunch of coworkers:: - >>> from gcloud import storage - >>> connection = storage.get_connection(project, email, private_key_path) - >>> bucket = connection.get_bucket(bucket_name) - >>> acl = bucket.get_acl() - >>> acl.user('coworker1@example.org').grant_read() - >>> acl.user('coworker2@example.org').grant_read() - >>> acl.save() + >>> from gcloud import storage + >>> connection = storage.get_connection(project, email, + private_key_path) + >>> bucket = connection.get_bucket(bucket_name) + >>> acl = bucket.get_acl() + >>> acl.user('coworker1@example.org').grant_read() + >>> acl.user('coworker2@example.org').grant_read() + >>> acl.save() - Now they work in another part of the company - and you want to 'start fresh' on who has access:: + Now they work in another part of the company + and you want to 'start fresh' on who has access:: - >>> acl.clear_acl() + >>> acl.clear_acl() - At this point all the custom rules you created have been removed. - """ + At this point all the custom rules you created have been removed. + """ - return self.save_acl(acl=[]) + return self.save_acl(acl=[]) - def reload_default_object_acl(self): - """Reload the Default Object ACL rules for this bucket. + def reload_default_object_acl(self): + """Reload the Default Object ACL rules for this bucket. - :rtype: :class:`Bucket` - :returns: The current bucket. - """ + :rtype: :class:`Bucket` + :returns: The current bucket. + """ - self.default_object_acl = DefaultObjectACL(bucket=self) + self.default_object_acl = DefaultObjectACL(bucket=self) - for entry in self.get_metadata('defaultObjectAcl', []): - entity = self.default_object_acl.entity_from_dict(entry) - self.default_object_acl.add_entity(entity) + for entry in self.get_metadata('defaultObjectAcl', []): + entity = self.default_object_acl.entity_from_dict(entry) + self.default_object_acl.add_entity(entity) - return self + return self - def get_default_object_acl(self): - """Get the current Default Object ACL rules. + def get_default_object_acl(self): + """Get the current Default Object ACL rules. - If the appropriate metadata isn't available locally, - this method will reload it from Cloud Storage. + If the appropriate metadata isn't available locally, + this method will reload it from Cloud Storage. - :rtype: :class:`gcloud.storage.acl.DefaultObjectACL` - :returns: A DefaultObjectACL object for this bucket. - """ + :rtype: :class:`gcloud.storage.acl.DefaultObjectACL` + :returns: A DefaultObjectACL object for this bucket. + """ - if not self.default_object_acl: - self.reload_default_object_acl() - return self.default_object_acl + if not self.default_object_acl: + self.reload_default_object_acl() + return self.default_object_acl - def save_default_object_acl(self, acl=None): - """Save the Default Object ACL rules for this bucket. + def save_default_object_acl(self, acl=None): + """Save the Default Object ACL rules for this bucket. - :type acl: :class:`gcloud.storage.acl.DefaultObjectACL` - :param acl: The DefaultObjectACL object to save. - If not provided, this will look at - the ``default_object_acl`` property - and save that. - """ + :type acl: :class:`gcloud.storage.acl.DefaultObjectACL` + :param acl: The DefaultObjectACL object to save. + If not provided, this will look at + the ``default_object_acl`` property + and save that. + """ - if acl is None: - acl = self.default_object_acl + if acl is None: + acl = self.default_object_acl - if acl is None: - return self + if acl is None: + return self - self.patch_metadata({'defaultObjectAcl': list(acl)}) - self.reload_default_object_acl() - return self + self.patch_metadata({'defaultObjectAcl': list(acl)}) + self.reload_default_object_acl() + return self - def clear_default_object_acl(self): - """Remove the Default Object ACL from this bucket.""" + def clear_default_object_acl(self): + """Remove the Default Object ACL from this bucket.""" - return self.save_default_object_acl(acl=[]) + return self.save_default_object_acl(acl=[]) - def make_public(self, recursive=False, future=False): - """Make a bucket public. + def make_public(self, recursive=False, future=False): + """Make a bucket public. - :type recursive: bool - :param recursive: If True, this will make all keys inside the bucket - public as well. + :type recursive: bool + :param recursive: If True, this will make all keys inside the bucket + public as well. - :type future: bool - :param future: If True, this will make all objects created in the future - public as well. - """ + :type future: bool + :param future: If True, this will make all objects created in the + future public as well. + """ - self.get_acl().all().grant_read() - self.save_acl() + self.get_acl().all().grant_read() + self.save_acl() - if future: - self.get_default_object_acl().all().grant_read() - self.save_default_object_acl() + if future: + self.get_default_object_acl().all().grant_read() + self.save_default_object_acl() - if recursive: - for key in self: - key.get_acl().all().grant_read() - key.save_acl() + if recursive: + for key in self: + key.get_acl().all().grant_read() + key.save_acl() diff --git a/gcloud/storage/connection.py b/gcloud/storage/connection.py index ad0ca17b1020..7a426205b9d8 100644 --- a/gcloud/storage/connection.py +++ b/gcloud/storage/connection.py @@ -18,489 +18,496 @@ class Connection(connection.Connection): - """A connection to Google Cloud Storage via the JSON REST API. + """A connection to Google Cloud Storage via the JSON REST API. - This class should understand only the basic types (and protobufs) - in method arguments, however should be capable of returning advanced types. + This class should understand only the basic types (and protobufs) + in method arguments, however should be capable of returning advanced types. - See :class:`gcloud.connection.Connection` for a full list of parameters. - :class:`Connection` differs only in needing a project name - (which you specify when creating a project in the Cloud Console). + See :class:`gcloud.connection.Connection` for a full list of parameters. + :class:`Connection` differs only in needing a project name + (which you specify when creating a project in the Cloud Console). - A typical use of this is to operate on - :class:`gcloud.storage.bucket.Bucket` objects:: - - >>> from gcloud import storage - >>> connection = storage.get_connection(project, email, key_path) - >>> bucket = connection.create_bucket('my-bucket-name') - - You can then delete this bucket:: - - >>> bucket.delete() - >>> # or - >>> connection.delete_bucket(bucket) - - If you want to access an existing bucket:: - - >>> bucket = connection.get_bucket('my-bucket-name') - - A :class:`Connection` is actually iterable - and will return the :class:`gcloud.storage.bucket.Bucket` objects - inside the project:: - - >>> for bucket in connection: - >>> print bucket - - - In that same way, you can check for whether a bucket exists - inside the project using Python's ``in`` operator:: - - >>> print 'my-bucket-name' in connection - True - """ - - API_VERSION = 'v1beta2' - """The version of the API, used in building the API call's URL.""" - - API_URL_TEMPLATE = '{api_base_url}/storage/{api_version}{path}' - """A template used to craft the URL pointing toward a particular API call.""" - - API_ACCESS_ENDPOINT = 'https://storage.googleapis.com' - - def __init__(self, project, *args, **kwargs): - """ - :type project: string - :param project: The project name to connect to. - """ - - super(Connection, self).__init__(*args, **kwargs) - - self.project = project - - def __iter__(self): - return iter(BucketIterator(connection=self)) - - def __contains__(self, bucket_name): - return self.lookup(bucket_name) is not None - - def build_api_url(self, path, query_params=None, api_base_url=None, - api_version=None): - """Construct an API url given a few components, some optional. - - Typically, you shouldn't need to use this method. - - :type path: string - :param path: The path to the resource (ie, ``'/b/bucket-name'``). - - :type query_params: dict - :param query_params: A dictionary of keys and values to insert into - the query string of the URL. - - :type api_base_url: string - :param api_base_url: The base URL for the API endpoint. - Typically you won't have to provide this. - - :type api_version: string - :param api_version: The version of the API to call. - Typically you shouldn't provide this and instead - use the default for the library. - - :rtype: string - :returns: The URL assembled from the pieces provided. - """ - - url = self.API_URL_TEMPLATE.format( - api_base_url=(api_base_url or self.API_BASE_URL), - api_version=(api_version or self.API_VERSION), - path=path) - - query_params = query_params or {} - query_params.update({'project': self.project}) - url += '?' + urllib.urlencode(query_params) - - return url - - def make_request(self, method, url, data=None, content_type=None, - headers=None): - """A low level method to send a request to the API. - - Typically, you shouldn't need to use this method. - - :type method: string - :param method: The HTTP method to use in the request. - - :type url: string - :param url: The URL to send the request to. - - :type data: string - :param data: The data to send as the body of the request. - - :type content_type: string - :param content_type: The proper MIME type of the data provided. - - :type headers: dict - :param headers: A dictionary of HTTP headers to send with the request. - - :rtype: tuple of ``response`` (a dictionary of sorts) - and ``content`` (a string). - :returns: The HTTP response object and the content of the response. - """ - - headers = headers or {} - headers['Accept-Encoding'] = 'gzip' - - if data: - content_length = len(str(data)) - else: - content_length = 0 - - headers['Content-Length'] = content_length - - if content_type: - headers['Content-Type'] = content_type - - return self.http.request(uri=url, method=method, headers=headers, - body=data) - - def api_request(self, method, path, query_params=None, - data=None, content_type=None, - api_base_url=None, api_version=None, - expect_json=True): - """Make a request over the HTTP transport to the Cloud Storage API. - - You shouldn't need to use this method, - but if you plan to interact with the API using these primitives, - this is the correct one to use... - - :type method: string - :param method: The HTTP method name (ie, ``GET``, ``POST``, etc). - Required. - - :type path: string - :param path: The path to the resource (ie, ``'/b/bucket-name'``). - Required. - - :type query_params: dict - :param query_params: A dictionary of keys and values to insert into - the query string of the URL. Default is empty dict. - - :type data: string - :param data: The data to send as the body of the request. Default is the - empty string. - - :type content_type: string - :param content_type: The proper MIME type of the data provided. Default - is None. - - :type api_base_url: string - :param api_base_url: The base URL for the API endpoint. - Typically you won't have to provide this. - Default is the standard API base URL. - - :type api_version: string - :param api_version: The version of the API to call. - Typically you shouldn't provide this and instead - use the default for the library. - Default is the latest API version supported by - gcloud-python. - - :type expect_json: bool - :param expect_json: If True, this method will try to parse the response - as JSON and raise an exception if that cannot be done. - Default is True. - - :raises: Exception if the response code is not 200 OK. - """ - - url = self.build_api_url(path=path, query_params=query_params, - api_base_url=api_base_url, - api_version=api_version) - - # Making the executive decision that any dictionary - # data will be sent properly as JSON. - if data and isinstance(data, dict): - data = json.dumps(data) - content_type = 'application/json' - - response, content = self.make_request( - method=method, url=url, data=data, content_type=content_type) - - if response.status == 404: - raise exceptions.NotFoundError(response, content) - elif not 200 <= response.status < 300: - raise exceptions.ConnectionError(response, content) - - if content and expect_json: - content_type = response.get('content-type', '') - if not content_type.startswith('application/json'): - raise TypeError('Expected JSON, got %s' % content_type) - return json.loads(content) - - return content - - def get_all_buckets(self): - """Get all buckets in the project. - - This will not populate the list of keys available - in each bucket. - - You can also iterate over the connection object, - so these two operations are identical:: + A typical use of this is to operate on + :class:`gcloud.storage.bucket.Bucket` objects:: >>> from gcloud import storage >>> connection = storage.get_connection(project, email, key_path) - >>> for bucket in connection.get_all_buckets(): - >>> print bucket - >>> # ... is the same as ... - >>> for bucket in connection: - >>> print bucket + >>> bucket = connection.create_bucket('my-bucket-name') - :rtype: list of :class:`gcloud.storage.bucket.Bucket` objects. - :returns: All buckets belonging to this project. - """ + You can then delete this bucket:: - return list(self) + >>> bucket.delete() + >>> # or + >>> connection.delete_bucket(bucket) - def get_bucket(self, bucket_name): - """Get a bucket by name. + If you want to access an existing bucket:: - If the bucket isn't found, - this will raise a :class:`gcloud.storage.exceptions.NotFoundError`. - If you would rather get a bucket by name, - and return ``None`` if the bucket isn't found - (like ``{}.get('...')``) - then use :func:`Connection.lookup`. + >>> bucket = connection.get_bucket('my-bucket-name') - For example:: + A :class:`Connection` is actually iterable + and will return the :class:`gcloud.storage.bucket.Bucket` objects + inside the project:: - >>> from gcloud import storage - >>> from gcloud.storage import exceptions - >>> connection = storage.get_connection(project, email, key_path) - >>> try: - >>> bucket = connection.get_bucket('my-bucket') - >>> except exceptions.NotFoundError: - >>> print 'Sorry, that bucket does not exist!' + >>> for bucket in connection: + >>> print bucket + - :type bucket_name: string - :param bucket_name: The name of the bucket to get. + In that same way, you can check for whether a bucket exists + inside the project using Python's ``in`` operator:: - :rtype: :class:`gcloud.storage.bucket.Bucket` - :returns: The bucket matching the name provided. - :raises: :class:`gcloud.storage.exceptions.NotFoundError` + >>> print 'my-bucket-name' in connection + True """ - bucket = self.new_bucket(bucket_name) - response = self.api_request(method='GET', path=bucket.path) - return Bucket.from_dict(response, connection=self) - def lookup(self, bucket_name): - """Get a bucket by name, returning None if not found. + API_VERSION = 'v1beta2' + """The version of the API, used in building the API call's URL.""" - You can use this if you would rather - checking for a None value - than catching an exception:: + API_URL_TEMPLATE = '{api_base_url}/storage/{api_version}{path}' + """A template for the URL of a particular API call.""" - >>> from gcloud import storage - >>> connection = storage.get_connection(project, email, key_path) - >>> bucket = connection.get_bucket('doesnt-exist') - >>> print bucket - None - >>> bucket = connection.get_bucket('my-bucket') - >>> print bucket - - - :type bucket_name: string - :param bucket_name: The name of the bucket to get. - - :rtype: :class:`gcloud.storage.bucket.Bucket` - :returns: The bucket matching the name provided or None if not found. - """ + API_ACCESS_ENDPOINT = 'https://storage.googleapis.com' - try: - return self.get_bucket(bucket_name) - except exceptions.NotFoundError: - return None + def __init__(self, project, *args, **kwargs): + """:type project: string + :param project: The project name to connect to. - def create_bucket(self, bucket): - """Create a new bucket. + """ - For example:: + super(Connection, self).__init__(*args, **kwargs) - >>> from gcloud import storage - >>> connection = storage.get_connection(project, client, key_path) - >>> bucket = connection.create_bucket('my-bucket') - >>> print bucket - + self.project = project - :type bucket: string or :class:`gcloud.storage.bucket.Bucket` - :param bucket: The bucket name (or bucket object) to create. + def __iter__(self): + return iter(BucketIterator(connection=self)) - :rtype: :class:`gcloud.storage.bucket.Bucket` - :returns: The newly created bucket. - """ + def __contains__(self, bucket_name): + return self.lookup(bucket_name) is not None - bucket = self.new_bucket(bucket) - response = self.api_request(method='POST', path='/b', - data={'name': bucket.name}) - return Bucket.from_dict(response, connection=self) + def build_api_url(self, path, query_params=None, api_base_url=None, + api_version=None): + """Construct an API url given a few components, some optional. - def delete_bucket(self, bucket, force=False): - """Delete a bucket. + Typically, you shouldn't need to use this method. - You can use this method to delete a bucket by name, - or to delete a bucket object:: + :type path: string + :param path: The path to the resource (ie, ``'/b/bucket-name'``). - >>> from gcloud import storage - >>> connection = storage.get_connection(project, email, key_path) - >>> connection.delete_bucket('my-bucket') - True + :type query_params: dict + :param query_params: A dictionary of keys and values to insert into + the query string of the URL. - You can also delete pass in the bucket object:: + :type api_base_url: string + :param api_base_url: The base URL for the API endpoint. + Typically you won't have to provide this. - >>> bucket = connection.get_bucket('other-bucket') - >>> connection.delete_bucket(bucket) - True + :type api_version: string + :param api_version: The version of the API to call. + Typically you shouldn't provide this and instead + use the default for the library. - If the bucket doesn't exist, - this will raise a :class:`gcloud.storage.exceptions.NotFoundError`:: + :rtype: string + :returns: The URL assembled from the pieces provided. + """ - >>> from gcloud.storage import exceptions - >>> try: - >>> connection.delete_bucket('my-bucket') - >>> except exceptions.NotFoundError: - >>> print 'That bucket does not exist!' + url = self.API_URL_TEMPLATE.format( + api_base_url=(api_base_url or self.API_BASE_URL), + api_version=(api_version or self.API_VERSION), + path=path) - :type bucket: string or :class:`gcloud.storage.bucket.Bucket` - :param bucket: The bucket name (or bucket object) to create. + query_params = query_params or {} + query_params.update({'project': self.project}) + url += '?' + urllib.urlencode(query_params) - :type force: bool - :param full: If True, empties the bucket's objects then deletes it. + return url - :rtype: bool - :returns: True if the bucket was deleted. - :raises: :class:`gcloud.storage.exceptions.NotFoundError` - """ + def make_request(self, method, url, data=None, content_type=None, + headers=None): + """A low level method to send a request to the API. - bucket = self.new_bucket(bucket) + Typically, you shouldn't need to use this method. - # This force delete operation is slow. - if force: - for key in bucket: - key.delete() + :type method: string + :param method: The HTTP method to use in the request. - response = self.api_request(method='DELETE', path=bucket.path) - return True + :type url: string + :param url: The URL to send the request to. - def new_bucket(self, bucket): - """Factory method for creating a new (unsaved) bucket object. + :type data: string + :param data: The data to send as the body of the request. - This method is really useful when you're not sure whether - you have an actual :class:`gcloud.storage.bucket.Bucket` object - or just a name of a bucket. - It always returns the object:: + :type content_type: string + :param content_type: The proper MIME type of the data provided. - >>> bucket = connection.new_bucket('bucket') - >>> print bucket - - >>> bucket = connection.new_bucket(bucket) - >>> print bucket - + :type headers: dict + :param headers: A dictionary of HTTP headers to send with the request. - :type bucket: string or :class:`gcloud.storage.bucket.Bucket` - :param bucket: A name of a bucket or an existing Bucket object. - """ + :rtype: tuple of ``response`` (a dictionary of sorts) + and ``content`` (a string). + :returns: The HTTP response object and the content of the response. + """ - if isinstance(bucket, Bucket): - return bucket + headers = headers or {} + headers['Accept-Encoding'] = 'gzip' - # Support Python 2 and 3. - try: - string_type = basestring - except NameError: # pragma NO COVER PY3k - string_type = str + if data: + content_length = len(str(data)) + else: + content_length = 0 - if isinstance(bucket, string_type): - return Bucket(connection=self, name=bucket) + headers['Content-Length'] = content_length - raise TypeError('Invalid bucket: %s' % bucket) + if content_type: + headers['Content-Type'] = content_type - def generate_signed_url(self, resource, expiration, - method='GET', content_md5=None, - content_type=None): # pragma NO COVER UGH - """Generate a signed URL to provide query-string authentication to a resource. + return self.http.request(uri=url, method=method, headers=headers, + body=data) - :type resource: string - :param resource: A pointer to a specific resource - (typically, ``/bucket-name/path/to/key.txt``). + def api_request(self, method, path, query_params=None, + data=None, content_type=None, + api_base_url=None, api_version=None, + expect_json=True): + """Make a request over the HTTP transport to the Cloud Storage API. - :type expiration: int, long, datetime.datetime, datetime.timedelta - :param expiration: When the signed URL should expire. + You shouldn't need to use this method, + but if you plan to interact with the API using these primitives, + this is the correct one to use... - :type method: string - :param method: The HTTP verb that will be used when requesting the URL. + :type method: string + :param method: The HTTP method name (ie, ``GET``, ``POST``, etc). + Required. - :type content_md5: string - :param content_md5: The MD5 hash of the object referenced by ``resource``. + :type path: string + :param path: The path to the resource (ie, ``'/b/bucket-name'``). + Required. - :type content_type: string - :param content_type: The content type of the object referenced by - ``resource``. + :type query_params: dict + :param query_params: A dictionary of keys and values to insert into + the query string of the URL. + Default is empty dict. - :rtype: string - :returns: A signed URL you can use to access the resource until expiration. - """ + :type data: string + :param data: The data to send as the body of the request. Default is + the empty string. + + :type content_type: string + :param content_type: The proper MIME type of the data provided. Default + is None. + + :type api_base_url: string + :param api_base_url: The base URL for the API endpoint. + Typically you won't have to provide this. + Default is the standard API base URL. + + :type api_version: string + :param api_version: The version of the API to call. + Typically you shouldn't provide this and instead + use the default for the library. + Default is the latest API version supported by + gcloud-python. + + :type expect_json: bool + :param expect_json: If True, this method will try to parse the response + as JSON and raise an exception if that cannot + be done. Default is True. + + :raises: Exception if the response code is not 200 OK. + """ + + url = self.build_api_url(path=path, query_params=query_params, + api_base_url=api_base_url, + api_version=api_version) + + # Making the executive decision that any dictionary + # data will be sent properly as JSON. + if data and isinstance(data, dict): + data = json.dumps(data) + content_type = 'application/json' + + response, content = self.make_request( + method=method, url=url, data=data, content_type=content_type) + + if response.status == 404: + raise exceptions.NotFoundError(response, content) + elif not 200 <= response.status < 300: + raise exceptions.ConnectionError(response, content) + + if content and expect_json: + content_type = response.get('content-type', '') + if not content_type.startswith('application/json'): + raise TypeError('Expected JSON, got %s' % content_type) + return json.loads(content) + + return content + + def get_all_buckets(self): + """Get all buckets in the project. + + This will not populate the list of keys available + in each bucket. + + You can also iterate over the connection object, + so these two operations are identical:: + + >>> from gcloud import storage + >>> connection = storage.get_connection(project, email, key_path) + >>> for bucket in connection.get_all_buckets(): + >>> print bucket + >>> # ... is the same as ... + >>> for bucket in connection: + >>> print bucket + + :rtype: list of :class:`gcloud.storage.bucket.Bucket` objects. + :returns: All buckets belonging to this project. + """ + + return list(self) + + def get_bucket(self, bucket_name): + """Get a bucket by name. + + If the bucket isn't found, + this will raise a :class:`gcloud.storage.exceptions.NotFoundError`. + If you would rather get a bucket by name, + and return ``None`` if the bucket isn't found + (like ``{}.get('...')``) + then use :func:`Connection.lookup`. + + For example:: + + >>> from gcloud import storage + >>> from gcloud.storage import exceptions + >>> connection = storage.get_connection(project, email, key_path) + >>> try: + >>> bucket = connection.get_bucket('my-bucket') + >>> except exceptions.NotFoundError: + >>> print 'Sorry, that bucket does not exist!' + + :type bucket_name: string + :param bucket_name: The name of the bucket to get. + + :rtype: :class:`gcloud.storage.bucket.Bucket` + :returns: The bucket matching the name provided. + :raises: :class:`gcloud.storage.exceptions.NotFoundError` + """ + bucket = self.new_bucket(bucket_name) + response = self.api_request(method='GET', path=bucket.path) + return Bucket.from_dict(response, connection=self) + + def lookup(self, bucket_name): + """Get a bucket by name, returning None if not found. + + You can use this if you would rather + checking for a None value + than catching an exception:: + + >>> from gcloud import storage + >>> connection = storage.get_connection(project, email, key_path) + >>> bucket = connection.get_bucket('doesnt-exist') + >>> print bucket + None + >>> bucket = connection.get_bucket('my-bucket') + >>> print bucket + + + :type bucket_name: string + :param bucket_name: The name of the bucket to get. + + :rtype: :class:`gcloud.storage.bucket.Bucket` + :returns: The bucket matching the name provided or None if not found. + """ + + try: + return self.get_bucket(bucket_name) + except exceptions.NotFoundError: + return None + + def create_bucket(self, bucket): + """Create a new bucket. + + For example:: + + >>> from gcloud import storage + >>> connection = storage.get_connection(project, client, key_path) + >>> bucket = connection.create_bucket('my-bucket') + >>> print bucket + + + :type bucket: string or :class:`gcloud.storage.bucket.Bucket` + :param bucket: The bucket name (or bucket object) to create. + + :rtype: :class:`gcloud.storage.bucket.Bucket` + :returns: The newly created bucket. + """ - # expiration can be an absolute timestamp (int, long), - # an absolute time (datetime.datetime), - # or a relative time (datetime.timedelta). - # We should convert all of these into an absolute timestamp. - - # If it's a timedelta, add it to `now` in UTC. - if isinstance(expiration, datetime.timedelta): - now = datetime.datetime.utcnow().replace(tzinfo=pytz.utc) - expiration = now + expiration - - # If it's a datetime, convert to a timestamp. - if isinstance(expiration, datetime.datetime): - # Make sure the timezone on the value is UTC - # (either by converting or replacing the value). - if expiration.tzinfo: - expiration = expiration.astimezone(pytz.utc) - else: - expiration = expiration.replace(tzinfo=pytz.utc) - - # Turn the datetime into a timestamp (seconds, not microseconds). - expiration = int(time.mktime(expiration.timetuple())) - - if not isinstance(expiration, (int, long)): - raise ValueError('Expected an integer timestamp, datetime, or ' - 'timedelta. Got %s' % type(expiration)) - - # Generate the string to sign. - signature_string = '\n'.join([ - method, - content_md5 or '', - content_type or '', - str(expiration), - resource]) - - # Take our PKCS12 (.p12) key and make it into a RSA key we can use... - pkcs12 = crypto.load_pkcs12(base64.b64decode(self.credentials.private_key), - 'notasecret') - pem = crypto.dump_privatekey(crypto.FILETYPE_PEM, pkcs12.get_privatekey()) - pem_key = RSA.importKey(pem) - - # Sign the string with the RSA key. - signer = PKCS1_v1_5.new(pem_key) - signature_hash = SHA256.new(signature_string) - signature_bytes = signer.sign(signature_hash) - signature = base64.b64encode(signature_bytes) - - # Set the right query parameters. - query_params = {'GoogleAccessId': self.credentials.service_account_name, - 'Expires': str(expiration), - 'Signature': signature} - - # Return the built URL. - return '{endpoint}{resource}?{querystring}'.format( - endpoint=self.API_ACCESS_ENDPOINT, resource=resource, - querystring=urllib.urlencode(query_params)) + bucket = self.new_bucket(bucket) + response = self.api_request(method='POST', path='/b', + data={'name': bucket.name}) + return Bucket.from_dict(response, connection=self) + + def delete_bucket(self, bucket, force=False): + """Delete a bucket. + + You can use this method to delete a bucket by name, + or to delete a bucket object:: + + >>> from gcloud import storage + >>> connection = storage.get_connection(project, email, key_path) + >>> connection.delete_bucket('my-bucket') + True + + You can also delete pass in the bucket object:: + + >>> bucket = connection.get_bucket('other-bucket') + >>> connection.delete_bucket(bucket) + True + + If the bucket doesn't exist, + this will raise a :class:`gcloud.storage.exceptions.NotFoundError`:: + + >>> from gcloud.storage import exceptions + >>> try: + >>> connection.delete_bucket('my-bucket') + >>> except exceptions.NotFoundError: + >>> print 'That bucket does not exist!' + + :type bucket: string or :class:`gcloud.storage.bucket.Bucket` + :param bucket: The bucket name (or bucket object) to create. + + :type force: bool + :param full: If True, empties the bucket's objects then deletes it. + + :rtype: bool + :returns: True if the bucket was deleted. + :raises: :class:`gcloud.storage.exceptions.NotFoundError` + """ + + bucket = self.new_bucket(bucket) + + # This force delete operation is slow. + if force: + for key in bucket: + key.delete() + + response = self.api_request(method='DELETE', path=bucket.path) + return True + + def new_bucket(self, bucket): + """Factory method for creating a new (unsaved) bucket object. + + This method is really useful when you're not sure whether + you have an actual :class:`gcloud.storage.bucket.Bucket` object + or just a name of a bucket. + It always returns the object:: + + >>> bucket = connection.new_bucket('bucket') + >>> print bucket + + >>> bucket = connection.new_bucket(bucket) + >>> print bucket + + + :type bucket: string or :class:`gcloud.storage.bucket.Bucket` + :param bucket: A name of a bucket or an existing Bucket object. + """ + + if isinstance(bucket, Bucket): + return bucket + + # Support Python 2 and 3. + try: + string_type = basestring + except NameError: # pragma NO COVER PY3k + string_type = str + + if isinstance(bucket, string_type): + return Bucket(connection=self, name=bucket) + + raise TypeError('Invalid bucket: %s' % bucket) + + def generate_signed_url(self, resource, expiration, + method='GET', content_md5=None, + content_type=None): # pragma NO COVER + """Generate signed URL to provide query-string auth'n to a resource. + + :type resource: string + :param resource: A pointer to a specific resource + (typically, ``/bucket-name/path/to/key.txt``). + + :type expiration: int, long, datetime.datetime, datetime.timedelta + :param expiration: When the signed URL should expire. + + :type method: string + :param method: The HTTP verb that will be used when requesting the URL. + + :type content_md5: string + :param content_md5: The MD5 hash of the object referenced by + ``resource``. + + :type content_type: string + :param content_type: The content type of the object referenced by + ``resource``. + + :rtype: string + :returns: A signed URL you can use to access the resource + until expiration. + """ + + # expiration can be an absolute timestamp (int, long), + # an absolute time (datetime.datetime), + # or a relative time (datetime.timedelta). + # We should convert all of these into an absolute timestamp. + + # If it's a timedelta, add it to `now` in UTC. + if isinstance(expiration, datetime.timedelta): + now = datetime.datetime.utcnow().replace(tzinfo=pytz.utc) + expiration = now + expiration + + # If it's a datetime, convert to a timestamp. + if isinstance(expiration, datetime.datetime): + # Make sure the timezone on the value is UTC + # (either by converting or replacing the value). + if expiration.tzinfo: + expiration = expiration.astimezone(pytz.utc) + else: + expiration = expiration.replace(tzinfo=pytz.utc) + + # Turn the datetime into a timestamp (seconds, not microseconds). + expiration = int(time.mktime(expiration.timetuple())) + + if not isinstance(expiration, (int, long)): + raise ValueError('Expected an integer timestamp, datetime, or ' + 'timedelta. Got %s' % type(expiration)) + + # Generate the string to sign. + signature_string = '\n'.join([ + method, + content_md5 or '', + content_type or '', + str(expiration), + resource]) + + # Take our PKCS12 (.p12) key and make it into a RSA key we can use... + pkcs12 = crypto.load_pkcs12( + base64.b64decode(self.credentials.private_key), + 'notasecret') + pem = crypto.dump_privatekey( + crypto.FILETYPE_PEM, pkcs12.get_privatekey()) + pem_key = RSA.importKey(pem) + + # Sign the string with the RSA key. + signer = PKCS1_v1_5.new(pem_key) + signature_hash = SHA256.new(signature_string) + signature_bytes = signer.sign(signature_hash) + signature = base64.b64encode(signature_bytes) + + # Set the right query parameters. + query_params = { + 'GoogleAccessId': self.credentials.service_account_name, + 'Expires': str(expiration), + 'Signature': signature + } + + # Return the built URL. + return '{endpoint}{resource}?{querystring}'.format( + endpoint=self.API_ACCESS_ENDPOINT, resource=resource, + querystring=urllib.urlencode(query_params)) diff --git a/gcloud/storage/demo/__init__.py b/gcloud/storage/demo/__init__.py index 156a543144b8..8aa5aee3f248 100644 --- a/gcloud/storage/demo/__init__.py +++ b/gcloud/storage/demo/__init__.py @@ -1,10 +1,8 @@ import os from gcloud import storage - __all__ = ['get_connection', 'CLIENT_EMAIL', 'PRIVATE_KEY_PATH', 'PROJECT'] - CLIENT_EMAIL = ('606734090113-6ink7iugcv89da9sru7lii8bs3i0obqg@' 'developer.gserviceaccount.com') PRIVATE_KEY_PATH = os.path.join(os.path.dirname(__file__), 'demo.key') @@ -12,4 +10,4 @@ def get_connection(): # pragma NO COVER. - return storage.get_connection(PROJECT, CLIENT_EMAIL, PRIVATE_KEY_PATH) + return storage.get_connection(PROJECT, CLIENT_EMAIL, PRIVATE_KEY_PATH) diff --git a/gcloud/storage/demo/__main__.py b/gcloud/storage/demo/__main__.py index e02f2c06dad2..8a2d2ae33061 100644 --- a/gcloud/storage/demo/__main__.py +++ b/gcloud/storage/demo/__main__.py @@ -1,5 +1,4 @@ from gcloud import demo from gcloud import storage - demo.DemoRunner.from_module(storage).run() diff --git a/gcloud/storage/exceptions.py b/gcloud/storage/exceptions.py index 1e5429d6d0b7..58b53f6a1257 100644 --- a/gcloud/storage/exceptions.py +++ b/gcloud/storage/exceptions.py @@ -1,19 +1,19 @@ class StorageError(Exception): - pass + pass class ConnectionError(StorageError): - def __init__(self, response, content): - message = str(response) + content - super(ConnectionError, self).__init__(message) + def __init__(self, response, content): + message = str(response) + content + super(ConnectionError, self).__init__(message) class NotFoundError(ConnectionError): - def __init__(self, response, content): - self.message = 'Request returned a 404. Headers: %s' % (response) + def __init__(self, response, content): + self.message = 'Request returned a 404. Headers: %s' % (response) class StorageDataError(StorageError): - pass + pass diff --git a/gcloud/storage/iterator.py b/gcloud/storage/iterator.py index 25e4d1c84126..3aecaab3982d 100644 --- a/gcloud/storage/iterator.py +++ b/gcloud/storage/iterator.py @@ -41,202 +41,202 @@ def get_items_from_response(self, response): class Iterator(object): - """A generic class for iterating through Cloud Storage list responses. + """A generic class for iterating through Cloud Storage list responses. - :type connection: :class:`gcloud.storage.connection.Connection` - :param connection: The connection to use to make requests. + :type connection: :class:`gcloud.storage.connection.Connection` + :param connection: The connection to use to make requests. - :type path: string - :param path: The path to query for the list of items. - """ + :type path: string + :param path: The path to query for the list of items. + """ - def __init__(self, connection, path): - self.connection = connection - self.path = path - self.page_number = 0 - self.next_page_token = None + def __init__(self, connection, path): + self.connection = connection + self.path = path + self.page_number = 0 + self.next_page_token = None - def __iter__(self): - """Iterate through the list of items.""" + def __iter__(self): + """Iterate through the list of items.""" - while self.has_next_page(): - response = self.get_next_page_response() - for item in self.get_items_from_response(response): - yield item + while self.has_next_page(): + response = self.get_next_page_response() + for item in self.get_items_from_response(response): + yield item - def has_next_page(self): - """Determines whether or not this iterator has more pages. + def has_next_page(self): + """Determines whether or not this iterator has more pages. - :rtype: bool - :returns: Whether the iterator has more pages or not. - """ + :rtype: bool + :returns: Whether the iterator has more pages or not. + """ - if self.page_number == 0: - return True + if self.page_number == 0: + return True - return self.next_page_token is not None + return self.next_page_token is not None - def get_query_params(self): - """Getter for query parameters for the next request. + def get_query_params(self): + """Getter for query parameters for the next request. - :rtype: dict or None - :returns: A dictionary of query parameters or None if there are none. - """ + :rtype: dict or None + :returns: A dictionary of query parameters or None if there are none. + """ - if self.next_page_token: - return {'pageToken': self.next_page_token} + if self.next_page_token: + return {'pageToken': self.next_page_token} - def get_next_page_response(self): - """Requests the next page from the path provided. + def get_next_page_response(self): + """Requests the next page from the path provided. - :rtype: dict - :returns: The parsed JSON response of the next page's contents. - """ + :rtype: dict + :returns: The parsed JSON response of the next page's contents. + """ - if not self.has_next_page(): - raise RuntimeError('No more pages. Try resetting the iterator.') + if not self.has_next_page(): + raise RuntimeError('No more pages. Try resetting the iterator.') - response = self.connection.api_request( - method='GET', path=self.path, query_params=self.get_query_params()) + response = self.connection.api_request( + method='GET', path=self.path, query_params=self.get_query_params()) - self.page_number += 1 - self.next_page_token = response.get('nextPageToken') + self.page_number += 1 + self.next_page_token = response.get('nextPageToken') - return response + return response - def reset(self): - """Resets the iterator to the beginning.""" - self.page_number = 0 - self.next_page_token = None + def reset(self): + """Resets the iterator to the beginning.""" + self.page_number = 0 + self.next_page_token = None - def get_items_from_response(self, response): # pragma NO COVER - """Factory method called while iterating. This should be overriden. + def get_items_from_response(self, response): # pragma NO COVER + """Factory method called while iterating. This should be overriden. - This method should be overridden by a subclass. - It should accept the API response - of a request for the next page of items, - and return a list (or other iterable) - of items. + This method should be overridden by a subclass. + It should accept the API response + of a request for the next page of items, + and return a list (or other iterable) + of items. - Typically this method will construct - a Bucket or a Key - from the page of results in the response. + Typically this method will construct + a Bucket or a Key + from the page of results in the response. - :type response: dict - :param response: The response of asking for the next page of items. + :type response: dict + :param response: The response of asking for the next page of items. - :rtype: iterable - :returns: Items that the iterator should yield. - """ - raise NotImplementedError + :rtype: iterable + :returns: Items that the iterator should yield. + """ + raise NotImplementedError class BucketIterator(Iterator): - """An iterator listing all buckets. + """An iterator listing all buckets. - You shouldn't have to use this directly, - but instead should use the helper methods - on :class:`gcloud.storage.connection.Connection` objects. + You shouldn't have to use this directly, + but instead should use the helper methods + on :class:`gcloud.storage.connection.Connection` objects. - :type connection: :class:`gcloud.storage.connection.Connection` - :param connection: The connection to use for querying the list of buckets. - """ + :type connection: :class:`gcloud.storage.connection.Connection` + :param connection: The connection to use for querying the list of buckets. + """ - def __init__(self, connection): - super(BucketIterator, self).__init__(connection=connection, path='/b') + def __init__(self, connection): + super(BucketIterator, self).__init__(connection=connection, path='/b') - def get_items_from_response(self, response): - """Factory method which yields :class:`.Bucket` items from a response. + def get_items_from_response(self, response): + """Factory method which yields :class:`.Bucket` items from a response. - :type response: dict - :param response: The JSON API response for a page of buckets. - """ + :type response: dict + :param response: The JSON API response for a page of buckets. + """ - from gcloud.storage.bucket import Bucket - for item in response.get('items', []): - yield Bucket.from_dict(item, connection=self.connection) + from gcloud.storage.bucket import Bucket + for item in response.get('items', []): + yield Bucket.from_dict(item, connection=self.connection) class KeyIterator(Iterator): - """An iterator listing keys. + """An iterator listing keys. - You shouldn't have to use this directly, - but instead should use the helper methods - on :class:`gcloud.storage.key.Key` objects. + You shouldn't have to use this directly, + but instead should use the helper methods + on :class:`gcloud.storage.key.Key` objects. - :type bucket: :class:`gcloud.storage.bucket.Bucket` - :param bucket: The bucket from which to list keys. - """ + :type bucket: :class:`gcloud.storage.bucket.Bucket` + :param bucket: The bucket from which to list keys. + """ - def __init__(self, bucket): - self.bucket = bucket - super(KeyIterator, self).__init__( - connection=bucket.connection, path=bucket.path + '/o') + def __init__(self, bucket): + self.bucket = bucket + super(KeyIterator, self).__init__( + connection=bucket.connection, path=bucket.path + '/o') - def get_items_from_response(self, response): - """Factory method, yields :class:`.storage.key.Key` items from response. + def get_items_from_response(self, response): + """Factory method, yields :class:`.storage.key.Key` items from response. - :type response: dict - :param response: The JSON API response for a page of keys. - """ + :type response: dict + :param response: The JSON API response for a page of keys. + """ - from gcloud.storage.key import Key - for item in response.get('items', []): - yield Key.from_dict(item, bucket=self.bucket) + from gcloud.storage.key import Key + for item in response.get('items', []): + yield Key.from_dict(item, bucket=self.bucket) class KeyDataIterator(object): - def __init__(self, key): - self.key = key - self.reset() + def __init__(self, key): + self.key = key + self.reset() - def __iter__(self): - while self.has_more_data(): - yield self.get_next_chunk() + def __iter__(self): + while self.has_more_data(): + yield self.get_next_chunk() - def reset(self): - self._bytes_written = 0 - self._total_bytes = None + def reset(self): + self._bytes_written = 0 + self._total_bytes = None - def has_more_data(self): - if self._bytes_written == 0: - return True - elif not self._total_bytes: - # self._total_bytes **should** be set by this point. - # If it isn't, something is wrong. - raise ValueError('Size of object is unknown... This is bad.') - else: - return (self._bytes_written < self._total_bytes) + def has_more_data(self): + if self._bytes_written == 0: + return True + elif not self._total_bytes: + # self._total_bytes **should** be set by this point. + # If it isn't, something is wrong. + raise ValueError('Size of object is unknown... This is bad.') + else: + return (self._bytes_written < self._total_bytes) - def get_headers(self): - start = self._bytes_written - end = self._bytes_written + self.key.CHUNK_SIZE - 1 + def get_headers(self): + start = self._bytes_written + end = self._bytes_written + self.key.CHUNK_SIZE - 1 - if self._total_bytes and end > self._total_bytes: - end = '' + if self._total_bytes and end > self._total_bytes: + end = '' - return {'Range': 'bytes=%s-%s' % (start, end)} + return {'Range': 'bytes=%s-%s' % (start, end)} - def get_url(self): - return self.key.connection.build_api_url( - path=self.key.path, query_params={'alt': 'media'}) + def get_url(self): + return self.key.connection.build_api_url( + path=self.key.path, query_params={'alt': 'media'}) - def get_next_chunk(self): - if not self.has_more_data(): - raise RuntimeError('No more data in this iterator. Try resetting.') + def get_next_chunk(self): + if not self.has_more_data(): + raise RuntimeError('No more data in this iterator. Try resetting.') - response, content = self.key.connection.make_request( - method='GET', url=self.get_url(), headers=self.get_headers()) + response, content = self.key.connection.make_request( + method='GET', url=self.get_url(), headers=self.get_headers()) - if response.status in (200, 206): - self._bytes_written += len(content) + if response.status in (200, 206): + self._bytes_written += len(content) - if 'content-range' in response: - content_range = response['content-range'] - self._total_bytes = int(content_range.rsplit('/', 1)[1]) + if 'content-range' in response: + content_range = response['content-range'] + self._total_bytes = int(content_range.rsplit('/', 1)[1]) - return content + return content - # Expected a 200 or a 206... Got something else, which is bad. - raise Exception(response) + # Expected a 200 or a 206... Got something else, which is bad. + raise Exception(response) diff --git a/gcloud/storage/key.py b/gcloud/storage/key.py index 490c68cf77fb..4562d81166ec 100644 --- a/gcloud/storage/key.py +++ b/gcloud/storage/key.py @@ -9,428 +9,431 @@ class Key(object): - """A wrapper around Cloud Storage's concept of an ``Object``.""" + """A wrapper around Cloud Storage's concept of an ``Object``.""" - CHUNK_SIZE = 1024 * 1024 # 1 MB. - """The size of a chunk of data whenever iterating (1 MB). + CHUNK_SIZE = 1024 * 1024 # 1 MB. + """The size of a chunk of data whenever iterating (1 MB). This must be a multiple of 256 KB per the API specification. """ - def __init__(self, bucket=None, name=None, metadata=None): - """ - :type bucket: :class:`gcloud.storage.bucket.Bucket` - :param bucket: The bucket to which this key belongs. + def __init__(self, bucket=None, name=None, metadata=None): + """Key constructor. - :type name: string - :param name: The name of the key. - This corresponds to the unique path of the object - in the bucket. + :type bucket: :class:`gcloud.storage.bucket.Bucket` + :param bucket: The bucket to which this key belongs. - :type metadata: dict - :param metadata: All the other data provided by Cloud Storage. - """ + :type name: string + :param name: The name of the key. + This corresponds to the unique path of the object + in the bucket. - self.bucket = bucket - self.name = name - self.metadata = metadata or {} + :type metadata: dict + :param metadata: All the other data provided by Cloud Storage. + """ - # Lazily get the ACL information. - self.acl = None + self.bucket = bucket + self.name = name + self.metadata = metadata or {} - @classmethod - def from_dict(cls, key_dict, bucket=None): - """Instantiate a :class:`Key` from data returned by the JSON API. + # Lazily get the ACL information. + self.acl = None - :type key_dict: dict - :param key_dict: A dictionary of data returned from - getting an Cloud Storage object. + @classmethod + def from_dict(cls, key_dict, bucket=None): + """Instantiate a :class:`Key` from data returned by the JSON API. - :type bucket: :class:`gcloud.storage.bucket.Bucket` - :param bucket: The bucket to which this key belongs - (and by proxy, which connection to use). + :type key_dict: dict + :param key_dict: A dictionary of data returned from + getting an Cloud Storage object. - :rtype: :class:`Key` - :returns: A key based on the data provided. - """ + :type bucket: :class:`gcloud.storage.bucket.Bucket` + :param bucket: The bucket to which this key belongs + (and by proxy, which connection to use). - return cls(bucket=bucket, name=key_dict['name'], metadata=key_dict) + :rtype: :class:`Key` + :returns: A key based on the data provided. + """ - def __repr__(self): # pragma NO COVER - if self.bucket: - bucket_name = self.bucket.name - else: - bucket_name = None + return cls(bucket=bucket, name=key_dict['name'], metadata=key_dict) - return '' % (bucket_name, self.name) + def __repr__(self): # pragma NO COVER + if self.bucket: + bucket_name = self.bucket.name + else: + bucket_name = None - @property - def connection(self): - """Getter property for the connection to use with this Key. + return '' % (bucket_name, self.name) - :rtype: :class:`gcloud.storage.connection.Connection` or None - :returns: The connection to use, or None if no connection is set. - """ + @property + def connection(self): + """Getter property for the connection to use with this Key. - if self.bucket and self.bucket.connection: - return self.bucket.connection + :rtype: :class:`gcloud.storage.connection.Connection` or None + :returns: The connection to use, or None if no connection is set. + """ - @property - def path(self): - """Getter property for the URL path to this Key. + if self.bucket and self.bucket.connection: + return self.bucket.connection - :rtype: string - :returns: The URL path to this Key. - """ + @property + def path(self): + """Getter property for the URL path to this Key. - if not self.bucket: - raise ValueError('Cannot determine path without a bucket defined.') - elif not self.name: - raise ValueError('Cannot determine path without a key name.') + :rtype: string + :returns: The URL path to this Key. + """ - return self.bucket.path + '/o/' + self.name + if not self.bucket: + raise ValueError('Cannot determine path without a bucket defined.') + elif not self.name: + raise ValueError('Cannot determine path without a key name.') - @property - def public_url(self): - return '{storage_base_url}/{self.bucket.name}/{self.name}'.format( - storage_base_url='http://commondatastorage.googleapis.com', self=self) + return self.bucket.path + '/o/' + self.name - def generate_signed_url(self, expiration, - method='GET'): # pragma NO COVER UGH - """Generates a signed URL for this key. + @property + def public_url(self): + return '{storage_base_url}/{self.bucket.name}/{self.name}'.format( + storage_base_url='http://commondatastorage.googleapis.com', + self=self) - If you have a key that you want to allow access to - for a set amount of time, - you can use this method to generate a URL - that is only valid within a certain time period. + def generate_signed_url(self, expiration, + method='GET'): # pragma NO COVER + """Generates a signed URL for this key. - This is particularly useful if you don't want publicly accessible keys, - but don't want to require users to explicitly log in. + If you have a key that you want to allow access to + for a set amount of time, + you can use this method to generate a URL + that is only valid within a certain time period. - :type expiration: int, long, datetime.datetime, datetime.timedelta - :param expiration: When the signed URL should expire. + This is particularly useful if you don't want publicly accessible keys, + but don't want to require users to explicitly log in. - :type method: string - :param method: The HTTP verb that will be used when requesting the URL. + :type expiration: int, long, datetime.datetime, datetime.timedelta + :param expiration: When the signed URL should expire. - :rtype: string - :returns: A signed URL you can use to access the resource until expiration. - """ + :type method: string + :param method: The HTTP verb that will be used when requesting the URL. - resource = '/{self.bucket.name}/{self.name}'.format(self=self) - return self.connection.generate_signed_url(resource=resource, - expiration=expiration, - method=method) + :rtype: string + :returns: A signed URL you can use to access the resource + until expiration. + """ - def exists(self): - """Determines whether or not this key exists. + resource = '/{self.bucket.name}/{self.name}'.format(self=self) + return self.connection.generate_signed_url(resource=resource, + expiration=expiration, + method=method) - :rtype: bool - :returns: True if the key exists in Cloud Storage. - """ + def exists(self): + """Determines whether or not this key exists. - return self.bucket.get_key(self.name) is not None + :rtype: bool + :returns: True if the key exists in Cloud Storage. + """ - def delete(self): - """Deletes a key from Cloud Storage. + return self.bucket.get_key(self.name) is not None - :rtype: :class:`Key` - :returns: The key that was just deleted. - """ + def delete(self): + """Deletes a key from Cloud Storage. - return self.bucket.delete_key(self) + :rtype: :class:`Key` + :returns: The key that was just deleted. + """ - def get_contents_to_file(self, fh): - """Gets the contents of this key to a file-like object. + return self.bucket.delete_key(self) - :type fh: file - :param fh: A file handle to which to write the key's data. + def get_contents_to_file(self, fh): + """Gets the contents of this key to a file-like object. - :raises: :class:`gcloud.storage.exceptions.NotFoundError` - """ + :type fh: file + :param fh: A file handle to which to write the key's data. - for chunk in KeyDataIterator(self): - try: - fh.write(chunk) - except IOError, e: # pragma NO COVER - if e.errno == errno.ENOSPC: - raise Exception('No space left on device.') + :raises: :class:`gcloud.storage.exceptions.NotFoundError` + """ - def get_contents_to_filename(self, filename): - """Get the contents of this key to a file by name. + for chunk in KeyDataIterator(self): + try: + fh.write(chunk) + except IOError, e: # pragma NO COVER + if e.errno == errno.ENOSPC: + raise Exception('No space left on device.') - :type filename: string - :param filename: A filename to be passed to ``open``. + def get_contents_to_filename(self, filename): + """Get the contents of this key to a file by name. - :raises: :class:`gcloud.storage.exceptions.NotFoundError` - """ + :type filename: string + :param filename: A filename to be passed to ``open``. - with open(filename, 'wb') as fh: - self.get_contents_to_file(fh) + :raises: :class:`gcloud.storage.exceptions.NotFoundError` + """ - def get_contents_as_string(self): - """Gets the data stored on this Key as a string. + with open(filename, 'wb') as fh: + self.get_contents_to_file(fh) - :rtype: string - :returns: The data stored in this key. - :raises: :class:`gcloud.storage.exceptions.NotFoundError` - """ + def get_contents_as_string(self): + """Gets the data stored on this Key as a string. - string_buffer = StringIO() - self.get_contents_to_file(string_buffer) - return string_buffer.getvalue() + :rtype: string + :returns: The data stored in this key. + :raises: :class:`gcloud.storage.exceptions.NotFoundError` + """ - def set_contents_from_file(self, fh, rewind=False, size=None, - content_type=None): - """Set the contents of this key to the contents of a file handle. + string_buffer = StringIO() + self.get_contents_to_file(string_buffer) + return string_buffer.getvalue() - :type fh: file - :param fh: A file handle open for reading. + def set_contents_from_file(self, fh, rewind=False, size=None, + content_type=None): + """Set the contents of this key to the contents of a file handle. - :type rewind: bool - :param rewind: If True, seek to the beginning of the file handle before - writing the file to Cloud Storage. + :type fh: file + :param fh: A file handle open for reading. - :type size: int - :param size: The number of bytes to read from the file handle. - If not provided, we'll try to guess the size using - :func:`os.fstat` - """ + :type rewind: bool + :param rewind: If True, seek to the beginning of the file handle before + writing the file to Cloud Storage. - # Rewind the file if desired. - if rewind: - fh.seek(0, os.SEEK_SET) + :type size: int + :param size: The number of bytes to read from the file handle. + If not provided, we'll try to guess the size using + :func:`os.fstat` + """ - # Get the basic stats about the file. - total_bytes = size or os.fstat(fh.fileno()).st_size - bytes_uploaded = 0 + # Rewind the file if desired. + if rewind: + fh.seek(0, os.SEEK_SET) - # Set up a resumable upload session. - headers = { - 'X-Upload-Content-Type': content_type or 'application/unknown', - 'X-Upload-Content-Length': total_bytes - } + # Get the basic stats about the file. + total_bytes = size or os.fstat(fh.fileno()).st_size + bytes_uploaded = 0 - upload_url = self.connection.build_api_url( - path=self.bucket.path + '/o', - query_params={'uploadType': 'resumable', 'name': self.name}, - api_base_url=self.connection.API_BASE_URL + '/upload') + # Set up a resumable upload session. + headers = { + 'X-Upload-Content-Type': content_type or 'application/unknown', + 'X-Upload-Content-Length': total_bytes, + } - response, content = self.connection.make_request( - method='POST', url=upload_url, - headers=headers) + upload_url = self.connection.build_api_url( + path=self.bucket.path + '/o', + query_params={'uploadType': 'resumable', 'name': self.name}, + api_base_url=self.connection.API_BASE_URL + '/upload') - # Get the resumable upload URL. - upload_url = response['location'] + response, content = self.connection.make_request( + method='POST', url=upload_url, + headers=headers) - while bytes_uploaded < total_bytes: - # Construct the range header. - data = fh.read(self.CHUNK_SIZE) - chunk_size = len(data) + # Get the resumable upload URL. + upload_url = response['location'] - start = bytes_uploaded - end = bytes_uploaded + chunk_size - 1 + while bytes_uploaded < total_bytes: + # Construct the range header. + data = fh.read(self.CHUNK_SIZE) + chunk_size = len(data) - headers = { - 'Content-Range': 'bytes %d-%d/%d' % (start, end, total_bytes), - } + start = bytes_uploaded + end = bytes_uploaded + chunk_size - 1 - response, content = self.connection.make_request( - content_type='text/plain', - method='POST', url=upload_url, headers=headers, data=data) + headers = { + 'Content-Range': 'bytes %d-%d/%d' % (start, end, total_bytes), + } - bytes_uploaded += chunk_size + response, content = self.connection.make_request( + content_type='text/plain', + method='POST', url=upload_url, headers=headers, data=data) - def set_contents_from_filename(self, filename): - """Open a path and set this key's contents to the content of that file. + bytes_uploaded += chunk_size - :type filename: string - :param filename: The path to the file. - """ + def set_contents_from_filename(self, filename): + """Open a path and set this key's contents to the content of that file. - content_type, _ = mimetypes.guess_type(filename) + :type filename: string + :param filename: The path to the file. + """ - with open(filename, 'rb') as fh: - self.set_contents_from_file(fh, content_type=content_type) + content_type, _ = mimetypes.guess_type(filename) - def set_contents_from_string(self, data, content_type='text/plain'): - """Sets the contents of this key to the provided string. + with open(filename, 'rb') as fh: + self.set_contents_from_file(fh, content_type=content_type) - You can use this method to quickly set the value of a key:: + def set_contents_from_string(self, data, content_type='text/plain'): + """Sets the contents of this key to the provided string. - >>> from gcloud import storage - >>> connection = storage.get_connection(project, email, key_path) - >>> bucket = connection.get_bucket(bucket_name) - >>> key = bucket.new_key('my_text_file.txt') - >>> key.set_contents_from_string('This is the contents of my file!') + You can use this method to quickly set the value of a key:: - Under the hood this is using a string buffer - and calling :func:`gcloud.storage.key.Key.set_contents_from_file`. + >>> from gcloud import storage + >>> connection = storage.get_connection(project, email, key_path) + >>> bucket = connection.get_bucket(bucket_name) + >>> key = bucket.new_key('my_text_file.txt') + >>> key.set_contents_from_string('This is the contents of my file!') - :type data: string - :param data: The data to store in this key. + Under the hood this is using a string buffer + and calling :func:`gcloud.storage.key.Key.set_contents_from_file`. - :rtype: :class:`Key` - :returns: The updated Key object. - """ + :type data: string + :param data: The data to store in this key. - string_buffer = StringIO() - string_buffer.write(data) - self.set_contents_from_file(fh=string_buffer, rewind=True, - size=string_buffer.len, - content_type=content_type) - return self + :rtype: :class:`Key` + :returns: The updated Key object. + """ - def has_metadata(self, field=None): - """Check if metadata is available locally. + string_buffer = StringIO() + string_buffer.write(data) + self.set_contents_from_file(fh=string_buffer, rewind=True, + size=string_buffer.len, + content_type=content_type) + return self - :type field: string - :param field: (optional) the particular field to check for. + def has_metadata(self, field=None): + """Check if metadata is available locally. - :rtype: bool - :returns: Whether metadata is available locally. - """ + :type field: string + :param field: (optional) the particular field to check for. - if not self.metadata: - return False - elif field and field not in self.metadata: - return False - else: - return True + :rtype: bool + :returns: Whether metadata is available locally. + """ - def reload_metadata(self, full=False): - """Reload metadata from Cloud Storage. + if not self.metadata: + return False + elif field and field not in self.metadata: + return False + else: + return True - :type full: bool - :param full: If True, loads all data (include ACL data). + def reload_metadata(self, full=False): + """Reload metadata from Cloud Storage. - :rtype: :class:`Key` - :returns: The key you just reloaded data for. - """ + :type full: bool + :param full: If True, loads all data (include ACL data). - projection = 'full' if full else 'noAcl' - query_params = {'projection': projection} - self.metadata = self.connection.api_request( - method='GET', path=self.path, query_params=query_params) - return self + :rtype: :class:`Key` + :returns: The key you just reloaded data for. + """ - def get_metadata(self, field=None, default=None): - """Get all metadata or a specific field. + projection = 'full' if full else 'noAcl' + query_params = {'projection': projection} + self.metadata = self.connection.api_request( + method='GET', path=self.path, query_params=query_params) + return self - If you request a field that isn't available, - and that field can be retrieved by refreshing data - from Cloud Storage, - this method will reload the data using - :func:`Key.reload_metadata`. + def get_metadata(self, field=None, default=None): + """Get all metadata or a specific field. - :type field: string - :param field: (optional) A particular field to retrieve from metadata. + If you request a field that isn't available, + and that field can be retrieved by refreshing data + from Cloud Storage, + this method will reload the data using + :func:`Key.reload_metadata`. - :type default: anything - :param default: The value to return if the field provided wasn't found. + :type field: string + :param field: (optional) A particular field to retrieve from metadata. - :rtype: dict or anything - :returns: All metadata or the value of the specific field. - """ + :type default: anything + :param default: The value to return if the field provided wasn't found. - if not self.has_metadata(field=field): - full = (field and field == 'acl') - self.reload_metadata(full=full) + :rtype: dict or anything + :returns: All metadata or the value of the specific field. + """ - if field: - return self.metadata.get(field, default) - else: - return self.metadata + if not self.has_metadata(field=field): + full = (field and field == 'acl') + self.reload_metadata(full=full) - def patch_metadata(self, metadata): - """Update particular fields of this key's metadata. + if field: + return self.metadata.get(field, default) + else: + return self.metadata - This method will only update the fields provided - and will not touch the other fields. + def patch_metadata(self, metadata): + """Update particular fields of this key's metadata. - It will also reload the metadata locally - based on the servers response. + This method will only update the fields provided + and will not touch the other fields. - :type metadata: dict - :param metadata: The dictionary of values to update. + It will also reload the metadata locally + based on the servers response. - :rtype: :class:`Key` - :returns: The current key. - """ + :type metadata: dict + :param metadata: The dictionary of values to update. - self.metadata = self.connection.api_request( - method='PATCH', path=self.path, data=metadata, - query_params={'projection': 'full'}) - return self + :rtype: :class:`Key` + :returns: The current key. + """ - def reload_acl(self): - """Reload the ACL data from Cloud Storage. + self.metadata = self.connection.api_request( + method='PATCH', path=self.path, data=metadata, + query_params={'projection': 'full'}) + return self - :rtype: :class:`Key` - :returns: The current key. - """ + def reload_acl(self): + """Reload the ACL data from Cloud Storage. - self.acl = ObjectACL(key=self) + :rtype: :class:`Key` + :returns: The current key. + """ - for entry in self.get_metadata('acl', []): - entity = self.acl.entity_from_dict(entry) - self.acl.add_entity(entity) + self.acl = ObjectACL(key=self) - return self + for entry in self.get_metadata('acl', []): + entity = self.acl.entity_from_dict(entry) + self.acl.add_entity(entity) - def get_acl(self): - """Get ACL metadata as a :class:`gcloud.storage.acl.ObjectACL` object. + return self - :rtype: :class:`gcloud.storage.acl.ObjectACL` - :returns: An ACL object for the current key. - """ + def get_acl(self): + """Get ACL metadata as a :class:`gcloud.storage.acl.ObjectACL` object. - if not self.acl: - self.reload_acl() - return self.acl + :rtype: :class:`gcloud.storage.acl.ObjectACL` + :returns: An ACL object for the current key. + """ - def save_acl(self, acl=None): - """Save the ACL data for this key. + if not self.acl: + self.reload_acl() + return self.acl - :type acl: :class:`gcloud.storage.acl.ACL` - :param acl: The ACL object to save. - If left blank, this will save the ACL - set locally on the key. - """ + def save_acl(self, acl=None): + """Save the ACL data for this key. - # We do things in this weird way because [] and None - # both evaluate to False, but mean very different things. - if acl is None: - acl = self.acl + :type acl: :class:`gcloud.storage.acl.ACL` + :param acl: The ACL object to save. + If left blank, this will save the ACL + set locally on the key. + """ - if acl is None: - return self + # We do things in this weird way because [] and None + # both evaluate to False, but mean very different things. + if acl is None: + acl = self.acl - self.patch_metadata({'acl': list(acl)}) - self.reload_acl() - return self + if acl is None: + return self - def clear_acl(self): - """Remove all ACL rules from the key. + self.patch_metadata({'acl': list(acl)}) + self.reload_acl() + return self - Note that this won't actually remove *ALL* the rules, - but it will remove all the non-default rules. - In short, - you'll still have access - to a key that you created - even after you clear ACL rules - with this method. - """ + def clear_acl(self): + """Remove all ACL rules from the key. - return self.save_acl(acl=[]) + Note that this won't actually remove *ALL* the rules, + but it will remove all the non-default rules. + In short, + you'll still have access + to a key that you created + even after you clear ACL rules + with this method. + """ - def make_public(self): - """Make this key public giving all users read access. + return self.save_acl(acl=[]) - :rtype: :class:`Key` - :returns: The current key. - """ + def make_public(self): + """Make this key public giving all users read access. - self.get_acl().all().grant_read() - self.save_acl() - return self + :rtype: :class:`Key` + :returns: The current key. + """ + + self.get_acl().all().grant_read() + self.save_acl() + return self From a8e016ed5c240ead130e6dac923a85b59cafe9ee Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 2 Oct 2014 14:06:22 -0700 Subject: [PATCH 2/4] Fixing line-length and import order in datastore.query. --- gcloud/datastore/query.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/gcloud/datastore/query.py b/gcloud/datastore/query.py index 42369684ace1..76a6ed12ae0e 100644 --- a/gcloud/datastore/query.py +++ b/gcloud/datastore/query.py @@ -1,10 +1,10 @@ +import base64 import copy from gcloud.datastore import datastore_v1_pb2 as datastore_pb from gcloud.datastore import helpers from gcloud.datastore.entity import Entity from gcloud.datastore.key import Key -import base64 class Query(object): @@ -341,8 +341,8 @@ def with_cursor(self, start_cursor, end_cursor=None): start reading query results. :type end_cursor: bytes - :param end_cursor: Base64-encoded cursor string specifying where to stop - reading query results. + :param end_cursor: Base64-encoded cursor string specifying where to + stop reading query results. :rtype: :class:`Query` :returns: If neither cursor is passed, returns self; else, returns a From 45bc84170e0667d045ae0031980d10b987f9c740 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 2 Oct 2014 14:48:42 -0700 Subject: [PATCH 3/4] Making setup.py PEP8 and pylint compliant. --- setup.py | 70 +++++++++++++++++++++++++++++--------------------------- 1 file changed, 36 insertions(+), 34 deletions(-) diff --git a/setup.py b/setup.py index 38127daed27c..8543b46d0e54 100644 --- a/setup.py +++ b/setup.py @@ -2,50 +2,52 @@ try: - from setuptools import setup, find_packages + from setuptools import setup, find_packages except ImportError: - from distutils.core import setup, find_packages + from distutils.core import setup, find_packages if sys.version_info <= (2, 4): - error = 'Requires Python Version 2.5 or above... exiting.' - print >> sys.stderr, error - sys.exit(1) + ERROR = 'Requires Python Version 2.5 or above... exiting.' + print >> sys.stderr, ERROR + sys.exit(1) -requirements = [ +REQUIREMENTS = [ 'httplib2', 'oauth2client', 'protobuf', 'pycrypto', 'pyopenssl', 'pytz', - ] +] -setup(name='gcloud', - version='0.02.2', - description='API Client library for Google Cloud', - author='JJ Geewax', - author_email='jj@geewax.org', - scripts=[], - url='https://github.com/GoogleCloudPlatform/gcloud-python', - packages=find_packages(), - license='Apache 2.0', - platforms='Posix; MacOS X; Windows', - package_data={'': ['gcloud/datastore/demo.key', - 'gcloud/storage/demo.key']}, - include_package_data=True, - zip_safe=False, - setup_requires=requirements, - install_requires=requirements, - classifiers=['Development Status :: 1 - Planning', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: Apache Software License', - 'Operating System :: OS Independent', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.5', - 'Programming Language :: Python :: 2.6', - 'Programming Language :: Python :: 2.7', - 'Topic :: Internet', - ] - ) +setup( + name='gcloud', + version='0.02.2', + description='API Client library for Google Cloud', + author='JJ Geewax', + author_email='jj@geewax.org', + scripts=[], + url='https://github.com/GoogleCloudPlatform/gcloud-python', + packages=find_packages(), + license='Apache 2.0', + platforms='Posix; MacOS X; Windows', + package_data={'': ['gcloud/datastore/demo.key', + 'gcloud/storage/demo.key']}, + include_package_data=True, + zip_safe=False, + setup_requires=REQUIREMENTS, + install_requires=REQUIREMENTS, + classifiers=[ + 'Development Status :: 1 - Planning', + 'Intended Audience :: Developers', + 'License :: OSI Approved :: Apache Software License', + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.5', + 'Programming Language :: Python :: 2.6', + 'Programming Language :: Python :: 2.7', + 'Topic :: Internet', + ] +) From aa73c1762ef29c9d3a76f8950480d070e50b24be Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 2 Oct 2014 15:46:50 -0700 Subject: [PATCH 4/4] Reverting datastore_v1_pb2 to original form. --- gcloud/datastore/datastore_v1_pb2.py | 270 +++++++++++++++------------ 1 file changed, 149 insertions(+), 121 deletions(-) diff --git a/gcloud/datastore/datastore_v1_pb2.py b/gcloud/datastore/datastore_v1_pb2.py index de7be0fdda59..6c789c922c4e 100644 --- a/gcloud/datastore/datastore_v1_pb2.py +++ b/gcloud/datastore/datastore_v1_pb2.py @@ -7,11 +7,16 @@ from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) + + + DESCRIPTOR = _descriptor.FileDescriptor( name='datastore_v1.proto', package='api.services.datastore', serialized_pb='\n\x12\x64\x61tastore_v1.proto\x12\x16\x61pi.services.datastore\"4\n\x0bPartitionId\x12\x12\n\ndataset_id\x18\x03 \x01(\t\x12\x11\n\tnamespace\x18\x04 \x01(\t\"\xb6\x01\n\x03Key\x12\x39\n\x0cpartition_id\x18\x01 \x01(\x0b\x32#.api.services.datastore.PartitionId\x12=\n\x0cpath_element\x18\x02 \x03(\x0b\x32\'.api.services.datastore.Key.PathElement\x1a\x35\n\x0bPathElement\x12\x0c\n\x04kind\x18\x01 \x02(\t\x12\n\n\x02id\x18\x02 \x01(\x03\x12\x0c\n\x04name\x18\x03 \x01(\t\"\xf4\x02\n\x05Value\x12\x15\n\rboolean_value\x18\x01 \x01(\x08\x12\x15\n\rinteger_value\x18\x02 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x03 \x01(\x01\x12$\n\x1ctimestamp_microseconds_value\x18\x04 \x01(\x03\x12.\n\tkey_value\x18\x05 \x01(\x0b\x32\x1b.api.services.datastore.Key\x12\x16\n\x0e\x62lob_key_value\x18\x10 \x01(\t\x12\x14\n\x0cstring_value\x18\x11 \x01(\t\x12\x12\n\nblob_value\x18\x12 \x01(\x0c\x12\x34\n\x0c\x65ntity_value\x18\x06 \x01(\x0b\x32\x1e.api.services.datastore.Entity\x12\x31\n\nlist_value\x18\x07 \x03(\x0b\x32\x1d.api.services.datastore.Value\x12\x0f\n\x07meaning\x18\x0e \x01(\x05\x12\x15\n\x07indexed\x18\x0f \x01(\x08:\x04true\"F\n\x08Property\x12\x0c\n\x04name\x18\x01 \x02(\t\x12,\n\x05value\x18\x04 \x02(\x0b\x32\x1d.api.services.datastore.Value\"f\n\x06\x45ntity\x12(\n\x03key\x18\x01 \x01(\x0b\x32\x1b.api.services.datastore.Key\x12\x32\n\x08property\x18\x02 \x03(\x0b\x32 .api.services.datastore.Property\"t\n\x0c\x45ntityResult\x12.\n\x06\x65ntity\x18\x01 \x02(\x0b\x32\x1e.api.services.datastore.Entity\"4\n\nResultType\x12\x08\n\x04\x46ULL\x10\x01\x12\x0e\n\nPROJECTION\x10\x02\x12\x0c\n\x08KEY_ONLY\x10\x03\"\xec\x02\n\x05Query\x12>\n\nprojection\x18\x02 \x03(\x0b\x32*.api.services.datastore.PropertyExpression\x12\x34\n\x04kind\x18\x03 \x03(\x0b\x32&.api.services.datastore.KindExpression\x12.\n\x06\x66ilter\x18\x04 \x01(\x0b\x32\x1e.api.services.datastore.Filter\x12\x34\n\x05order\x18\x05 \x03(\x0b\x32%.api.services.datastore.PropertyOrder\x12;\n\x08group_by\x18\x06 \x03(\x0b\x32).api.services.datastore.PropertyReference\x12\x14\n\x0cstart_cursor\x18\x07 \x01(\x0c\x12\x12\n\nend_cursor\x18\x08 \x01(\x0c\x12\x11\n\x06offset\x18\n \x01(\x05:\x01\x30\x12\r\n\x05limit\x18\x0b \x01(\x05\"\x1e\n\x0eKindExpression\x12\x0c\n\x04name\x18\x01 \x02(\t\"!\n\x11PropertyReference\x12\x0c\n\x04name\x18\x02 \x02(\t\"\xd1\x01\n\x12PropertyExpression\x12;\n\x08property\x18\x01 \x02(\x0b\x32).api.services.datastore.PropertyReference\x12\\\n\x14\x61ggregation_function\x18\x02 \x01(\x0e\x32>.api.services.datastore.PropertyExpression.AggregationFunction\" \n\x13\x41ggregationFunction\x12\t\n\x05\x46IRST\x10\x01\"\xc7\x01\n\rPropertyOrder\x12;\n\x08property\x18\x01 \x02(\x0b\x32).api.services.datastore.PropertyReference\x12M\n\tdirection\x18\x02 \x01(\x0e\x32/.api.services.datastore.PropertyOrder.Direction:\tASCENDING\"*\n\tDirection\x12\r\n\tASCENDING\x10\x01\x12\x0e\n\nDESCENDING\x10\x02\"\x8c\x01\n\x06\x46ilter\x12\x41\n\x10\x63omposite_filter\x18\x01 \x01(\x0b\x32\'.api.services.datastore.CompositeFilter\x12?\n\x0fproperty_filter\x18\x02 \x01(\x0b\x32&.api.services.datastore.PropertyFilter\"\x9a\x01\n\x0f\x43ompositeFilter\x12\x42\n\x08operator\x18\x01 \x02(\x0e\x32\x30.api.services.datastore.CompositeFilter.Operator\x12.\n\x06\x66ilter\x18\x02 \x03(\x0b\x32\x1e.api.services.datastore.Filter\"\x13\n\x08Operator\x12\x07\n\x03\x41ND\x10\x01\"\xbb\x02\n\x0ePropertyFilter\x12;\n\x08property\x18\x01 \x02(\x0b\x32).api.services.datastore.PropertyReference\x12\x41\n\x08operator\x18\x02 \x02(\x0e\x32/.api.services.datastore.PropertyFilter.Operator\x12,\n\x05value\x18\x03 \x02(\x0b\x32\x1d.api.services.datastore.Value\"{\n\x08Operator\x12\r\n\tLESS_THAN\x10\x01\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x02\x12\x10\n\x0cGREATER_THAN\x10\x03\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x04\x12\t\n\x05\x45QUAL\x10\x05\x12\x10\n\x0cHAS_ANCESTOR\x10\x0b\"\xae\x01\n\x08GqlQuery\x12\x14\n\x0cquery_string\x18\x01 \x02(\t\x12\x1c\n\rallow_literal\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x35\n\x08name_arg\x18\x03 \x03(\x0b\x32#.api.services.datastore.GqlQueryArg\x12\x37\n\nnumber_arg\x18\x04 \x03(\x0b\x32#.api.services.datastore.GqlQueryArg\"Y\n\x0bGqlQueryArg\x12\x0c\n\x04name\x18\x01 \x01(\t\x12,\n\x05value\x18\x02 \x01(\x0b\x32\x1d.api.services.datastore.Value\x12\x0e\n\x06\x63ursor\x18\x03 \x01(\x0c\"\xf1\x02\n\x10QueryResultBatch\x12K\n\x12\x65ntity_result_type\x18\x01 \x02(\x0e\x32/.api.services.datastore.EntityResult.ResultType\x12;\n\rentity_result\x18\x02 \x03(\x0b\x32$.api.services.datastore.EntityResult\x12\x12\n\nend_cursor\x18\x04 \x01(\x0c\x12N\n\x0cmore_results\x18\x05 \x02(\x0e\x32\x38.api.services.datastore.QueryResultBatch.MoreResultsType\x12\x17\n\x0fskipped_results\x18\x06 \x01(\x05\"V\n\x0fMoreResultsType\x12\x10\n\x0cNOT_FINISHED\x10\x01\x12\x1c\n\x18MORE_RESULTS_AFTER_LIMIT\x10\x02\x12\x13\n\x0fNO_MORE_RESULTS\x10\x03\"\x8e\x02\n\x08Mutation\x12.\n\x06upsert\x18\x01 \x03(\x0b\x32\x1e.api.services.datastore.Entity\x12.\n\x06update\x18\x02 \x03(\x0b\x32\x1e.api.services.datastore.Entity\x12.\n\x06insert\x18\x03 \x03(\x0b\x32\x1e.api.services.datastore.Entity\x12\x36\n\x0einsert_auto_id\x18\x04 \x03(\x0b\x32\x1e.api.services.datastore.Entity\x12+\n\x06\x64\x65lete\x18\x05 \x03(\x0b\x32\x1b.api.services.datastore.Key\x12\r\n\x05\x66orce\x18\x06 \x01(\x08\"`\n\x0eMutationResult\x12\x15\n\rindex_updates\x18\x01 \x02(\x05\x12\x37\n\x12insert_auto_id_key\x18\x02 \x03(\x0b\x32\x1b.api.services.datastore.Key\"\xb4\x01\n\x0bReadOptions\x12V\n\x10read_consistency\x18\x01 \x01(\x0e\x32\x33.api.services.datastore.ReadOptions.ReadConsistency:\x07\x44\x45\x46\x41ULT\x12\x13\n\x0btransaction\x18\x02 \x01(\x0c\"8\n\x0fReadConsistency\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\n\n\x06STRONG\x10\x01\x12\x0c\n\x08\x45VENTUAL\x10\x02\"t\n\rLookupRequest\x12\x39\n\x0cread_options\x18\x01 \x01(\x0b\x32#.api.services.datastore.ReadOptions\x12(\n\x03key\x18\x03 \x03(\x0b\x32\x1b.api.services.datastore.Key\"\xab\x01\n\x0eLookupResponse\x12\x33\n\x05\x66ound\x18\x01 \x03(\x0b\x32$.api.services.datastore.EntityResult\x12\x35\n\x07missing\x18\x02 \x03(\x0b\x32$.api.services.datastore.EntityResult\x12-\n\x08\x64\x65\x66\x65rred\x18\x03 \x03(\x0b\x32\x1b.api.services.datastore.Key\"\xea\x01\n\x0fRunQueryRequest\x12\x39\n\x0cread_options\x18\x01 \x01(\x0b\x32#.api.services.datastore.ReadOptions\x12\x39\n\x0cpartition_id\x18\x02 \x01(\x0b\x32#.api.services.datastore.PartitionId\x12,\n\x05query\x18\x03 \x01(\x0b\x32\x1d.api.services.datastore.Query\x12\x33\n\tgql_query\x18\x07 \x01(\x0b\x32 .api.services.datastore.GqlQuery\"K\n\x10RunQueryResponse\x12\x37\n\x05\x62\x61tch\x18\x01 \x01(\x0b\x32(.api.services.datastore.QueryResultBatch\"\xae\x01\n\x17\x42\x65ginTransactionRequest\x12\x61\n\x0fisolation_level\x18\x01 \x01(\x0e\x32>.api.services.datastore.BeginTransactionRequest.IsolationLevel:\x08SNAPSHOT\"0\n\x0eIsolationLevel\x12\x0c\n\x08SNAPSHOT\x10\x00\x12\x10\n\x0cSERIALIZABLE\x10\x01\"/\n\x18\x42\x65ginTransactionResponse\x12\x13\n\x0btransaction\x18\x01 \x01(\x0c\"&\n\x0fRollbackRequest\x12\x13\n\x0btransaction\x18\x01 \x02(\x0c\"\x12\n\x10RollbackResponse\"\xd3\x01\n\rCommitRequest\x12\x13\n\x0btransaction\x18\x01 \x01(\x0c\x12\x32\n\x08mutation\x18\x02 \x01(\x0b\x32 .api.services.datastore.Mutation\x12G\n\x04mode\x18\x05 \x01(\x0e\x32*.api.services.datastore.CommitRequest.Mode:\rTRANSACTIONAL\"0\n\x04Mode\x12\x11\n\rTRANSACTIONAL\x10\x01\x12\x15\n\x11NON_TRANSACTIONAL\x10\x02\"Q\n\x0e\x43ommitResponse\x12?\n\x0fmutation_result\x18\x01 \x01(\x0b\x32&.api.services.datastore.MutationResult\">\n\x12\x41llocateIdsRequest\x12(\n\x03key\x18\x01 \x03(\x0b\x32\x1b.api.services.datastore.Key\"?\n\x13\x41llocateIdsResponse\x12(\n\x03key\x18\x01 \x03(\x0b\x32\x1b.api.services.datastore.Key2\xed\x04\n\x10\x44\x61tastoreService\x12Y\n\x06Lookup\x12%.api.services.datastore.LookupRequest\x1a&.api.services.datastore.LookupResponse\"\x00\x12_\n\x08RunQuery\x12\'.api.services.datastore.RunQueryRequest\x1a(.api.services.datastore.RunQueryResponse\"\x00\x12w\n\x10\x42\x65ginTransaction\x12/.api.services.datastore.BeginTransactionRequest\x1a\x30.api.services.datastore.BeginTransactionResponse\"\x00\x12Y\n\x06\x43ommit\x12%.api.services.datastore.CommitRequest\x1a&.api.services.datastore.CommitResponse\"\x00\x12_\n\x08Rollback\x12\'.api.services.datastore.RollbackRequest\x1a(.api.services.datastore.RollbackResponse\"\x00\x12h\n\x0b\x41llocateIds\x12*.api.services.datastore.AllocateIdsRequest\x1a+.api.services.datastore.AllocateIdsResponse\"\x00\x42#\n!com.google.api.services.datastore') + + _ENTITYRESULT_RESULTTYPE = _descriptor.EnumDescriptor( name='ResultType', full_name='api.services.datastore.EntityResult.ResultType', @@ -221,6 +226,7 @@ serialized_end=4718, ) + _PARTITIONID = _descriptor.Descriptor( name='PartitionId', full_name='api.services.datastore.PartitionId', @@ -255,6 +261,7 @@ serialized_end=98, ) + _KEY_PATHELEMENT = _descriptor.Descriptor( name='PathElement', full_name='api.services.datastore.Key.PathElement', @@ -330,6 +337,7 @@ serialized_end=283, ) + _VALUE = _descriptor.Descriptor( name='Value', full_name='api.services.datastore.Value', @@ -434,6 +442,7 @@ serialized_end=658, ) + _PROPERTY = _descriptor.Descriptor( name='Property', full_name='api.services.datastore.Property', @@ -468,6 +477,7 @@ serialized_end=730, ) + _ENTITY = _descriptor.Descriptor( name='Entity', full_name='api.services.datastore.Entity', @@ -502,6 +512,7 @@ serialized_end=834, ) + _ENTITYRESULT = _descriptor.Descriptor( name='EntityResult', full_name='api.services.datastore.EntityResult', @@ -530,6 +541,7 @@ serialized_end=952, ) + _QUERY = _descriptor.Descriptor( name='Query', full_name='api.services.datastore.Query', @@ -613,6 +625,7 @@ serialized_end=1319, ) + _KINDEXPRESSION = _descriptor.Descriptor( name='KindExpression', full_name='api.services.datastore.KindExpression', @@ -640,6 +653,7 @@ serialized_end=1351, ) + _PROPERTYREFERENCE = _descriptor.Descriptor( name='PropertyReference', full_name='api.services.datastore.PropertyReference', @@ -667,6 +681,7 @@ serialized_end=1386, ) + _PROPERTYEXPRESSION = _descriptor.Descriptor( name='PropertyExpression', full_name='api.services.datastore.PropertyExpression', @@ -702,6 +717,7 @@ serialized_end=1598, ) + _PROPERTYORDER = _descriptor.Descriptor( name='PropertyOrder', full_name='api.services.datastore.PropertyOrder', @@ -737,6 +753,7 @@ serialized_end=1800, ) + _FILTER = _descriptor.Descriptor( name='Filter', full_name='api.services.datastore.Filter', @@ -771,6 +788,7 @@ serialized_end=1943, ) + _COMPOSITEFILTER = _descriptor.Descriptor( name='CompositeFilter', full_name='api.services.datastore.CompositeFilter', @@ -806,6 +824,7 @@ serialized_end=2100, ) + _PROPERTYFILTER = _descriptor.Descriptor( name='PropertyFilter', full_name='api.services.datastore.PropertyFilter', @@ -848,6 +867,7 @@ serialized_end=2418, ) + _GQLQUERY = _descriptor.Descriptor( name='GqlQuery', full_name='api.services.datastore.GqlQuery', @@ -896,6 +916,7 @@ serialized_end=2595, ) + _GQLQUERYARG = _descriptor.Descriptor( name='GqlQueryArg', full_name='api.services.datastore.GqlQueryArg', @@ -937,6 +958,7 @@ serialized_end=2686, ) + _QUERYRESULTBATCH = _descriptor.Descriptor( name='QueryResultBatch', full_name='api.services.datastore.QueryResultBatch', @@ -993,6 +1015,7 @@ serialized_end=3058, ) + _MUTATION = _descriptor.Descriptor( name='Mutation', full_name='api.services.datastore.Mutation', @@ -1055,6 +1078,7 @@ serialized_end=3331, ) + _MUTATIONRESULT = _descriptor.Descriptor( name='MutationResult', full_name='api.services.datastore.MutationResult', @@ -1089,6 +1113,7 @@ serialized_end=3429, ) + _READOPTIONS = _descriptor.Descriptor( name='ReadOptions', full_name='api.services.datastore.ReadOptions', @@ -1124,6 +1149,7 @@ serialized_end=3612, ) + _LOOKUPREQUEST = _descriptor.Descriptor( name='LookupRequest', full_name='api.services.datastore.LookupRequest', @@ -1158,6 +1184,7 @@ serialized_end=3730, ) + _LOOKUPRESPONSE = _descriptor.Descriptor( name='LookupResponse', full_name='api.services.datastore.LookupResponse', @@ -1199,6 +1226,7 @@ serialized_end=3904, ) + _RUNQUERYREQUEST = _descriptor.Descriptor( name='RunQueryRequest', full_name='api.services.datastore.RunQueryRequest', @@ -1247,6 +1275,7 @@ serialized_end=4141, ) + _RUNQUERYRESPONSE = _descriptor.Descriptor( name='RunQueryResponse', full_name='api.services.datastore.RunQueryResponse', @@ -1274,6 +1303,7 @@ serialized_end=4218, ) + _BEGINTRANSACTIONREQUEST = _descriptor.Descriptor( name='BeginTransactionRequest', full_name='api.services.datastore.BeginTransactionRequest', @@ -1302,6 +1332,7 @@ serialized_end=4395, ) + _BEGINTRANSACTIONRESPONSE = _descriptor.Descriptor( name='BeginTransactionResponse', full_name='api.services.datastore.BeginTransactionResponse', @@ -1329,6 +1360,7 @@ serialized_end=4444, ) + _ROLLBACKREQUEST = _descriptor.Descriptor( name='RollbackRequest', full_name='api.services.datastore.RollbackRequest', @@ -1356,6 +1388,7 @@ serialized_end=4484, ) + _ROLLBACKRESPONSE = _descriptor.Descriptor( name='RollbackResponse', full_name='api.services.datastore.RollbackResponse', @@ -1376,6 +1409,7 @@ serialized_end=4504, ) + _COMMITREQUEST = _descriptor.Descriptor( name='CommitRequest', full_name='api.services.datastore.CommitRequest', @@ -1418,6 +1452,7 @@ serialized_end=4718, ) + _COMMITRESPONSE = _descriptor.Descriptor( name='CommitResponse', full_name='api.services.datastore.CommitResponse', @@ -1445,6 +1480,7 @@ serialized_end=4801, ) + _ALLOCATEIDSREQUEST = _descriptor.Descriptor( name='AllocateIdsRequest', full_name='api.services.datastore.AllocateIdsRequest', @@ -1472,6 +1508,7 @@ serialized_end=4865, ) + _ALLOCATEIDSRESPONSE = _descriptor.Descriptor( name='AllocateIdsResponse', full_name='api.services.datastore.AllocateIdsResponse', @@ -1499,8 +1536,7 @@ serialized_end=4930, ) -_KEY_PATHELEMENT.containing_type = _KEY - +_KEY_PATHELEMENT.containing_type = _KEY; _KEY.fields_by_name['partition_id'].message_type = _PARTITIONID _KEY.fields_by_name['path_element'].message_type = _KEY_PATHELEMENT _VALUE.fields_by_name['key_value'].message_type = _KEY @@ -1510,8 +1546,7 @@ _ENTITY.fields_by_name['key'].message_type = _KEY _ENTITY.fields_by_name['property'].message_type = _PROPERTY _ENTITYRESULT.fields_by_name['entity'].message_type = _ENTITY -_ENTITYRESULT_RESULTTYPE.containing_type = _ENTITYRESULT - +_ENTITYRESULT_RESULTTYPE.containing_type = _ENTITYRESULT; _QUERY.fields_by_name['projection'].message_type = _PROPERTYEXPRESSION _QUERY.fields_by_name['kind'].message_type = _KINDEXPRESSION _QUERY.fields_by_name['filter'].message_type = _FILTER @@ -1519,31 +1554,26 @@ _QUERY.fields_by_name['group_by'].message_type = _PROPERTYREFERENCE _PROPERTYEXPRESSION.fields_by_name['property'].message_type = _PROPERTYREFERENCE _PROPERTYEXPRESSION.fields_by_name['aggregation_function'].enum_type = _PROPERTYEXPRESSION_AGGREGATIONFUNCTION -_PROPERTYEXPRESSION_AGGREGATIONFUNCTION.containing_type = _PROPERTYEXPRESSION - +_PROPERTYEXPRESSION_AGGREGATIONFUNCTION.containing_type = _PROPERTYEXPRESSION; _PROPERTYORDER.fields_by_name['property'].message_type = _PROPERTYREFERENCE _PROPERTYORDER.fields_by_name['direction'].enum_type = _PROPERTYORDER_DIRECTION -_PROPERTYORDER_DIRECTION.containing_type = _PROPERTYORDER - +_PROPERTYORDER_DIRECTION.containing_type = _PROPERTYORDER; _FILTER.fields_by_name['composite_filter'].message_type = _COMPOSITEFILTER _FILTER.fields_by_name['property_filter'].message_type = _PROPERTYFILTER _COMPOSITEFILTER.fields_by_name['operator'].enum_type = _COMPOSITEFILTER_OPERATOR _COMPOSITEFILTER.fields_by_name['filter'].message_type = _FILTER -_COMPOSITEFILTER_OPERATOR.containing_type = _COMPOSITEFILTER - +_COMPOSITEFILTER_OPERATOR.containing_type = _COMPOSITEFILTER; _PROPERTYFILTER.fields_by_name['property'].message_type = _PROPERTYREFERENCE _PROPERTYFILTER.fields_by_name['operator'].enum_type = _PROPERTYFILTER_OPERATOR _PROPERTYFILTER.fields_by_name['value'].message_type = _VALUE -_PROPERTYFILTER_OPERATOR.containing_type = _PROPERTYFILTER - +_PROPERTYFILTER_OPERATOR.containing_type = _PROPERTYFILTER; _GQLQUERY.fields_by_name['name_arg'].message_type = _GQLQUERYARG _GQLQUERY.fields_by_name['number_arg'].message_type = _GQLQUERYARG _GQLQUERYARG.fields_by_name['value'].message_type = _VALUE _QUERYRESULTBATCH.fields_by_name['entity_result_type'].enum_type = _ENTITYRESULT_RESULTTYPE _QUERYRESULTBATCH.fields_by_name['entity_result'].message_type = _ENTITYRESULT _QUERYRESULTBATCH.fields_by_name['more_results'].enum_type = _QUERYRESULTBATCH_MORERESULTSTYPE -_QUERYRESULTBATCH_MORERESULTSTYPE.containing_type = _QUERYRESULTBATCH - +_QUERYRESULTBATCH_MORERESULTSTYPE.containing_type = _QUERYRESULTBATCH; _MUTATION.fields_by_name['upsert'].message_type = _ENTITY _MUTATION.fields_by_name['update'].message_type = _ENTITY _MUTATION.fields_by_name['insert'].message_type = _ENTITY @@ -1551,8 +1581,7 @@ _MUTATION.fields_by_name['delete'].message_type = _KEY _MUTATIONRESULT.fields_by_name['insert_auto_id_key'].message_type = _KEY _READOPTIONS.fields_by_name['read_consistency'].enum_type = _READOPTIONS_READCONSISTENCY -_READOPTIONS_READCONSISTENCY.containing_type = _READOPTIONS - +_READOPTIONS_READCONSISTENCY.containing_type = _READOPTIONS; _LOOKUPREQUEST.fields_by_name['read_options'].message_type = _READOPTIONS _LOOKUPREQUEST.fields_by_name['key'].message_type = _KEY _LOOKUPRESPONSE.fields_by_name['found'].message_type = _ENTITYRESULT @@ -1564,12 +1593,10 @@ _RUNQUERYREQUEST.fields_by_name['gql_query'].message_type = _GQLQUERY _RUNQUERYRESPONSE.fields_by_name['batch'].message_type = _QUERYRESULTBATCH _BEGINTRANSACTIONREQUEST.fields_by_name['isolation_level'].enum_type = _BEGINTRANSACTIONREQUEST_ISOLATIONLEVEL -_BEGINTRANSACTIONREQUEST_ISOLATIONLEVEL.containing_type = _BEGINTRANSACTIONREQUEST - +_BEGINTRANSACTIONREQUEST_ISOLATIONLEVEL.containing_type = _BEGINTRANSACTIONREQUEST; _COMMITREQUEST.fields_by_name['mutation'].message_type = _MUTATION _COMMITREQUEST.fields_by_name['mode'].enum_type = _COMMITREQUEST_MODE -_COMMITREQUEST_MODE.containing_type = _COMMITREQUEST - +_COMMITREQUEST_MODE.containing_type = _COMMITREQUEST; _COMMITRESPONSE.fields_by_name['mutation_result'].message_type = _MUTATIONRESULT _ALLOCATEIDSREQUEST.fields_by_name['key'].message_type = _KEY _ALLOCATEIDSRESPONSE.fields_by_name['key'].message_type = _KEY @@ -1606,203 +1633,204 @@ DESCRIPTOR.message_types_by_name['AllocateIdsRequest'] = _ALLOCATEIDSREQUEST DESCRIPTOR.message_types_by_name['AllocateIdsResponse'] = _ALLOCATEIDSRESPONSE - class PartitionId(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _PARTITIONID + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _PARTITIONID - # @@protoc_insertion_point(class_scope:api.services.datastore.PartitionId) + # @@protoc_insertion_point(class_scope:api.services.datastore.PartitionId) class Key(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType + __metaclass__ = _reflection.GeneratedProtocolMessageType - class PathElement(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _KEY_PATHELEMENT + class PathElement(_message.Message): + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _KEY_PATHELEMENT - # @@protoc_insertion_point(class_scope:api.services.datastore.Key.PathElement) - DESCRIPTOR = _KEY + # @@protoc_insertion_point(class_scope:api.services.datastore.Key.PathElement) + DESCRIPTOR = _KEY - # @@protoc_insertion_point(class_scope:api.services.datastore.Key) + # @@protoc_insertion_point(class_scope:api.services.datastore.Key) class Value(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _VALUE + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _VALUE - # @@protoc_insertion_point(class_scope:api.services.datastore.Value) + # @@protoc_insertion_point(class_scope:api.services.datastore.Value) class Property(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _PROPERTY + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _PROPERTY - # @@protoc_insertion_point(class_scope:api.services.datastore.Property) + # @@protoc_insertion_point(class_scope:api.services.datastore.Property) class Entity(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _ENTITY + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _ENTITY - # @@protoc_insertion_point(class_scope:api.services.datastore.Entity) + # @@protoc_insertion_point(class_scope:api.services.datastore.Entity) class EntityResult(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _ENTITYRESULT + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _ENTITYRESULT - # @@protoc_insertion_point(class_scope:api.services.datastore.EntityResult) + # @@protoc_insertion_point(class_scope:api.services.datastore.EntityResult) class Query(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _QUERY + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _QUERY - # @@protoc_insertion_point(class_scope:api.services.datastore.Query) + # @@protoc_insertion_point(class_scope:api.services.datastore.Query) class KindExpression(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _KINDEXPRESSION + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _KINDEXPRESSION - # @@protoc_insertion_point(class_scope:api.services.datastore.KindExpression) + # @@protoc_insertion_point(class_scope:api.services.datastore.KindExpression) class PropertyReference(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _PROPERTYREFERENCE + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _PROPERTYREFERENCE - # @@protoc_insertion_point(class_scope:api.services.datastore.PropertyReference) + # @@protoc_insertion_point(class_scope:api.services.datastore.PropertyReference) class PropertyExpression(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _PROPERTYEXPRESSION + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _PROPERTYEXPRESSION - # @@protoc_insertion_point(class_scope:api.services.datastore.PropertyExpression) + # @@protoc_insertion_point(class_scope:api.services.datastore.PropertyExpression) class PropertyOrder(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _PROPERTYORDER + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _PROPERTYORDER - # @@protoc_insertion_point(class_scope:api.services.datastore.PropertyOrder) + # @@protoc_insertion_point(class_scope:api.services.datastore.PropertyOrder) class Filter(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _FILTER + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _FILTER - # @@protoc_insertion_point(class_scope:api.services.datastore.Filter) + # @@protoc_insertion_point(class_scope:api.services.datastore.Filter) class CompositeFilter(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _COMPOSITEFILTER + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _COMPOSITEFILTER - # @@protoc_insertion_point(class_scope:api.services.datastore.CompositeFilter) + # @@protoc_insertion_point(class_scope:api.services.datastore.CompositeFilter) class PropertyFilter(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _PROPERTYFILTER + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _PROPERTYFILTER - # @@protoc_insertion_point(class_scope:api.services.datastore.PropertyFilter) + # @@protoc_insertion_point(class_scope:api.services.datastore.PropertyFilter) class GqlQuery(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _GQLQUERY + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _GQLQUERY - # @@protoc_insertion_point(class_scope:api.services.datastore.GqlQuery) + # @@protoc_insertion_point(class_scope:api.services.datastore.GqlQuery) class GqlQueryArg(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _GQLQUERYARG + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _GQLQUERYARG - # @@protoc_insertion_point(class_scope:api.services.datastore.GqlQueryArg) + # @@protoc_insertion_point(class_scope:api.services.datastore.GqlQueryArg) class QueryResultBatch(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _QUERYRESULTBATCH + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _QUERYRESULTBATCH - # @@protoc_insertion_point(class_scope:api.services.datastore.QueryResultBatch) + # @@protoc_insertion_point(class_scope:api.services.datastore.QueryResultBatch) class Mutation(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _MUTATION + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _MUTATION - # @@protoc_insertion_point(class_scope:api.services.datastore.Mutation) + # @@protoc_insertion_point(class_scope:api.services.datastore.Mutation) class MutationResult(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _MUTATIONRESULT + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _MUTATIONRESULT - # @@protoc_insertion_point(class_scope:api.services.datastore.MutationResult) + # @@protoc_insertion_point(class_scope:api.services.datastore.MutationResult) class ReadOptions(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _READOPTIONS + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _READOPTIONS - # @@protoc_insertion_point(class_scope:api.services.datastore.ReadOptions) + # @@protoc_insertion_point(class_scope:api.services.datastore.ReadOptions) class LookupRequest(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _LOOKUPREQUEST + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _LOOKUPREQUEST - # @@protoc_insertion_point(class_scope:api.services.datastore.LookupRequest) + # @@protoc_insertion_point(class_scope:api.services.datastore.LookupRequest) class LookupResponse(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _LOOKUPRESPONSE + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _LOOKUPRESPONSE - # @@protoc_insertion_point(class_scope:api.services.datastore.LookupResponse) + # @@protoc_insertion_point(class_scope:api.services.datastore.LookupResponse) class RunQueryRequest(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _RUNQUERYREQUEST + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _RUNQUERYREQUEST - # @@protoc_insertion_point(class_scope:api.services.datastore.RunQueryRequest) + # @@protoc_insertion_point(class_scope:api.services.datastore.RunQueryRequest) class RunQueryResponse(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _RUNQUERYRESPONSE + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _RUNQUERYRESPONSE - # @@protoc_insertion_point(class_scope:api.services.datastore.RunQueryResponse) + # @@protoc_insertion_point(class_scope:api.services.datastore.RunQueryResponse) class BeginTransactionRequest(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _BEGINTRANSACTIONREQUEST + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _BEGINTRANSACTIONREQUEST - # @@protoc_insertion_point(class_scope:api.services.datastore.BeginTransactionRequest) + # @@protoc_insertion_point(class_scope:api.services.datastore.BeginTransactionRequest) class BeginTransactionResponse(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _BEGINTRANSACTIONRESPONSE + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _BEGINTRANSACTIONRESPONSE - # @@protoc_insertion_point(class_scope:api.services.datastore.BeginTransactionResponse) + # @@protoc_insertion_point(class_scope:api.services.datastore.BeginTransactionResponse) class RollbackRequest(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _ROLLBACKREQUEST + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _ROLLBACKREQUEST - # @@protoc_insertion_point(class_scope:api.services.datastore.RollbackRequest) + # @@protoc_insertion_point(class_scope:api.services.datastore.RollbackRequest) class RollbackResponse(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _ROLLBACKRESPONSE + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _ROLLBACKRESPONSE - # @@protoc_insertion_point(class_scope:api.services.datastore.RollbackResponse) + # @@protoc_insertion_point(class_scope:api.services.datastore.RollbackResponse) class CommitRequest(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _COMMITREQUEST + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _COMMITREQUEST - # @@protoc_insertion_point(class_scope:api.services.datastore.CommitRequest) + # @@protoc_insertion_point(class_scope:api.services.datastore.CommitRequest) class CommitResponse(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _COMMITRESPONSE + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _COMMITRESPONSE - # @@protoc_insertion_point(class_scope:api.services.datastore.CommitResponse) + # @@protoc_insertion_point(class_scope:api.services.datastore.CommitResponse) class AllocateIdsRequest(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _ALLOCATEIDSREQUEST + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _ALLOCATEIDSREQUEST - # @@protoc_insertion_point(class_scope:api.services.datastore.AllocateIdsRequest) + # @@protoc_insertion_point(class_scope:api.services.datastore.AllocateIdsRequest) class AllocateIdsResponse(_message.Message): - __metaclass__ = _reflection.GeneratedProtocolMessageType - DESCRIPTOR = _ALLOCATEIDSRESPONSE - # @@protoc_insertion_point(class_scope:api.services.datastore.AllocateIdsResponse) + __metaclass__ = _reflection.GeneratedProtocolMessageType + DESCRIPTOR = _ALLOCATEIDSRESPONSE + + # @@protoc_insertion_point(class_scope:api.services.datastore.AllocateIdsResponse) + DESCRIPTOR.has_options = True DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), '\n!com.google.api.services.datastore')