Skip to content

Commit

Permalink
feat: support object retention lock (#1188)
Browse files Browse the repository at this point in the history
* feat: add support for object retention lock

* add Retention config object in Blob

* update tests

* update test coverage

* clarify docstrings

---------

Co-authored-by: Anthonios Partheniou <partheniou@google.com>
  • Loading branch information
cojenco and parthea committed Dec 7, 2023
1 parent 22f36da commit a179337
Show file tree
Hide file tree
Showing 9 changed files with 308 additions and 2 deletions.
20 changes: 20 additions & 0 deletions google/cloud/storage/_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -290,6 +290,7 @@ def patch(
if_metageneration_not_match=None,
timeout=_DEFAULT_TIMEOUT,
retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED,
override_unlocked_retention=False,
):
"""Sends all changed properties in a PATCH request.
Expand Down Expand Up @@ -326,12 +327,21 @@ def patch(
:type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy
:param retry:
(Optional) How to retry the RPC. See: :ref:`configuring_retries`
:type override_unlocked_retention: bool
:param override_unlocked_retention:
(Optional) override_unlocked_retention must be set to True if the operation includes
a retention property that changes the mode from Unlocked to Locked, reduces the
retainUntilTime, or removes the retention configuration from the object. See:
https://cloud.google.com/storage/docs/json_api/v1/objects/patch
"""
client = self._require_client(client)
query_params = self._query_params
# Pass '?projection=full' here because 'PATCH' documented not
# to work properly w/ 'noAcl'.
query_params["projection"] = "full"
if override_unlocked_retention:
query_params["overrideUnlockedRetention"] = override_unlocked_retention
_add_generation_match_parameters(
query_params,
if_generation_match=if_generation_match,
Expand Down Expand Up @@ -361,6 +371,7 @@ def update(
if_metageneration_not_match=None,
timeout=_DEFAULT_TIMEOUT,
retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED,
override_unlocked_retention=False,
):
"""Sends all properties in a PUT request.
Expand Down Expand Up @@ -397,11 +408,20 @@ def update(
:type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy
:param retry:
(Optional) How to retry the RPC. See: :ref:`configuring_retries`
:type override_unlocked_retention: bool
:param override_unlocked_retention:
(Optional) override_unlocked_retention must be set to True if the operation includes
a retention property that changes the mode from Unlocked to Locked, reduces the
retainUntilTime, or removes the retention configuration from the object. See:
https://cloud.google.com/storage/docs/json_api/v1/objects/patch
"""
client = self._require_client(client)

query_params = self._query_params
query_params["projection"] = "full"
if override_unlocked_retention:
query_params["overrideUnlockedRetention"] = override_unlocked_retention
_add_generation_match_parameters(
query_params,
if_generation_match=if_generation_match,
Expand Down
135 changes: 135 additions & 0 deletions google/cloud/storage/blob.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,7 @@
"md5Hash",
"metadata",
"name",
"retention",
"storageClass",
)
_READ_LESS_THAN_SIZE = (
Expand Down Expand Up @@ -1700,6 +1701,7 @@ def _get_writable_metadata(self):
* ``md5Hash``
* ``metadata``
* ``name``
* ``retention``
* ``storageClass``
For now, we don't support ``acl``, access control lists should be
Expand Down Expand Up @@ -4667,6 +4669,16 @@ def custom_time(self, value):

self._patch_property("customTime", value)

@property
def retention(self):
"""Retrieve the retention configuration for this object.
:rtype: :class:`Retention`
:returns: an instance for managing the object's retention configuration.
"""
info = self._properties.get("retention", {})
return Retention.from_api_repr(info, self)


def _get_host_name(connection):
"""Returns the host name from the given connection.
Expand Down Expand Up @@ -4797,3 +4809,126 @@ def _add_query_parameters(base_url, name_value_pairs):
query = parse_qsl(query)
query.extend(name_value_pairs)
return urlunsplit((scheme, netloc, path, urlencode(query), frag))


class Retention(dict):
"""Map an object's retention configuration.
:type blob: :class:`Blob`
:params blob: blob for which this retention configuration applies to.
:type mode: str or ``NoneType``
:params mode:
(Optional) The mode of the retention configuration, which can be either Unlocked or Locked.
See: https://cloud.google.com/storage/docs/object-lock
:type retain_until_time: :class:`datetime.datetime` or ``NoneType``
:params retain_until_time:
(Optional) The earliest time that the object can be deleted or replaced, which is the
retention configuration set for this object.
:type retention_expiration_time: :class:`datetime.datetime` or ``NoneType``
:params retention_expiration_time:
(Optional) The earliest time that the object can be deleted, which depends on any
retention configuration set for the object and any retention policy set for the bucket
that contains the object. This value should normally only be set by the back-end API.
"""

def __init__(
self,
blob,
mode=None,
retain_until_time=None,
retention_expiration_time=None,
):
data = {"mode": mode}
if retain_until_time is not None:
retain_until_time = _datetime_to_rfc3339(retain_until_time)
data["retainUntilTime"] = retain_until_time

if retention_expiration_time is not None:
retention_expiration_time = _datetime_to_rfc3339(retention_expiration_time)
data["retentionExpirationTime"] = retention_expiration_time

super(Retention, self).__init__(data)
self._blob = blob

@classmethod
def from_api_repr(cls, resource, blob):
"""Factory: construct instance from resource.
:type blob: :class:`Blob`
:params blob: Blob for which this retention configuration applies to.
:type resource: dict
:param resource: mapping as returned from API call.
:rtype: :class:`Retention`
:returns: Retention configuration created from resource.
"""
instance = cls(blob)
instance.update(resource)
return instance

@property
def blob(self):
"""Blob for which this retention configuration applies to.
:rtype: :class:`Blob`
:returns: the instance's blob.
"""
return self._blob

@property
def mode(self):
"""The mode of the retention configuration. Options are 'Unlocked' or 'Locked'.
:rtype: string
:returns: The mode of the retention configuration, which can be either set to 'Unlocked' or 'Locked'.
"""
return self.get("mode")

@mode.setter
def mode(self, value):
self["mode"] = value
self.blob._patch_property("retention", self)

@property
def retain_until_time(self):
"""The earliest time that the object can be deleted or replaced, which is the
retention configuration set for this object.
:rtype: :class:`datetime.datetime` or ``NoneType``
:returns: Datetime object parsed from RFC3339 valid timestamp, or
``None`` if the blob's resource has not been loaded from
the server (see :meth:`reload`).
"""
value = self.get("retainUntilTime")
if value is not None:
return _rfc3339_nanos_to_datetime(value)

@retain_until_time.setter
def retain_until_time(self, value):
"""Set the retain_until_time for the object retention configuration.
:type value: :class:`datetime.datetime`
:param value: The earliest time that the object can be deleted or replaced.
"""
if value is not None:
value = _datetime_to_rfc3339(value)
self["retainUntilTime"] = value
self.blob._patch_property("retention", self)

@property
def retention_expiration_time(self):
"""The earliest time that the object can be deleted, which depends on any
retention configuration set for the object and any retention policy set for
the bucket that contains the object.
:rtype: :class:`datetime.datetime` or ``NoneType``
:returns:
(readonly) The earliest time that the object can be deleted.
"""
retention_expiration_time = self.get("retentionExpirationTime")
if retention_expiration_time is not None:
return _rfc3339_nanos_to_datetime(retention_expiration_time)
19 changes: 19 additions & 0 deletions google/cloud/storage/bucket.py
Original file line number Diff line number Diff line change
Expand Up @@ -917,6 +917,7 @@ def create(
location=None,
predefined_acl=None,
predefined_default_object_acl=None,
enable_object_retention=False,
timeout=_DEFAULT_TIMEOUT,
retry=DEFAULT_RETRY,
):
Expand Down Expand Up @@ -956,6 +957,11 @@ def create(
(Optional) Name of predefined ACL to apply to bucket's objects. See:
https://cloud.google.com/storage/docs/access-control/lists#predefined-acl
:type enable_object_retention: bool
:param enable_object_retention:
(Optional) Whether object retention should be enabled on this bucket. See:
https://cloud.google.com/storage/docs/object-lock
:type timeout: float or tuple
:param timeout:
(Optional) The amount of time, in seconds, to wait
Expand All @@ -974,6 +980,7 @@ def create(
location=location,
predefined_acl=predefined_acl,
predefined_default_object_acl=predefined_default_object_acl,
enable_object_retention=enable_object_retention,
timeout=timeout,
retry=retry,
)
Expand Down Expand Up @@ -2750,6 +2757,18 @@ def autoclass_terminal_storage_class_update_time(self):
if timestamp is not None:
return _rfc3339_nanos_to_datetime(timestamp)

@property
def object_retention_mode(self):
"""Retrieve the object retention mode set on the bucket.
:rtype: str
:returns: When set to Enabled, retention configurations can be
set on objects in the bucket.
"""
object_retention = self._properties.get("objectRetention")
if object_retention is not None:
return object_retention.get("mode")

def configure_website(self, main_page_suffix=None, not_found_page=None):
"""Configure website-related properties.
Expand Down
7 changes: 7 additions & 0 deletions google/cloud/storage/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -845,6 +845,7 @@ def create_bucket(
data_locations=None,
predefined_acl=None,
predefined_default_object_acl=None,
enable_object_retention=False,
timeout=_DEFAULT_TIMEOUT,
retry=DEFAULT_RETRY,
):
Expand Down Expand Up @@ -883,6 +884,9 @@ def create_bucket(
predefined_default_object_acl (str):
(Optional) Name of predefined ACL to apply to bucket's objects. See:
https://cloud.google.com/storage/docs/access-control/lists#predefined-acl
enable_object_retention (bool):
(Optional) Whether object retention should be enabled on this bucket. See:
https://cloud.google.com/storage/docs/object-lock
timeout (Optional[Union[float, Tuple[float, float]]]):
The amount of time, in seconds, to wait for the server response.
Expand Down Expand Up @@ -951,6 +955,9 @@ def create_bucket(
if user_project is not None:
query_params["userProject"] = user_project

if enable_object_retention:
query_params["enableObjectRetention"] = enable_object_retention

properties = {key: bucket._properties[key] for key in bucket._changes}
properties["name"] = bucket.name

Expand Down
29 changes: 29 additions & 0 deletions tests/system/test_blob.py
Original file line number Diff line number Diff line change
Expand Up @@ -1117,3 +1117,32 @@ def test_blob_update_storage_class_large_file(
blob.update_storage_class(constants.COLDLINE_STORAGE_CLASS)
blob.reload()
assert blob.storage_class == constants.COLDLINE_STORAGE_CLASS


def test_object_retention_lock(storage_client, buckets_to_delete, blobs_to_delete):
# Test bucket created with object retention enabled
new_bucket_name = _helpers.unique_name("object-retention")
created_bucket = _helpers.retry_429_503(storage_client.create_bucket)(
new_bucket_name, enable_object_retention=True
)
buckets_to_delete.append(created_bucket)
assert created_bucket.object_retention_mode == "Enabled"

# Test create object with object retention enabled
payload = b"Hello World"
mode = "Unlocked"
current_time = datetime.datetime.utcnow()
expiration_time = current_time + datetime.timedelta(seconds=10)
blob = created_bucket.blob("object-retention-lock")
blob.retention.mode = mode
blob.retention.retain_until_time = expiration_time
blob.upload_from_string(payload)
blobs_to_delete.append(blob)
blob.reload()
assert blob.retention.mode == mode

# Test patch object to disable object retention
blob.retention.mode = None
blob.retention.retain_until_time = None
blob.patch(override_unlocked_retention=True)
assert blob.retention.mode is None
6 changes: 6 additions & 0 deletions tests/unit/test__helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -353,12 +353,14 @@ def test_patch_w_metageneration_match_w_timeout_w_retry(self):
retry = mock.Mock(spec=[])
generation_number = 9
metageneration_number = 6
override_unlocked_retention = True

derived.patch(
if_generation_match=generation_number,
if_metageneration_match=metageneration_number,
timeout=timeout,
retry=retry,
override_unlocked_retention=override_unlocked_retention,
)

self.assertEqual(derived._properties, {"foo": "Foo"})
Expand All @@ -370,6 +372,7 @@ def test_patch_w_metageneration_match_w_timeout_w_retry(self):
"projection": "full",
"ifGenerationMatch": generation_number,
"ifMetagenerationMatch": metageneration_number,
"overrideUnlockedRetention": override_unlocked_retention,
}
client._patch_resource.assert_called_once_with(
path,
Expand Down Expand Up @@ -454,10 +457,12 @@ def test_update_with_metageneration_not_match_w_timeout_w_retry(self):
client = derived.client = mock.Mock(spec=["_put_resource"])
client._put_resource.return_value = api_response
timeout = 42
override_unlocked_retention = True

derived.update(
if_metageneration_not_match=generation_number,
timeout=timeout,
override_unlocked_retention=override_unlocked_retention,
)

self.assertEqual(derived._properties, {"foo": "Foo"})
Expand All @@ -467,6 +472,7 @@ def test_update_with_metageneration_not_match_w_timeout_w_retry(self):
expected_query_params = {
"projection": "full",
"ifMetagenerationNotMatch": generation_number,
"overrideUnlockedRetention": override_unlocked_retention,
}
client._put_resource.assert_called_once_with(
path,
Expand Down
Loading

0 comments on commit a179337

Please sign in to comment.