From 953106a613689237108682abef5570735562fa72 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 8 Feb 2023 14:32:23 -0800 Subject: [PATCH 001/204] got retryable generators partially working --- google/api_core/retry.py | 6 +++++- google/api_core/retry_async.py | 23 ++++++++++++++--------- 2 files changed, 19 insertions(+), 10 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index f9207a12..0872ea2b 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -61,6 +61,7 @@ def check_if_exists(): import logging import random import time +import inspect import requests.exceptions @@ -188,7 +189,10 @@ def retry_target( for sleep in sleep_generator: try: - return target() + if inspect.isgeneratorfunction(target): + yield from target() + else: + return target() # pylint: disable=broad-except # This function explicitly must deal with broad exceptions. diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index 81698838..ef773d49 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -55,6 +55,7 @@ async def check_if_exists(): import datetime import functools import logging +import inspect from google.api_core import datetime_helpers from google.api_core import exceptions @@ -115,13 +116,8 @@ async def retry_target( for sleep in sleep_generator: try: - if not deadline_dt: - return await target() - else: - return await asyncio.wait_for( - target(), - timeout=(deadline_dt - datetime_helpers.utcnow()).total_seconds(), - ) + async for item in target(): + yield item # pylint: disable=broad-except # This function explicitly must deal with broad exceptions. except Exception as exc: @@ -220,13 +216,22 @@ async def retry_wrapped_func(*args, **kwargs): sleep_generator = exponential_sleep_generator( self._initial, self._maximum, multiplier=self._multiplier ) - return await retry_target( + async for item in retry_target( target, self._predicate, sleep_generator, self._timeout, on_error=on_error, - ) + ): + yield item + raise AsyncStopIteration + # return await retry_target( + # target, + # self._predicate, + # sleep_generator, + # self._timeout, + # on_error=on_error, + # ) return retry_wrapped_func From 89aeb75a058f572137d19273f42dc2ba29844654 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 8 Feb 2023 14:51:03 -0800 Subject: [PATCH 002/204] added retrun statement --- google/api_core/retry_async.py | 14 +++----------- 1 file changed, 3 insertions(+), 11 deletions(-) diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index ef773d49..271e0519 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -118,6 +118,7 @@ async def retry_target( try: async for item in target(): yield item + return # pylint: disable=broad-except # This function explicitly must deal with broad exceptions. except Exception as exc: @@ -216,22 +217,13 @@ async def retry_wrapped_func(*args, **kwargs): sleep_generator = exponential_sleep_generator( self._initial, self._maximum, multiplier=self._multiplier ) - async for item in retry_target( + return retry_target( target, self._predicate, sleep_generator, self._timeout, on_error=on_error, - ): - yield item - raise AsyncStopIteration - # return await retry_target( - # target, - # self._predicate, - # sleep_generator, - # self._timeout, - # on_error=on_error, - # ) + ) return retry_wrapped_func From 27feb8036602920d2f03ef29d8fe37d0cb275e3f Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 8 Feb 2023 15:15:27 -0800 Subject: [PATCH 003/204] refactoring --- google/api_core/retry_async.py | 91 ++++++++++++++++++++++++++++++---- 1 file changed, 81 insertions(+), 10 deletions(-) diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index 271e0519..e146ef52 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -71,6 +71,62 @@ async def check_if_exists(): _DEFAULT_DEADLINE = 60.0 * 2.0 # seconds _DEFAULT_TIMEOUT = 60.0 * 2.0 # seconds +async def retry_target_generator( + target, predicate, sleep_generator, timeout=None, on_error=None, **kwargs +): + timeout = kwargs.get("deadline", timeout) + + deadline_dt = ( + (datetime_helpers.utcnow() + datetime.timedelta(seconds=timeout)) + if timeout + else None + ) + + last_exc = None + + for sleep in sleep_generator: + try: + async for item in target(): + yield item + # check for overtime + if deadline_dt <= datetime_helpers.utcnow(): + raise asyncio.TimeoutError("generator timeout") + return + # pylint: disable=broad-except + # This function explicitly must deal with broad exceptions. + except Exception as exc: + if not predicate(exc) and not isinstance(exc, asyncio.TimeoutError): + raise + last_exc = exc + if on_error is not None: + on_error(exc) + + now = datetime_helpers.utcnow() + + if deadline_dt: + if deadline_dt <= now: + # Chains the raising RetryError with the root cause error, + # which helps observability and debugability. + raise exceptions.RetryError( + "Timeout of {:.1f}s exceeded while calling target function".format( + timeout + ), + last_exc, + ) from last_exc + else: + time_to_deadline = (deadline_dt - now).total_seconds() + sleep = min(time_to_deadline, sleep) + + _LOGGER.debug( + "Retrying due to {}, sleeping {:.1f}s ...".format(last_exc, sleep) + ) + await asyncio.sleep(sleep) + + raise ValueError("Sleep generator stopped yielding sleep values.") + + + + async def retry_target( target, predicate, sleep_generator, timeout=None, on_error=None, **kwargs @@ -116,9 +172,13 @@ async def retry_target( for sleep in sleep_generator: try: - async for item in target(): - yield item - return + if not deadline_dt: + return await target() + else: + return await asyncio.wait_for( + target(), + timeout=(deadline_dt - datetime_helpers.utcnow()).total_seconds(), + ) # pylint: disable=broad-except # This function explicitly must deal with broad exceptions. except Exception as exc: @@ -184,6 +244,7 @@ def __init__( multiplier=_DEFAULT_DELAY_MULTIPLIER, timeout=_DEFAULT_TIMEOUT, on_error=None, + generator_target=False, **kwargs ): self._predicate = predicate @@ -193,6 +254,7 @@ def __init__( self._timeout = kwargs.get("deadline", timeout) self._deadline = self._timeout self._on_error = on_error + self._generator_target = generator_target def __call__(self, func, on_error=None): """Wrap a callable with retry behavior. @@ -217,13 +279,22 @@ async def retry_wrapped_func(*args, **kwargs): sleep_generator = exponential_sleep_generator( self._initial, self._maximum, multiplier=self._multiplier ) - return retry_target( - target, - self._predicate, - sleep_generator, - self._timeout, - on_error=on_error, - ) + if self._generator_target: + return retry_target_generator( + target, + self._predicate, + sleep_generator, + self._timeout, + on_error=on_error, + ) + else: + return await retry_target( + target, + self._predicate, + sleep_generator, + self._timeout, + on_error=on_error, + ) return retry_wrapped_func From 0dffa6d555e0e1975221f3485f19cb3bf94ced38 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 8 Feb 2023 15:23:20 -0800 Subject: [PATCH 004/204] work for now deadline --- google/api_core/retry_async.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index e146ef52..d3f13699 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -89,7 +89,7 @@ async def retry_target_generator( async for item in target(): yield item # check for overtime - if deadline_dt <= datetime_helpers.utcnow(): + if deadline_dt and deadline_dt <= datetime_helpers.utcnow(): raise asyncio.TimeoutError("generator timeout") return # pylint: disable=broad-except @@ -124,10 +124,6 @@ async def retry_target_generator( raise ValueError("Sleep generator stopped yielding sleep values.") - - - - async def retry_target( target, predicate, sleep_generator, timeout=None, on_error=None, **kwargs ): From b330c3be5d437b9c26bdb1799e2a64468198b424 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 8 Feb 2023 15:30:00 -0800 Subject: [PATCH 005/204] improved synchronous generator retry --- google/api_core/retry.py | 9 ++++++--- google/api_core/retry_async.py | 1 - 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index 0872ea2b..72f3d08a 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -61,7 +61,6 @@ def check_if_exists(): import logging import random import time -import inspect import requests.exceptions @@ -147,7 +146,7 @@ def exponential_sleep_generator(initial, maximum, multiplier=_DEFAULT_DELAY_MULT def retry_target( - target, predicate, sleep_generator, timeout=None, on_error=None, **kwargs + target, predicate, sleep_generator, timeout=None, on_error=None, generator_target=False, **kwargs ): """Call a function and retry if it fails. @@ -189,8 +188,9 @@ def retry_target( for sleep in sleep_generator: try: - if inspect.isgeneratorfunction(target): + if generator_target: yield from target() + return else: return target() @@ -317,6 +317,7 @@ def __init__( multiplier=_DEFAULT_DELAY_MULTIPLIER, timeout=_DEFAULT_DEADLINE, on_error=None, + generator_target=False, **kwargs ): self._predicate = predicate @@ -326,6 +327,7 @@ def __init__( self._timeout = kwargs.get("deadline", timeout) self._deadline = self._timeout self._on_error = on_error + self._generator_target = generator_target def __call__(self, func, on_error=None): """Wrap a callable with retry behavior. @@ -356,6 +358,7 @@ def retry_wrapped_func(*args, **kwargs): sleep_generator, self._timeout, on_error=on_error, + generator_target=self._generator_target, ) return retry_wrapped_func diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index d3f13699..b019ac25 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -55,7 +55,6 @@ async def check_if_exists(): import datetime import functools import logging -import inspect from google.api_core import datetime_helpers from google.api_core import exceptions From 67ceaa21c7e7ee5bae481efbee461f57434547a4 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 10 Feb 2023 11:35:35 -0800 Subject: [PATCH 006/204] handle closing and returns --- google/api_core/retry.py | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index 72f3d08a..f13e73ec 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -165,6 +165,9 @@ def retry_target( on_error (Callable[Exception]): A function to call while processing a retryable exception. Any error raised by this function will *not* be caught. + generator_target (bool): If True, the target function will be treated + as a generator. Wrapper function will `yield from` the target, and + pass `close()` calls through. deadline (float): DEPRECATED: use ``timeout`` instead. For backward compatibility, if specified it will override ``timeout`` parameter. @@ -189,14 +192,18 @@ def retry_target( for sleep in sleep_generator: try: if generator_target: - yield from target() - return + result = yield from target() + return result else: return target() # pylint: disable=broad-except # This function explicitly must deal with broad exceptions. except Exception as exc: + if generator_target and isinstance(exc, GeneratorExit): + # pass close call to target generator + target.close() + raise if not predicate(exc): raise last_exc = exc @@ -305,6 +312,12 @@ class Retry(object): maximum (float): The maximum amount of time to delay in seconds. multiplier (float): The multiplier applied to the delay. timeout (float): How long to keep retrying, in seconds. + on_error (Callable[Exception]): A function to call while processing + a retryable exception. Any error raised by this function will + *not* be caught. + generator_target (bool): If True, the target function will be treated + as a generator. Wrapper function will `yield from` the target, and + pass `close()` calls through. deadline (float): DEPRECATED: use `timeout` instead. For backward compatibility, if specified it will override the ``timeout`` parameter. """ From ee2647abea1d466eef06ee26a14f9c78809cb7cc Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 10 Feb 2023 12:14:48 -0800 Subject: [PATCH 007/204] got test to pass --- google/api_core/retry.py | 100 ++++++++++++++++++++++++++++++++------- tests/unit/test_retry.py | 21 ++++++++ 2 files changed, 104 insertions(+), 17 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index f13e73ec..32067fe9 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -61,6 +61,7 @@ def check_if_exists(): import logging import random import time +from inspect import isgeneratorfunction import requests.exceptions @@ -145,8 +146,86 @@ def exponential_sleep_generator(initial, maximum, multiplier=_DEFAULT_DELAY_MULT delay = min(delay * multiplier, maximum) +def retry_target_generator( + target, predicate, sleep_generator, timeout=None, on_error=None, **kwargs +): + """Wrap a generator object and retry if it fails. + + This is the lowest-level retry helper. Generally, you'll use the + higher-level retry helper :class:`Retry`. + + Args: + target(Generator): The generator to call and retry. This must be a + nullary function - apply arguments with `functools.partial`. + predicate (Callable[Exception]): A callable used to determine if an + exception raised by the target should be considered retryable. + It should return True to retry or False otherwise. + sleep_generator (Iterable[float]): An infinite iterator that determines + how long to sleep between retries. + timeout (float): How long to keep retrying the target. + on_error (Callable[Exception]): A function to call while processing a + retryable exception. Any error raised by this function will *not* + be caught. + deadline (float): DEPRECATED: use ``timeout`` instead. For backward + compatibility, if specified it will override ``timeout`` parameter. + + Returns: + Any: the return value of the target function. + + Raises: + google.api_core.RetryError: If the deadline is exceeded while retrying. + ValueError: If the sleep generator stops yielding values. + Exception: If the target raises a method that isn't retryable. + """ + + timeout = kwargs.get("deadline", timeout) + + if timeout is not None: + deadline = datetime_helpers.utcnow() + datetime.timedelta(seconds=timeout) + else: + deadline = None + + last_exc = None + + for sleep in sleep_generator: + try: + yield from target() + return + + # pylint: disable=broad-except + # This function explicitly must deal with broad exceptions. + except GeneratorExit: + # pass close call to target generator + target.close() + raise + except Exception as exc: + if not predicate(exc): + raise + last_exc = exc + if on_error is not None: + on_error(exc) + + if deadline is not None: + next_attempt_time = datetime_helpers.utcnow() + datetime.timedelta( + seconds=sleep + ) + if deadline < next_attempt_time: + raise exceptions.RetryError( + "Deadline of {:.1f}s exceeded while calling target function".format( + timeout + ), + last_exc, + ) from last_exc + + _LOGGER.debug( + "Retrying due to {}, sleeping {:.1f}s ...".format(last_exc, sleep) + ) + time.sleep(sleep) + + raise ValueError("Sleep generator stopped yielding sleep values.") + def retry_target( - target, predicate, sleep_generator, timeout=None, on_error=None, generator_target=False, **kwargs + target, predicate, sleep_generator, timeout=None, on_error=None, **kwargs ): """Call a function and retry if it fails. @@ -165,9 +244,6 @@ def retry_target( on_error (Callable[Exception]): A function to call while processing a retryable exception. Any error raised by this function will *not* be caught. - generator_target (bool): If True, the target function will be treated - as a generator. Wrapper function will `yield from` the target, and - pass `close()` calls through. deadline (float): DEPRECATED: use ``timeout`` instead. For backward compatibility, if specified it will override ``timeout`` parameter. @@ -191,19 +267,11 @@ def retry_target( for sleep in sleep_generator: try: - if generator_target: - result = yield from target() - return result - else: - return target() + return target() # pylint: disable=broad-except # This function explicitly must deal with broad exceptions. except Exception as exc: - if generator_target and isinstance(exc, GeneratorExit): - # pass close call to target generator - target.close() - raise if not predicate(exc): raise last_exc = exc @@ -330,7 +398,6 @@ def __init__( multiplier=_DEFAULT_DELAY_MULTIPLIER, timeout=_DEFAULT_DEADLINE, on_error=None, - generator_target=False, **kwargs ): self._predicate = predicate @@ -340,7 +407,6 @@ def __init__( self._timeout = kwargs.get("deadline", timeout) self._deadline = self._timeout self._on_error = on_error - self._generator_target = generator_target def __call__(self, func, on_error=None): """Wrap a callable with retry behavior. @@ -365,13 +431,13 @@ def retry_wrapped_func(*args, **kwargs): sleep_generator = exponential_sleep_generator( self._initial, self._maximum, multiplier=self._multiplier ) - return retry_target( + retry_func = retry_target if not isgeneratorfunction(target) else retry_target_generator + return retry_func( target, self._predicate, sleep_generator, self._timeout, on_error=on_error, - generator_target=self._generator_target, ) return retry_wrapped_func diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index ec27056d..99afc6ac 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -15,6 +15,7 @@ import datetime import itertools import re +import inspect import mock import pytest @@ -467,3 +468,23 @@ def test___init___when_retry_is_executed(self, sleep, uniform): assert _some_function.call_count == 2 target.assert_has_calls([mock.call("meep"), mock.call("meep")]) sleep.assert_any_call(retry_._initial) + + @mock.patch("time.sleep", autospec=True) + def test___call___with_generator_target(self, sleep): + retry_ = retry.Retry() + + decorated = retry_(sample_generator) + + num = 10 + result = decorated(num) + assert inspect.isgenerator(result) + unpacked = [i for i in result] + assert len(unpacked) == num + for a,b in zip(unpacked, sample_generator(num)): + assert a == b + sleep.assert_not_called() + + +def sample_generator(num=5): + for i in range(num): + yield i From 5a5396c106ee4840bdccc501a06eb12a92835ad7 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 10 Feb 2023 13:02:27 -0800 Subject: [PATCH 008/204] restructured test --- tests/unit/test_retry.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 99afc6ac..5eeeb779 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -469,22 +469,29 @@ def test___init___when_retry_is_executed(self, sleep, uniform): target.assert_has_calls([mock.call("meep"), mock.call("meep")]) sleep.assert_any_call(retry_._initial) + def _generator_mock(self, num=5, error_on=None, return_val=None): + for i in range(num): + if error_on and i == error_on: + raise ValueError("generator mock error") + yield i + return return_val + @mock.patch("time.sleep", autospec=True) - def test___call___with_generator_target(self, sleep): + def test___call___generator_success(self, sleep): retry_ = retry.Retry() - decorated = retry_(sample_generator) + decorated = retry_(self._generator_mock) num = 10 result = decorated(num) + # check types assert inspect.isgenerator(result) + assert type(decorated(num)) == type(self._generator_mock(num)) + # check yield contents unpacked = [i for i in result] assert len(unpacked) == num - for a,b in zip(unpacked, sample_generator(num)): + for a,b in zip(decorated(num), self._generator_mock(num)): assert a == b sleep.assert_not_called() -def sample_generator(num=5): - for i in range(num): - yield i From 7afa76b0a9570f6c37847ca4ec3019f6104734cc Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 10 Feb 2023 15:05:18 -0800 Subject: [PATCH 009/204] added tests --- google/api_core/retry.py | 11 +-- tests/unit/test_retry.py | 147 +++++++++++++++++++++++++++++++++++++-- 2 files changed, 143 insertions(+), 15 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index 32067fe9..2c398b79 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -189,15 +189,8 @@ def retry_target_generator( for sleep in sleep_generator: try: - yield from target() - return + return (yield from target()) - # pylint: disable=broad-except - # This function explicitly must deal with broad exceptions. - except GeneratorExit: - # pass close call to target generator - target.close() - raise except Exception as exc: if not predicate(exc): raise @@ -431,7 +424,7 @@ def retry_wrapped_func(*args, **kwargs): sleep_generator = exponential_sleep_generator( self._initial, self._maximum, multiplier=self._multiplier ) - retry_func = retry_target if not isgeneratorfunction(target) else retry_target_generator + retry_func = retry_target if not isgeneratorfunction(func) else retry_target_generator return retry_func( target, self._predicate, diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 5eeeb779..7a2c20e7 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -469,12 +469,19 @@ def test___init___when_retry_is_executed(self, sleep, uniform): target.assert_has_calls([mock.call("meep"), mock.call("meep")]) sleep.assert_any_call(retry_._initial) - def _generator_mock(self, num=5, error_on=None, return_val=None): - for i in range(num): - if error_on and i == error_on: - raise ValueError("generator mock error") - yield i - return return_val + def _generator_mock(self, num=5, error_on=None, return_val=None, exceptions_seen=None): + try: + sent_in = None + for i in range(num): + if error_on and i == error_on: + raise ValueError("generator mock error") + sent_in = yield (sent_in if sent_in else i) + return return_val + except (Exception, BaseException, GeneratorExit) as e: + # keep track of exceptions seen by generator + if exceptions_seen is not None: + exceptions_seen.append(e) + raise @mock.patch("time.sleep", autospec=True) def test___call___generator_success(self, sleep): @@ -494,4 +501,132 @@ def test___call___generator_success(self, sleep): assert a == b sleep.assert_not_called() + @mock.patch("time.sleep", autospec=True) + def test___call___generator_retry(self, sleep): + on_error = mock.Mock() + retry_ = retry.Retry(on_error=on_error, predicate=retry.if_exception_type(ValueError)) + result = retry_(self._generator_mock)(error_on=3) + assert inspect.isgenerator(result) + # error thrown on 3 + # generator should contain 0, 1, 2 looping + unpacked = [next(result) for i in range(10)] + assert unpacked == [0,1,2,0,1,2,0,1,2,0] + assert on_error.call_count==3 + + @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) + @mock.patch("time.sleep", autospec=True) + def test___call___generator_retry_hitting_deadline(self, sleep, uniform): + on_error = mock.Mock() + retry_ = retry.Retry( + predicate=retry.if_exception_type(ValueError), + initial=1.0, + maximum=1024.0, + multiplier=2.0, + deadline=30.9, + ) + + utcnow = datetime.datetime.utcnow() + utcnow_patcher = mock.patch( + "google.api_core.datetime_helpers.utcnow", return_value=utcnow + ) + + decorated = retry_(self._generator_mock, on_error=on_error) + generator = decorated(error_on=1) + with utcnow_patcher as patched_utcnow: + # Make sure that calls to fake time.sleep() also advance the mocked + # time clock. + def increase_time(sleep_delay): + patched_utcnow.return_value += datetime.timedelta(seconds=sleep_delay) + + sleep.side_effect = increase_time + with pytest.raises(exceptions.RetryError): + unpacked = [i for i in generator] + + # check the delays + assert sleep.call_count == 4 # once between each successive target calls + last_wait = sleep.call_args.args[0] + total_wait = sum(call_args.args[0] for call_args in sleep.call_args_list) + assert last_wait == 8.0 + assert total_wait == 15.0 + + @mock.patch("time.sleep", autospec=True) + def test___call___with_generator_send(self, sleep): + """ + Send should be passed through retry into target generator + """ + retry_ = retry.Retry() + + decorated = retry_(self._generator_mock) + + generator = decorated(5) + result = next(generator) + in_messages = ["test_1", "hello", "world"] + out_messages = [] + for msg in in_messages: + recv = generator.send(msg) + out_messages.append(recv) + assert in_messages == out_messages + + @mock.patch("time.sleep", autospec=True) + def test___call___with_generator_return(self, sleep): + """ + Generator return value should be passed through retry decorator + """ + retry_ = retry.Retry() + + decorated = retry_(self._generator_mock) + + expected_value = "done" + generator = decorated(5, return_val=expected_value) + found_value = None + try: + while True: + next(generator) + except StopIteration as e: + found_value = e.value + assert found_value == expected_value + + @mock.patch("time.sleep", autospec=True) + def test___call___with_generator_close(self, sleep): + retry_ = retry.Retry() + + decorated = retry_(self._generator_mock) + + exception_list = [] + generator = decorated(10, exceptions_seen=exception_list) + for i in range(2): + next(generator) + generator.close() + assert isinstance(exception_list[0], GeneratorExit) + assert inspect.getgeneratorstate(generator) == 'GEN_CLOSED' + with pytest.raises(StopIteration): + # calling next on closed generator should raise error + next(generator) + + @mock.patch("time.sleep", autospec=True) + def test___call___with_generator_throw(self, sleep): + retry_ = retry.Retry() + + decorated = retry_(self._generator_mock) + + exception_list = [] + generator = decorated(10, exceptions_seen=exception_list) + for i in range(2): + next(generator) + with pytest.raises(BufferError): + generator.throw(BufferError("test")) + assert isinstance(exception_list[0], BufferError) + assert inspect.getgeneratorstate(generator) == 'GEN_CLOSED' + with pytest.raises(StopIteration): + # calling next on closed generator should raise error + next(generator) + + @mock.patch("time.sleep", autospec=True) + def test___init___generator_without_retry_executed(self, sleep): + pass + + @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) + @mock.patch("time.sleep", autospec=True) + def test___init___generator_when_retry_is_executed(self, sleep, uniform): + pass From 2d91ade759f453650cad78519c65a962509ae0bc Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 10 Feb 2023 15:35:17 -0800 Subject: [PATCH 010/204] refactoring and comments in retry code --- google/api_core/retry.py | 65 ++++++++++++++++++++-------------------- 1 file changed, 33 insertions(+), 32 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index 2c398b79..f7496a19 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -155,8 +155,8 @@ def retry_target_generator( higher-level retry helper :class:`Retry`. Args: - target(Generator): The generator to call and retry. This must be a - nullary function - apply arguments with `functools.partial`. + target(Callable[None, Generator[Any,Any,Any]]): A generator function to yield from. + This must be a nullary function - apply arguments with `functools.partial`. predicate (Callable[Exception]): A callable used to determine if an exception raised by the target should be considered retryable. It should return True to retry or False otherwise. @@ -170,7 +170,7 @@ def retry_target_generator( compatibility, if specified it will override ``timeout`` parameter. Returns: - Any: the return value of the target function. + Generator[Any,Any,Any]: returns a generator that wraps the target in retry logic. Raises: google.api_core.RetryError: If the deadline is exceeded while retrying. @@ -189,6 +189,7 @@ def retry_target_generator( for sleep in sleep_generator: try: + #create and yeild from a new instance of the generator from input generator function return (yield from target()) except Exception as exc: @@ -197,19 +198,7 @@ def retry_target_generator( last_exc = exc if on_error is not None: on_error(exc) - - if deadline is not None: - next_attempt_time = datetime_helpers.utcnow() + datetime.timedelta( - seconds=sleep - ) - if deadline < next_attempt_time: - raise exceptions.RetryError( - "Deadline of {:.1f}s exceeded while calling target function".format( - timeout - ), - last_exc, - ) from last_exc - + _raise_if_over_deadline(deadline, sleep_time) _LOGGER.debug( "Retrying due to {}, sleeping {:.1f}s ...".format(last_exc, sleep) ) @@ -226,7 +215,7 @@ def retry_target( higher-level retry helper :class:`Retry`. Args: - target(Callable): The function to call and retry. This must be a + target(Callable[None, Any]): The function to call and retry. This must be a nullary function - apply arguments with `functools.partial`. predicate (Callable[Exception]): A callable used to determine if an exception raised by the target should be considered retryable. @@ -270,19 +259,7 @@ def retry_target( last_exc = exc if on_error is not None: on_error(exc) - - if deadline is not None: - next_attempt_time = datetime_helpers.utcnow() + datetime.timedelta( - seconds=sleep - ) - if deadline < next_attempt_time: - raise exceptions.RetryError( - "Deadline of {:.1f}s exceeded while calling target function".format( - timeout - ), - last_exc, - ) from last_exc - + _raise_if_over_deadline(deadline, sleep_time) _LOGGER.debug( "Retrying due to {}, sleeping {:.1f}s ...".format(last_exc, sleep) ) @@ -290,6 +267,27 @@ def retry_target( raise ValueError("Sleep generator stopped yielding sleep values.") +def _raise_if_over_deadline(deadline, sleep_time): + """ + Raise an exception if the next sleep time would push it over the deadline + + Args: + deadline (float): a UTC timestamp for when to stop retries + sleep_time (float): the amount of time to sleep before the next try + Raises: + google.api_core.RetryError: If the deadline is exceeded while retrying. + """ + if deadline is not None: + next_attempt_time = datetime_helpers.utcnow() + datetime.timedelta( + seconds=sleep + ) + if deadline < next_attempt_time: + raise exceptions.RetryError( + "Deadline of {:.1f}s exceeded while calling target function".format( + timeout + ), + last_exc, + ) from last_exc class Retry(object): """Exponential retry decorator. @@ -405,13 +403,15 @@ def __call__(self, func, on_error=None): """Wrap a callable with retry behavior. Args: - func (Callable): The callable to add retry behavior to. + func (Callable[Any, Any]): The callable to add retry behavior to. + If a generator function is passed in (Callable[Any, Generator[Any,Any,Any]]), + a matcing retryable generator will be returned. on_error (Callable[Exception]): A function to call while processing a retryable exception. Any error raised by this function will *not* be caught. Returns: - Callable: A callable that will invoke ``func`` with retry + Callable: A callable that will invoke or yield from ``func`` with retry behavior. """ if self._on_error is not None: @@ -424,6 +424,7 @@ def retry_wrapped_func(*args, **kwargs): sleep_generator = exponential_sleep_generator( self._initial, self._maximum, multiplier=self._multiplier ) + # if the target is a generator function, use a different retry function that is also a generator function retry_func = retry_target if not isgeneratorfunction(func) else retry_target_generator return retry_func( target, From 0cd384e32abec474e1e176c9d594abf32cec3dc5 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 10 Feb 2023 16:56:54 -0800 Subject: [PATCH 011/204] fixed helper; added is_generator flag --- google/api_core/retry.py | 63 +++++++++++++++++++++++----------------- tests/unit/test_retry.py | 32 ++++++++++++++++---- 2 files changed, 62 insertions(+), 33 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index f7496a19..ac3c5a78 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -193,16 +193,8 @@ def retry_target_generator( return (yield from target()) except Exception as exc: - if not predicate(exc): - raise last_exc = exc - if on_error is not None: - on_error(exc) - _raise_if_over_deadline(deadline, sleep_time) - _LOGGER.debug( - "Retrying due to {}, sleeping {:.1f}s ...".format(last_exc, sleep) - ) - time.sleep(sleep) + _raise_or_sleep(last_exc, predicate, on_error, timeout, deadline, sleep) raise ValueError("Sleep generator stopped yielding sleep values.") @@ -246,7 +238,6 @@ def retry_target( deadline = None last_exc = None - for sleep in sleep_generator: try: return target() @@ -254,32 +245,42 @@ def retry_target( # pylint: disable=broad-except # This function explicitly must deal with broad exceptions. except Exception as exc: - if not predicate(exc): - raise last_exc = exc - if on_error is not None: - on_error(exc) - _raise_if_over_deadline(deadline, sleep_time) - _LOGGER.debug( - "Retrying due to {}, sleeping {:.1f}s ...".format(last_exc, sleep) - ) - time.sleep(sleep) + _raise_or_sleep(last_exc, predicate, on_error, timeout, deadline, sleep) + raise ValueError("Sleep generator stopped yielding sleep values.") -def _raise_if_over_deadline(deadline, sleep_time): +def _raise_or_sleep(last_exc, predicate, on_error, timeout, deadline, sleep_time): """ - Raise an exception if the next sleep time would push it over the deadline + Helper function that contains retry and timeout logic. + Raise an exception if: + - the exception is not handled by the predicate + - the next sleep time would push it over the deadline + Otherwise, sleeps before next retry Args: + last_exc (Exception): the last exception that was encountered as part + running the target function + predicate (Callable[Exception]): A callable used to determine if an + exception raised by the target should be considered retryable. + It should return True to retry or False otherwise. + on_error (Callable[Exception]): A function to call while processing a + retryable exception. Any error raised by this function will *not* + be caught. + timeout (float): total time the target was retried for. deadline (float): a UTC timestamp for when to stop retries sleep_time (float): the amount of time to sleep before the next try Raises: google.api_core.RetryError: If the deadline is exceeded while retrying. """ + if not predicate(last_exc): + raise last_exc + if on_error is not None: + on_error(last_exc) if deadline is not None: next_attempt_time = datetime_helpers.utcnow() + datetime.timedelta( - seconds=sleep + seconds=sleep_time ) if deadline < next_attempt_time: raise exceptions.RetryError( @@ -288,6 +289,10 @@ def _raise_if_over_deadline(deadline, sleep_time): ), last_exc, ) from last_exc + _LOGGER.debug( + "Retrying due to {}, sleeping {:.1f}s ...".format(last_exc, sleep_time) + ) + time.sleep(sleep_time) class Retry(object): """Exponential retry decorator. @@ -374,9 +379,10 @@ class Retry(object): on_error (Callable[Exception]): A function to call while processing a retryable exception. Any error raised by this function will *not* be caught. - generator_target (bool): If True, the target function will be treated - as a generator. Wrapper function will `yield from` the target, and - pass `close()` calls through. + is_generator (Optional[bool]): Indicates whether the input function + should be treated as a generator function. If True, retries will + `yield from` wrapped function. If false, retries will call wrapped + function directly. If None, function will be auto-detected. deadline (float): DEPRECATED: use `timeout` instead. For backward compatibility, if specified it will override the ``timeout`` parameter. """ @@ -389,6 +395,7 @@ def __init__( multiplier=_DEFAULT_DELAY_MULTIPLIER, timeout=_DEFAULT_DEADLINE, on_error=None, + is_generator=None, **kwargs ): self._predicate = predicate @@ -398,6 +405,7 @@ def __init__( self._timeout = kwargs.get("deadline", timeout) self._deadline = self._timeout self._on_error = on_error + self._is_generator = is_generator def __call__(self, func, on_error=None): """Wrap a callable with retry behavior. @@ -424,8 +432,9 @@ def retry_wrapped_func(*args, **kwargs): sleep_generator = exponential_sleep_generator( self._initial, self._maximum, multiplier=self._multiplier ) - # if the target is a generator function, use a different retry function that is also a generator function - retry_func = retry_target if not isgeneratorfunction(func) else retry_target_generator + # if the target is a generator function, make sure return is also a generator function + use_generator = self._is_generator if self._is_generator is not None else isgeneratorfunction(func) + retry_func = retry_target_generator if use_generator else retry_target return retry_func( target, self._predicate, diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 7a2c20e7..51b0f439 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -16,6 +16,7 @@ import itertools import re import inspect +import functools import mock import pytest @@ -621,12 +622,31 @@ def test___call___with_generator_throw(self, sleep): # calling next on closed generator should raise error next(generator) - @mock.patch("time.sleep", autospec=True) - def test___init___generator_without_retry_executed(self, sleep): - pass - @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) + @mock.patch("time.sleep", autospec=True) + def test___call___with_is_generator(self, sleep): + gen_retry_ = retry.Retry(is_generator=True, predicate=retry.if_exception_type(ValueError)) + not_gen_retry_ = retry.Retry(is_generator=False, predicate=retry.if_exception_type(ValueError)) + auto_retry_ = retry.Retry(predicate=retry.if_exception_type(ValueError)) + # force generator to act as non-generator + with pytest.raises(ValueError): + # generator should not retry if error is thrown on yield + gen = not_gen_retry_(self._generator_mock)(10, error_on=3) + unpacked = [next(gen) for i in range(10)] + # wrapped generators won't be detected as generator functions + wrapped = functools.partial(self._generator_mock, 10, error_on=6) + assert not inspect.isgeneratorfunction(wrapped) + with pytest.raises(ValueError): + # generator should not retry if error is thrown on yield + gen = auto_retry_(wrapped)() + unpacked = [next(gen) for i in range(10)] + # force non-detected to be accepted as generator + gen = gen_retry_(wrapped)() + unpacked = [next(gen) for i in range(10)] + assert unpacked == [0,1,2,3,4,5,0,1,2,3] + + + + - def test___init___generator_when_retry_is_executed(self, sleep, uniform): - pass From f72bbec11434360263ad1b5066479edc40ee6762 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 10 Feb 2023 18:03:50 -0800 Subject: [PATCH 012/204] got first test working --- google/api_core/retry_async.py | 30 +++++-------- tests/asyncio/test_retry_async.py | 73 +++++++++++++++++++++++++++++++ 2 files changed, 85 insertions(+), 18 deletions(-) diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index b019ac25..e5b125ff 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -55,6 +55,7 @@ async def check_if_exists(): import datetime import functools import logging +from inspect import isasyncgenfunction from google.api_core import datetime_helpers from google.api_core import exceptions @@ -239,7 +240,7 @@ def __init__( multiplier=_DEFAULT_DELAY_MULTIPLIER, timeout=_DEFAULT_TIMEOUT, on_error=None, - generator_target=False, + is_generator=False, **kwargs ): self._predicate = predicate @@ -249,7 +250,7 @@ def __init__( self._timeout = kwargs.get("deadline", timeout) self._deadline = self._timeout self._on_error = on_error - self._generator_target = generator_target + self._is_generator = is_generator def __call__(self, func, on_error=None): """Wrap a callable with retry behavior. @@ -261,7 +262,8 @@ def __call__(self, func, on_error=None): *not* be caught. Returns: - Callable: A callable that will invoke ``func`` with retry + Union[Coroutine, Awaitable[AsynchronousGenerator]: A couroutine + that will invoke or yield from ``func`` with retry behavior. """ if self._on_error is not None: @@ -274,22 +276,14 @@ async def retry_wrapped_func(*args, **kwargs): sleep_generator = exponential_sleep_generator( self._initial, self._maximum, multiplier=self._multiplier ) - if self._generator_target: - return retry_target_generator( - target, - self._predicate, - sleep_generator, - self._timeout, - on_error=on_error, - ) + # if the target is a generator function, make sure return is also a generator function + use_generator = self._is_generator if self._is_generator is not None else isasyncgenfunction(func) + fn_args = (target, self._predicate, sleep_generator, self._timeout) + fn_kwargs = {"on_error": on_error} + if use_generator: + return retry_target_generator(*fn_args, **fn_kwargs) else: - return await retry_target( - target, - self._predicate, - sleep_generator, - self._timeout, - on_error=on_error, - ) + return await retry_target(*fn_args, **fn_kwargs) return retry_wrapped_func diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 14807eb5..63fe2e31 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -14,6 +14,7 @@ import datetime import re +import inspect import mock import pytest @@ -402,3 +403,75 @@ async def test___init___when_retry_is_executed(self, sleep, uniform): assert _some_function.call_count == 2 target.assert_has_calls([mock.call("meep"), mock.call("meep")]) sleep.assert_any_call(retry_._initial) + + async def _generator_mock(self, num=5, error_on=None, exceptions_seen=None): + try: + sent_in = None + for i in range(num): + if error_on and i == error_on: + raise ValueError("generator mock error") + sent_in = yield (sent_in if sent_in else i) + except (Exception, BaseException, GeneratorExit) as e: + # keep track of exceptions seen by generator + if exceptions_seen is not None: + exceptions_seen.append(e) + raise + + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___generator_success(self, sleep): + retry_ = retry_async.AsyncRetry(is_generator=True) + + decorated = retry_(self._generator_mock) + + num = 10 + generator = decorated(num) + # check types + # assert inspect.isasyncgen(generator) + # assert type(decorated(num)) == type(self._generator_target(num)) + # check yield contents + # unpacked = [i async for i in self._generator_mock(num)] + # TODO: get rid of extra await? + unpacked = [i async for i in await generator] + assert len(unpacked) == num + expected = [i async for i in self._generator_mock(num)] + for a,b in zip(unpacked, expected): + assert a == b + sleep.assert_not_called() + + + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___generator_retry(self, sleep): + pass + + @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___generator_retry_hitting_deadline(self, sleep, uniform): + pass + + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___with_generator_send(self, sleep): + pass + + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___with_generator_return(self, sleep): + pass + + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___with_generator_close(self, sleep): + pass + + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___with_generator_throw(self, sleep): + pass + + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___with_is_generator(self, sleep): + pass From 88eed5cb9ae2f24fe4aa1aaaca7ce9e98eab1dd1 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 10 Feb 2023 18:21:21 -0800 Subject: [PATCH 013/204] remove extra await in front of async generator --- google/api_core/retry_async.py | 32 ++++++++++++++++++++----------- tests/asyncio/test_retry_async.py | 8 +++----- 2 files changed, 24 insertions(+), 16 deletions(-) diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index e5b125ff..8819cec8 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -133,7 +133,7 @@ async def retry_target( higher-level retry helper :class:`Retry`. Args: - target(Callable): The function to call and retry. This must be a + target(Coroutine): The function to call and retry. This must be a nullary function - apply arguments with `functools.partial`. predicate (Callable[Exception]): A callable used to determine if an exception raised by the target should be considered retryable. @@ -256,19 +256,23 @@ def __call__(self, func, on_error=None): """Wrap a callable with retry behavior. Args: - func (Callable): The callable to add retry behavior to. + func (Union[Coroutine, Callable[Any, AsynchronousGenerator]): The + coroutine or async generator function to add retry behavior to. on_error (Callable[Exception]): A function to call while processing a retryable exception. Any error raised by this function will *not* be caught. Returns: - Union[Coroutine, Awaitable[AsynchronousGenerator]: A couroutine - that will invoke or yield from ``func`` with retry - behavior. + Union[Coroutine, AsynchronousGenerator]: One of: + - A couroutine that will invoke ``func`` if ``func`` is a coroutine + - An AsynchronousGenerator that yields from ``func`` if ``func`` is an AsynchronousGenerator. """ if self._on_error is not None: on_error = self._on_error + # if the target is a generator function, make sure return is also a generator function + use_generator = self._is_generator if self._is_generator is not None else isasyncgenfunction(func) + @functools.wraps(func) async def retry_wrapped_func(*args, **kwargs): """A wrapper that calls target function with retry.""" @@ -276,16 +280,22 @@ async def retry_wrapped_func(*args, **kwargs): sleep_generator = exponential_sleep_generator( self._initial, self._maximum, multiplier=self._multiplier ) + return await retry_target(target, self._predicate, sleep_generator, self._timeout, on_error=on_error) + + + @functools.wraps(func) + def retry_wrapped_generator(*args, **kwargs): + """A wrapper that yields through target generator with retry.""" + target = functools.partial(func, *args, **kwargs) + sleep_generator = exponential_sleep_generator( + self._initial, self._maximum, multiplier=self._multiplier + ) # if the target is a generator function, make sure return is also a generator function use_generator = self._is_generator if self._is_generator is not None else isasyncgenfunction(func) fn_args = (target, self._predicate, sleep_generator, self._timeout) - fn_kwargs = {"on_error": on_error} - if use_generator: - return retry_target_generator(*fn_args, **fn_kwargs) - else: - return await retry_target(*fn_args, **fn_kwargs) + return retry_target_generator(target, self._predicate, sleep_generator, self._timeout, on_error=on_error) - return retry_wrapped_func + return retry_wrapped_generator if use_generator else retry_wrapped_func def _replace( self, diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 63fe2e31..91746577 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -427,12 +427,10 @@ async def test___call___generator_success(self, sleep): num = 10 generator = decorated(num) # check types - # assert inspect.isasyncgen(generator) - # assert type(decorated(num)) == type(self._generator_target(num)) + assert inspect.isasyncgen(generator) + assert type(decorated(num)) == type(self._generator_mock(num)) # check yield contents - # unpacked = [i async for i in self._generator_mock(num)] - # TODO: get rid of extra await? - unpacked = [i async for i in await generator] + unpacked = [i async for i in generator] assert len(unpacked) == num expected = [i async for i in self._generator_mock(num)] for a,b in zip(unpacked, expected): From 91f9cc4dced4ce369f0c5469b20a935f156482b3 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 10 Feb 2023 19:23:55 -0800 Subject: [PATCH 014/204] implemented async generator retry test --- tests/asyncio/test_retry_async.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 91746577..b9530115 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -441,7 +441,15 @@ async def test___call___generator_success(self, sleep): @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___generator_retry(self, sleep): - pass + on_error = mock.Mock() + retry_ = retry_async.AsyncRetry(is_generator=True, on_error=on_error, predicate=retry_async.if_exception_type(ValueError)) + generator = retry_(self._generator_mock)(error_on=3) + assert inspect.isasyncgen(generator) + # error thrown on 3 + # generator should contain 0, 1, 2 looping + unpacked = [await anext(generator) for i in range(10)] + assert unpacked == [0,1,2,0,1,2,0,1,2,0] + assert on_error.call_count==3 @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) @mock.patch("asyncio.sleep", autospec=True) From c3eb9972dc06fa4d1acee1c0b0ae90dcba3c75f2 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 10 Feb 2023 20:41:21 -0800 Subject: [PATCH 015/204] fixed is_generator --- google/api_core/retry_async.py | 2 +- tests/asyncio/test_retry_async.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index 8819cec8..bee69060 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -240,7 +240,7 @@ def __init__( multiplier=_DEFAULT_DELAY_MULTIPLIER, timeout=_DEFAULT_TIMEOUT, on_error=None, - is_generator=False, + is_generator=None, **kwargs ): self._predicate = predicate diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index b9530115..630f5168 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -420,7 +420,7 @@ async def _generator_mock(self, num=5, error_on=None, exceptions_seen=None): @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___generator_success(self, sleep): - retry_ = retry_async.AsyncRetry(is_generator=True) + retry_ = retry_async.AsyncRetry() decorated = retry_(self._generator_mock) @@ -442,7 +442,7 @@ async def test___call___generator_success(self, sleep): @pytest.mark.asyncio async def test___call___generator_retry(self, sleep): on_error = mock.Mock() - retry_ = retry_async.AsyncRetry(is_generator=True, on_error=on_error, predicate=retry_async.if_exception_type(ValueError)) + retry_ = retry_async.AsyncRetry(on_error=on_error, predicate=retry_async.if_exception_type(ValueError)) generator = retry_(self._generator_mock)(error_on=3) assert inspect.isasyncgen(generator) # error thrown on 3 From f6c62016511f2b1008d397a016f6b4eb25ce89b6 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 10 Feb 2023 20:54:31 -0800 Subject: [PATCH 016/204] added tests for aclose and athrow --- google/api_core/retry_async.py | 13 +++++++---- tests/asyncio/test_retry_async.py | 38 +++++++++++++++++++++---------- 2 files changed, 35 insertions(+), 16 deletions(-) diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index bee69060..177e5216 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -86,11 +86,16 @@ async def retry_target_generator( for sleep in sleep_generator: try: - async for item in target(): - yield item + subgenerator = target() + async for item in subgenerator: + try: + yield item + except (Exception, GeneratorExit) as exc: + # if athrow, or aclose pass on to subgenerator + await subgenerator.athrow(exc) # check for overtime - if deadline_dt and deadline_dt <= datetime_helpers.utcnow(): - raise asyncio.TimeoutError("generator timeout") + # if deadline_dt and deadline_dt <= datetime_helpers.utcnow(): + # raise asyncio.TimeoutError("generator timeout") return # pylint: disable=broad-except # This function explicitly must deal with broad exceptions. diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 630f5168..4bd07094 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -457,25 +457,39 @@ async def test___call___generator_retry(self, sleep): async def test___call___generator_retry_hitting_deadline(self, sleep, uniform): pass - @mock.patch("asyncio.sleep", autospec=True) - @pytest.mark.asyncio - async def test___call___with_generator_send(self, sleep): - pass - - @mock.patch("asyncio.sleep", autospec=True) - @pytest.mark.asyncio - async def test___call___with_generator_return(self, sleep): - pass - @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___with_generator_close(self, sleep): - pass + retry_ = retry_async.AsyncRetry() + decorated = retry_(self._generator_mock) + exception_list = [] + generator = decorated(10, exceptions_seen=exception_list) + for i in range(2): + await anext(generator) + await generator.aclose() + + assert isinstance(exception_list[0], GeneratorExit) + assert generator.ag_running == False + with pytest.raises(StopAsyncIteration): + # calling next on closed generator should raise error + await anext(generator) @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___with_generator_throw(self, sleep): - pass + retry_ = retry_async.AsyncRetry() + decorated = retry_(self._generator_mock) + exception_list = [] + generator = decorated(10, exceptions_seen=exception_list) + for i in range(2): + await anext(generator) + with pytest.raises(BufferError): + await generator.athrow(BufferError("test")) + assert isinstance(exception_list[0], BufferError) + assert generator.ag_running == False + with pytest.raises(StopAsyncIteration): + # calling next on closed generator should raise error + await anext(generator) @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio From 57b0ee369ef5a692ff4749ac63c0cc09d5ab68b9 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 10 Feb 2023 21:02:11 -0800 Subject: [PATCH 017/204] simplified close; don't support throws --- google/api_core/retry_async.py | 11 ++++++----- tests/asyncio/test_retry_async.py | 17 ----------------- 2 files changed, 6 insertions(+), 22 deletions(-) diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index 177e5216..01829352 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -83,22 +83,23 @@ async def retry_target_generator( ) last_exc = None + subgenerator = None for sleep in sleep_generator: try: subgenerator = target() async for item in subgenerator: - try: - yield item - except (Exception, GeneratorExit) as exc: - # if athrow, or aclose pass on to subgenerator - await subgenerator.athrow(exc) + yield item # check for overtime # if deadline_dt and deadline_dt <= datetime_helpers.utcnow(): # raise asyncio.TimeoutError("generator timeout") return # pylint: disable=broad-except # This function explicitly must deal with broad exceptions. + except GeneratorExit as close_exc: + if subgenerator is not None: + await subgenerator.aclose() + raise except Exception as exc: if not predicate(exc) and not isinstance(exc, asyncio.TimeoutError): raise diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 4bd07094..7c59f09e 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -474,23 +474,6 @@ async def test___call___with_generator_close(self, sleep): # calling next on closed generator should raise error await anext(generator) - @mock.patch("asyncio.sleep", autospec=True) - @pytest.mark.asyncio - async def test___call___with_generator_throw(self, sleep): - retry_ = retry_async.AsyncRetry() - decorated = retry_(self._generator_mock) - exception_list = [] - generator = decorated(10, exceptions_seen=exception_list) - for i in range(2): - await anext(generator) - with pytest.raises(BufferError): - await generator.athrow(BufferError("test")) - assert isinstance(exception_list[0], BufferError) - assert generator.ag_running == False - with pytest.raises(StopAsyncIteration): - # calling next on closed generator should raise error - await anext(generator) - @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___with_is_generator(self, sleep): From e814ce7985f047ede94e26d752c702750e7a13ac Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 10 Feb 2023 21:56:55 -0800 Subject: [PATCH 018/204] added tests --- tests/asyncio/test_retry_async.py | 60 +++++++++++++++++++++++++++++-- tests/unit/test_retry.py | 16 ++++++--- 2 files changed, 70 insertions(+), 6 deletions(-) diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 7c59f09e..86861420 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -15,6 +15,7 @@ import datetime import re import inspect +import functools import mock import pytest @@ -455,7 +456,43 @@ async def test___call___generator_retry(self, sleep): @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___generator_retry_hitting_deadline(self, sleep, uniform): - pass + on_error = mock.Mock() + retry_ = retry_async.AsyncRetry( + predicate=retry_async.if_exception_type(ValueError), + initial=1.0, + maximum=1024.0, + multiplier=2.0, + deadline=9.9, + ) + + utcnow = datetime.datetime.utcnow() + utcnow_patcher = mock.patch( + "google.api_core.datetime_helpers.utcnow", return_value=utcnow + ) + + decorated = retry_(self._generator_mock, on_error=on_error) + generator = decorated(error_on=1) + + with utcnow_patcher as patched_utcnow: + # Make sure that calls to fake asyncio.sleep() also advance the mocked + # time clock. + def increase_time(sleep_delay): + patched_utcnow.return_value += datetime.timedelta(seconds=sleep_delay) + + sleep.side_effect = increase_time + + with pytest.raises(exceptions.RetryError): + unpacked = [i async for i in generator] + + assert on_error.call_count == 5 + + # check the delays + assert sleep.call_count == 4 # once between each successive target calls + last_wait = sleep.call_args.args[0] + total_wait = sum(call_args.args[0] for call_args in sleep.call_args_list) + + assert last_wait == 2.9 # and not 8.0, because the last delay was shortened + assert total_wait == 9.9 # the same as the deadline @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio @@ -477,4 +514,23 @@ async def test___call___with_generator_close(self, sleep): @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___with_is_generator(self, sleep): - pass + gen_retry_ = retry_async.AsyncRetry(is_generator=True, predicate=retry_async.if_exception_type(ValueError)) + not_gen_retry_ = retry_async.AsyncRetry(is_generator=False, predicate=retry_async.if_exception_type(ValueError)) + auto_retry_ = retry_async.AsyncRetry(predicate=retry_async.if_exception_type(ValueError)) + # force generator to act as non-generator + with pytest.raises(TypeError): + # error will be thrown because gen is coroutine + gen = not_gen_retry_(self._generator_mock)(10, error_on=3) + unpacked = [await anext(gen) for i in range(10)] + # wrapped generators won't be detected as generator functions + wrapped = functools.partial(self._generator_mock, 10, error_on=6) + assert not inspect.isasyncgenfunction(wrapped) + with pytest.raises(TypeError): + # error will be thrown because gen is coroutine + gen = auto_retry_(wrapped)() + unpacked = [next(gen) for i in range(10)] + # force non-detected to be accepted as generator + gen = gen_retry_(wrapped)() + unpacked = [await anext(gen) for i in range(10)] + assert unpacked == [0,1,2,3,4,5,0,1,2,3] + diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 51b0f439..64dbe38a 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -543,6 +543,7 @@ def increase_time(sleep_delay): with pytest.raises(exceptions.RetryError): unpacked = [i for i in generator] + assert on_error.call_count == 5 # check the delays assert sleep.call_count == 4 # once between each successive target calls last_wait = sleep.call_args.args[0] @@ -606,8 +607,7 @@ def test___call___with_generator_close(self, sleep): @mock.patch("time.sleep", autospec=True) def test___call___with_generator_throw(self, sleep): - retry_ = retry.Retry() - + retry_ = retry.Retry(predicate=retry.if_exception_type(ValueError)) decorated = retry_(self._generator_mock) exception_list = [] @@ -621,8 +621,16 @@ def test___call___with_generator_throw(self, sleep): with pytest.raises(StopIteration): # calling next on closed generator should raise error next(generator) - - + # should retry if throw retryable exception + exception_list = [] + generator = decorated(10, exceptions_seen=exception_list) + for i in range(2): + next(generator) + val = generator.throw(ValueError("test")) + assert val == 0 + assert isinstance(exception_list[0], ValueError) + # calling next on closed generator should not raise error + assert next(generator) == 1 @mock.patch("time.sleep", autospec=True) def test___call___with_is_generator(self, sleep): From 0ffb03f6e1460971e27228ccace77884ce5e2b0b Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 10 Feb 2023 22:03:40 -0800 Subject: [PATCH 019/204] have test that throw should retry --- google/api_core/retry_async.py | 3 +++ tests/asyncio/test_retry_async.py | 14 ++++++++++++++ 2 files changed, 17 insertions(+) diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index 01829352..ac9f4461 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -87,6 +87,9 @@ async def retry_target_generator( for sleep in sleep_generator: try: + # composed generators do not currently support `yield from`, + # so we can not pass asend or athrow calls + # https://peps.python.org/pep-0525/#asynchronous-yield-from subgenerator = target() async for item in subgenerator: yield item diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 86861420..0a131c58 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -511,6 +511,20 @@ async def test___call___with_generator_close(self, sleep): # calling next on closed generator should raise error await anext(generator) + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___with_generator_throw(self, sleep): + # should retry if throw retryable exception + retry_ = retry_async.AsyncRetry(predicate=retry_async.if_exception_type(ValueError)) + decorated = retry_(self._generator_mock) + generator = decorated(10) + for i in range(2): + await anext(generator) + throw_val = await generator.athrow(ValueError("test")) + assert throw_val == 0 + # calling next on closed generator should not raise error + assert await anext(generator) == 1 + @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___with_is_generator(self, sleep): From a8024f340840915966ffbe8e3e13ac47a49cb3ed Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 10 Feb 2023 22:28:34 -0800 Subject: [PATCH 020/204] improved aclose and athrow --- google/api_core/retry_async.py | 17 +++++++++-------- tests/asyncio/test_retry_async.py | 16 ++++++++++++++-- 2 files changed, 23 insertions(+), 10 deletions(-) diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index ac9f4461..a0f94360 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -87,22 +87,23 @@ async def retry_target_generator( for sleep in sleep_generator: try: - # composed generators do not currently support `yield from`, - # so we can not pass asend or athrow calls - # https://peps.python.org/pep-0525/#asynchronous-yield-from subgenerator = target() async for item in subgenerator: - yield item + try: + yield item + except GeneratorExit as close_exc: + # handle aclose() + await subgenerator.aclose() + raise + except Exception as throw_exc: + # handle athrow() + await subgenerator.athrow(throw_exc) # check for overtime # if deadline_dt and deadline_dt <= datetime_helpers.utcnow(): # raise asyncio.TimeoutError("generator timeout") return # pylint: disable=broad-except # This function explicitly must deal with broad exceptions. - except GeneratorExit as close_exc: - if subgenerator is not None: - await subgenerator.aclose() - raise except Exception as exc: if not predicate(exc) and not isinstance(exc, asyncio.TimeoutError): raise diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 0a131c58..95c4e96a 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -514,14 +514,26 @@ async def test___call___with_generator_close(self, sleep): @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___with_generator_throw(self, sleep): - # should retry if throw retryable exception retry_ = retry_async.AsyncRetry(predicate=retry_async.if_exception_type(ValueError)) decorated = retry_(self._generator_mock) - generator = decorated(10) + exception_list = [] + generator = decorated(10, exceptions_seen=exception_list) + for i in range(2): + await anext(generator) + with pytest.raises(BufferError): + await generator.athrow(BufferError("test")) + assert isinstance(exception_list[0], BufferError) + with pytest.raises(StopAsyncIteration): + # calling next on closed generator should raise error + await anext(generator) + # should retry if throw retryable exception + exception_list = [] + generator = decorated(10, exceptions_seen=exception_list) for i in range(2): await anext(generator) throw_val = await generator.athrow(ValueError("test")) assert throw_val == 0 + assert isinstance(exception_list[0], ValueError) # calling next on closed generator should not raise error assert await anext(generator) == 1 From c76f64117c2bbb5512d083f04c06c7435a573fe7 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 10 Feb 2023 22:39:34 -0800 Subject: [PATCH 021/204] added comments --- google/api_core/retry_async.py | 39 ++++++++++++++++++++++++++++++---- 1 file changed, 35 insertions(+), 4 deletions(-) diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index a0f94360..ed53e8f1 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -74,6 +74,36 @@ async def check_if_exists(): async def retry_target_generator( target, predicate, sleep_generator, timeout=None, on_error=None, **kwargs ): + """Await a coroutine and retry if it fails. + + This is the lowest-level retry helper. Generally, you'll use the + higher-level retry helper :class:`Retry`. + + Args: + target(Callable[None, AsynchronousGenerator]): An asynchronous + generator function to yield from. This must be a nullary + function - apply arguments with `functools.partial`. + predicate (Callable[Exception]): A callable used to determine if an + exception raised by the target should be considered retryable. + It should return True to retry or False otherwise. + sleep_generator (Iterable[float]): An infinite iterator that determines + how long to sleep between retries. + timeout (float): How long to keep retrying the target, in seconds. + on_error (Callable[Exception]): A function to call while processing a + retryable exception. Any error raised by this function will *not* + be caught. + deadline (float): DEPRECATED use ``timeout`` instead. For backward + compatibility, if set it will override the ``timeout`` parameter. + + Returns: + Any: the return value of the target function. + + Raises: + google.api_core.RetryError: If the deadline is exceeded while retrying. + ValueError: If the sleep generator stops yielding values. + Exception: If the target raises a method that isn't retryable. + """ + timeout = kwargs.get("deadline", timeout) deadline_dt = ( @@ -98,14 +128,15 @@ async def retry_target_generator( except Exception as throw_exc: # handle athrow() await subgenerator.athrow(throw_exc) - # check for overtime - # if deadline_dt and deadline_dt <= datetime_helpers.utcnow(): - # raise asyncio.TimeoutError("generator timeout") + # check for timeout + if deadline_dt and deadline_dt <= datetime_helpers.utcnow(): + raise asyncio.TimeoutError("generator timeout") return # pylint: disable=broad-except # This function explicitly must deal with broad exceptions. except Exception as exc: if not predicate(exc) and not isinstance(exc, asyncio.TimeoutError): + await subgenerator.aclose() raise last_exc = exc if on_error is not None: @@ -137,7 +168,7 @@ async def retry_target_generator( async def retry_target( target, predicate, sleep_generator, timeout=None, on_error=None, **kwargs ): - """Call a function and retry if it fails. + """Await a coroutine and retry if it fails. This is the lowest-level retry helper. Generally, you'll use the higher-level retry helper :class:`Retry`. From ee631e3b47c86cc52dd296e47e4c98f315d11213 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 10 Feb 2023 22:57:52 -0800 Subject: [PATCH 022/204] close synchronous generator --- google/api_core/retry.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index ac3c5a78..dc848ead 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -186,13 +186,16 @@ def retry_target_generator( deadline = None last_exc = None - + subgenerator = None for sleep in sleep_generator: try: #create and yeild from a new instance of the generator from input generator function - return (yield from target()) + subgenerator = target() + return (yield from subgenerator) except Exception as exc: + if subgenerator is not None: + subgenerator.close() last_exc = exc _raise_or_sleep(last_exc, predicate, on_error, timeout, deadline, sleep) From 70eb78c5a9ac022cbf2d1a26ffbe04fbf25ec0fe Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 10 Feb 2023 22:58:25 -0800 Subject: [PATCH 023/204] refactor async file --- google/api_core/retry_async.py | 101 +++++++++++++++++---------------- 1 file changed, 51 insertions(+), 50 deletions(-) diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index ed53e8f1..3bad03e1 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -135,33 +135,10 @@ async def retry_target_generator( # pylint: disable=broad-except # This function explicitly must deal with broad exceptions. except Exception as exc: - if not predicate(exc) and not isinstance(exc, asyncio.TimeoutError): + if subgenerator is not None: await subgenerator.aclose() - raise last_exc = exc - if on_error is not None: - on_error(exc) - - now = datetime_helpers.utcnow() - - if deadline_dt: - if deadline_dt <= now: - # Chains the raising RetryError with the root cause error, - # which helps observability and debugability. - raise exceptions.RetryError( - "Timeout of {:.1f}s exceeded while calling target function".format( - timeout - ), - last_exc, - ) from last_exc - else: - time_to_deadline = (deadline_dt - now).total_seconds() - sleep = min(time_to_deadline, sleep) - - _LOGGER.debug( - "Retrying due to {}, sleeping {:.1f}s ...".format(last_exc, sleep) - ) - await asyncio.sleep(sleep) + await _raise_or_sleep(last_exc, predicate, on_error, timeout, deadline_dt, sleep) raise ValueError("Sleep generator stopped yielding sleep values.") @@ -219,35 +196,59 @@ async def retry_target( # pylint: disable=broad-except # This function explicitly must deal with broad exceptions. except Exception as exc: - if not predicate(exc) and not isinstance(exc, asyncio.TimeoutError): - raise last_exc = exc - if on_error is not None: - on_error(exc) - - now = datetime_helpers.utcnow() - - if deadline_dt: - if deadline_dt <= now: - # Chains the raising RetryError with the root cause error, - # which helps observability and debugability. - raise exceptions.RetryError( - "Timeout of {:.1f}s exceeded while calling target function".format( - timeout - ), - last_exc, - ) from last_exc - else: - time_to_deadline = (deadline_dt - now).total_seconds() - sleep = min(time_to_deadline, sleep) - - _LOGGER.debug( - "Retrying due to {}, sleeping {:.1f}s ...".format(last_exc, sleep) - ) - await asyncio.sleep(sleep) + await _raise_or_sleep(last_exc, predicate, on_error, timeout, deadline_dt, sleep) raise ValueError("Sleep generator stopped yielding sleep values.") +async def _raise_or_sleep(last_exc, predicate, on_error, timeout, deadline_dt, sleep_time): + """ + Helper function that contains retry and timeout logic. + Raise an exception if: + - the exception is not handled by the predicate + - the next sleep time would push it over the deadline + Otherwise, sleeps before next retry + + Args: + last_exc (Exception): the last exception that was encountered as part + running the target function + predicate (Callable[Exception]): A callable used to determine if an + exception raised by the target should be considered retryable. + It should return True to retry or False otherwise. + on_error (Callable[Exception]): A function to call while processing a + retryable exception. Any error raised by this function will *not* + be caught. + timeout (float): total time the target was retried for. + deadline_dt (float): a UTC timestamp for when to stop retries + sleep_time (float): the amount of time to sleep before the next try + Raises: + google.api_core.RetryError: If the deadline is exceeded while retrying. + """ + if not predicate(last_exc) and not isinstance(last_exc, asyncio.TimeoutError) and not isinstance(last_exc, GeneratorExit): + raise last_exc + if on_error is not None: + on_error(last_exc) + + now = datetime_helpers.utcnow() + + if deadline_dt: + if deadline_dt <= now: + # Chains the raising RetryError with the root cause error, + # which helps observability and debugability. + raise exceptions.RetryError( + "Timeout of {:.1f}s exceeded while calling target function".format( + timeout + ), + last_exc, + ) from last_exc + else: + time_to_deadline = (deadline_dt - now).total_seconds() + sleep_time = min(time_to_deadline, sleep_time) + + _LOGGER.debug( + "Retrying due to {}, sleeping {:.1f}s ...".format(last_exc, sleep_time) + ) + await asyncio.sleep(sleep_time) class AsyncRetry: """Exponential retry decorator for async functions. From 42ee132a1eee4144455e5802d907dea6baf9dcd2 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 10 Feb 2023 22:59:28 -0800 Subject: [PATCH 024/204] ran blacken --- google/api_core/retry.py | 16 +++++++--- google/api_core/retry_async.py | 53 +++++++++++++++++++++++++------ tests/asyncio/test_retry_async.py | 30 ++++++++++------- tests/unit/test_retry.py | 33 ++++++++++--------- 4 files changed, 91 insertions(+), 41 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index dc848ead..f41f6dcd 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -147,7 +147,7 @@ def exponential_sleep_generator(initial, maximum, multiplier=_DEFAULT_DELAY_MULT def retry_target_generator( - target, predicate, sleep_generator, timeout=None, on_error=None, **kwargs + target, predicate, sleep_generator, timeout=None, on_error=None, **kwargs ): """Wrap a generator object and retry if it fails. @@ -189,7 +189,7 @@ def retry_target_generator( subgenerator = None for sleep in sleep_generator: try: - #create and yeild from a new instance of the generator from input generator function + # create and yeild from a new instance of the generator from input generator function subgenerator = target() return (yield from subgenerator) @@ -201,6 +201,7 @@ def retry_target_generator( raise ValueError("Sleep generator stopped yielding sleep values.") + def retry_target( target, predicate, sleep_generator, timeout=None, on_error=None, **kwargs ): @@ -251,9 +252,9 @@ def retry_target( last_exc = exc _raise_or_sleep(last_exc, predicate, on_error, timeout, deadline, sleep) - raise ValueError("Sleep generator stopped yielding sleep values.") + def _raise_or_sleep(last_exc, predicate, on_error, timeout, deadline, sleep_time): """ Helper function that contains retry and timeout logic. @@ -297,6 +298,7 @@ def _raise_or_sleep(last_exc, predicate, on_error, timeout, deadline, sleep_time ) time.sleep(sleep_time) + class Retry(object): """Exponential retry decorator. @@ -383,7 +385,7 @@ class Retry(object): a retryable exception. Any error raised by this function will *not* be caught. is_generator (Optional[bool]): Indicates whether the input function - should be treated as a generator function. If True, retries will + should be treated as a generator function. If True, retries will `yield from` wrapped function. If false, retries will call wrapped function directly. If None, function will be auto-detected. deadline (float): DEPRECATED: use `timeout` instead. For backward @@ -436,7 +438,11 @@ def retry_wrapped_func(*args, **kwargs): self._initial, self._maximum, multiplier=self._multiplier ) # if the target is a generator function, make sure return is also a generator function - use_generator = self._is_generator if self._is_generator is not None else isgeneratorfunction(func) + use_generator = ( + self._is_generator + if self._is_generator is not None + else isgeneratorfunction(func) + ) retry_func = retry_target_generator if use_generator else retry_target return retry_func( target, diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index 3bad03e1..626f108e 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -71,6 +71,7 @@ async def check_if_exists(): _DEFAULT_DEADLINE = 60.0 * 2.0 # seconds _DEFAULT_TIMEOUT = 60.0 * 2.0 # seconds + async def retry_target_generator( target, predicate, sleep_generator, timeout=None, on_error=None, **kwargs ): @@ -81,7 +82,7 @@ async def retry_target_generator( Args: target(Callable[None, AsynchronousGenerator]): An asynchronous - generator function to yield from. This must be a nullary + generator function to yield from. This must be a nullary function - apply arguments with `functools.partial`. predicate (Callable[Exception]): A callable used to determine if an exception raised by the target should be considered retryable. @@ -138,10 +139,13 @@ async def retry_target_generator( if subgenerator is not None: await subgenerator.aclose() last_exc = exc - await _raise_or_sleep(last_exc, predicate, on_error, timeout, deadline_dt, sleep) + await _raise_or_sleep( + last_exc, predicate, on_error, timeout, deadline_dt, sleep + ) raise ValueError("Sleep generator stopped yielding sleep values.") + async def retry_target( target, predicate, sleep_generator, timeout=None, on_error=None, **kwargs ): @@ -197,11 +201,16 @@ async def retry_target( # This function explicitly must deal with broad exceptions. except Exception as exc: last_exc = exc - await _raise_or_sleep(last_exc, predicate, on_error, timeout, deadline_dt, sleep) + await _raise_or_sleep( + last_exc, predicate, on_error, timeout, deadline_dt, sleep + ) raise ValueError("Sleep generator stopped yielding sleep values.") -async def _raise_or_sleep(last_exc, predicate, on_error, timeout, deadline_dt, sleep_time): + +async def _raise_or_sleep( + last_exc, predicate, on_error, timeout, deadline_dt, sleep_time +): """ Helper function that contains retry and timeout logic. Raise an exception if: @@ -224,7 +233,11 @@ async def _raise_or_sleep(last_exc, predicate, on_error, timeout, deadline_dt, s Raises: google.api_core.RetryError: If the deadline is exceeded while retrying. """ - if not predicate(last_exc) and not isinstance(last_exc, asyncio.TimeoutError) and not isinstance(last_exc, GeneratorExit): + if ( + not predicate(last_exc) + and not isinstance(last_exc, asyncio.TimeoutError) + and not isinstance(last_exc, GeneratorExit) + ): raise last_exc if on_error is not None: on_error(last_exc) @@ -250,6 +263,7 @@ async def _raise_or_sleep(last_exc, predicate, on_error, timeout, deadline_dt, s ) await asyncio.sleep(sleep_time) + class AsyncRetry: """Exponential retry decorator for async functions. @@ -313,7 +327,11 @@ def __call__(self, func, on_error=None): on_error = self._on_error # if the target is a generator function, make sure return is also a generator function - use_generator = self._is_generator if self._is_generator is not None else isasyncgenfunction(func) + use_generator = ( + self._is_generator + if self._is_generator is not None + else isasyncgenfunction(func) + ) @functools.wraps(func) async def retry_wrapped_func(*args, **kwargs): @@ -322,8 +340,13 @@ async def retry_wrapped_func(*args, **kwargs): sleep_generator = exponential_sleep_generator( self._initial, self._maximum, multiplier=self._multiplier ) - return await retry_target(target, self._predicate, sleep_generator, self._timeout, on_error=on_error) - + return await retry_target( + target, + self._predicate, + sleep_generator, + self._timeout, + on_error=on_error, + ) @functools.wraps(func) def retry_wrapped_generator(*args, **kwargs): @@ -333,9 +356,19 @@ def retry_wrapped_generator(*args, **kwargs): self._initial, self._maximum, multiplier=self._multiplier ) # if the target is a generator function, make sure return is also a generator function - use_generator = self._is_generator if self._is_generator is not None else isasyncgenfunction(func) + use_generator = ( + self._is_generator + if self._is_generator is not None + else isasyncgenfunction(func) + ) fn_args = (target, self._predicate, sleep_generator, self._timeout) - return retry_target_generator(target, self._predicate, sleep_generator, self._timeout, on_error=on_error) + return retry_target_generator( + target, + self._predicate, + sleep_generator, + self._timeout, + on_error=on_error, + ) return retry_wrapped_generator if use_generator else retry_wrapped_func diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 95c4e96a..bed643ea 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -434,23 +434,24 @@ async def test___call___generator_success(self, sleep): unpacked = [i async for i in generator] assert len(unpacked) == num expected = [i async for i in self._generator_mock(num)] - for a,b in zip(unpacked, expected): + for a, b in zip(unpacked, expected): assert a == b sleep.assert_not_called() - @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___generator_retry(self, sleep): on_error = mock.Mock() - retry_ = retry_async.AsyncRetry(on_error=on_error, predicate=retry_async.if_exception_type(ValueError)) + retry_ = retry_async.AsyncRetry( + on_error=on_error, predicate=retry_async.if_exception_type(ValueError) + ) generator = retry_(self._generator_mock)(error_on=3) assert inspect.isasyncgen(generator) # error thrown on 3 # generator should contain 0, 1, 2 looping unpacked = [await anext(generator) for i in range(10)] - assert unpacked == [0,1,2,0,1,2,0,1,2,0] - assert on_error.call_count==3 + assert unpacked == [0, 1, 2, 0, 1, 2, 0, 1, 2, 0] + assert on_error.call_count == 3 @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) @mock.patch("asyncio.sleep", autospec=True) @@ -514,7 +515,9 @@ async def test___call___with_generator_close(self, sleep): @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___with_generator_throw(self, sleep): - retry_ = retry_async.AsyncRetry(predicate=retry_async.if_exception_type(ValueError)) + retry_ = retry_async.AsyncRetry( + predicate=retry_async.if_exception_type(ValueError) + ) decorated = retry_(self._generator_mock) exception_list = [] generator = decorated(10, exceptions_seen=exception_list) @@ -540,9 +543,15 @@ async def test___call___with_generator_throw(self, sleep): @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___with_is_generator(self, sleep): - gen_retry_ = retry_async.AsyncRetry(is_generator=True, predicate=retry_async.if_exception_type(ValueError)) - not_gen_retry_ = retry_async.AsyncRetry(is_generator=False, predicate=retry_async.if_exception_type(ValueError)) - auto_retry_ = retry_async.AsyncRetry(predicate=retry_async.if_exception_type(ValueError)) + gen_retry_ = retry_async.AsyncRetry( + is_generator=True, predicate=retry_async.if_exception_type(ValueError) + ) + not_gen_retry_ = retry_async.AsyncRetry( + is_generator=False, predicate=retry_async.if_exception_type(ValueError) + ) + auto_retry_ = retry_async.AsyncRetry( + predicate=retry_async.if_exception_type(ValueError) + ) # force generator to act as non-generator with pytest.raises(TypeError): # error will be thrown because gen is coroutine @@ -558,5 +567,4 @@ async def test___call___with_is_generator(self, sleep): # force non-detected to be accepted as generator gen = gen_retry_(wrapped)() unpacked = [await anext(gen) for i in range(10)] - assert unpacked == [0,1,2,3,4,5,0,1,2,3] - + assert unpacked == [0, 1, 2, 3, 4, 5, 0, 1, 2, 3] diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 64dbe38a..b8915a6a 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -470,7 +470,9 @@ def test___init___when_retry_is_executed(self, sleep, uniform): target.assert_has_calls([mock.call("meep"), mock.call("meep")]) sleep.assert_any_call(retry_._initial) - def _generator_mock(self, num=5, error_on=None, return_val=None, exceptions_seen=None): + def _generator_mock( + self, num=5, error_on=None, return_val=None, exceptions_seen=None + ): try: sent_in = None for i in range(num): @@ -498,21 +500,23 @@ def test___call___generator_success(self, sleep): # check yield contents unpacked = [i for i in result] assert len(unpacked) == num - for a,b in zip(decorated(num), self._generator_mock(num)): + for a, b in zip(decorated(num), self._generator_mock(num)): assert a == b sleep.assert_not_called() @mock.patch("time.sleep", autospec=True) def test___call___generator_retry(self, sleep): on_error = mock.Mock() - retry_ = retry.Retry(on_error=on_error, predicate=retry.if_exception_type(ValueError)) + retry_ = retry.Retry( + on_error=on_error, predicate=retry.if_exception_type(ValueError) + ) result = retry_(self._generator_mock)(error_on=3) assert inspect.isgenerator(result) # error thrown on 3 # generator should contain 0, 1, 2 looping unpacked = [next(result) for i in range(10)] - assert unpacked == [0,1,2,0,1,2,0,1,2,0] - assert on_error.call_count==3 + assert unpacked == [0, 1, 2, 0, 1, 2, 0, 1, 2, 0] + assert on_error.call_count == 3 @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) @mock.patch("time.sleep", autospec=True) @@ -600,7 +604,7 @@ def test___call___with_generator_close(self, sleep): next(generator) generator.close() assert isinstance(exception_list[0], GeneratorExit) - assert inspect.getgeneratorstate(generator) == 'GEN_CLOSED' + assert inspect.getgeneratorstate(generator) == "GEN_CLOSED" with pytest.raises(StopIteration): # calling next on closed generator should raise error next(generator) @@ -617,7 +621,7 @@ def test___call___with_generator_throw(self, sleep): with pytest.raises(BufferError): generator.throw(BufferError("test")) assert isinstance(exception_list[0], BufferError) - assert inspect.getgeneratorstate(generator) == 'GEN_CLOSED' + assert inspect.getgeneratorstate(generator) == "GEN_CLOSED" with pytest.raises(StopIteration): # calling next on closed generator should raise error next(generator) @@ -634,8 +638,12 @@ def test___call___with_generator_throw(self, sleep): @mock.patch("time.sleep", autospec=True) def test___call___with_is_generator(self, sleep): - gen_retry_ = retry.Retry(is_generator=True, predicate=retry.if_exception_type(ValueError)) - not_gen_retry_ = retry.Retry(is_generator=False, predicate=retry.if_exception_type(ValueError)) + gen_retry_ = retry.Retry( + is_generator=True, predicate=retry.if_exception_type(ValueError) + ) + not_gen_retry_ = retry.Retry( + is_generator=False, predicate=retry.if_exception_type(ValueError) + ) auto_retry_ = retry.Retry(predicate=retry.if_exception_type(ValueError)) # force generator to act as non-generator with pytest.raises(ValueError): @@ -652,9 +660,4 @@ def test___call___with_is_generator(self, sleep): # force non-detected to be accepted as generator gen = gen_retry_(wrapped)() unpacked = [next(gen) for i in range(10)] - assert unpacked == [0,1,2,3,4,5,0,1,2,3] - - - - - + assert unpacked == [0, 1, 2, 3, 4, 5, 0, 1, 2, 3] From 102d83b574cdd74722171ec7f2f1103dbd8a0e2e Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Sat, 11 Feb 2023 11:06:40 -0800 Subject: [PATCH 025/204] improved send test --- tests/unit/test_retry.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index b8915a6a..eb22c724 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -564,14 +564,17 @@ def test___call___with_generator_send(self, sleep): decorated = retry_(self._generator_mock) - generator = decorated(5) + generator = decorated(10) result = next(generator) + assert result == 0 in_messages = ["test_1", "hello", "world"] out_messages = [] for msg in in_messages: recv = generator.send(msg) out_messages.append(recv) assert in_messages == out_messages + assert next(generator) == 4 + assert next(generator) == 5 @mock.patch("time.sleep", autospec=True) def test___call___with_generator_return(self, sleep): From f029dbd6e7917f98c69cf43ab2d879b1207e53c1 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Sat, 11 Feb 2023 11:07:14 -0800 Subject: [PATCH 026/204] improved comments --- google/api_core/retry_async.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index 626f108e..ce167a69 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -75,7 +75,8 @@ async def check_if_exists(): async def retry_target_generator( target, predicate, sleep_generator, timeout=None, on_error=None, **kwargs ): - """Await a coroutine and retry if it fails. + """Wrap an Asyncrhonous Generator Function in another that will + spawn and yeild from a new generator instance if an error occurs This is the lowest-level retry helper. Generally, you'll use the higher-level retry helper :class:`Retry`. @@ -97,12 +98,14 @@ async def retry_target_generator( compatibility, if set it will override the ``timeout`` parameter. Returns: - Any: the return value of the target function. + AsynchronousGenerator: This function spawns new asynchronous generator + instances when called. - Raises: + Generator Raises: google.api_core.RetryError: If the deadline is exceeded while retrying. ValueError: If the sleep generator stops yielding values. Exception: If the target raises a method that isn't retryable. + StopAsyncIteration: If the generator is exhausted """ timeout = kwargs.get("deadline", timeout) From c83c62a80c0d76de03c2ea191b9b724de23cf446 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Sat, 11 Feb 2023 11:22:11 -0800 Subject: [PATCH 027/204] got send working --- google/api_core/retry_async.py | 35 ++++++++++++++++++++----------- tests/asyncio/test_retry_async.py | 22 +++++++++++++++++++ 2 files changed, 45 insertions(+), 12 deletions(-) diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index ce167a69..24a550d6 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -56,6 +56,7 @@ async def check_if_exists(): import functools import logging from inspect import isasyncgenfunction +import sys from google.api_core import datetime_helpers from google.api_core import exceptions @@ -122,19 +123,30 @@ async def retry_target_generator( for sleep in sleep_generator: try: subgenerator = target() - async for item in subgenerator: + + sent_in = None + while True: + ## Read from Subgenerator + # TODO: add test for timeout + next_value = await asyncio.wait_for( + subgenerator.asend(sent_in), + timeout=(deadline_dt - datetime_helpers.utcnow()).total_seconds(), + ) + ## Yield from Wrapper to caller try: - yield item - except GeneratorExit as close_exc: - # handle aclose() + # yield last value from subgenerator + # exceptions from `athrow` and `aclose` are injected here + sent_in = yield next_value + except GeneratorExit: + # if wrapper received `aclose`, pass to subgenerator and close await subgenerator.aclose() - raise - except Exception as throw_exc: - # handle athrow() - await subgenerator.athrow(throw_exc) - # check for timeout - if deadline_dt and deadline_dt <= datetime_helpers.utcnow(): - raise asyncio.TimeoutError("generator timeout") + return + except: + # if wrapper received `athrow`, pass to subgenerator + await subgenerator.athrow(*sys.exc_info()) + return + except StopAsyncIteration: + # if generator exhausted, return return # pylint: disable=broad-except # This function explicitly must deal with broad exceptions. @@ -239,7 +251,6 @@ async def _raise_or_sleep( if ( not predicate(last_exc) and not isinstance(last_exc, asyncio.TimeoutError) - and not isinstance(last_exc, GeneratorExit) ): raise last_exc if on_error is not None: diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index bed643ea..2563889c 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -495,6 +495,28 @@ def increase_time(sleep_delay): assert last_wait == 2.9 # and not 8.0, because the last delay was shortened assert total_wait == 9.9 # the same as the deadline + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___with_generator_send(self, sleep): + """ + Send should be passed through retry into target generator + """ + retry_ = retry_async.AsyncRetry() + + decorated = retry_(self._generator_mock) + + generator = decorated(10) + result = await anext(generator) + assert result == 0 + in_messages = ["test_1", "hello", "world"] + out_messages = [] + for msg in in_messages: + recv = await generator.asend(msg) + out_messages.append(recv) + assert in_messages == out_messages + assert await anext(generator) == 4 + assert await anext(generator) == 5 + @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___with_generator_close(self, sleep): From 185826cb91839089ca5705be19d51833425fda16 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Sat, 11 Feb 2023 14:02:54 -0800 Subject: [PATCH 028/204] tested deadline handling --- google/api_core/retry_async.py | 47 +++++++++++++++---------------- tests/asyncio/test_retry_async.py | 41 +++++++++++++++++++++++++-- 2 files changed, 61 insertions(+), 27 deletions(-) diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index 24a550d6..882974f0 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -74,7 +74,7 @@ async def check_if_exists(): async def retry_target_generator( - target, predicate, sleep_generator, timeout=None, on_error=None, **kwargs + target, predicate, sleep_generator, deadline_dt=None, timeout_seconds=None, on_error=None, **kwargs ): """Wrap an Asyncrhonous Generator Function in another that will spawn and yeild from a new generator instance if an error occurs @@ -91,12 +91,13 @@ async def retry_target_generator( It should return True to retry or False otherwise. sleep_generator (Iterable[float]): An infinite iterator that determines how long to sleep between retries. - timeout (float): How long to keep retrying the target, in seconds. + deadline_dt (float): The utc timestamp to timeout at. + timeout_seconds (float): the amount of seconds the timeout was set for. + deadline_dt is used for actual timeout calculation, but the timeout + value is presented to the user for errors. on_error (Callable[Exception]): A function to call while processing a retryable exception. Any error raised by this function will *not* be caught. - deadline (float): DEPRECATED use ``timeout`` instead. For backward - compatibility, if set it will override the ``timeout`` parameter. Returns: AsynchronousGenerator: This function spawns new asynchronous generator @@ -109,14 +110,6 @@ async def retry_target_generator( StopAsyncIteration: If the generator is exhausted """ - timeout = kwargs.get("deadline", timeout) - - deadline_dt = ( - (datetime_helpers.utcnow() + datetime.timedelta(seconds=timeout)) - if timeout - else None - ) - last_exc = None subgenerator = None @@ -155,7 +148,7 @@ async def retry_target_generator( await subgenerator.aclose() last_exc = exc await _raise_or_sleep( - last_exc, predicate, on_error, timeout, deadline_dt, sleep + last_exc, predicate, on_error, timeout_seconds, deadline_dt, sleep ) raise ValueError("Sleep generator stopped yielding sleep values.") @@ -263,7 +256,7 @@ async def _raise_or_sleep( # Chains the raising RetryError with the root cause error, # which helps observability and debugability. raise exceptions.RetryError( - "Timeout of {:.1f}s exceeded while calling target function".format( + "Timeout of {:.1f}s exceeded".format( timeout ), last_exc, @@ -358,33 +351,37 @@ async def retry_wrapped_func(*args, **kwargs): target, self._predicate, sleep_generator, - self._timeout, + timeout=self._timeout, on_error=on_error, ) @functools.wraps(func) - def retry_wrapped_generator(*args, **kwargs): + def retry_wrapped_generator(*args, deadline_dt=None, **kwargs): """A wrapper that yields through target generator with retry.""" target = functools.partial(func, *args, **kwargs) sleep_generator = exponential_sleep_generator( self._initial, self._maximum, multiplier=self._multiplier ) # if the target is a generator function, make sure return is also a generator function - use_generator = ( - self._is_generator - if self._is_generator is not None - else isasyncgenfunction(func) - ) - fn_args = (target, self._predicate, sleep_generator, self._timeout) return retry_target_generator( target, self._predicate, sleep_generator, - self._timeout, + deadline_dt=deadline_dt, + timeout_seconds=self._timeout, on_error=on_error, ) - - return retry_wrapped_generator if use_generator else retry_wrapped_func + if use_generator: + # for generator, bake deadline into function at call time + # time should start counting at generator creation, not first yield + deadline_dt = ( + (datetime_helpers.utcnow() + datetime.timedelta(seconds=self._timeout)) + if self._timeout + else None + ) + return functools.partial(retry_wrapped_generator, deadline_dt=deadline_dt) + else: + return retry_wrapped_func def _replace( self, diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 2563889c..017402f2 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -16,6 +16,7 @@ import re import inspect import functools +import asyncio import mock import pytest @@ -418,6 +419,11 @@ async def _generator_mock(self, num=5, error_on=None, exceptions_seen=None): exceptions_seen.append(e) raise + async def _generator_infinite(self): + while True: + asyncio.sleep(5) + yield "done" + @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___generator_success(self, sleep): @@ -492,8 +498,39 @@ def increase_time(sleep_delay): last_wait = sleep.call_args.args[0] total_wait = sum(call_args.args[0] for call_args in sleep.call_args_list) - assert last_wait == 2.9 # and not 8.0, because the last delay was shortened - assert total_wait == 9.9 # the same as the deadline + assert abs(last_wait - 2.9) <= 1e3 # and not 8.0, because the last delay was shortened + assert abs(total_wait - 9.9) <= 1e3 # the same as the deadline + + + @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___generator_await_hitting_deadline(self, sleep, uniform): + timeout_value = 10 + retry_ = retry_async.AsyncRetry(deadline=timeout_value) + utcnow = datetime.datetime.utcnow() + utcnow_patcher = mock.patch( + "google.api_core.datetime_helpers.utcnow", return_value=utcnow + ) + # test timeout before yielding anything + generator = retry_(self._generator_infinite)() + with utcnow_patcher as patched_utcnow: + patched_utcnow.return_value += datetime.timedelta(seconds=20) + with pytest.raises(exceptions.RetryError) as retry_error: + await anext(generator) + assert f"{timeout_value:.1f}" in str(retry_error.value) + # test timeout mid-stream + exception_list = [] + generator = retry_(self._generator_mock)(10, exceptions_seen=exception_list) + assert await anext(generator) == 0 + assert await anext(generator) == 1 + assert await anext(generator) == 2 + with utcnow_patcher as patched_utcnow: + patched_utcnow.return_value += datetime.timedelta(seconds=20) + with pytest.raises(exceptions.RetryError) as retry_error: + await anext(generator) + assert f"{timeout_value:.1f}" in str(retry_error.value) + assert isinstance(exception_list[0], asyncio.CancelledError) @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio From c5f7bbe3cb006e6859a3a0af2319cf1148244c07 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 13 Feb 2023 15:43:47 -0800 Subject: [PATCH 029/204] changed timeout to only count time awaiting or sleeping --- google/api_core/retry_async.py | 164 +++++++++++++++--------------- tests/asyncio/test_retry_async.py | 64 ++++++------ 2 files changed, 109 insertions(+), 119 deletions(-) diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index 882974f0..ffef695f 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -74,7 +74,7 @@ async def check_if_exists(): async def retry_target_generator( - target, predicate, sleep_generator, deadline_dt=None, timeout_seconds=None, on_error=None, **kwargs + target, predicate, sleep_generator, timeout=None, on_error=None, **kwargs ): """Wrap an Asyncrhonous Generator Function in another that will spawn and yeild from a new generator instance if an error occurs @@ -91,14 +91,15 @@ async def retry_target_generator( It should return True to retry or False otherwise. sleep_generator (Iterable[float]): An infinite iterator that determines how long to sleep between retries. - deadline_dt (float): The utc timestamp to timeout at. - timeout_seconds (float): the amount of seconds the timeout was set for. - deadline_dt is used for actual timeout calculation, but the timeout - value is presented to the user for errors. + timeout (float): How long to keep retrying the target, in seconds. + Because generator execution isn't continuous, only time spent + waiting on the target generator or sleeping between retries + is counted towards the timeout. on_error (Callable[Exception]): A function to call while processing a retryable exception. Any error raised by this function will *not* be caught. - + deadline (float): DEPRECATED use ``timeout`` instead. For backward + compatibility, if set it will override the ``timeout`` parameter. Returns: AsynchronousGenerator: This function spawns new asynchronous generator instances when called. @@ -113,18 +114,30 @@ async def retry_target_generator( last_exc = None subgenerator = None + timeout = kwargs.get("deadline", timeout) + remaining_timeout_budget = timeout if timeout else None + for sleep in sleep_generator: try: subgenerator = target() sent_in = None while True: + if remaining_timeout_budget <= 0: + raise exceptions.RetryError( + "Timeout of {:.1f}s exceeded".format(timeout) + ) ## Read from Subgenerator - # TODO: add test for timeout - next_value = await asyncio.wait_for( - subgenerator.asend(sent_in), - timeout=(deadline_dt - datetime_helpers.utcnow()).total_seconds(), - ) + next_value_routine = subgenerator.asend(sent_in) + if timeout is not None: + next_value_routine = asyncio.wait_for( + subgenerator.asend(sent_in), + timeout=remaining_timeout_budget, + ) + start_timestamp = datetime_helpers.utcnow() + next_value = await next_value_routine + if remaining_timeout_budget is not None: + remaining_timeout_budget -= (datetime_helpers.utcnow() - start_timestamp).total_seconds() ## Yield from Wrapper to caller try: # yield last value from subgenerator @@ -143,13 +156,36 @@ async def retry_target_generator( return # pylint: disable=broad-except # This function explicitly must deal with broad exceptions. - except Exception as exc: + except (Exception, asyncio.CancelledError) as exc: + if not predicate(exc) and not isinstance(exc, asyncio.TimeoutError): + raise + last_exc = exc + finally: if subgenerator is not None: await subgenerator.aclose() - last_exc = exc - await _raise_or_sleep( - last_exc, predicate, on_error, timeout_seconds, deadline_dt, sleep + + if on_error is not None: + on_error(last_exc) + + # sleep and adjust timeout budget + if remaining_timeout_budget is not None: + now = datetime_helpers.utcnow() + if remaining_timeout_budget <= sleep: + # Chains the raising RetryError with the root cause error, + # which helps observability and debugability. + raise exceptions.RetryError( + "Timeout of {:.1f}s exceeded".format( + timeout + ), + last_exc, + ) from last_exc + else: + sleep = min(sleep, remaining_timeout_budget) + remaining_timeout_budget -= sleep + _LOGGER.debug( + "Retrying due to {}, sleeping {:.1f}s ...".format(last_exc, sleep) ) + await asyncio.sleep(sleep) raise ValueError("Sleep generator stopped yielding sleep values.") @@ -208,71 +244,37 @@ async def retry_target( # pylint: disable=broad-except # This function explicitly must deal with broad exceptions. except Exception as exc: + if not predicate(exc) and not isinstance(exc, asyncio.TimeoutError): + raise last_exc = exc - await _raise_or_sleep( - last_exc, predicate, on_error, timeout, deadline_dt, sleep + if on_error is not None: + on_error(last_exc) + + now = datetime_helpers.utcnow() + + if deadline_dt: + if deadline_dt <= now: + # Chains the raising RetryError with the root cause error, + # which helps observability and debugability. + raise exceptions.RetryError( + "Timeout of {:.1f}s exceeded while calling target function".format( + timeout + ), + last_exc, + ) from last_exc + else: + time_to_deadline = (deadline_dt - now).total_seconds() + sleep = min(time_to_deadline, sleep) + + _LOGGER.debug( + "Retrying due to {}, sleeping {:.1f}s ...".format(last_exc, sleep) ) + await asyncio.sleep(sleep) raise ValueError("Sleep generator stopped yielding sleep values.") - -async def _raise_or_sleep( - last_exc, predicate, on_error, timeout, deadline_dt, sleep_time -): - """ - Helper function that contains retry and timeout logic. - Raise an exception if: - - the exception is not handled by the predicate - - the next sleep time would push it over the deadline - Otherwise, sleeps before next retry - - Args: - last_exc (Exception): the last exception that was encountered as part - running the target function - predicate (Callable[Exception]): A callable used to determine if an - exception raised by the target should be considered retryable. - It should return True to retry or False otherwise. - on_error (Callable[Exception]): A function to call while processing a - retryable exception. Any error raised by this function will *not* - be caught. - timeout (float): total time the target was retried for. - deadline_dt (float): a UTC timestamp for when to stop retries - sleep_time (float): the amount of time to sleep before the next try - Raises: - google.api_core.RetryError: If the deadline is exceeded while retrying. - """ - if ( - not predicate(last_exc) - and not isinstance(last_exc, asyncio.TimeoutError) - ): - raise last_exc - if on_error is not None: - on_error(last_exc) - - now = datetime_helpers.utcnow() - - if deadline_dt: - if deadline_dt <= now: - # Chains the raising RetryError with the root cause error, - # which helps observability and debugability. - raise exceptions.RetryError( - "Timeout of {:.1f}s exceeded".format( - timeout - ), - last_exc, - ) from last_exc - else: - time_to_deadline = (deadline_dt - now).total_seconds() - sleep_time = min(time_to_deadline, sleep_time) - - _LOGGER.debug( - "Retrying due to {}, sleeping {:.1f}s ...".format(last_exc, sleep_time) - ) - await asyncio.sleep(sleep_time) - - class AsyncRetry: - """Exponential retry decorator for async functions. + """Exponential retry decorator for async coroutines. This class is a decorator used to add exponential back-off retry behavior to an RPC call. @@ -288,6 +290,8 @@ class AsyncRetry: maximum (float): The maximum amout of time to delay in seconds. multiplier (float): The multiplier applied to the delay. timeout (float): How long to keep retrying in seconds. + When the target is a generator, only time spent waiting on the + target or sleeping between retries is counted towards the timeout. on_error (Callable[Exception]): A function to call while processing a retryable exception. Any error raised by this function will *not* be caught. @@ -367,19 +371,11 @@ def retry_wrapped_generator(*args, deadline_dt=None, **kwargs): target, self._predicate, sleep_generator, - deadline_dt=deadline_dt, - timeout_seconds=self._timeout, + timeout=self._timeout, on_error=on_error, ) if use_generator: - # for generator, bake deadline into function at call time - # time should start counting at generator creation, not first yield - deadline_dt = ( - (datetime_helpers.utcnow() + datetime.timedelta(seconds=self._timeout)) - if self._timeout - else None - ) - return functools.partial(retry_wrapped_generator, deadline_dt=deadline_dt) + return retry_wrapped_generator else: return retry_wrapped_func diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 017402f2..2ba5f9e2 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -406,10 +406,12 @@ async def test___init___when_retry_is_executed(self, sleep, uniform): target.assert_has_calls([mock.call("meep"), mock.call("meep")]) sleep.assert_any_call(retry_._initial) - async def _generator_mock(self, num=5, error_on=None, exceptions_seen=None): + async def _generator_mock(self, num=5, error_on=None, exceptions_seen=None, sleep_time=0): try: sent_in = None for i in range(num): + if sleep_time: + await asyncio.sleep(sleep_time) if error_on and i == error_on: raise ValueError("generator mock error") sent_in = yield (sent_in if sent_in else i) @@ -419,11 +421,6 @@ async def _generator_mock(self, num=5, error_on=None, exceptions_seen=None): exceptions_seen.append(e) raise - async def _generator_infinite(self): - while True: - asyncio.sleep(5) - yield "done" - @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___generator_success(self, sleep): @@ -463,6 +460,7 @@ async def test___call___generator_retry(self, sleep): @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___generator_retry_hitting_deadline(self, sleep, uniform): + on_error = mock.Mock() retry_ = retry_async.AsyncRetry( predicate=retry_async.if_exception_type(ValueError), @@ -489,48 +487,44 @@ def increase_time(sleep_delay): sleep.side_effect = increase_time with pytest.raises(exceptions.RetryError): - unpacked = [i async for i in generator] - - assert on_error.call_count == 5 + unpacked = [i async for i in generator] + assert on_error.call_count == 4 # check the delays - assert sleep.call_count == 4 # once between each successive target calls + assert sleep.call_count == 3 # once between each successive target calls last_wait = sleep.call_args.args[0] total_wait = sum(call_args.args[0] for call_args in sleep.call_args_list) + # next wait would have put us over, so ended early + assert last_wait == 4 + assert total_wait == 7 - assert abs(last_wait - 2.9) <= 1e3 # and not 8.0, because the last delay was shortened - assert abs(total_wait - 9.9) <= 1e3 # the same as the deadline - - - @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) - @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio - async def test___call___generator_await_hitting_deadline(self, sleep, uniform): - timeout_value = 10 - retry_ = retry_async.AsyncRetry(deadline=timeout_value) + async def test___call___generator_timeout_cancellations(self): + on_error = mock.Mock() + retry_ = retry_async.AsyncRetry( + predicate=retry_async.if_exception_type(ValueError), + deadline=0.2, + ) utcnow = datetime.datetime.utcnow() utcnow_patcher = mock.patch( "google.api_core.datetime_helpers.utcnow", return_value=utcnow ) - # test timeout before yielding anything - generator = retry_(self._generator_infinite)() + # ensure generator times out when awaiting past deadline + with pytest.raises(exceptions.RetryError): + infinite_gen = retry_(self._generator_mock, on_error)(sleep_time=60) + await anext(infinite_gen) + # ensure time between yields isn't counted with utcnow_patcher as patched_utcnow: - patched_utcnow.return_value += datetime.timedelta(seconds=20) - with pytest.raises(exceptions.RetryError) as retry_error: - await anext(generator) - assert f"{timeout_value:.1f}" in str(retry_error.value) - # test timeout mid-stream - exception_list = [] - generator = retry_(self._generator_mock)(10, exceptions_seen=exception_list) + generator = retry_(self._generator_mock)(sleep_time=0.05) + assert await anext(generator) == 0 + patched_utcnow.return_value += datetime.timedelta(20) + assert await anext(generator) == 1 + # ensure timeout budget is tracked + generator = retry_(self._generator_mock)(sleep_time=0.07) assert await anext(generator) == 0 assert await anext(generator) == 1 - assert await anext(generator) == 2 - with utcnow_patcher as patched_utcnow: - patched_utcnow.return_value += datetime.timedelta(seconds=20) - with pytest.raises(exceptions.RetryError) as retry_error: - await anext(generator) - assert f"{timeout_value:.1f}" in str(retry_error.value) - assert isinstance(exception_list[0], asyncio.CancelledError) + with pytest.raises(exceptions.RetryError): + await anext(generator) @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio From 4242036f028d4fc45ba412eead90970483a467b8 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 13 Feb 2023 16:23:03 -0800 Subject: [PATCH 030/204] improved comments --- google/api_core/retry_async.py | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index ffef695f..397cf39e 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -76,8 +76,7 @@ async def check_if_exists(): async def retry_target_generator( target, predicate, sleep_generator, timeout=None, on_error=None, **kwargs ): - """Wrap an Asyncrhonous Generator Function in another that will - spawn and yeild from a new generator instance if an error occurs + """Wrap an Asyncrhonous Generator and retstart stream on errors This is the lowest-level retry helper. Generally, you'll use the higher-level retry helper :class:`Retry`. @@ -169,7 +168,6 @@ async def retry_target_generator( # sleep and adjust timeout budget if remaining_timeout_budget is not None: - now = datetime_helpers.utcnow() if remaining_timeout_budget <= sleep: # Chains the raising RetryError with the root cause error, # which helps observability and debugability. @@ -180,7 +178,6 @@ async def retry_target_generator( last_exc, ) from last_exc else: - sleep = min(sleep, remaining_timeout_budget) remaining_timeout_budget -= sleep _LOGGER.debug( "Retrying due to {}, sleeping {:.1f}s ...".format(last_exc, sleep) @@ -199,8 +196,8 @@ async def retry_target( higher-level retry helper :class:`Retry`. Args: - target(Coroutine): The function to call and retry. This must be a - nullary function - apply arguments with `functools.partial`. + target(Callable[..., Coroutine]): The coroutine function to call and retry. + This must be a nullary function - apply arguments with `functools.partial`. predicate (Callable[Exception]): A callable used to determine if an exception raised by the target should be considered retryable. It should return True to retry or False otherwise. @@ -323,7 +320,7 @@ def __call__(self, func, on_error=None): """Wrap a callable with retry behavior. Args: - func (Union[Coroutine, Callable[Any, AsynchronousGenerator]): The + func (Callable[..., Union[Coroutine, AsynchronousGenerator]]): The coroutine or async generator function to add retry behavior to. on_error (Callable[Exception]): A function to call while processing a retryable exception. Any error raised by this function will @@ -331,8 +328,8 @@ def __call__(self, func, on_error=None): Returns: Union[Coroutine, AsynchronousGenerator]: One of: - - A couroutine that will invoke ``func`` if ``func`` is a coroutine - - An AsynchronousGenerator that yields from ``func`` if ``func`` is an AsynchronousGenerator. + - A couroutine that will invoke ``func`` if ``func`` is a coroutine function + - An AsynchronousGenerator that yields from ``func`` if ``func`` is an AsynchronousGenerator function. """ if self._on_error is not None: on_error = self._on_error From 9c4799c890ee37c45ac332689f16631f7483918b Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 13 Feb 2023 16:23:17 -0800 Subject: [PATCH 031/204] added test for cancellation --- tests/asyncio/test_retry_async.py | 35 +++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 2ba5f9e2..34f9ba69 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -526,6 +526,41 @@ async def test___call___generator_timeout_cancellations(self): with pytest.raises(exceptions.RetryError): await anext(generator) + @pytest.mark.asyncio + async def test___call___generator_await_cancel_retryable(self): + """ + cancel calls should be supported as retryable errors + """ + # test without cancel as retryable + timeout_value = 0.1 + retry_ = retry_async.AsyncRetry() + utcnow = datetime.datetime.utcnow() + utcnow_patcher = mock.patch( + "google.api_core.datetime_helpers.utcnow", return_value=utcnow + ) + generator = retry_(self._generator_mock)(sleep_time=5) + await anext(generator) == 0 + task = asyncio.create_task(anext(generator)) + await asyncio.sleep(0.1) + task.cancel() + with pytest.raises(asyncio.CancelledError): + await task + with pytest.raises(StopAsyncIteration): + await anext(generator) + # test with cancel as retryable + retry_cancel_ = retry_async.AsyncRetry( + predicate=retry_async.if_exception_type(asyncio.CancelledError), + ) + generator = retry_cancel_(self._generator_mock)(sleep_time=0.2) + await anext(generator) == 0 + await anext(generator) == 1 + task = asyncio.create_task(anext(generator)) + await asyncio.sleep(0.05) + task.cancel() + await task + assert task.result() == 0 + await anext(generator) == 1 + @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___with_generator_send(self, sleep): From 0bd6cabb829b310a6e57688864cf3d11223d4b85 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 13 Feb 2023 16:34:21 -0800 Subject: [PATCH 032/204] improved comments --- google/api_core/retry.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index f41f6dcd..9ad9020d 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -155,7 +155,7 @@ def retry_target_generator( higher-level retry helper :class:`Retry`. Args: - target(Callable[None, Generator[Any,Any,Any]]): A generator function to yield from. + target(Callable[None, Generator]): A generator function to yield from. This must be a nullary function - apply arguments with `functools.partial`. predicate (Callable[Exception]): A callable used to determine if an exception raised by the target should be considered retryable. @@ -170,7 +170,7 @@ def retry_target_generator( compatibility, if specified it will override ``timeout`` parameter. Returns: - Generator[Any,Any,Any]: returns a generator that wraps the target in retry logic. + Generator: returns a generator that wraps the target in retry logic. Raises: google.api_core.RetryError: If the deadline is exceeded while retrying. @@ -288,7 +288,7 @@ def _raise_or_sleep(last_exc, predicate, on_error, timeout, deadline, sleep_time ) if deadline < next_attempt_time: raise exceptions.RetryError( - "Deadline of {:.1f}s exceeded while calling target function".format( + "Deadline of {:.1f}s exceeded".format( timeout ), last_exc, @@ -416,9 +416,9 @@ def __call__(self, func, on_error=None): """Wrap a callable with retry behavior. Args: - func (Callable[Any, Any]): The callable to add retry behavior to. - If a generator function is passed in (Callable[Any, Generator[Any,Any,Any]]), - a matcing retryable generator will be returned. + func (Callable): The callable to add retry behavior to. + If a generator function is passed in (Callable[..., Generator]), + a matching retryable generator will be returned. on_error (Callable[Exception]): A function to call while processing a retryable exception. Any error raised by this function will *not* be caught. From 67aeeafc7e7121c520cc35005022640bbabf86b9 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 13 Feb 2023 17:08:38 -0800 Subject: [PATCH 033/204] on_error can yield into the generator stream --- google/api_core/retry.py | 104 +++++++++++++++--------------- google/api_core/retry_async.py | 18 ++++-- tests/asyncio/test_retry_async.py | 19 +++++- tests/unit/test_retry.py | 15 ++++- 4 files changed, 95 insertions(+), 61 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index 9ad9020d..b04d7c96 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -164,8 +164,9 @@ def retry_target_generator( how long to sleep between retries. timeout (float): How long to keep retrying the target. on_error (Callable[Exception]): A function to call while processing a - retryable exception. Any error raised by this function will *not* - be caught. + retryable exception. Non-None values returned by `on_error` will be + yielded for downstream consumers. Any error raised by this function + will *not* be caught. deadline (float): DEPRECATED: use ``timeout`` instead. For backward compatibility, if specified it will override ``timeout`` parameter. @@ -194,10 +195,33 @@ def retry_target_generator( return (yield from subgenerator) except Exception as exc: + if not predicate(exc): + raise + last_exc = exc + finally: if subgenerator is not None: subgenerator.close() - last_exc = exc - _raise_or_sleep(last_exc, predicate, on_error, timeout, deadline, sleep) + + if on_error is not None: + error_result = on_error(last_exc) + if error_result is not None: + yield error_result + + if deadline is not None: + next_attempt_time = datetime_helpers.utcnow() + datetime.timedelta( + seconds=sleep + ) + if deadline < next_attempt_time: + raise exceptions.RetryError( + "Deadline of {:.1f}s exceeded".format( + timeout + ), + last_exc, + ) from last_exc + _LOGGER.debug( + "Retrying due to {}, sleeping {:.1f}s ...".format(last_exc, sleep) + ) + time.sleep(sleep) raise ValueError("Sleep generator stopped yielding sleep values.") @@ -249,55 +273,28 @@ def retry_target( # pylint: disable=broad-except # This function explicitly must deal with broad exceptions. except Exception as exc: + if not predicate(exc): + raise last_exc = exc - _raise_or_sleep(last_exc, predicate, on_error, timeout, deadline, sleep) - - raise ValueError("Sleep generator stopped yielding sleep values.") - - -def _raise_or_sleep(last_exc, predicate, on_error, timeout, deadline, sleep_time): - """ - Helper function that contains retry and timeout logic. - Raise an exception if: - - the exception is not handled by the predicate - - the next sleep time would push it over the deadline - Otherwise, sleeps before next retry - - Args: - last_exc (Exception): the last exception that was encountered as part - running the target function - predicate (Callable[Exception]): A callable used to determine if an - exception raised by the target should be considered retryable. - It should return True to retry or False otherwise. - on_error (Callable[Exception]): A function to call while processing a - retryable exception. Any error raised by this function will *not* - be caught. - timeout (float): total time the target was retried for. - deadline (float): a UTC timestamp for when to stop retries - sleep_time (float): the amount of time to sleep before the next try - Raises: - google.api_core.RetryError: If the deadline is exceeded while retrying. - """ - if not predicate(last_exc): - raise last_exc - if on_error is not None: - on_error(last_exc) - if deadline is not None: - next_attempt_time = datetime_helpers.utcnow() + datetime.timedelta( - seconds=sleep_time + if on_error is not None: + on_error(last_exc) + if deadline is not None: + next_attempt_time = datetime_helpers.utcnow() + datetime.timedelta( + seconds=sleep + ) + if deadline < next_attempt_time: + raise exceptions.RetryError( + "Deadline of {:.1f}s exceeded".format( + timeout + ), + last_exc, + ) from last_exc + _LOGGER.debug( + "Retrying due to {}, sleeping {:.1f}s ...".format(last_exc, sleep) ) - if deadline < next_attempt_time: - raise exceptions.RetryError( - "Deadline of {:.1f}s exceeded".format( - timeout - ), - last_exc, - ) from last_exc - _LOGGER.debug( - "Retrying due to {}, sleeping {:.1f}s ...".format(last_exc, sleep_time) - ) - time.sleep(sleep_time) + time.sleep(sleep) + raise ValueError("Sleep generator stopped yielding sleep values.") class Retry(object): """Exponential retry decorator. @@ -383,7 +380,8 @@ class Retry(object): timeout (float): How long to keep retrying, in seconds. on_error (Callable[Exception]): A function to call while processing a retryable exception. Any error raised by this function will - *not* be caught. + *not* be caught. When target is a generator function, non-None values + returned 1by `on_error` will be yielded for downstream consumers. is_generator (Optional[bool]): Indicates whether the input function should be treated as a generator function. If True, retries will `yield from` wrapped function. If false, retries will call wrapped @@ -421,7 +419,9 @@ def __call__(self, func, on_error=None): a matching retryable generator will be returned. on_error (Callable[Exception]): A function to call while processing a retryable exception. Any error raised by this function will - *not* be caught. + *not* be caught. When target is a generator function, non-None + values returned by `on_error` will be yielded for downstream + consumers. Returns: Callable: A callable that will invoke or yield from ``func`` with retry diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index 397cf39e..2559c144 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -96,7 +96,9 @@ async def retry_target_generator( is counted towards the timeout. on_error (Callable[Exception]): A function to call while processing a retryable exception. Any error raised by this function will *not* - be caught. + be caught. Non-None values returned by `on_error` will be yielded + for downstream consumers. + deadline (float): DEPRECATED use ``timeout`` instead. For backward compatibility, if set it will override the ``timeout`` parameter. Returns: @@ -164,7 +166,9 @@ async def retry_target_generator( await subgenerator.aclose() if on_error is not None: - on_error(last_exc) + error_result = on_error(last_exc) + if error_result is not None: + yield error_result # sleep and adjust timeout budget if remaining_timeout_budget is not None: @@ -291,7 +295,10 @@ class AsyncRetry: target or sleeping between retries is counted towards the timeout. on_error (Callable[Exception]): A function to call while processing a retryable exception. Any error raised by this function will - *not* be caught. + *not* be caught. When target is a generator function, non-None + values returned by `on_error` will be yielded for downstream + consumers. + deadline (float): DEPRECATED use ``timeout`` instead. If set it will override ``timeout`` parameter. """ @@ -324,7 +331,10 @@ def __call__(self, func, on_error=None): coroutine or async generator function to add retry behavior to. on_error (Callable[Exception]): A function to call while processing a retryable exception. Any error raised by this function will - *not* be caught. + *not* be caught. When `func` is a generator function, non-None + values returned by `on_error` will be yielded for downstream + consumers. + Returns: Union[Coroutine, AsynchronousGenerator]: One of: diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 34f9ba69..5ef3beda 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -444,7 +444,7 @@ async def test___call___generator_success(self, sleep): @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___generator_retry(self, sleep): - on_error = mock.Mock() + on_error = mock.Mock(return_value=None) retry_ = retry_async.AsyncRetry( on_error=on_error, predicate=retry_async.if_exception_type(ValueError) ) @@ -460,7 +460,6 @@ async def test___call___generator_retry(self, sleep): @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___generator_retry_hitting_deadline(self, sleep, uniform): - on_error = mock.Mock() retry_ = retry_async.AsyncRetry( predicate=retry_async.if_exception_type(ValueError), @@ -500,7 +499,7 @@ def increase_time(sleep_delay): @pytest.mark.asyncio async def test___call___generator_timeout_cancellations(self): - on_error = mock.Mock() + on_error = mock.Mock(return_value=None) retry_ = retry_async.AsyncRetry( predicate=retry_async.if_exception_type(ValueError), deadline=0.2, @@ -656,3 +655,17 @@ async def test___call___with_is_generator(self, sleep): gen = gen_retry_(wrapped)() unpacked = [await anext(gen) for i in range(10)] assert unpacked == [0, 1, 2, 3, 4, 5, 0, 1, 2, 3] + + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___generator_retry_on_error_yield(self, sleep): + error_token = "Err" + retry_ = retry_async.AsyncRetry( + on_error=lambda x: error_token, predicate=retry_async.if_exception_type(ValueError) + ) + generator = retry_(self._generator_mock)(error_on=3) + assert inspect.isasyncgen(generator) + # error thrown on 3 + # generator should contain 0, 1, 2 looping + unpacked = [await anext(generator) for i in range(10)] + assert unpacked == [0, 1, 2, error_token, 0, 1, 2, error_token, 0, 1] diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index eb22c724..5682dab0 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -506,7 +506,7 @@ def test___call___generator_success(self, sleep): @mock.patch("time.sleep", autospec=True) def test___call___generator_retry(self, sleep): - on_error = mock.Mock() + on_error = mock.Mock(return_value=None) retry_ = retry.Retry( on_error=on_error, predicate=retry.if_exception_type(ValueError) ) @@ -521,7 +521,7 @@ def test___call___generator_retry(self, sleep): @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) @mock.patch("time.sleep", autospec=True) def test___call___generator_retry_hitting_deadline(self, sleep, uniform): - on_error = mock.Mock() + on_error = mock.Mock(return_value=None) retry_ = retry.Retry( predicate=retry.if_exception_type(ValueError), initial=1.0, @@ -664,3 +664,14 @@ def test___call___with_is_generator(self, sleep): gen = gen_retry_(wrapped)() unpacked = [next(gen) for i in range(10)] assert unpacked == [0, 1, 2, 3, 4, 5, 0, 1, 2, 3] + + @mock.patch("time.sleep", autospec=True) + def test___call___generator_retry_on_error_yield(self, sleep): + error_token = "Err" + retry_ = retry.Retry( + on_error=lambda x: error_token, predicate=retry.if_exception_type(ValueError) + ) + generator = retry_(self._generator_mock)(error_on=3) + # error thrown on 3 + unpacked = [next(generator) for i in range(10)] + assert unpacked == [0, 1, 2, error_token, 0, 1, 2, error_token, 0, 1] From 0ea829750387c26a68eb3898bd23ee7b7ef396d2 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 4 Apr 2023 11:33:01 -0700 Subject: [PATCH 034/204] added filter_func to retryable generator --- google/api_core/retry_async.py | 56 ++++++++++++++++++++++++++-------- 1 file changed, 43 insertions(+), 13 deletions(-) diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index 98ca7648..39eb8e77 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -72,9 +72,25 @@ async def check_if_exists(): _DEFAULT_DEADLINE = 60.0 * 2.0 # seconds _DEFAULT_TIMEOUT = 60.0 * 2.0 # seconds +def consistency_filter(): + stream = [] + + def func(new_item, stream_idx, retry_num): + should_yield = True + if stream_idx < len(stream): + prev_value = stream[stream_idx] + if prev_value != new_item: + raise ValueError( + "Retry stream yielded different value than original stream" + ) + should_yield = False + stream.append(new_item) + return should_yield + return func + async def retry_target_generator( - target, predicate, sleep_generator, timeout=None, on_error=None, **kwargs + target, predicate, sleep_generator, timeout=None, on_error=None, filter_func=None, **kwargs ): """Wrap an Asyncrhonous Generator and retstart stream on errors @@ -98,6 +114,13 @@ async def retry_target_generator( retryable exception. Any error raised by this function will *not* be caught. Non-None values returned by `on_error` will be yielded for downstream consumers. + filter_func (Callable[bool]): A function that filters the stream. + It receives the next item, the index of the item in the stream, + and the retry number. It should return a bool indicating whether + the item should be yielded or not. + `consistency_filter` is a useful example of this, which ensures + that the stream is consistent across retries, and raises an error + if it is not. deadline (float): DEPRECATED use ``timeout`` instead. For backward compatibility, if set it will override the ``timeout`` parameter. @@ -117,13 +140,17 @@ async def retry_target_generator( timeout = kwargs.get("deadline", timeout) remaining_timeout_budget = timeout if timeout else None + retry_num = -1 for sleep in sleep_generator: try: subgenerator = target() + retry_num += 1 + stream_idx = -1 sent_in = None while True: + stream_idx += 1 if remaining_timeout_budget <= 0: raise exceptions.RetryError( "Timeout of {:.1f}s exceeded".format(timeout) @@ -139,18 +166,21 @@ async def retry_target_generator( next_value = await next_value_routine if remaining_timeout_budget is not None: remaining_timeout_budget -= (datetime_helpers.utcnow() - start_timestamp).total_seconds() - ## Yield from Wrapper to caller - try: - # yield last value from subgenerator - # exceptions from `athrow` and `aclose` are injected here - sent_in = yield next_value - except GeneratorExit: - # if wrapper received `aclose`, pass to subgenerator and close - await subgenerator.aclose() - return - except: - # if wrapper received `athrow`, pass to subgenerator - await subgenerator.athrow(*sys.exc_info()) + + # Check filter_func to see if we should yield + if filter_func is None or filter_func(next_value, stream_idx, retry_num): + ## Yield from Wrapper to caller + try: + # yield last value from subgenerator + # exceptions from `athrow` and `aclose` are injected here + sent_in = yield next_value + except GeneratorExit: + # if wrapper received `aclose`, pass to subgenerator and close + await subgenerator.aclose() + return + except: + # if wrapper received `athrow`, pass to subgenerator + await subgenerator.athrow(*sys.exc_info()) return except StopAsyncIteration: # if generator exhausted, return From b95265294e6d2b099be1f860577369e59e273337 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 5 Apr 2023 17:01:19 -0700 Subject: [PATCH 035/204] fixed error in time budget calculation --- google/api_core/retry_async.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index 39eb8e77..6ff6bef3 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -191,6 +191,9 @@ async def retry_target_generator( if not predicate(exc) and not isinstance(exc, asyncio.TimeoutError): raise last_exc = exc + # reduce time budget by time spent before exception + if remaining_timeout_budget is not None: + remaining_timeout_budget -= (datetime_helpers.utcnow() - start_timestamp).total_seconds() finally: if subgenerator is not None: await subgenerator.aclose() From 6cb3e2dc6edac2b4b4c22496a3b507ceed3c5a24 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 6 Apr 2023 13:10:08 -0700 Subject: [PATCH 036/204] added from field to raised timeout exception --- google/api_core/retry_async.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index 6ff6bef3..1c178b74 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -153,7 +153,8 @@ async def retry_target_generator( stream_idx += 1 if remaining_timeout_budget <= 0: raise exceptions.RetryError( - "Timeout of {:.1f}s exceeded".format(timeout) + "Timeout of {:.1f}s exceeded".format(timeout), + None ) ## Read from Subgenerator next_value_routine = subgenerator.asend(sent_in) From 99da116272ab049e936d2c9ab23eada2d4f6d012 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 6 Apr 2023 16:56:15 -0700 Subject: [PATCH 037/204] removed filter_fn --- google/api_core/retry_async.py | 55 ++++++++-------------------------- 1 file changed, 13 insertions(+), 42 deletions(-) diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index 1c178b74..ffc32b3b 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -72,25 +72,9 @@ async def check_if_exists(): _DEFAULT_DEADLINE = 60.0 * 2.0 # seconds _DEFAULT_TIMEOUT = 60.0 * 2.0 # seconds -def consistency_filter(): - stream = [] - - def func(new_item, stream_idx, retry_num): - should_yield = True - if stream_idx < len(stream): - prev_value = stream[stream_idx] - if prev_value != new_item: - raise ValueError( - "Retry stream yielded different value than original stream" - ) - should_yield = False - stream.append(new_item) - return should_yield - return func - async def retry_target_generator( - target, predicate, sleep_generator, timeout=None, on_error=None, filter_func=None, **kwargs + target, predicate, sleep_generator, timeout=None, on_error=None, **kwargs ): """Wrap an Asyncrhonous Generator and retstart stream on errors @@ -114,13 +98,6 @@ async def retry_target_generator( retryable exception. Any error raised by this function will *not* be caught. Non-None values returned by `on_error` will be yielded for downstream consumers. - filter_func (Callable[bool]): A function that filters the stream. - It receives the next item, the index of the item in the stream, - and the retry number. It should return a bool indicating whether - the item should be yielded or not. - `consistency_filter` is a useful example of this, which ensures - that the stream is consistent across retries, and raises an error - if it is not. deadline (float): DEPRECATED use ``timeout`` instead. For backward compatibility, if set it will override the ``timeout`` parameter. @@ -140,17 +117,13 @@ async def retry_target_generator( timeout = kwargs.get("deadline", timeout) remaining_timeout_budget = timeout if timeout else None - retry_num = -1 for sleep in sleep_generator: try: subgenerator = target() - retry_num += 1 - stream_idx = -1 sent_in = None while True: - stream_idx += 1 if remaining_timeout_budget <= 0: raise exceptions.RetryError( "Timeout of {:.1f}s exceeded".format(timeout), @@ -168,20 +141,18 @@ async def retry_target_generator( if remaining_timeout_budget is not None: remaining_timeout_budget -= (datetime_helpers.utcnow() - start_timestamp).total_seconds() - # Check filter_func to see if we should yield - if filter_func is None or filter_func(next_value, stream_idx, retry_num): - ## Yield from Wrapper to caller - try: - # yield last value from subgenerator - # exceptions from `athrow` and `aclose` are injected here - sent_in = yield next_value - except GeneratorExit: - # if wrapper received `aclose`, pass to subgenerator and close - await subgenerator.aclose() - return - except: - # if wrapper received `athrow`, pass to subgenerator - await subgenerator.athrow(*sys.exc_info()) + ## Yield from Wrapper to caller + try: + # yield last value from subgenerator + # exceptions from `athrow` and `aclose` are injected here + sent_in = yield next_value + except GeneratorExit: + # if wrapper received `aclose`, pass to subgenerator and close + await subgenerator.aclose() + return + except: + # if wrapper received `athrow`, pass to subgenerator + await subgenerator.athrow(*sys.exc_info()) return except StopAsyncIteration: # if generator exhausted, return From 7f862d078dfa3f23939de741df654e891f48daec Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 6 Apr 2023 16:57:13 -0700 Subject: [PATCH 038/204] ran blacken --- google/api_core/retry.py | 13 +++++-------- google/api_core/retry_async.py | 23 +++++++++++++---------- tests/asyncio/test_retry_async.py | 9 ++++++--- tests/unit/test_retry.py | 3 ++- 4 files changed, 26 insertions(+), 22 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index 60042b0e..2eba3acb 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -213,9 +213,7 @@ def retry_target_generator( ) if deadline < next_attempt_time: raise exceptions.RetryError( - "Deadline of {:.1f}s exceeded".format( - timeout - ), + "Deadline of {:.1f}s exceeded".format(timeout), last_exc, ) from last_exc _LOGGER.debug( @@ -284,9 +282,7 @@ def retry_target( ) if deadline < next_attempt_time: raise exceptions.RetryError( - "Deadline of {:.1f}s exceeded".format( - timeout - ), + "Deadline of {:.1f}s exceeded".format(timeout), last_exc, ) from last_exc _LOGGER.debug( @@ -296,6 +292,7 @@ def retry_target( raise ValueError("Sleep generator stopped yielding sleep values.") + class Retry(object): """Exponential retry decorator. @@ -419,8 +416,8 @@ def __call__(self, func, on_error=None): a matching retryable generator will be returned. on_error (Callable[Exception]): A function to call while processing a retryable exception. Any error raised by this function will - *not* be caught. When target is a generator function, non-None - values returned by `on_error` will be yielded for downstream + *not* be caught. When target is a generator function, non-None + values returned by `on_error` will be yielded for downstream consumers. Returns: diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index ffc32b3b..74b9197d 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -126,8 +126,7 @@ async def retry_target_generator( while True: if remaining_timeout_budget <= 0: raise exceptions.RetryError( - "Timeout of {:.1f}s exceeded".format(timeout), - None + "Timeout of {:.1f}s exceeded".format(timeout), None ) ## Read from Subgenerator next_value_routine = subgenerator.asend(sent_in) @@ -139,7 +138,9 @@ async def retry_target_generator( start_timestamp = datetime_helpers.utcnow() next_value = await next_value_routine if remaining_timeout_budget is not None: - remaining_timeout_budget -= (datetime_helpers.utcnow() - start_timestamp).total_seconds() + remaining_timeout_budget -= ( + datetime_helpers.utcnow() - start_timestamp + ).total_seconds() ## Yield from Wrapper to caller try: @@ -165,7 +166,9 @@ async def retry_target_generator( last_exc = exc # reduce time budget by time spent before exception if remaining_timeout_budget is not None: - remaining_timeout_budget -= (datetime_helpers.utcnow() - start_timestamp).total_seconds() + remaining_timeout_budget -= ( + datetime_helpers.utcnow() - start_timestamp + ).total_seconds() finally: if subgenerator is not None: await subgenerator.aclose() @@ -181,9 +184,7 @@ async def retry_target_generator( # Chains the raising RetryError with the root cause error, # which helps observability and debugability. raise exceptions.RetryError( - "Timeout of {:.1f}s exceeded".format( - timeout - ), + "Timeout of {:.1f}s exceeded".format(timeout), last_exc, ) from last_exc else: @@ -279,6 +280,7 @@ async def retry_target( raise ValueError("Sleep generator stopped yielding sleep values.") + class AsyncRetry: """Exponential retry decorator for async coroutines. @@ -300,7 +302,7 @@ class AsyncRetry: target or sleeping between retries is counted towards the timeout. on_error (Callable[Exception]): A function to call while processing a retryable exception. Any error raised by this function will - *not* be caught. When target is a generator function, non-None + *not* be caught. When target is a generator function, non-None values returned by `on_error` will be yielded for downstream consumers. @@ -336,8 +338,8 @@ def __call__(self, func, on_error=None): coroutine or async generator function to add retry behavior to. on_error (Callable[Exception]): A function to call while processing a retryable exception. Any error raised by this function will - *not* be caught. When `func` is a generator function, non-None - values returned by `on_error` will be yielded for downstream + *not* be caught. When `func` is a generator function, non-None + values returned by `on_error` will be yielded for downstream consumers. @@ -386,6 +388,7 @@ def retry_wrapped_generator(*args, deadline_dt=None, **kwargs): timeout=self._timeout, on_error=on_error, ) + if use_generator: return retry_wrapped_generator else: diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 5ef3beda..94509f92 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -406,7 +406,9 @@ async def test___init___when_retry_is_executed(self, sleep, uniform): target.assert_has_calls([mock.call("meep"), mock.call("meep")]) sleep.assert_any_call(retry_._initial) - async def _generator_mock(self, num=5, error_on=None, exceptions_seen=None, sleep_time=0): + async def _generator_mock( + self, num=5, error_on=None, exceptions_seen=None, sleep_time=0 + ): try: sent_in = None for i in range(num): @@ -486,7 +488,7 @@ def increase_time(sleep_delay): sleep.side_effect = increase_time with pytest.raises(exceptions.RetryError): - unpacked = [i async for i in generator] + unpacked = [i async for i in generator] assert on_error.call_count == 4 # check the delays @@ -661,7 +663,8 @@ async def test___call___with_is_generator(self, sleep): async def test___call___generator_retry_on_error_yield(self, sleep): error_token = "Err" retry_ = retry_async.AsyncRetry( - on_error=lambda x: error_token, predicate=retry_async.if_exception_type(ValueError) + on_error=lambda x: error_token, + predicate=retry_async.if_exception_type(ValueError), ) generator = retry_(self._generator_mock)(error_on=3) assert inspect.isasyncgen(generator) diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 5682dab0..e7f4e4ec 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -669,7 +669,8 @@ def test___call___with_is_generator(self, sleep): def test___call___generator_retry_on_error_yield(self, sleep): error_token = "Err" retry_ = retry.Retry( - on_error=lambda x: error_token, predicate=retry.if_exception_type(ValueError) + on_error=lambda x: error_token, + predicate=retry.if_exception_type(ValueError), ) generator = retry_(self._generator_mock)(error_on=3) # error thrown on 3 From 04a4a695b716c0a91a47d617645319f72fc7f8b3 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 6 Apr 2023 17:11:37 -0700 Subject: [PATCH 039/204] removed generator auto-detection --- google/api_core/retry.py | 15 ++++----------- google/api_core/retry_async.py | 16 ++++++---------- 2 files changed, 10 insertions(+), 21 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index 2eba3acb..8b6313d6 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -61,7 +61,6 @@ def check_if_exists(): import logging import random import time -from inspect import isgeneratorfunction import requests.exceptions @@ -379,10 +378,10 @@ class Retry(object): a retryable exception. Any error raised by this function will *not* be caught. When target is a generator function, non-None values returned 1by `on_error` will be yielded for downstream consumers. - is_generator (Optional[bool]): Indicates whether the input function + is_generator (bool): Indicates whether the input function should be treated as a generator function. If True, retries will `yield from` wrapped function. If false, retries will call wrapped - function directly. If None, function will be auto-detected. + function directly. Defaults to False. deadline (float): DEPRECATED: use `timeout` instead. For backward compatibility, if specified it will override the ``timeout`` parameter. """ @@ -395,7 +394,7 @@ def __init__( multiplier=_DEFAULT_DELAY_MULTIPLIER, timeout=_DEFAULT_DEADLINE, on_error=None, - is_generator=None, + is_generator=False, **kwargs ): self._predicate = predicate @@ -434,13 +433,7 @@ def retry_wrapped_func(*args, **kwargs): sleep_generator = exponential_sleep_generator( self._initial, self._maximum, multiplier=self._multiplier ) - # if the target is a generator function, make sure return is also a generator function - use_generator = ( - self._is_generator - if self._is_generator is not None - else isgeneratorfunction(func) - ) - retry_func = retry_target_generator if use_generator else retry_target + retry_func = retry_target_generator if self._is_generator else retry_target return retry_func( target, self._predicate, diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index 74b9197d..44ed97d8 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -55,7 +55,6 @@ async def check_if_exists(): import datetime import functools import logging -from inspect import isasyncgenfunction import sys from google.api_core import datetime_helpers @@ -305,6 +304,10 @@ class AsyncRetry: *not* be caught. When target is a generator function, non-None values returned by `on_error` will be yielded for downstream consumers. + is_generator (bool): Indicates whether the input function + should be treated as a generator function. If True, retries will + `yield from` wrapped function. If false, retries will call wrapped + function directly. Defaults to False. deadline (float): DEPRECATED use ``timeout`` instead. If set it will override ``timeout`` parameter. @@ -318,7 +321,7 @@ def __init__( multiplier=_DEFAULT_DELAY_MULTIPLIER, timeout=_DEFAULT_TIMEOUT, on_error=None, - is_generator=None, + is_generator=False, **kwargs ): self._predicate = predicate @@ -351,13 +354,6 @@ def __call__(self, func, on_error=None): if self._on_error is not None: on_error = self._on_error - # if the target is a generator function, make sure return is also a generator function - use_generator = ( - self._is_generator - if self._is_generator is not None - else isasyncgenfunction(func) - ) - @functools.wraps(func) async def retry_wrapped_func(*args, **kwargs): """A wrapper that calls target function with retry.""" @@ -389,7 +385,7 @@ def retry_wrapped_generator(*args, deadline_dt=None, **kwargs): on_error=on_error, ) - if use_generator: + if self._is_generator: return retry_wrapped_generator else: return retry_wrapped_func From d20cf0826d30bf22b2c0d2d310e9ad2a5da5c7fe Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 6 Apr 2023 17:40:04 -0700 Subject: [PATCH 040/204] fixed tests and lint --- google/api_core/retry_async.py | 3 +- tests/asyncio/test_retry_async.py | 111 ++++++++++++------------------ tests/unit/test_retry.py | 17 +++-- 3 files changed, 59 insertions(+), 72 deletions(-) diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index 44ed97d8..46c014d0 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -150,7 +150,8 @@ async def retry_target_generator( # if wrapper received `aclose`, pass to subgenerator and close await subgenerator.aclose() return - except: + # bare except used to delegate all exceptions to subgenerator + except: # noqa: E722 # if wrapper received `athrow`, pass to subgenerator await subgenerator.athrow(*sys.exc_info()) return diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 94509f92..8d4a2c7b 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -15,7 +15,6 @@ import datetime import re import inspect -import functools import asyncio import mock @@ -426,7 +425,9 @@ async def _generator_mock( @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___generator_success(self, sleep): - retry_ = retry_async.AsyncRetry() + from types import AsyncGeneratorType + + retry_ = retry_async.AsyncRetry(is_generator=True) decorated = retry_(self._generator_mock) @@ -434,7 +435,8 @@ async def test___call___generator_success(self, sleep): generator = decorated(num) # check types assert inspect.isasyncgen(generator) - assert type(decorated(num)) == type(self._generator_mock(num)) + assert isinstance(decorated(num), AsyncGeneratorType) + assert isinstance(self._generator_mock(num), AsyncGeneratorType) # check yield contents unpacked = [i async for i in generator] assert len(unpacked) == num @@ -448,13 +450,15 @@ async def test___call___generator_success(self, sleep): async def test___call___generator_retry(self, sleep): on_error = mock.Mock(return_value=None) retry_ = retry_async.AsyncRetry( - on_error=on_error, predicate=retry_async.if_exception_type(ValueError) + on_error=on_error, + predicate=retry_async.if_exception_type(ValueError), + is_generator=True, ) generator = retry_(self._generator_mock)(error_on=3) assert inspect.isasyncgen(generator) # error thrown on 3 # generator should contain 0, 1, 2 looping - unpacked = [await anext(generator) for i in range(10)] + unpacked = [await generator.__anext__() for i in range(10)] assert unpacked == [0, 1, 2, 0, 1, 2, 0, 1, 2, 0] assert on_error.call_count == 3 @@ -469,6 +473,7 @@ async def test___call___generator_retry_hitting_deadline(self, sleep, uniform): maximum=1024.0, multiplier=2.0, deadline=9.9, + is_generator=True, ) utcnow = datetime.datetime.utcnow() @@ -488,7 +493,7 @@ def increase_time(sleep_delay): sleep.side_effect = increase_time with pytest.raises(exceptions.RetryError): - unpacked = [i async for i in generator] + [i async for i in generator] assert on_error.call_count == 4 # check the delays @@ -505,6 +510,7 @@ async def test___call___generator_timeout_cancellations(self): retry_ = retry_async.AsyncRetry( predicate=retry_async.if_exception_type(ValueError), deadline=0.2, + is_generator=True, ) utcnow = datetime.datetime.utcnow() utcnow_patcher = mock.patch( @@ -513,19 +519,19 @@ async def test___call___generator_timeout_cancellations(self): # ensure generator times out when awaiting past deadline with pytest.raises(exceptions.RetryError): infinite_gen = retry_(self._generator_mock, on_error)(sleep_time=60) - await anext(infinite_gen) + await infinite_gen.__anext__() # ensure time between yields isn't counted with utcnow_patcher as patched_utcnow: generator = retry_(self._generator_mock)(sleep_time=0.05) - assert await anext(generator) == 0 + assert await generator.__anext__() == 0 patched_utcnow.return_value += datetime.timedelta(20) - assert await anext(generator) == 1 + assert await generator.__anext__() == 1 # ensure timeout budget is tracked generator = retry_(self._generator_mock)(sleep_time=0.07) - assert await anext(generator) == 0 - assert await anext(generator) == 1 + assert await generator.__anext__() == 0 + assert await generator.__anext__() == 1 with pytest.raises(exceptions.RetryError): - await anext(generator) + await generator.__anext__() @pytest.mark.asyncio async def test___call___generator_await_cancel_retryable(self): @@ -533,34 +539,32 @@ async def test___call___generator_await_cancel_retryable(self): cancel calls should be supported as retryable errors """ # test without cancel as retryable - timeout_value = 0.1 - retry_ = retry_async.AsyncRetry() + retry_ = retry_async.AsyncRetry(is_generator=True) utcnow = datetime.datetime.utcnow() - utcnow_patcher = mock.patch( - "google.api_core.datetime_helpers.utcnow", return_value=utcnow - ) + mock.patch("google.api_core.datetime_helpers.utcnow", return_value=utcnow) generator = retry_(self._generator_mock)(sleep_time=5) - await anext(generator) == 0 - task = asyncio.create_task(anext(generator)) + await generator.__anext__() == 0 + task = asyncio.create_task(generator.__anext__()) await asyncio.sleep(0.1) task.cancel() with pytest.raises(asyncio.CancelledError): await task with pytest.raises(StopAsyncIteration): - await anext(generator) + await generator.__anext__() # test with cancel as retryable retry_cancel_ = retry_async.AsyncRetry( predicate=retry_async.if_exception_type(asyncio.CancelledError), + is_generator=True, ) generator = retry_cancel_(self._generator_mock)(sleep_time=0.2) - await anext(generator) == 0 - await anext(generator) == 1 - task = asyncio.create_task(anext(generator)) + await generator.__anext__() == 0 + await generator.__anext__() == 1 + task = asyncio.create_task(generator.__anext__()) await asyncio.sleep(0.05) task.cancel() await task assert task.result() == 0 - await anext(generator) == 1 + await generator.__anext__() == 1 @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio @@ -568,12 +572,12 @@ async def test___call___with_generator_send(self, sleep): """ Send should be passed through retry into target generator """ - retry_ = retry_async.AsyncRetry() + retry_ = retry_async.AsyncRetry(is_generator=True) decorated = retry_(self._generator_mock) generator = decorated(10) - result = await anext(generator) + result = await generator.__anext__() assert result == 0 in_messages = ["test_1", "hello", "world"] out_messages = [] @@ -581,82 +585,54 @@ async def test___call___with_generator_send(self, sleep): recv = await generator.asend(msg) out_messages.append(recv) assert in_messages == out_messages - assert await anext(generator) == 4 - assert await anext(generator) == 5 + assert await generator.__anext__() == 4 + assert await generator.__anext__() == 5 @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___with_generator_close(self, sleep): - retry_ = retry_async.AsyncRetry() + retry_ = retry_async.AsyncRetry(is_generator=True) decorated = retry_(self._generator_mock) exception_list = [] generator = decorated(10, exceptions_seen=exception_list) for i in range(2): - await anext(generator) + await generator.__anext__() await generator.aclose() assert isinstance(exception_list[0], GeneratorExit) - assert generator.ag_running == False + assert generator.ag_running is False with pytest.raises(StopAsyncIteration): # calling next on closed generator should raise error - await anext(generator) + await generator.__anext__() @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___with_generator_throw(self, sleep): retry_ = retry_async.AsyncRetry( - predicate=retry_async.if_exception_type(ValueError) + predicate=retry_async.if_exception_type(ValueError), + is_generator=True, ) decorated = retry_(self._generator_mock) exception_list = [] generator = decorated(10, exceptions_seen=exception_list) for i in range(2): - await anext(generator) + await generator.__anext__() with pytest.raises(BufferError): await generator.athrow(BufferError("test")) assert isinstance(exception_list[0], BufferError) with pytest.raises(StopAsyncIteration): # calling next on closed generator should raise error - await anext(generator) + await generator.__anext__() # should retry if throw retryable exception exception_list = [] generator = decorated(10, exceptions_seen=exception_list) for i in range(2): - await anext(generator) + await generator.__anext__() throw_val = await generator.athrow(ValueError("test")) assert throw_val == 0 assert isinstance(exception_list[0], ValueError) # calling next on closed generator should not raise error - assert await anext(generator) == 1 - - @mock.patch("asyncio.sleep", autospec=True) - @pytest.mark.asyncio - async def test___call___with_is_generator(self, sleep): - gen_retry_ = retry_async.AsyncRetry( - is_generator=True, predicate=retry_async.if_exception_type(ValueError) - ) - not_gen_retry_ = retry_async.AsyncRetry( - is_generator=False, predicate=retry_async.if_exception_type(ValueError) - ) - auto_retry_ = retry_async.AsyncRetry( - predicate=retry_async.if_exception_type(ValueError) - ) - # force generator to act as non-generator - with pytest.raises(TypeError): - # error will be thrown because gen is coroutine - gen = not_gen_retry_(self._generator_mock)(10, error_on=3) - unpacked = [await anext(gen) for i in range(10)] - # wrapped generators won't be detected as generator functions - wrapped = functools.partial(self._generator_mock, 10, error_on=6) - assert not inspect.isasyncgenfunction(wrapped) - with pytest.raises(TypeError): - # error will be thrown because gen is coroutine - gen = auto_retry_(wrapped)() - unpacked = [next(gen) for i in range(10)] - # force non-detected to be accepted as generator - gen = gen_retry_(wrapped)() - unpacked = [await anext(gen) for i in range(10)] - assert unpacked == [0, 1, 2, 3, 4, 5, 0, 1, 2, 3] + assert await generator.__anext__() == 1 @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio @@ -665,10 +641,11 @@ async def test___call___generator_retry_on_error_yield(self, sleep): retry_ = retry_async.AsyncRetry( on_error=lambda x: error_token, predicate=retry_async.if_exception_type(ValueError), + is_generator=True, ) generator = retry_(self._generator_mock)(error_on=3) assert inspect.isasyncgen(generator) # error thrown on 3 # generator should contain 0, 1, 2 looping - unpacked = [await anext(generator) for i in range(10)] + unpacked = [await generator.__anext__() for i in range(10)] assert unpacked == [0, 1, 2, error_token, 0, 1, 2, error_token, 0, 1] diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index e7f4e4ec..37f7417e 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -488,6 +488,8 @@ def _generator_mock( @mock.patch("time.sleep", autospec=True) def test___call___generator_success(self, sleep): + import types + retry_ = retry.Retry() decorated = retry_(self._generator_mock) @@ -496,7 +498,8 @@ def test___call___generator_success(self, sleep): result = decorated(num) # check types assert inspect.isgenerator(result) - assert type(decorated(num)) == type(self._generator_mock(num)) + assert isinstance(decorated(num), types.GeneratorType) + assert isinstance(self._generator_mock(num), types.GeneratorType) # check yield contents unpacked = [i for i in result] assert len(unpacked) == num @@ -508,7 +511,9 @@ def test___call___generator_success(self, sleep): def test___call___generator_retry(self, sleep): on_error = mock.Mock(return_value=None) retry_ = retry.Retry( - on_error=on_error, predicate=retry.if_exception_type(ValueError) + on_error=on_error, + predicate=retry.if_exception_type(ValueError), + is_generator=True, ) result = retry_(self._generator_mock)(error_on=3) assert inspect.isgenerator(result) @@ -528,6 +533,7 @@ def test___call___generator_retry_hitting_deadline(self, sleep, uniform): maximum=1024.0, multiplier=2.0, deadline=30.9, + is_generator=True, ) utcnow = datetime.datetime.utcnow() @@ -545,7 +551,7 @@ def increase_time(sleep_delay): sleep.side_effect = increase_time with pytest.raises(exceptions.RetryError): - unpacked = [i for i in generator] + [i for i in generator] assert on_error.call_count == 5 # check the delays @@ -614,7 +620,9 @@ def test___call___with_generator_close(self, sleep): @mock.patch("time.sleep", autospec=True) def test___call___with_generator_throw(self, sleep): - retry_ = retry.Retry(predicate=retry.if_exception_type(ValueError)) + retry_ = retry.Retry( + predicate=retry.if_exception_type(ValueError), is_generator=True + ) decorated = retry_(self._generator_mock) exception_list = [] @@ -671,6 +679,7 @@ def test___call___generator_retry_on_error_yield(self, sleep): retry_ = retry.Retry( on_error=lambda x: error_token, predicate=retry.if_exception_type(ValueError), + is_generator=True, ) generator = retry_(self._generator_mock)(error_on=3) # error thrown on 3 From 183c221c3ceb33da9ea36a16d80c1c72db092ac2 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 6 Apr 2023 17:52:09 -0700 Subject: [PATCH 041/204] changed comments --- google/api_core/retry.py | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index 8b6313d6..b87796c8 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -411,17 +411,14 @@ def __call__(self, func, on_error=None): Args: func (Callable): The callable to add retry behavior to. - If a generator function is passed in (Callable[..., Generator]), - a matching retryable generator will be returned. on_error (Callable[Exception]): A function to call while processing a retryable exception. Any error raised by this function will - *not* be caught. When target is a generator function, non-None - values returned by `on_error` will be yielded for downstream - consumers. - + *not* be caught. + If `is_generator` is set, non-None values returned by + `on_error` will be yielded for downstream consumers. Returns: - Callable: A callable that will invoke or yield from ``func`` with retry - behavior. + Callable: A callable that will invoke ``func`` with retry + behavior``. """ if self._on_error is not None: on_error = self._on_error From d2217e4cef3561ef079e0800a1cac538f9b2b91e Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 6 Apr 2023 17:54:38 -0700 Subject: [PATCH 042/204] fixed 3.11 failed test --- tests/unit/test_retry.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 37f7417e..4f7f76c2 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -661,13 +661,11 @@ def test___call___with_is_generator(self, sleep): # generator should not retry if error is thrown on yield gen = not_gen_retry_(self._generator_mock)(10, error_on=3) unpacked = [next(gen) for i in range(10)] - # wrapped generators won't be detected as generator functions wrapped = functools.partial(self._generator_mock, 10, error_on=6) - assert not inspect.isgeneratorfunction(wrapped) with pytest.raises(ValueError): # generator should not retry if error is thrown on yield gen = auto_retry_(wrapped)() - unpacked = [next(gen) for i in range(10)] + [next(gen) for i in range(10)] # force non-detected to be accepted as generator gen = gen_retry_(wrapped)() unpacked = [next(gen) for i in range(10)] From d4a9d308182a2bf2b39e8d37c47c19591773e5c8 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 7 Apr 2023 10:36:29 -0700 Subject: [PATCH 043/204] added comments --- google/api_core/retry.py | 3 ++- google/api_core/retry_async.py | 14 ++++++++++---- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index b87796c8..5e7bba0e 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -188,11 +188,12 @@ def retry_target_generator( last_exc = None subgenerator = None for sleep in sleep_generator: + # Start a new retry loop try: # create and yeild from a new instance of the generator from input generator function subgenerator = target() return (yield from subgenerator) - + # handle exceptions raised by the subgenerator except Exception as exc: if not predicate(exc): raise diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index 46c014d0..ea280536 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -118,12 +118,17 @@ async def retry_target_generator( remaining_timeout_budget = timeout if timeout else None for sleep in sleep_generator: + # Start a new retry loop try: subgenerator = target() sent_in = None while True: - if remaining_timeout_budget <= 0: + # Check for expiration before starting + if ( + remaining_timeout_budget is not None + and remaining_timeout_budget <= 0 + ): raise exceptions.RetryError( "Timeout of {:.1f}s exceeded".format(timeout), None ) @@ -136,6 +141,7 @@ async def retry_target_generator( ) start_timestamp = datetime_helpers.utcnow() next_value = await next_value_routine + if remaining_timeout_budget is not None: remaining_timeout_budget -= ( datetime_helpers.utcnow() - start_timestamp @@ -150,16 +156,16 @@ async def retry_target_generator( # if wrapper received `aclose`, pass to subgenerator and close await subgenerator.aclose() return - # bare except used to delegate all exceptions to subgenerator except: # noqa: E722 - # if wrapper received `athrow`, pass to subgenerator + # bare except catches any exception passed to `athrow` + # delegate error handling to subgenerator await subgenerator.athrow(*sys.exc_info()) return except StopAsyncIteration: # if generator exhausted, return return # pylint: disable=broad-except - # This function explicitly must deal with broad exceptions. + # This function handles exceptions thrown by subgenerator except (Exception, asyncio.CancelledError) as exc: if not predicate(exc) and not isinstance(exc, asyncio.TimeoutError): raise From 06d45ccb901c9627e74d35ebd6f8112d25d66053 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 13 Apr 2023 16:09:53 -0700 Subject: [PATCH 044/204] made streaming retries into a custom generator object --- google/api_core/retry.py | 147 ++++++++++--------- google/api_core/retry_async.py | 228 ++++++++++++++---------------- tests/asyncio/test_retry_async.py | 27 +--- tests/unit/test_retry.py | 21 +-- 4 files changed, 181 insertions(+), 242 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index 5e7bba0e..d4c07be6 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -61,6 +61,7 @@ def check_if_exists(): import logging import random import time +from collections.abc import Generator import requests.exceptions @@ -145,84 +146,78 @@ def exponential_sleep_generator(initial, maximum, multiplier=_DEFAULT_DELAY_MULT delay = min(delay * multiplier, maximum) -def retry_target_generator( - target, predicate, sleep_generator, timeout=None, on_error=None, **kwargs -): - """Wrap a generator object and retry if it fails. - - This is the lowest-level retry helper. Generally, you'll use the - higher-level retry helper :class:`Retry`. - - Args: - target(Callable[None, Generator]): A generator function to yield from. - This must be a nullary function - apply arguments with `functools.partial`. - predicate (Callable[Exception]): A callable used to determine if an - exception raised by the target should be considered retryable. - It should return True to retry or False otherwise. - sleep_generator (Iterable[float]): An infinite iterator that determines - how long to sleep between retries. - timeout (float): How long to keep retrying the target. - on_error (Callable[Exception]): A function to call while processing a - retryable exception. Non-None values returned by `on_error` will be - yielded for downstream consumers. Any error raised by this function - will *not* be caught. - deadline (float): DEPRECATED: use ``timeout`` instead. For backward - compatibility, if specified it will override ``timeout`` parameter. - - Returns: - Generator: returns a generator that wraps the target in retry logic. - - Raises: - google.api_core.RetryError: If the deadline is exceeded while retrying. - ValueError: If the sleep generator stops yielding values. - Exception: If the target raises a method that isn't retryable. - """ - - timeout = kwargs.get("deadline", timeout) - - if timeout is not None: - deadline = datetime_helpers.utcnow() + datetime.timedelta(seconds=timeout) - else: - deadline = None +class RetryableGenerator(Generator): + + def __init__(self, target, predicate, sleep_generator, timeout=None, on_error=None): + self.subgenerator_fn = target + self.subgenerator = self.subgenerator_fn() + self.predicate = predicate + self.sleep_generator = sleep_generator + self.on_error = on_error + self.timeout = timeout + if self.timeout is not None: + self.deadline = datetime_helpers.utcnow() + datetime.timedelta(seconds=self.timeout) + else: + self.deadline = None + + def __iter__(self): + return self + + def _handle_exception(self, exc): + if not self.predicate(exc): + raise exc + else: + if self.on_error: + self.on_error(exc) + try: + next_sleep = next(self.sleep_generator) + except StopIteration: + raise ValueError('Sleep generator stopped yielding sleep values') + if self.deadline is not None: + next_attempt = datetime_helpers.utcnow() + datetime.timedelta(seconds=next_sleep) + if self.deadline < next_attempt: + raise exceptions.RetryError(f"Deadline of {self.timeout:.1f} seconds exceeded", exc) from exc + _LOGGER.debug( + "Retrying due to {}, sleeping {:.1f}s ...".format(exc, next_sleep) + ) + time.sleep(next_sleep) + self.subgenerator = self.subgenerator_fn() - last_exc = None - subgenerator = None - for sleep in sleep_generator: - # Start a new retry loop + def __next__(self): try: - # create and yeild from a new instance of the generator from input generator function - subgenerator = target() - return (yield from subgenerator) - # handle exceptions raised by the subgenerator + return next(self.subgenerator) except Exception as exc: - if not predicate(exc): - raise - last_exc = exc - finally: - if subgenerator is not None: - subgenerator.close() - - if on_error is not None: - error_result = on_error(last_exc) - if error_result is not None: - yield error_result - - if deadline is not None: - next_attempt_time = datetime_helpers.utcnow() + datetime.timedelta( - seconds=sleep - ) - if deadline < next_attempt_time: - raise exceptions.RetryError( - "Deadline of {:.1f}s exceeded".format(timeout), - last_exc, - ) from last_exc - _LOGGER.debug( - "Retrying due to {}, sleeping {:.1f}s ...".format(last_exc, sleep) - ) - time.sleep(sleep) - - raise ValueError("Sleep generator stopped yielding sleep values.") - + self._handle_exception(exc) + # if retryable exception was handled, try again with new subgenerator + return self.__next__() + + def close(self): + if getattr(self.subgenerator, "close", None): + return self.subgenerator.close() + else: + raise NotImplementedError("close() not implemented for {}".format(self.subgenerator)) + + def send(self, value): + if getattr(self.subgenerator, "send", None): + try: + return self.subgenerator.send(value) + except Exception as exc: + self._handle_exception(exc) + # if retryable exception was handled, try again with new subgenerator + return self.send(value) + else: + raise NotImplementedError("send() not implemented for {}".format(self.subgenerator)) + + def throw(self, typ, val=None, tb=None): + if getattr(self.subgenerator, "throw", None): + try: + return self.subgenerator.throw(typ, val, tb) + except Exception as exc: + self._handle_exception(exc) + # if retryable exception was handled, return next from new subgenerator + return self.__next__() + else: + raise NotImplementedError("throw() not implemented for {}".format(self.subgenerator)) def retry_target( target, predicate, sleep_generator, timeout=None, on_error=None, **kwargs @@ -431,7 +426,7 @@ def retry_wrapped_func(*args, **kwargs): sleep_generator = exponential_sleep_generator( self._initial, self._maximum, multiplier=self._multiplier ) - retry_func = retry_target_generator if self._is_generator else retry_target + retry_func = RetryableGenerator if self._is_generator else retry_target return retry_func( target, self._predicate, diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index ea280536..cc11264f 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -62,6 +62,7 @@ async def check_if_exists(): from google.api_core.retry import exponential_sleep_generator from google.api_core.retry import if_exception_type # noqa: F401 from google.api_core.retry import if_transient_error +from google.api_core.retry import RetryableGenerator _LOGGER = logging.getLogger(__name__) @@ -71,137 +72,114 @@ async def check_if_exists(): _DEFAULT_DEADLINE = 60.0 * 2.0 # seconds _DEFAULT_TIMEOUT = 60.0 * 2.0 # seconds +from collections.abc import AsyncGenerator -async def retry_target_generator( - target, predicate, sleep_generator, timeout=None, on_error=None, **kwargs -): - """Wrap an Asyncrhonous Generator and retstart stream on errors - - This is the lowest-level retry helper. Generally, you'll use the - higher-level retry helper :class:`Retry`. - - Args: - target(Callable[None, AsynchronousGenerator]): An asynchronous - generator function to yield from. This must be a nullary - function - apply arguments with `functools.partial`. - predicate (Callable[Exception]): A callable used to determine if an - exception raised by the target should be considered retryable. - It should return True to retry or False otherwise. - sleep_generator (Iterable[float]): An infinite iterator that determines - how long to sleep between retries. - timeout (float): How long to keep retrying the target, in seconds. - Because generator execution isn't continuous, only time spent - waiting on the target generator or sleeping between retries - is counted towards the timeout. - on_error (Callable[Exception]): A function to call while processing a - retryable exception. Any error raised by this function will *not* - be caught. Non-None values returned by `on_error` will be yielded - for downstream consumers. - - deadline (float): DEPRECATED use ``timeout`` instead. For backward - compatibility, if set it will override the ``timeout`` parameter. - Returns: - AsynchronousGenerator: This function spawns new asynchronous generator - instances when called. - - Generator Raises: - google.api_core.RetryError: If the deadline is exceeded while retrying. - ValueError: If the sleep generator stops yielding values. - Exception: If the target raises a method that isn't retryable. - StopAsyncIteration: If the generator is exhausted - """ +class AsyncRetryableGenerator(AsyncGenerator): - last_exc = None - subgenerator = None + def __init__(self, target, predicate, sleep_generator, timeout=None, on_error=None): + self.subgenerator_fn = target + self.subgenerator = self.subgenerator_fn() + self.predicate = predicate + self.sleep_generator = sleep_generator + self.on_error = on_error + self.timeout = timeout + self.remaining_timeout_budget = timeout if timeout else None - timeout = kwargs.get("deadline", timeout) - remaining_timeout_budget = timeout if timeout else None + def __aiter__(self): + return self - for sleep in sleep_generator: - # Start a new retry loop - try: - subgenerator = target() - - sent_in = None - while True: - # Check for expiration before starting - if ( - remaining_timeout_budget is not None - and remaining_timeout_budget <= 0 - ): + async def _handle_exception(self, exc): + if not self.predicate(exc) and not isinstance(exc, asyncio.TimeoutError): + raise exc + else: + if self.on_error: + self.on_error(exc) + try: + next_sleep = next(self.sleep_generator) + except StopIteration: + raise ValueError('Sleep generator stopped yielding sleep values') + + if self.remaining_timeout_budget is not None: + if self.remaining_timeout_budget <= next_sleep: raise exceptions.RetryError( - "Timeout of {:.1f}s exceeded".format(timeout), None - ) - ## Read from Subgenerator - next_value_routine = subgenerator.asend(sent_in) - if timeout is not None: - next_value_routine = asyncio.wait_for( - subgenerator.asend(sent_in), - timeout=remaining_timeout_budget, - ) - start_timestamp = datetime_helpers.utcnow() - next_value = await next_value_routine - - if remaining_timeout_budget is not None: - remaining_timeout_budget -= ( - datetime_helpers.utcnow() - start_timestamp - ).total_seconds() - - ## Yield from Wrapper to caller - try: - # yield last value from subgenerator - # exceptions from `athrow` and `aclose` are injected here - sent_in = yield next_value - except GeneratorExit: - # if wrapper received `aclose`, pass to subgenerator and close - await subgenerator.aclose() - return - except: # noqa: E722 - # bare except catches any exception passed to `athrow` - # delegate error handling to subgenerator - await subgenerator.athrow(*sys.exc_info()) - return - except StopAsyncIteration: - # if generator exhausted, return - return - # pylint: disable=broad-except - # This function handles exceptions thrown by subgenerator + "Timeout of {:.1f}s exceeded".format(self.timeout), + exc, + ) from exc + else: + self.remaining_timeout_budget -= next_sleep + _LOGGER.debug( + "Retrying due to {}, sleeping {:.1f}s ...".format(exc, next_sleep) + ) + await asyncio.sleep(next_sleep) + self.subgenerator = self.subgenerator_fn() + + def _subtract_time_from_budget(self, start_timestamp): + if self.remaining_timeout_budget is not None: + self.remaining_timeout_budget -= ( + datetime_helpers.utcnow() - start_timestamp + ).total_seconds() + + async def __anext__(self): + if self.remaining_timeout_budget is not None and self.remaining_timeout_budget <= 0: + raise exceptions.RetryError( + "Timeout of {:.1f}s exceeded".format(self.timeout), + None, + ) + try: + start_timestamp = datetime_helpers.utcnow() + next_val_routine = asyncio.wait_for( + self.subgenerator.__anext__(), + self.remaining_timeout_budget + ) + next_val = await next_val_routine + self._subtract_time_from_budget(start_timestamp) + return next_val except (Exception, asyncio.CancelledError) as exc: - if not predicate(exc) and not isinstance(exc, asyncio.TimeoutError): - raise - last_exc = exc - # reduce time budget by time spent before exception - if remaining_timeout_budget is not None: - remaining_timeout_budget -= ( - datetime_helpers.utcnow() - start_timestamp - ).total_seconds() - finally: - if subgenerator is not None: - await subgenerator.aclose() - - if on_error is not None: - error_result = on_error(last_exc) - if error_result is not None: - yield error_result - - # sleep and adjust timeout budget - if remaining_timeout_budget is not None: - if remaining_timeout_budget <= sleep: - # Chains the raising RetryError with the root cause error, - # which helps observability and debugability. - raise exceptions.RetryError( - "Timeout of {:.1f}s exceeded".format(timeout), - last_exc, - ) from last_exc - else: - remaining_timeout_budget -= sleep - _LOGGER.debug( - "Retrying due to {}, sleeping {:.1f}s ...".format(last_exc, sleep) - ) - await asyncio.sleep(sleep) - - raise ValueError("Sleep generator stopped yielding sleep values.") + self._subtract_time_from_budget(start_timestamp) + await self._handle_exception(exc) + # if retryable exception was handled, try again with new subgenerator + return await self.__anext__() + + async def aclose(self): + if getattr(self.subgenerator, "aclose", None): + return await self.subgenerator.aclose() + else: + raise NotImplementedError("aclose is not implemented for retried stream") + async def asend(self, value): + if getattr(self.subgenerator, "asend", None): + if self.remaining_timeout_budget is not None and self.remaining_timeout_budget <= 0: + raise exceptions.RetryError( + "Timeout of {:.1f}s exceeded".format(self.timeout), + None, + ) + try: + start_timestamp = datetime_helpers.utcnow() + next_val_routine = asyncio.wait_for( + self.subgenerator.asend(value), + self.remaining_timeout_budget + ) + next_val = await next_val_routine + self._subtract_time_from_budget(start_timestamp) + return next_val + except (Exception, asyncio.CancelledError) as exc: + self._subtract_time_from_budget(start_timestamp) + await self._handle_exception(exc) + # if retryable exception was handled, try again with new subgenerator + return await self.__asend__(value) + else: + raise NotImplementedError("asend is not implemented for retried stream") + + async def athrow(self, typ, val=None, tb=None): + if getattr(self.subgenerator, "athrow", None): + try: + return await self.subgenerator.athrow(typ, val, tb) + except Exception as exc: + await self._handle_exception(exc) + # if retryable exception was handled, return next from new subgenerator + return await self.__anext__() + else: + raise NotImplementedError("athrow is not implemented for retried stream") async def retry_target( target, predicate, sleep_generator, timeout=None, on_error=None, **kwargs @@ -384,7 +362,7 @@ def retry_wrapped_generator(*args, deadline_dt=None, **kwargs): self._initial, self._maximum, multiplier=self._multiplier ) # if the target is a generator function, make sure return is also a generator function - return retry_target_generator( + return AsyncRetryableGenerator( target, self._predicate, sleep_generator, diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 8d4a2c7b..9c765d22 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -14,7 +14,6 @@ import datetime import re -import inspect import asyncio import mock @@ -425,7 +424,7 @@ async def _generator_mock( @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___generator_success(self, sleep): - from types import AsyncGeneratorType + from collections.abc import AsyncGenerator retry_ = retry_async.AsyncRetry(is_generator=True) @@ -434,9 +433,8 @@ async def test___call___generator_success(self, sleep): num = 10 generator = decorated(num) # check types - assert inspect.isasyncgen(generator) - assert isinstance(decorated(num), AsyncGeneratorType) - assert isinstance(self._generator_mock(num), AsyncGeneratorType) + assert isinstance(decorated(num), AsyncGenerator) + assert isinstance(self._generator_mock(num), AsyncGenerator) # check yield contents unpacked = [i async for i in generator] assert len(unpacked) == num @@ -455,7 +453,6 @@ async def test___call___generator_retry(self, sleep): is_generator=True, ) generator = retry_(self._generator_mock)(error_on=3) - assert inspect.isasyncgen(generator) # error thrown on 3 # generator should contain 0, 1, 2 looping unpacked = [await generator.__anext__() for i in range(10)] @@ -542,7 +539,7 @@ async def test___call___generator_await_cancel_retryable(self): retry_ = retry_async.AsyncRetry(is_generator=True) utcnow = datetime.datetime.utcnow() mock.patch("google.api_core.datetime_helpers.utcnow", return_value=utcnow) - generator = retry_(self._generator_mock)(sleep_time=5) + generator = retry_(self._generator_mock)(sleep_time=0.2) await generator.__anext__() == 0 task = asyncio.create_task(generator.__anext__()) await asyncio.sleep(0.1) @@ -600,7 +597,6 @@ async def test___call___with_generator_close(self, sleep): await generator.aclose() assert isinstance(exception_list[0], GeneratorExit) - assert generator.ag_running is False with pytest.raises(StopAsyncIteration): # calling next on closed generator should raise error await generator.__anext__() @@ -634,18 +630,3 @@ async def test___call___with_generator_throw(self, sleep): # calling next on closed generator should not raise error assert await generator.__anext__() == 1 - @mock.patch("asyncio.sleep", autospec=True) - @pytest.mark.asyncio - async def test___call___generator_retry_on_error_yield(self, sleep): - error_token = "Err" - retry_ = retry_async.AsyncRetry( - on_error=lambda x: error_token, - predicate=retry_async.if_exception_type(ValueError), - is_generator=True, - ) - generator = retry_(self._generator_mock)(error_on=3) - assert inspect.isasyncgen(generator) - # error thrown on 3 - # generator should contain 0, 1, 2 looping - unpacked = [await generator.__anext__() for i in range(10)] - assert unpacked == [0, 1, 2, error_token, 0, 1, 2, error_token, 0, 1] diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 4f7f76c2..2850f788 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -1,4 +1,3 @@ -# Copyright 2017 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,7 +14,6 @@ import datetime import itertools import re -import inspect import functools import mock @@ -489,6 +487,7 @@ def _generator_mock( @mock.patch("time.sleep", autospec=True) def test___call___generator_success(self, sleep): import types + import collections retry_ = retry.Retry() @@ -497,8 +496,9 @@ def test___call___generator_success(self, sleep): num = 10 result = decorated(num) # check types - assert inspect.isgenerator(result) + assert isinstance(decorated(num), collections.abc.Iterable) assert isinstance(decorated(num), types.GeneratorType) + assert isinstance(self._generator_mock(num), collections.abc.Iterable) assert isinstance(self._generator_mock(num), types.GeneratorType) # check yield contents unpacked = [i for i in result] @@ -516,7 +516,6 @@ def test___call___generator_retry(self, sleep): is_generator=True, ) result = retry_(self._generator_mock)(error_on=3) - assert inspect.isgenerator(result) # error thrown on 3 # generator should contain 0, 1, 2 looping unpacked = [next(result) for i in range(10)] @@ -613,7 +612,6 @@ def test___call___with_generator_close(self, sleep): next(generator) generator.close() assert isinstance(exception_list[0], GeneratorExit) - assert inspect.getgeneratorstate(generator) == "GEN_CLOSED" with pytest.raises(StopIteration): # calling next on closed generator should raise error next(generator) @@ -632,7 +630,6 @@ def test___call___with_generator_throw(self, sleep): with pytest.raises(BufferError): generator.throw(BufferError("test")) assert isinstance(exception_list[0], BufferError) - assert inspect.getgeneratorstate(generator) == "GEN_CLOSED" with pytest.raises(StopIteration): # calling next on closed generator should raise error next(generator) @@ -671,15 +668,3 @@ def test___call___with_is_generator(self, sleep): unpacked = [next(gen) for i in range(10)] assert unpacked == [0, 1, 2, 3, 4, 5, 0, 1, 2, 3] - @mock.patch("time.sleep", autospec=True) - def test___call___generator_retry_on_error_yield(self, sleep): - error_token = "Err" - retry_ = retry.Retry( - on_error=lambda x: error_token, - predicate=retry.if_exception_type(ValueError), - is_generator=True, - ) - generator = retry_(self._generator_mock)(error_on=3) - # error thrown on 3 - unpacked = [next(generator) for i in range(10)] - assert unpacked == [0, 1, 2, error_token, 0, 1, 2, error_token, 0, 1] From de41a148047634fdff08d8800c8aaae84f981127 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 13 Apr 2023 16:42:44 -0700 Subject: [PATCH 045/204] added tests for iterators --- google/api_core/retry.py | 6 ++--- google/api_core/retry_async.py | 28 ++++++++++++++++------- tests/asyncio/test_retry_async.py | 38 +++++++++++++++++++++++++++++++ tests/unit/test_retry.py | 27 +++++++++++++++++++++- 4 files changed, 87 insertions(+), 12 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index d4c07be6..cb75d6f5 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -195,7 +195,7 @@ def close(self): if getattr(self.subgenerator, "close", None): return self.subgenerator.close() else: - raise NotImplementedError("close() not implemented for {}".format(self.subgenerator)) + raise AttributeError("close() not implemented for {}".format(self.subgenerator)) def send(self, value): if getattr(self.subgenerator, "send", None): @@ -206,7 +206,7 @@ def send(self, value): # if retryable exception was handled, try again with new subgenerator return self.send(value) else: - raise NotImplementedError("send() not implemented for {}".format(self.subgenerator)) + raise AttributeError("send() not implemented for {}".format(self.subgenerator)) def throw(self, typ, val=None, tb=None): if getattr(self.subgenerator, "throw", None): @@ -217,7 +217,7 @@ def throw(self, typ, val=None, tb=None): # if retryable exception was handled, return next from new subgenerator return self.__next__() else: - raise NotImplementedError("throw() not implemented for {}".format(self.subgenerator)) + raise AttributeError("throw() not implemented for {}".format(self.subgenerator)) def retry_target( target, predicate, sleep_generator, timeout=None, on_error=None, **kwargs diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index cc11264f..48c888c2 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -55,14 +55,15 @@ async def check_if_exists(): import datetime import functools import logging -import sys +import inspect + +from collections.abc import AsyncGenerator from google.api_core import datetime_helpers from google.api_core import exceptions from google.api_core.retry import exponential_sleep_generator from google.api_core.retry import if_exception_type # noqa: F401 from google.api_core.retry import if_transient_error -from google.api_core.retry import RetryableGenerator _LOGGER = logging.getLogger(__name__) @@ -72,19 +73,25 @@ async def check_if_exists(): _DEFAULT_DEADLINE = 60.0 * 2.0 # seconds _DEFAULT_TIMEOUT = 60.0 * 2.0 # seconds -from collections.abc import AsyncGenerator class AsyncRetryableGenerator(AsyncGenerator): def __init__(self, target, predicate, sleep_generator, timeout=None, on_error=None): self.subgenerator_fn = target - self.subgenerator = self.subgenerator_fn() + self.subgenerator = None self.predicate = predicate self.sleep_generator = sleep_generator self.on_error = on_error self.timeout = timeout self.remaining_timeout_budget = timeout if timeout else None + async def _ensure_subgenerator(self): + if not self.subgenerator: + if inspect.iscoroutinefunction(self.subgenerator_fn): + self.subgenerator = await self.subgenerator_fn() + else: + self.subgenerator = self.subgenerator_fn() + def __aiter__(self): return self @@ -111,7 +118,8 @@ async def _handle_exception(self, exc): "Retrying due to {}, sleeping {:.1f}s ...".format(exc, next_sleep) ) await asyncio.sleep(next_sleep) - self.subgenerator = self.subgenerator_fn() + self.subgenerator = None + await self._ensure_subgenerator() def _subtract_time_from_budget(self, start_timestamp): if self.remaining_timeout_budget is not None: @@ -120,6 +128,7 @@ def _subtract_time_from_budget(self, start_timestamp): ).total_seconds() async def __anext__(self): + await self._ensure_subgenerator() if self.remaining_timeout_budget is not None and self.remaining_timeout_budget <= 0: raise exceptions.RetryError( "Timeout of {:.1f}s exceeded".format(self.timeout), @@ -141,12 +150,14 @@ async def __anext__(self): return await self.__anext__() async def aclose(self): + await self._ensure_subgenerator() if getattr(self.subgenerator, "aclose", None): return await self.subgenerator.aclose() else: - raise NotImplementedError("aclose is not implemented for retried stream") + raise AttributeError("aclose is not implemented for retried stream") async def asend(self, value): + await self._ensure_subgenerator() if getattr(self.subgenerator, "asend", None): if self.remaining_timeout_budget is not None and self.remaining_timeout_budget <= 0: raise exceptions.RetryError( @@ -168,9 +179,10 @@ async def asend(self, value): # if retryable exception was handled, try again with new subgenerator return await self.__asend__(value) else: - raise NotImplementedError("asend is not implemented for retried stream") + raise AttributeError("asend is not implemented for retried stream") async def athrow(self, typ, val=None, tb=None): + await self._ensure_subgenerator() if getattr(self.subgenerator, "athrow", None): try: return await self.subgenerator.athrow(typ, val, tb) @@ -179,7 +191,7 @@ async def athrow(self, typ, val=None, tb=None): # if retryable exception was handled, return next from new subgenerator return await self.__anext__() else: - raise NotImplementedError("athrow is not implemented for retried stream") + raise AttributeError("athrow is not implemented for retried stream") async def retry_target( target, predicate, sleep_generator, timeout=None, on_error=None, **kwargs diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 9c765d22..2c0826d3 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -630,3 +630,41 @@ async def test___call___with_generator_throw(self, sleep): # calling next on closed generator should not raise error assert await generator.__anext__() == 1 + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___with_iterable_send_close_throw(self, sleep): + """ + Send should be passed through retry into target generator + """ + retry_ = retry_async.AsyncRetry(is_generator=True) + + async def iterable_fn(n): + class CustomIterator: + def __init__(self, n): + self.n = n + self.i = 0 + + def __aiter__(self): + return self + + async def __anext__(self): + if self.i == self.n: + raise StopAsyncIteration + self.i += 1 + return self.i - 1 + return CustomIterator(n) + + decorated = retry_(iterable_fn) + + retryable = decorated(10) + result = await retryable.__anext__() + assert result == 0 + with pytest.raises(AttributeError): + await retryable.asend("test") + assert await retryable.__anext__() == 1 + with pytest.raises(AttributeError): + await retryable.aclose() + assert await retryable.__anext__() == 2 + with pytest.raises(AttributeError): + await retryable.athrow(ValueError("test")) + assert await retryable.__anext__() == 3 diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 2850f788..f8d0fbe6 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -565,7 +565,7 @@ def test___call___with_generator_send(self, sleep): """ Send should be passed through retry into target generator """ - retry_ = retry.Retry() + retry_ = retry.Retry(is_generator=True) decorated = retry_(self._generator_mock) @@ -581,6 +581,31 @@ def test___call___with_generator_send(self, sleep): assert next(generator) == 4 assert next(generator) == 5 + @mock.patch("time.sleep", autospec=True) + def test___call___with_iterable_send_close_throw(self, sleep): + """ + Send, Throw, and Close should raise AttributeErrors + """ + retry_ = retry.Retry(is_generator=True) + + def iterable_fn(n): + return iter(range(n)) + + decorated = retry_(iterable_fn) + + retryable = decorated(10) + result = next(retryable) + assert result == 0 + with pytest.raises(AttributeError): + retryable.send("test") + assert next(retryable) == 1 + with pytest.raises(AttributeError): + retryable.close() + assert next(retryable) == 2 + with pytest.raises(AttributeError): + retryable.throw(ValueError) + assert next(retryable) == 3 + @mock.patch("time.sleep", autospec=True) def test___call___with_generator_return(self, sleep): """ From dcb3766d09ff0e99c80b8fde99af0e6811d3e8eb Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 13 Apr 2023 16:44:18 -0700 Subject: [PATCH 046/204] added test for non-awaitable target --- tests/asyncio/test_retry_async.py | 45 +++++++++++++++++++++++++++++-- 1 file changed, 43 insertions(+), 2 deletions(-) diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 2c0826d3..940e5265 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -632,9 +632,10 @@ async def test___call___with_generator_throw(self, sleep): @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio - async def test___call___with_iterable_send_close_throw(self, sleep): + async def test___call___with_iterable_coroutine_send_close_throw(self, sleep): """ - Send should be passed through retry into target generator + Send, Throw, and Close should raise AttributeErrors when target is a coroutine that + produces an iterable """ retry_ = retry_async.AsyncRetry(is_generator=True) @@ -668,3 +669,43 @@ async def __anext__(self): with pytest.raises(AttributeError): await retryable.athrow(ValueError("test")) assert await retryable.__anext__() == 3 + + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___with_iterable_send_close_throw(self, sleep): + """ + Send, Throw, and Close should raise AttributeErrors when target is a + function that produces an iterable + """ + retry_ = retry_async.AsyncRetry(is_generator=True) + + def iterable_fn(n): + class CustomIterator: + def __init__(self, n): + self.n = n + self.i = 0 + + def __aiter__(self): + return self + + async def __anext__(self): + if self.i == self.n: + raise StopAsyncIteration + self.i += 1 + return self.i - 1 + return CustomIterator(n) + + decorated = retry_(iterable_fn) + + retryable = decorated(10) + result = await retryable.__anext__() + assert result == 0 + with pytest.raises(AttributeError): + await retryable.asend("test") + assert await retryable.__anext__() == 1 + with pytest.raises(AttributeError): + await retryable.aclose() + assert await retryable.__anext__() == 2 + with pytest.raises(AttributeError): + await retryable.athrow(ValueError("test")) + assert await retryable.__anext__() == 3 From dd368e43a373f2e184e4c29ebdd1ab503d92df07 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 13 Apr 2023 16:52:08 -0700 Subject: [PATCH 047/204] changed is_generator to is_stream --- google/api_core/retry.py | 10 +++++----- google/api_core/retry_async.py | 20 ++++++++++---------- tests/asyncio/test_retry_async.py | 22 +++++++++++----------- tests/unit/test_retry.py | 16 ++++++++-------- 4 files changed, 34 insertions(+), 34 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index cb75d6f5..055faddd 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -374,7 +374,7 @@ class Retry(object): a retryable exception. Any error raised by this function will *not* be caught. When target is a generator function, non-None values returned 1by `on_error` will be yielded for downstream consumers. - is_generator (bool): Indicates whether the input function + is_stream (bool): Indicates whether the input function should be treated as a generator function. If True, retries will `yield from` wrapped function. If false, retries will call wrapped function directly. Defaults to False. @@ -390,7 +390,7 @@ def __init__( multiplier=_DEFAULT_DELAY_MULTIPLIER, timeout=_DEFAULT_DEADLINE, on_error=None, - is_generator=False, + is_stream=False, **kwargs ): self._predicate = predicate @@ -400,7 +400,7 @@ def __init__( self._timeout = kwargs.get("deadline", timeout) self._deadline = self._timeout self._on_error = on_error - self._is_generator = is_generator + self._is_stream = is_stream def __call__(self, func, on_error=None): """Wrap a callable with retry behavior. @@ -410,7 +410,7 @@ def __call__(self, func, on_error=None): on_error (Callable[Exception]): A function to call while processing a retryable exception. Any error raised by this function will *not* be caught. - If `is_generator` is set, non-None values returned by + If `is_stream` is set, non-None values returned by `on_error` will be yielded for downstream consumers. Returns: Callable: A callable that will invoke ``func`` with retry @@ -426,7 +426,7 @@ def retry_wrapped_func(*args, **kwargs): sleep_generator = exponential_sleep_generator( self._initial, self._maximum, multiplier=self._multiplier ) - retry_func = RetryableGenerator if self._is_generator else retry_target + retry_func = RetryableGenerator if self._is_stream else retry_target return retry_func( target, self._predicate, diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index 48c888c2..6cf472d9 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -301,10 +301,10 @@ class AsyncRetry: *not* be caught. When target is a generator function, non-None values returned by `on_error` will be yielded for downstream consumers. - is_generator (bool): Indicates whether the input function - should be treated as a generator function. If True, retries will - `yield from` wrapped function. If false, retries will call wrapped - function directly. Defaults to False. + is_stream (bool): Indicates whether the input function + should be treated as an iterable function. If True, + the retries will be `yield from` wrapped function. If false, + retries will call wrapped function directly. Defaults to False. deadline (float): DEPRECATED use ``timeout`` instead. If set it will override ``timeout`` parameter. @@ -318,7 +318,7 @@ def __init__( multiplier=_DEFAULT_DELAY_MULTIPLIER, timeout=_DEFAULT_TIMEOUT, on_error=None, - is_generator=False, + is_stream=False, **kwargs ): self._predicate = predicate @@ -328,7 +328,7 @@ def __init__( self._timeout = kwargs.get("deadline", timeout) self._deadline = self._timeout self._on_error = on_error - self._is_generator = is_generator + self._is_stream = is_stream def __call__(self, func, on_error=None): """Wrap a callable with retry behavior. @@ -367,13 +367,13 @@ async def retry_wrapped_func(*args, **kwargs): ) @functools.wraps(func) - def retry_wrapped_generator(*args, deadline_dt=None, **kwargs): + def retry_wrapped_stream(*args, deadline_dt=None, **kwargs): """A wrapper that yields through target generator with retry.""" target = functools.partial(func, *args, **kwargs) sleep_generator = exponential_sleep_generator( self._initial, self._maximum, multiplier=self._multiplier ) - # if the target is a generator function, make sure return is also a generator function + # if the target is a generator or iterable function, make sure return is also a generator function return AsyncRetryableGenerator( target, self._predicate, @@ -382,8 +382,8 @@ def retry_wrapped_generator(*args, deadline_dt=None, **kwargs): on_error=on_error, ) - if self._is_generator: - return retry_wrapped_generator + if self._is_stream: + return retry_wrapped_stream else: return retry_wrapped_func diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 940e5265..d1ca41f5 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -426,7 +426,7 @@ async def _generator_mock( async def test___call___generator_success(self, sleep): from collections.abc import AsyncGenerator - retry_ = retry_async.AsyncRetry(is_generator=True) + retry_ = retry_async.AsyncRetry(is_stream=True) decorated = retry_(self._generator_mock) @@ -450,7 +450,7 @@ async def test___call___generator_retry(self, sleep): retry_ = retry_async.AsyncRetry( on_error=on_error, predicate=retry_async.if_exception_type(ValueError), - is_generator=True, + is_stream=True, ) generator = retry_(self._generator_mock)(error_on=3) # error thrown on 3 @@ -470,7 +470,7 @@ async def test___call___generator_retry_hitting_deadline(self, sleep, uniform): maximum=1024.0, multiplier=2.0, deadline=9.9, - is_generator=True, + is_stream=True, ) utcnow = datetime.datetime.utcnow() @@ -507,7 +507,7 @@ async def test___call___generator_timeout_cancellations(self): retry_ = retry_async.AsyncRetry( predicate=retry_async.if_exception_type(ValueError), deadline=0.2, - is_generator=True, + is_stream=True, ) utcnow = datetime.datetime.utcnow() utcnow_patcher = mock.patch( @@ -536,7 +536,7 @@ async def test___call___generator_await_cancel_retryable(self): cancel calls should be supported as retryable errors """ # test without cancel as retryable - retry_ = retry_async.AsyncRetry(is_generator=True) + retry_ = retry_async.AsyncRetry(is_stream=True) utcnow = datetime.datetime.utcnow() mock.patch("google.api_core.datetime_helpers.utcnow", return_value=utcnow) generator = retry_(self._generator_mock)(sleep_time=0.2) @@ -551,7 +551,7 @@ async def test___call___generator_await_cancel_retryable(self): # test with cancel as retryable retry_cancel_ = retry_async.AsyncRetry( predicate=retry_async.if_exception_type(asyncio.CancelledError), - is_generator=True, + is_stream=True, ) generator = retry_cancel_(self._generator_mock)(sleep_time=0.2) await generator.__anext__() == 0 @@ -569,7 +569,7 @@ async def test___call___with_generator_send(self, sleep): """ Send should be passed through retry into target generator """ - retry_ = retry_async.AsyncRetry(is_generator=True) + retry_ = retry_async.AsyncRetry(is_stream=True) decorated = retry_(self._generator_mock) @@ -588,7 +588,7 @@ async def test___call___with_generator_send(self, sleep): @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___with_generator_close(self, sleep): - retry_ = retry_async.AsyncRetry(is_generator=True) + retry_ = retry_async.AsyncRetry(is_stream=True) decorated = retry_(self._generator_mock) exception_list = [] generator = decorated(10, exceptions_seen=exception_list) @@ -606,7 +606,7 @@ async def test___call___with_generator_close(self, sleep): async def test___call___with_generator_throw(self, sleep): retry_ = retry_async.AsyncRetry( predicate=retry_async.if_exception_type(ValueError), - is_generator=True, + is_stream=True, ) decorated = retry_(self._generator_mock) exception_list = [] @@ -637,7 +637,7 @@ async def test___call___with_iterable_coroutine_send_close_throw(self, sleep): Send, Throw, and Close should raise AttributeErrors when target is a coroutine that produces an iterable """ - retry_ = retry_async.AsyncRetry(is_generator=True) + retry_ = retry_async.AsyncRetry(is_stream=True) async def iterable_fn(n): class CustomIterator: @@ -677,7 +677,7 @@ async def test___call___with_iterable_send_close_throw(self, sleep): Send, Throw, and Close should raise AttributeErrors when target is a function that produces an iterable """ - retry_ = retry_async.AsyncRetry(is_generator=True) + retry_ = retry_async.AsyncRetry(is_stream=True) def iterable_fn(n): class CustomIterator: diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index f8d0fbe6..94d70d58 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -513,7 +513,7 @@ def test___call___generator_retry(self, sleep): retry_ = retry.Retry( on_error=on_error, predicate=retry.if_exception_type(ValueError), - is_generator=True, + is_stream=True, ) result = retry_(self._generator_mock)(error_on=3) # error thrown on 3 @@ -532,7 +532,7 @@ def test___call___generator_retry_hitting_deadline(self, sleep, uniform): maximum=1024.0, multiplier=2.0, deadline=30.9, - is_generator=True, + is_stream=True, ) utcnow = datetime.datetime.utcnow() @@ -565,7 +565,7 @@ def test___call___with_generator_send(self, sleep): """ Send should be passed through retry into target generator """ - retry_ = retry.Retry(is_generator=True) + retry_ = retry.Retry(is_stream=True) decorated = retry_(self._generator_mock) @@ -586,7 +586,7 @@ def test___call___with_iterable_send_close_throw(self, sleep): """ Send, Throw, and Close should raise AttributeErrors """ - retry_ = retry.Retry(is_generator=True) + retry_ = retry.Retry(is_stream=True) def iterable_fn(n): return iter(range(n)) @@ -644,7 +644,7 @@ def test___call___with_generator_close(self, sleep): @mock.patch("time.sleep", autospec=True) def test___call___with_generator_throw(self, sleep): retry_ = retry.Retry( - predicate=retry.if_exception_type(ValueError), is_generator=True + predicate=retry.if_exception_type(ValueError), is_stream=True ) decorated = retry_(self._generator_mock) @@ -670,12 +670,12 @@ def test___call___with_generator_throw(self, sleep): assert next(generator) == 1 @mock.patch("time.sleep", autospec=True) - def test___call___with_is_generator(self, sleep): + def test___call___with_is_stream(self, sleep): gen_retry_ = retry.Retry( - is_generator=True, predicate=retry.if_exception_type(ValueError) + is_stream=True, predicate=retry.if_exception_type(ValueError) ) not_gen_retry_ = retry.Retry( - is_generator=False, predicate=retry.if_exception_type(ValueError) + is_stream=False, predicate=retry.if_exception_type(ValueError) ) auto_retry_ = retry.Retry(predicate=retry.if_exception_type(ValueError)) # force generator to act as non-generator From 452b9bbd9dfed347d08aa09c5e73d3dc2e971e38 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Thu, 13 Apr 2023 23:54:18 +0000 Subject: [PATCH 048/204] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot?= =?UTF-8?q?=20post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- google/api_core/retry.py | 30 +++++++++++++++++++++--------- google/api_core/retry_async.py | 20 ++++++++++++-------- tests/asyncio/test_retry_async.py | 2 ++ tests/unit/test_retry.py | 1 - 4 files changed, 35 insertions(+), 18 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index 055faddd..e3eaffde 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -147,7 +147,6 @@ def exponential_sleep_generator(initial, maximum, multiplier=_DEFAULT_DELAY_MULT class RetryableGenerator(Generator): - def __init__(self, target, predicate, sleep_generator, timeout=None, on_error=None): self.subgenerator_fn = target self.subgenerator = self.subgenerator_fn() @@ -156,7 +155,9 @@ def __init__(self, target, predicate, sleep_generator, timeout=None, on_error=No self.on_error = on_error self.timeout = timeout if self.timeout is not None: - self.deadline = datetime_helpers.utcnow() + datetime.timedelta(seconds=self.timeout) + self.deadline = datetime_helpers.utcnow() + datetime.timedelta( + seconds=self.timeout + ) else: self.deadline = None @@ -172,11 +173,15 @@ def _handle_exception(self, exc): try: next_sleep = next(self.sleep_generator) except StopIteration: - raise ValueError('Sleep generator stopped yielding sleep values') + raise ValueError("Sleep generator stopped yielding sleep values") if self.deadline is not None: - next_attempt = datetime_helpers.utcnow() + datetime.timedelta(seconds=next_sleep) + next_attempt = datetime_helpers.utcnow() + datetime.timedelta( + seconds=next_sleep + ) if self.deadline < next_attempt: - raise exceptions.RetryError(f"Deadline of {self.timeout:.1f} seconds exceeded", exc) from exc + raise exceptions.RetryError( + f"Deadline of {self.timeout:.1f} seconds exceeded", exc + ) from exc _LOGGER.debug( "Retrying due to {}, sleeping {:.1f}s ...".format(exc, next_sleep) ) @@ -195,7 +200,9 @@ def close(self): if getattr(self.subgenerator, "close", None): return self.subgenerator.close() else: - raise AttributeError("close() not implemented for {}".format(self.subgenerator)) + raise AttributeError( + "close() not implemented for {}".format(self.subgenerator) + ) def send(self, value): if getattr(self.subgenerator, "send", None): @@ -206,7 +213,9 @@ def send(self, value): # if retryable exception was handled, try again with new subgenerator return self.send(value) else: - raise AttributeError("send() not implemented for {}".format(self.subgenerator)) + raise AttributeError( + "send() not implemented for {}".format(self.subgenerator) + ) def throw(self, typ, val=None, tb=None): if getattr(self.subgenerator, "throw", None): @@ -217,7 +226,10 @@ def throw(self, typ, val=None, tb=None): # if retryable exception was handled, return next from new subgenerator return self.__next__() else: - raise AttributeError("throw() not implemented for {}".format(self.subgenerator)) + raise AttributeError( + "throw() not implemented for {}".format(self.subgenerator) + ) + def retry_target( target, predicate, sleep_generator, timeout=None, on_error=None, **kwargs @@ -391,7 +403,7 @@ def __init__( timeout=_DEFAULT_DEADLINE, on_error=None, is_stream=False, - **kwargs + **kwargs, ): self._predicate = predicate self._initial = initial diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index 6cf472d9..d93c056a 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -75,7 +75,6 @@ async def check_if_exists(): class AsyncRetryableGenerator(AsyncGenerator): - def __init__(self, target, predicate, sleep_generator, timeout=None, on_error=None): self.subgenerator_fn = target self.subgenerator = None @@ -104,7 +103,7 @@ async def _handle_exception(self, exc): try: next_sleep = next(self.sleep_generator) except StopIteration: - raise ValueError('Sleep generator stopped yielding sleep values') + raise ValueError("Sleep generator stopped yielding sleep values") if self.remaining_timeout_budget is not None: if self.remaining_timeout_budget <= next_sleep: @@ -129,7 +128,10 @@ def _subtract_time_from_budget(self, start_timestamp): async def __anext__(self): await self._ensure_subgenerator() - if self.remaining_timeout_budget is not None and self.remaining_timeout_budget <= 0: + if ( + self.remaining_timeout_budget is not None + and self.remaining_timeout_budget <= 0 + ): raise exceptions.RetryError( "Timeout of {:.1f}s exceeded".format(self.timeout), None, @@ -137,8 +139,7 @@ async def __anext__(self): try: start_timestamp = datetime_helpers.utcnow() next_val_routine = asyncio.wait_for( - self.subgenerator.__anext__(), - self.remaining_timeout_budget + self.subgenerator.__anext__(), self.remaining_timeout_budget ) next_val = await next_val_routine self._subtract_time_from_budget(start_timestamp) @@ -159,7 +160,10 @@ async def aclose(self): async def asend(self, value): await self._ensure_subgenerator() if getattr(self.subgenerator, "asend", None): - if self.remaining_timeout_budget is not None and self.remaining_timeout_budget <= 0: + if ( + self.remaining_timeout_budget is not None + and self.remaining_timeout_budget <= 0 + ): raise exceptions.RetryError( "Timeout of {:.1f}s exceeded".format(self.timeout), None, @@ -167,8 +171,7 @@ async def asend(self, value): try: start_timestamp = datetime_helpers.utcnow() next_val_routine = asyncio.wait_for( - self.subgenerator.asend(value), - self.remaining_timeout_budget + self.subgenerator.asend(value), self.remaining_timeout_budget ) next_val = await next_val_routine self._subtract_time_from_budget(start_timestamp) @@ -193,6 +196,7 @@ async def athrow(self, typ, val=None, tb=None): else: raise AttributeError("athrow is not implemented for retried stream") + async def retry_target( target, predicate, sleep_generator, timeout=None, on_error=None, **kwargs ): diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index d1ca41f5..62091af4 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -653,6 +653,7 @@ async def __anext__(self): raise StopAsyncIteration self.i += 1 return self.i - 1 + return CustomIterator(n) decorated = retry_(iterable_fn) @@ -693,6 +694,7 @@ async def __anext__(self): raise StopAsyncIteration self.i += 1 return self.i - 1 + return CustomIterator(n) decorated = retry_(iterable_fn) diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 94d70d58..b86b7715 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -692,4 +692,3 @@ def test___call___with_is_stream(self, sleep): gen = gen_retry_(wrapped)() unpacked = [next(gen) for i in range(10)] assert unpacked == [0, 1, 2, 3, 4, 5, 0, 1, 2, 3] - From 6879418fb7ad76c6ed195df3f816f3440f8be0d8 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 13 Apr 2023 16:56:22 -0700 Subject: [PATCH 049/204] changed docstrings --- google/api_core/retry.py | 5 ++--- google/api_core/retry_async.py | 4 +--- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index e3eaffde..55c91668 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -384,10 +384,9 @@ class Retry(object): timeout (float): How long to keep retrying, in seconds. on_error (Callable[Exception]): A function to call while processing a retryable exception. Any error raised by this function will - *not* be caught. When target is a generator function, non-None values - returned 1by `on_error` will be yielded for downstream consumers. + *not* be caught. is_stream (bool): Indicates whether the input function - should be treated as a generator function. If True, retries will + should be treated as an iterable function. If True, retries will `yield from` wrapped function. If false, retries will call wrapped function directly. Defaults to False. deadline (float): DEPRECATED: use `timeout` instead. For backward diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index d93c056a..c3d43a27 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -302,9 +302,7 @@ class AsyncRetry: target or sleeping between retries is counted towards the timeout. on_error (Callable[Exception]): A function to call while processing a retryable exception. Any error raised by this function will - *not* be caught. When target is a generator function, non-None - values returned by `on_error` will be yielded for downstream - consumers. + *not* be caught. is_stream (bool): Indicates whether the input function should be treated as an iterable function. If True, the retries will be `yield from` wrapped function. If false, From 847509f791933edfe12d0b9806ed239ee12731ae Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 13 Apr 2023 16:57:59 -0700 Subject: [PATCH 050/204] removed back-ticks --- google/api_core/retry.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index 55c91668..30111059 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -425,7 +425,7 @@ def __call__(self, func, on_error=None): `on_error` will be yielded for downstream consumers. Returns: Callable: A callable that will invoke ``func`` with retry - behavior``. + behavior. """ if self._on_error is not None: on_error = self._on_error From b5e3796674e117056b165a8cf8f52078ff015053 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 13 Apr 2023 16:58:49 -0700 Subject: [PATCH 051/204] removed outdated comment --- google/api_core/retry.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index 30111059..64b8b6bf 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -421,8 +421,6 @@ def __call__(self, func, on_error=None): on_error (Callable[Exception]): A function to call while processing a retryable exception. Any error raised by this function will *not* be caught. - If `is_stream` is set, non-None values returned by - `on_error` will be yielded for downstream consumers. Returns: Callable: A callable that will invoke ``func`` with retry behavior. From 7a7d9ac3cc9f366ac0420b6342129895d655988b Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 13 Apr 2023 17:04:35 -0700 Subject: [PATCH 052/204] changed comments --- google/api_core/retry_async.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index c3d43a27..10e8fad9 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -298,7 +298,7 @@ class AsyncRetry: maximum (float): The maximum amount of time to delay in seconds. multiplier (float): The multiplier applied to the delay. timeout (float): How long to keep retrying in seconds. - When the target is a generator, only time spent waiting on the + When ``is_stream``, only time spent waiting on the target or sleeping between retries is counted towards the timeout. on_error (Callable[Exception]): A function to call while processing a retryable exception. Any error raised by this function will @@ -346,9 +346,9 @@ def __call__(self, func, on_error=None): Returns: - Union[Coroutine, AsynchronousGenerator]: One of: - - A couroutine that will invoke ``func`` if ``func`` is a coroutine function - - An AsynchronousGenerator that yields from ``func`` if ``func`` is an AsynchronousGenerator function. + Coroutine | AsynchronousGenerator: One of: + - An AsynchronousGenerator that yields from ``func`` if ``is_stream`` + - A couroutine that will invoke ``func`` if ``func`` if not ``is_stream`` """ if self._on_error is not None: on_error = self._on_error From 661989537b422dcff2c191667c28a1bb943ac89b Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 13 Apr 2023 17:47:39 -0700 Subject: [PATCH 053/204] moved streaming retries to new files --- google/api_core/retry.py | 87 +------------ google/api_core/retry_async.py | 128 +------------------ google/api_core/retry_streaming.py | 116 +++++++++++++++++ google/api_core/retry_streaming_async.py | 151 +++++++++++++++++++++++ 4 files changed, 270 insertions(+), 212 deletions(-) create mode 100644 google/api_core/retry_streaming.py create mode 100644 google/api_core/retry_streaming_async.py diff --git a/google/api_core/retry.py b/google/api_core/retry.py index 64b8b6bf..3c265933 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -61,12 +61,12 @@ def check_if_exists(): import logging import random import time -from collections.abc import Generator import requests.exceptions from google.api_core import datetime_helpers from google.api_core import exceptions +from google.api_core.retry_streaming import RetryableGenerator from google.auth import exceptions as auth_exceptions _LOGGER = logging.getLogger(__name__) @@ -146,91 +146,6 @@ def exponential_sleep_generator(initial, maximum, multiplier=_DEFAULT_DELAY_MULT delay = min(delay * multiplier, maximum) -class RetryableGenerator(Generator): - def __init__(self, target, predicate, sleep_generator, timeout=None, on_error=None): - self.subgenerator_fn = target - self.subgenerator = self.subgenerator_fn() - self.predicate = predicate - self.sleep_generator = sleep_generator - self.on_error = on_error - self.timeout = timeout - if self.timeout is not None: - self.deadline = datetime_helpers.utcnow() + datetime.timedelta( - seconds=self.timeout - ) - else: - self.deadline = None - - def __iter__(self): - return self - - def _handle_exception(self, exc): - if not self.predicate(exc): - raise exc - else: - if self.on_error: - self.on_error(exc) - try: - next_sleep = next(self.sleep_generator) - except StopIteration: - raise ValueError("Sleep generator stopped yielding sleep values") - if self.deadline is not None: - next_attempt = datetime_helpers.utcnow() + datetime.timedelta( - seconds=next_sleep - ) - if self.deadline < next_attempt: - raise exceptions.RetryError( - f"Deadline of {self.timeout:.1f} seconds exceeded", exc - ) from exc - _LOGGER.debug( - "Retrying due to {}, sleeping {:.1f}s ...".format(exc, next_sleep) - ) - time.sleep(next_sleep) - self.subgenerator = self.subgenerator_fn() - - def __next__(self): - try: - return next(self.subgenerator) - except Exception as exc: - self._handle_exception(exc) - # if retryable exception was handled, try again with new subgenerator - return self.__next__() - - def close(self): - if getattr(self.subgenerator, "close", None): - return self.subgenerator.close() - else: - raise AttributeError( - "close() not implemented for {}".format(self.subgenerator) - ) - - def send(self, value): - if getattr(self.subgenerator, "send", None): - try: - return self.subgenerator.send(value) - except Exception as exc: - self._handle_exception(exc) - # if retryable exception was handled, try again with new subgenerator - return self.send(value) - else: - raise AttributeError( - "send() not implemented for {}".format(self.subgenerator) - ) - - def throw(self, typ, val=None, tb=None): - if getattr(self.subgenerator, "throw", None): - try: - return self.subgenerator.throw(typ, val, tb) - except Exception as exc: - self._handle_exception(exc) - # if retryable exception was handled, return next from new subgenerator - return self.__next__() - else: - raise AttributeError( - "throw() not implemented for {}".format(self.subgenerator) - ) - - def retry_target( target, predicate, sleep_generator, timeout=None, on_error=None, **kwargs ): diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index 10e8fad9..6eaf290b 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -55,15 +55,14 @@ async def check_if_exists(): import datetime import functools import logging -import inspect -from collections.abc import AsyncGenerator from google.api_core import datetime_helpers from google.api_core import exceptions from google.api_core.retry import exponential_sleep_generator from google.api_core.retry import if_exception_type # noqa: F401 from google.api_core.retry import if_transient_error +from google.api_core.retry_streaming_async import AsyncRetryableGenerator _LOGGER = logging.getLogger(__name__) @@ -74,129 +73,6 @@ async def check_if_exists(): _DEFAULT_TIMEOUT = 60.0 * 2.0 # seconds -class AsyncRetryableGenerator(AsyncGenerator): - def __init__(self, target, predicate, sleep_generator, timeout=None, on_error=None): - self.subgenerator_fn = target - self.subgenerator = None - self.predicate = predicate - self.sleep_generator = sleep_generator - self.on_error = on_error - self.timeout = timeout - self.remaining_timeout_budget = timeout if timeout else None - - async def _ensure_subgenerator(self): - if not self.subgenerator: - if inspect.iscoroutinefunction(self.subgenerator_fn): - self.subgenerator = await self.subgenerator_fn() - else: - self.subgenerator = self.subgenerator_fn() - - def __aiter__(self): - return self - - async def _handle_exception(self, exc): - if not self.predicate(exc) and not isinstance(exc, asyncio.TimeoutError): - raise exc - else: - if self.on_error: - self.on_error(exc) - try: - next_sleep = next(self.sleep_generator) - except StopIteration: - raise ValueError("Sleep generator stopped yielding sleep values") - - if self.remaining_timeout_budget is not None: - if self.remaining_timeout_budget <= next_sleep: - raise exceptions.RetryError( - "Timeout of {:.1f}s exceeded".format(self.timeout), - exc, - ) from exc - else: - self.remaining_timeout_budget -= next_sleep - _LOGGER.debug( - "Retrying due to {}, sleeping {:.1f}s ...".format(exc, next_sleep) - ) - await asyncio.sleep(next_sleep) - self.subgenerator = None - await self._ensure_subgenerator() - - def _subtract_time_from_budget(self, start_timestamp): - if self.remaining_timeout_budget is not None: - self.remaining_timeout_budget -= ( - datetime_helpers.utcnow() - start_timestamp - ).total_seconds() - - async def __anext__(self): - await self._ensure_subgenerator() - if ( - self.remaining_timeout_budget is not None - and self.remaining_timeout_budget <= 0 - ): - raise exceptions.RetryError( - "Timeout of {:.1f}s exceeded".format(self.timeout), - None, - ) - try: - start_timestamp = datetime_helpers.utcnow() - next_val_routine = asyncio.wait_for( - self.subgenerator.__anext__(), self.remaining_timeout_budget - ) - next_val = await next_val_routine - self._subtract_time_from_budget(start_timestamp) - return next_val - except (Exception, asyncio.CancelledError) as exc: - self._subtract_time_from_budget(start_timestamp) - await self._handle_exception(exc) - # if retryable exception was handled, try again with new subgenerator - return await self.__anext__() - - async def aclose(self): - await self._ensure_subgenerator() - if getattr(self.subgenerator, "aclose", None): - return await self.subgenerator.aclose() - else: - raise AttributeError("aclose is not implemented for retried stream") - - async def asend(self, value): - await self._ensure_subgenerator() - if getattr(self.subgenerator, "asend", None): - if ( - self.remaining_timeout_budget is not None - and self.remaining_timeout_budget <= 0 - ): - raise exceptions.RetryError( - "Timeout of {:.1f}s exceeded".format(self.timeout), - None, - ) - try: - start_timestamp = datetime_helpers.utcnow() - next_val_routine = asyncio.wait_for( - self.subgenerator.asend(value), self.remaining_timeout_budget - ) - next_val = await next_val_routine - self._subtract_time_from_budget(start_timestamp) - return next_val - except (Exception, asyncio.CancelledError) as exc: - self._subtract_time_from_budget(start_timestamp) - await self._handle_exception(exc) - # if retryable exception was handled, try again with new subgenerator - return await self.__asend__(value) - else: - raise AttributeError("asend is not implemented for retried stream") - - async def athrow(self, typ, val=None, tb=None): - await self._ensure_subgenerator() - if getattr(self.subgenerator, "athrow", None): - try: - return await self.subgenerator.athrow(typ, val, tb) - except Exception as exc: - await self._handle_exception(exc) - # if retryable exception was handled, return next from new subgenerator - return await self.__anext__() - else: - raise AttributeError("athrow is not implemented for retried stream") - - async def retry_target( target, predicate, sleep_generator, timeout=None, on_error=None, **kwargs ): @@ -346,7 +222,7 @@ def __call__(self, func, on_error=None): Returns: - Coroutine | AsynchronousGenerator: One of: + Coroutine | AsynchronousGenerator: - An AsynchronousGenerator that yields from ``func`` if ``is_stream`` - A couroutine that will invoke ``func`` if ``func`` if not ``is_stream`` """ diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py new file mode 100644 index 00000000..27f39452 --- /dev/null +++ b/google/api_core/retry_streaming.py @@ -0,0 +1,116 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helpers for retries for streaming APIs.""" + +import datetime +import logging +import time + +from collections.abc import Generator + +from google.api_core import datetime_helpers +from google.api_core import exceptions + +_LOGGER = logging.getLogger(__name__) + + +class RetryableGenerator(Generator): + """ + Helper class for retrying Iterator and Generator-based + streaming APIs. + """ + + def __init__(self, target, predicate, sleep_generator, timeout=None, on_error=None): + self.subgenerator_fn = target + self.subgenerator = self.subgenerator_fn() + self.predicate = predicate + self.sleep_generator = sleep_generator + self.on_error = on_error + self.timeout = timeout + if self.timeout is not None: + self.deadline = datetime_helpers.utcnow() + datetime.timedelta( + seconds=self.timeout + ) + else: + self.deadline = None + + def __iter__(self): + return self + + def _handle_exception(self, exc): + if not self.predicate(exc): + raise exc + else: + if self.on_error: + self.on_error(exc) + try: + next_sleep = next(self.sleep_generator) + except StopIteration: + raise ValueError("Sleep generator stopped yielding sleep values") + if self.deadline is not None: + next_attempt = datetime_helpers.utcnow() + datetime.timedelta( + seconds=next_sleep + ) + if self.deadline < next_attempt: + raise exceptions.RetryError( + f"Deadline of {self.timeout:.1f} seconds exceeded", exc + ) from exc + _LOGGER.debug( + "Retrying due to {}, sleeping {:.1f}s ...".format(exc, next_sleep) + ) + time.sleep(next_sleep) + self.subgenerator = self.subgenerator_fn() + + def __next__(self): + try: + return next(self.subgenerator) + except Exception as exc: + self._handle_exception(exc) + # if retryable exception was handled, try again with new subgenerator + return self.__next__() + + def close(self): + if getattr(self.subgenerator, "close", None): + return self.subgenerator.close() + else: + raise AttributeError( + "close() not implemented for {}".format(self.subgenerator) + ) + + def send(self, value): + if getattr(self.subgenerator, "send", None): + try: + return self.subgenerator.send(value) + except Exception as exc: + self._handle_exception(exc) + # if retryable exception was handled, try again with new subgenerator + return self.send(value) + else: + raise AttributeError( + "send() not implemented for {}".format(self.subgenerator) + ) + + def throw(self, typ, val=None, tb=None): + if getattr(self.subgenerator, "throw", None): + try: + return self.subgenerator.throw(typ, val, tb) + except Exception as exc: + self._handle_exception(exc) + # if retryable exception was handled, return next from new subgenerator + return self.__next__() + else: + raise AttributeError( + "throw() not implemented for {}".format(self.subgenerator) + ) diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py new file mode 100644 index 00000000..b2d440c2 --- /dev/null +++ b/google/api_core/retry_streaming_async.py @@ -0,0 +1,151 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helpers for retries for async streaming APIs.""" + +import asyncio +import inspect +import logging + +from collections.abc import AsyncGenerator + +from google.api_core import datetime_helpers +from google.api_core import exceptions + +_LOGGER = logging.getLogger(__name__) + + +class AsyncRetryableGenerator(AsyncGenerator): + """ + Helper class for retrying AsyncIterator and AsyncGenerator-based + streaming APIs. + """ + + def __init__(self, target, predicate, sleep_generator, timeout=None, on_error=None): + self.subgenerator_fn = target + self.subgenerator = None + self.predicate = predicate + self.sleep_generator = sleep_generator + self.on_error = on_error + self.timeout = timeout + self.remaining_timeout_budget = timeout if timeout else None + + async def _ensure_subgenerator(self): + if not self.subgenerator: + if inspect.iscoroutinefunction(self.subgenerator_fn): + self.subgenerator = await self.subgenerator_fn() + else: + self.subgenerator = self.subgenerator_fn() + + def __aiter__(self): + return self + + async def _handle_exception(self, exc): + if not self.predicate(exc) and not isinstance(exc, asyncio.TimeoutError): + raise exc + else: + if self.on_error: + self.on_error(exc) + try: + next_sleep = next(self.sleep_generator) + except StopIteration: + raise ValueError('Sleep generator stopped yielding sleep values') + + if self.remaining_timeout_budget is not None: + if self.remaining_timeout_budget <= next_sleep: + raise exceptions.RetryError( + "Timeout of {:.1f}s exceeded".format(self.timeout), + exc, + ) from exc + else: + self.remaining_timeout_budget -= next_sleep + _LOGGER.debug( + "Retrying due to {}, sleeping {:.1f}s ...".format(exc, next_sleep) + ) + await asyncio.sleep(next_sleep) + self.subgenerator = None + await self._ensure_subgenerator() + + def _subtract_time_from_budget(self, start_timestamp): + if self.remaining_timeout_budget is not None: + self.remaining_timeout_budget -= ( + datetime_helpers.utcnow() - start_timestamp + ).total_seconds() + + async def __anext__(self): + await self._ensure_subgenerator() + if self.remaining_timeout_budget is not None and self.remaining_timeout_budget <= 0: + raise exceptions.RetryError( + "Timeout of {:.1f}s exceeded".format(self.timeout), + None, + ) + try: + start_timestamp = datetime_helpers.utcnow() + next_val_routine = asyncio.wait_for( + self.subgenerator.__anext__(), + self.remaining_timeout_budget + ) + next_val = await next_val_routine + self._subtract_time_from_budget(start_timestamp) + return next_val + except (Exception, asyncio.CancelledError) as exc: + self._subtract_time_from_budget(start_timestamp) + await self._handle_exception(exc) + # if retryable exception was handled, try again with new subgenerator + return await self.__anext__() + + async def aclose(self): + await self._ensure_subgenerator() + if getattr(self.subgenerator, "aclose", None): + return await self.subgenerator.aclose() + else: + raise AttributeError("aclose is not implemented for retried stream") + + async def asend(self, value): + await self._ensure_subgenerator() + if getattr(self.subgenerator, "asend", None): + if self.remaining_timeout_budget is not None and self.remaining_timeout_budget <= 0: + raise exceptions.RetryError( + "Timeout of {:.1f}s exceeded".format(self.timeout), + None, + ) + try: + start_timestamp = datetime_helpers.utcnow() + next_val_routine = asyncio.wait_for( + self.subgenerator.asend(value), + self.remaining_timeout_budget + ) + next_val = await next_val_routine + self._subtract_time_from_budget(start_timestamp) + return next_val + except (Exception, asyncio.CancelledError) as exc: + self._subtract_time_from_budget(start_timestamp) + await self._handle_exception(exc) + # if retryable exception was handled, try again with new subgenerator + return await self.__asend__(value) + else: + raise AttributeError("asend is not implemented for retried stream") + + async def athrow(self, typ, val=None, tb=None): + await self._ensure_subgenerator() + if getattr(self.subgenerator, "athrow", None): + try: + return await self.subgenerator.athrow(typ, val, tb) + except Exception as exc: + await self._handle_exception(exc) + # if retryable exception was handled, return next from new subgenerator + return await self.__anext__() + else: + raise AttributeError("athrow is not implemented for retried stream") + From 27fc9302228aa5269f6cb08ce401f89a0874ae38 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 13 Apr 2023 17:50:15 -0700 Subject: [PATCH 054/204] reverted some style changes --- google/api_core/retry.py | 12 +++++++++--- google/api_core/retry_async.py | 2 +- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index 3c265933..707dc30b 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -186,6 +186,7 @@ def retry_target( deadline = None last_exc = None + for sleep in sleep_generator: try: return target() @@ -196,17 +197,21 @@ def retry_target( if not predicate(exc): raise last_exc = exc - if on_error is not None: - on_error(last_exc) + if on_error is not None: + on_error(exc) + if deadline is not None: next_attempt_time = datetime_helpers.utcnow() + datetime.timedelta( seconds=sleep ) if deadline < next_attempt_time: raise exceptions.RetryError( - "Deadline of {:.1f}s exceeded".format(timeout), + "Deadline of {:.1f}s exceeded while calling target function".format( + timeout + ), last_exc, ) from last_exc + _LOGGER.debug( "Retrying due to {}, sleeping {:.1f}s ...".format(last_exc, sleep) ) @@ -336,6 +341,7 @@ def __call__(self, func, on_error=None): on_error (Callable[Exception]): A function to call while processing a retryable exception. Any error raised by this function will *not* be caught. + Returns: Callable: A callable that will invoke ``func`` with retry behavior. diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index 6eaf290b..d3f6494a 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -131,7 +131,7 @@ async def retry_target( raise last_exc = exc if on_error is not None: - on_error(last_exc) + on_error(exc) now = datetime_helpers.utcnow() From d6a23ea0a7bc532f35b973ccfca5ceb002de099c Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 13 Apr 2023 17:53:01 -0700 Subject: [PATCH 055/204] changed comments --- google/api_core/retry_async.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index d3f6494a..320ea892 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -56,7 +56,6 @@ async def check_if_exists(): import functools import logging - from google.api_core import datetime_helpers from google.api_core import exceptions from google.api_core.retry import exponential_sleep_generator @@ -216,13 +215,10 @@ def __call__(self, func, on_error=None): coroutine or async generator function to add retry behavior to. on_error (Callable[Exception]): A function to call while processing a retryable exception. Any error raised by this function will - *not* be caught. When `func` is a generator function, non-None - values returned by `on_error` will be yielded for downstream - consumers. - + *not* be caught. Returns: - Coroutine | AsynchronousGenerator: + Union[Coroutine, AsynchronousGenerator]: - An AsynchronousGenerator that yields from ``func`` if ``is_stream`` - A couroutine that will invoke ``func`` if ``func`` if not ``is_stream`` """ From 90ef83480fa390c24ccea5a2796fc2744ffad23f Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 13 Apr 2023 18:24:14 -0700 Subject: [PATCH 056/204] added comments --- google/api_core/retry_streaming.py | 101 ++++++++++++--- google/api_core/retry_streaming_async.py | 155 ++++++++++++++++++----- 2 files changed, 205 insertions(+), 51 deletions(-) diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index 27f39452..e674a1f3 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -14,6 +14,8 @@ """Helpers for retries for streaming APIs.""" +from typing import Callable, Optional, Iterable + import datetime import logging import time @@ -32,9 +34,31 @@ class RetryableGenerator(Generator): streaming APIs. """ - def __init__(self, target, predicate, sleep_generator, timeout=None, on_error=None): - self.subgenerator_fn = target - self.subgenerator = self.subgenerator_fn() + def __init__( + self, + target: Callable[[], Iterable], + predicate: Callable[[Exception], bool], + sleep_generator: Iterable[float], + timeout: Optional[float] = None, + on_error: Optional[Callable[[Exception], None]] = None, + ): + """ + Args: + target: The function to call to produce iterables for each retry. + This must be a nullary function - apply arguments with + `functools.partial`. + predicate: A callable used to determine if an + exception raised by the target should be considered retryable. + It should return True to retry or False otherwise. + sleep_generator: An infinite iterator that determines + how long to sleep between retries. + timeout: How long to keep retrying the target. + on_error: A function to call while processing a + retryable exception. Any error raised by this function will *not* + be caught. + """ + self.target_fn = target + self.active_target: Iterable = self.target_fn() self.predicate = predicate self.sleep_generator = sleep_generator self.on_error = on_error @@ -47,18 +71,28 @@ def __init__(self, target, predicate, sleep_generator, timeout=None, on_error=No self.deadline = None def __iter__(self): + """ + Implement the iterator protocol. + """ return self def _handle_exception(self, exc): + """ + When an exception is raised while iterating over the active_target, + check if it is retryable. If so, create a new active_target and + continue iterating. If not, raise the exception. + """ if not self.predicate(exc): raise exc else: + # run on_error callback if provided if self.on_error: self.on_error(exc) try: next_sleep = next(self.sleep_generator) except StopIteration: raise ValueError("Sleep generator stopped yielding sleep values") + # if deadline is exceeded, raise RetryError if self.deadline is not None: next_attempt = datetime_helpers.utcnow() + datetime.timedelta( seconds=next_sleep @@ -67,50 +101,83 @@ def _handle_exception(self, exc): raise exceptions.RetryError( f"Deadline of {self.timeout:.1f} seconds exceeded", exc ) from exc + # sleep before retrying _LOGGER.debug( "Retrying due to {}, sleeping {:.1f}s ...".format(exc, next_sleep) ) time.sleep(next_sleep) - self.subgenerator = self.subgenerator_fn() + self.active_target = self.target_fn() def __next__(self): + """ + Implement the iterator protocol. + """ try: - return next(self.subgenerator) + return next(self.active_target) except Exception as exc: self._handle_exception(exc) - # if retryable exception was handled, try again with new subgenerator + # if retryable exception was handled, try again with new active_target return self.__next__() def close(self): - if getattr(self.subgenerator, "close", None): - return self.subgenerator.close() + """ + Close the active_target if supported. (e.g. target is a generator) + + Raises: + - AttributeError if the active_target does not have a close() method + """ + if getattr(self.active_target, "close", None): + return self.active_target.close() else: raise AttributeError( - "close() not implemented for {}".format(self.subgenerator) + "close() not implemented for {}".format(self.active_target) ) def send(self, value): - if getattr(self.subgenerator, "send", None): + """ + Call send on the active_target if supported. (e.g. target is a generator) + + If an exception is raised, a retry may be attempted before returning + a result. + + Returns: + - the result of calling send() on the active_target + + Raises: + - AttributeError if the active_target does not have a send() method + """ + if getattr(self.active_target, "send", None): try: - return self.subgenerator.send(value) + return self.active_target.send(value) except Exception as exc: self._handle_exception(exc) - # if retryable exception was handled, try again with new subgenerator + # if retryable exception was handled, try again with new active_target return self.send(value) else: raise AttributeError( - "send() not implemented for {}".format(self.subgenerator) + "send() not implemented for {}".format(self.active_target) ) def throw(self, typ, val=None, tb=None): - if getattr(self.subgenerator, "throw", None): + """ + Call throw on the active_target if supported. (e.g. target is a generator) + + If an exception is raised, a retry may be attempted before returning + a result. + + Returns: + - the result of calling throw() on the active_target + Raises: + - AttributeError if the active_target does not have a throw() method + """ + if getattr(self.active_target, "throw", None): try: - return self.subgenerator.throw(typ, val, tb) + return self.active_target.throw(typ, val, tb) except Exception as exc: self._handle_exception(exc) - # if retryable exception was handled, return next from new subgenerator + # if retryable exception was handled, return next from new active_target return self.__next__() else: raise AttributeError( - "throw() not implemented for {}".format(self.subgenerator) + "throw() not implemented for {}".format(self.active_target) ) diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index b2d440c2..ca151c44 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -14,6 +14,8 @@ """Helpers for retries for async streaming APIs.""" +from typing import Callable, Optional, Iterable, AsyncIterable, Awaitable, Union + import asyncio import inspect import logging @@ -32,36 +34,71 @@ class AsyncRetryableGenerator(AsyncGenerator): streaming APIs. """ - def __init__(self, target, predicate, sleep_generator, timeout=None, on_error=None): - self.subgenerator_fn = target - self.subgenerator = None + def __init__( + self, + target: Union[ + Callable[[], AsyncIterable], Callable[[], Awaitable[AsyncIterable]] + ], + predicate: Callable[[Exception], bool], + sleep_generator: Iterable[float], + timeout: Optional[float] = None, + on_error: Optional[Callable[[Exception], None]] = None, + ): + """ + Args: + target: The function to call to produce iterables for each retry. + This must be a nullary function - apply arguments with + `functools.partial`. + predicate: A callable used to determine if an + exception raised by the target should be considered retryable. + It should return True to retry or False otherwise. + sleep_generator: An infinite iterator that determines + how long to sleep between retries. + timeout: How long to keep retrying the target. + on_error: A function to call while processing a + retryable exception. Any error raised by this function will *not* + be caught. + """ + self.target_fn = target + # active target must be populated in an async context + self.active_target: Optional[AsyncIterable] = None self.predicate = predicate self.sleep_generator = sleep_generator self.on_error = on_error self.timeout = timeout self.remaining_timeout_budget = timeout if timeout else None - async def _ensure_subgenerator(self): - if not self.subgenerator: - if inspect.iscoroutinefunction(self.subgenerator_fn): - self.subgenerator = await self.subgenerator_fn() + async def _ensure_active_target(self): + """ + Ensure that the active target is populated and ready to be iterated over. + """ + if not self.active_target: + if inspect.iscoroutinefunction(self.target_fn): + self.active_target = await self.target_fn() else: - self.subgenerator = self.subgenerator_fn() + self.active_target = self.target_fn() def __aiter__(self): + """Implement the async iterator protocol.""" return self async def _handle_exception(self, exc): + """ + When an exception is raised while iterating over the active_target, + check if it is retryable. If so, create a new active_target and + continue iterating. If not, raise the exception. + """ if not self.predicate(exc) and not isinstance(exc, asyncio.TimeoutError): raise exc else: + # run on_error callback if provided if self.on_error: self.on_error(exc) try: next_sleep = next(self.sleep_generator) except StopIteration: - raise ValueError('Sleep generator stopped yielding sleep values') - + raise ValueError("Sleep generator stopped yielding sleep values") + # if time budget is exceeded, raise RetryError if self.remaining_timeout_budget is not None: if self.remaining_timeout_budget <= next_sleep: raise exceptions.RetryError( @@ -73,19 +110,34 @@ async def _handle_exception(self, exc): _LOGGER.debug( "Retrying due to {}, sleeping {:.1f}s ...".format(exc, next_sleep) ) + # sleep before retrying await asyncio.sleep(next_sleep) - self.subgenerator = None - await self._ensure_subgenerator() + self.active_target = None + await self._ensure_active_target() def _subtract_time_from_budget(self, start_timestamp): + """ + Subtract the time elapsed since start_timestamp from the remaining + timeout budget. + + Args: + - start_timestamp (datetime): The time at which the last operation + started. + """ if self.remaining_timeout_budget is not None: self.remaining_timeout_budget -= ( datetime_helpers.utcnow() - start_timestamp ).total_seconds() async def __anext__(self): - await self._ensure_subgenerator() - if self.remaining_timeout_budget is not None and self.remaining_timeout_budget <= 0: + """ + Implement the async iterator protocol. + """ + await self._ensure_active_target() + if ( + self.remaining_timeout_budget is not None + and self.remaining_timeout_budget <= 0 + ): raise exceptions.RetryError( "Timeout of {:.1f}s exceeded".format(self.timeout), None, @@ -93,8 +145,7 @@ async def __anext__(self): try: start_timestamp = datetime_helpers.utcnow() next_val_routine = asyncio.wait_for( - self.subgenerator.__anext__(), - self.remaining_timeout_budget + self.active_target.__anext__(), self.remaining_timeout_budget ) next_val = await next_val_routine self._subtract_time_from_budget(start_timestamp) @@ -102,20 +153,44 @@ async def __anext__(self): except (Exception, asyncio.CancelledError) as exc: self._subtract_time_from_budget(start_timestamp) await self._handle_exception(exc) - # if retryable exception was handled, try again with new subgenerator + # if retryable exception was handled, try again with new active_target return await self.__anext__() async def aclose(self): - await self._ensure_subgenerator() - if getattr(self.subgenerator, "aclose", None): - return await self.subgenerator.aclose() + """ + Close the active_target if supported. (e.g. target is an async generator) + + Raises: + - AttributeError if the active_target does not have a aclose() method + """ + + await self._ensure_active_target() + if getattr(self.active_target, "aclose", None): + return await self.active_target.aclose() else: - raise AttributeError("aclose is not implemented for retried stream") + raise AttributeError( + "aclose() not implemented for {}".format(self.active_target) + ) async def asend(self, value): - await self._ensure_subgenerator() - if getattr(self.subgenerator, "asend", None): - if self.remaining_timeout_budget is not None and self.remaining_timeout_budget <= 0: + """ + Call asend on the active_target if supported. (e.g. target is an async generator) + + If an exception is raised, a retry may be attempted before returning + a result. + + Returns: + - the result of calling asend() on the active_target + + Raises: + - AttributeError if the active_target does not have a asend() method + """ + await self._ensure_active_target() + if getattr(self.active_target, "asend", None): + if ( + self.remaining_timeout_budget is not None + and self.remaining_timeout_budget <= 0 + ): raise exceptions.RetryError( "Timeout of {:.1f}s exceeded".format(self.timeout), None, @@ -123,8 +198,7 @@ async def asend(self, value): try: start_timestamp = datetime_helpers.utcnow() next_val_routine = asyncio.wait_for( - self.subgenerator.asend(value), - self.remaining_timeout_budget + self.active_target.asend(value), self.remaining_timeout_budget ) next_val = await next_val_routine self._subtract_time_from_budget(start_timestamp) @@ -132,20 +206,33 @@ async def asend(self, value): except (Exception, asyncio.CancelledError) as exc: self._subtract_time_from_budget(start_timestamp) await self._handle_exception(exc) - # if retryable exception was handled, try again with new subgenerator + # if retryable exception was handled, try again with new active_target return await self.__asend__(value) else: - raise AttributeError("asend is not implemented for retried stream") + raise AttributeError( + "asend() not implemented for {}".format(self.active_target) + ) async def athrow(self, typ, val=None, tb=None): - await self._ensure_subgenerator() - if getattr(self.subgenerator, "athrow", None): + """ + Call athrow on the active_target if supported. (e.g. target is an async generator) + + If an exception is raised, a retry may be attempted before returning + + Returns: + - the result of calling athrow() on the active_target + Raises: + - AttributeError if the active_target does not have a athrow() method + """ + await self._ensure_active_target() + if getattr(self.active_target, "athrow", None): try: - return await self.subgenerator.athrow(typ, val, tb) + return await self.active_target.athrow(typ, val, tb) except Exception as exc: await self._handle_exception(exc) - # if retryable exception was handled, return next from new subgenerator + # if retryable exception was handled, return next from new active_target return await self.__anext__() else: - raise AttributeError("athrow is not implemented for retried stream") - + raise AttributeError( + "athrow() not implemented for {}".format(self.active_target) + ) From 6201db615646793e63765420a10954dcd2ab530c Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 13 Apr 2023 18:38:59 -0700 Subject: [PATCH 057/204] refactoring and commenting --- google/api_core/retry_streaming_async.py | 63 +++++++++++++----------- 1 file changed, 35 insertions(+), 28 deletions(-) diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index ca151c44..af72c01f 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -129,11 +129,24 @@ def _subtract_time_from_budget(self, start_timestamp): datetime_helpers.utcnow() - start_timestamp ).total_seconds() - async def __anext__(self): - """ - Implement the async iterator protocol. + async def _iteration_helper(self, + iteration_fn:Callable[..., Awaitable], + try_again_fn: Callable, + *args + ): """ - await self._ensure_active_target() + Helper function for sharing logic between __anext__ and asend. + + Args: + - iteration_fn: The function to call to get the next value from the + iterator (e.g. __anext__ or asend) + - try_again_fn: The function to call after a retryable exception is + encountered, to get a value from the new active_target + (e.g. self.__anext__ or self.asend) + - *args: Any additional arguments to pass to iteration_fn and + try_again_fn (e.g. the value to send to asend) + """ + # check for expired timeouts before attempting to iterate if ( self.remaining_timeout_budget is not None and self.remaining_timeout_budget <= 0 @@ -143,18 +156,31 @@ async def __anext__(self): None, ) try: + # start the timer for the current operation start_timestamp = datetime_helpers.utcnow() + # grab the next value from the active_target next_val_routine = asyncio.wait_for( - self.active_target.__anext__(), self.remaining_timeout_budget + iteration_fn(*args), self.remaining_timeout_budget ) next_val = await next_val_routine + # subtract the time spent waiting for the next value from the + # remaining timeout budget self._subtract_time_from_budget(start_timestamp) return next_val except (Exception, asyncio.CancelledError) as exc: self._subtract_time_from_budget(start_timestamp) await self._handle_exception(exc) # if retryable exception was handled, try again with new active_target - return await self.__anext__() + return await try_again_fn(*args) + + async def __anext__(self): + """ + Implement the async iterator protocol. + """ + await self._ensure_active_target() + return await self._iteration_helper( + self.active_target.__anext__, self.__anext__ + ) async def aclose(self): """ @@ -163,7 +189,6 @@ async def aclose(self): Raises: - AttributeError if the active_target does not have a aclose() method """ - await self._ensure_active_target() if getattr(self.active_target, "aclose", None): return await self.active_target.aclose() @@ -187,27 +212,9 @@ async def asend(self, value): """ await self._ensure_active_target() if getattr(self.active_target, "asend", None): - if ( - self.remaining_timeout_budget is not None - and self.remaining_timeout_budget <= 0 - ): - raise exceptions.RetryError( - "Timeout of {:.1f}s exceeded".format(self.timeout), - None, - ) - try: - start_timestamp = datetime_helpers.utcnow() - next_val_routine = asyncio.wait_for( - self.active_target.asend(value), self.remaining_timeout_budget - ) - next_val = await next_val_routine - self._subtract_time_from_budget(start_timestamp) - return next_val - except (Exception, asyncio.CancelledError) as exc: - self._subtract_time_from_budget(start_timestamp) - await self._handle_exception(exc) - # if retryable exception was handled, try again with new active_target - return await self.__asend__(value) + return await self._iteration_helper( + self.active_target.asend, self.asend, value + ) else: raise AttributeError( "asend() not implemented for {}".format(self.active_target) From 61ce3a7d2fe41febc31bb5556d5814263d669223 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 13 Apr 2023 18:44:19 -0700 Subject: [PATCH 058/204] blacken/mypy fixes --- google/api_core/retry_streaming_async.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index af72c01f..2ba00219 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -129,10 +129,8 @@ def _subtract_time_from_budget(self, start_timestamp): datetime_helpers.utcnow() - start_timestamp ).total_seconds() - async def _iteration_helper(self, - iteration_fn:Callable[..., Awaitable], - try_again_fn: Callable, - *args + async def _iteration_helper( + self, iteration_fn: Callable[..., Awaitable], try_again_fn: Callable, *args ): """ Helper function for sharing logic between __anext__ and asend. @@ -150,6 +148,7 @@ async def _iteration_helper(self, if ( self.remaining_timeout_budget is not None and self.remaining_timeout_budget <= 0 + and self.timeout is not None ): raise exceptions.RetryError( "Timeout of {:.1f}s exceeded".format(self.timeout), From 69149a15f12cd4d1b151ddde1418e3ae4270f517 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 13 Apr 2023 19:13:22 -0700 Subject: [PATCH 059/204] fixed issue with py37 --- google/api_core/retry_streaming_async.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index 2ba00219..29ce5134 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -73,10 +73,9 @@ async def _ensure_active_target(self): Ensure that the active target is populated and ready to be iterated over. """ if not self.active_target: - if inspect.iscoroutinefunction(self.target_fn): - self.active_target = await self.target_fn() - else: - self.active_target = self.target_fn() + self.active_target = self.target_fn() + if inspect.iscoroutine(self.active_target): + self.active_target = await self.active_target def __aiter__(self): """Implement the async iterator protocol.""" From d63871e262cb35d152528aa47c8e71063e01fbf3 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 14 Apr 2023 10:14:19 -0700 Subject: [PATCH 060/204] added tests for bad sleep generators --- google/api_core/retry_streaming.py | 2 +- google/api_core/retry_streaming_async.py | 2 +- tests/asyncio/test_retry_async.py | 16 ++++++++++++++++ tests/unit/test_retry.py | 15 +++++++++++++++ 4 files changed, 33 insertions(+), 2 deletions(-) diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index e674a1f3..6aae3d11 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -60,7 +60,7 @@ def __init__( self.target_fn = target self.active_target: Iterable = self.target_fn() self.predicate = predicate - self.sleep_generator = sleep_generator + self.sleep_generator = iter(sleep_generator) self.on_error = on_error self.timeout = timeout if self.timeout is not None: diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index 29ce5134..bd804445 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -63,7 +63,7 @@ def __init__( # active target must be populated in an async context self.active_target: Optional[AsyncIterable] = None self.predicate = predicate - self.sleep_generator = sleep_generator + self.sleep_generator = iter(sleep_generator) self.on_error = on_error self.timeout = timeout self.remaining_timeout_budget = timeout if timeout else None diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 62091af4..718dc212 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -134,6 +134,22 @@ async def test_retry_target_bad_sleep_generator(): ) +@pytest.mark.asyncio +async def test_retry_streaming_target_bad_sleep_generator(): + from google.api_core.retry_streaming_async import AsyncRetryableGenerator + + async def target_fn(): + async def inner_gen(): + raise RuntimeError("initiate retry") + yield None + + return inner_gen() + + with pytest.raises(ValueError, match="Sleep generator"): + gen = AsyncRetryableGenerator(target_fn, lambda x: True, [], None) + await gen.__anext__() + + class TestAsyncRetry: def test_constructor_defaults(self): retry_ = retry_async.AsyncRetry() diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index b86b7715..36dbe362 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -162,6 +162,21 @@ def test_retry_target_bad_sleep_generator(): retry.retry_target(mock.sentinel.target, mock.sentinel.predicate, [], None) +def test_retry_streaming_target_bad_sleep_generator(): + from google.api_core.retry_streaming import RetryableGenerator + + def target_fn(): + def inner_gen(): + raise RuntimeError("initiate retry") + yield None + + return inner_gen() + + with pytest.raises(ValueError, match="Sleep generator"): + gen = RetryableGenerator(target_fn, lambda x: True, [], None) + next(gen) + + class TestRetry(object): def test_constructor_defaults(self): retry_ = retry.Retry() From 773e03364df6c11730c4b833a96f976d7d6ad3a7 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 14 Apr 2023 10:42:38 -0700 Subject: [PATCH 061/204] improved test_retry coverage --- tests/unit/test_retry.py | 30 +++++++++++++++++++++++++++--- 1 file changed, 27 insertions(+), 3 deletions(-) diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 36dbe362..2ed9af21 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -529,6 +529,7 @@ def test___call___generator_retry(self, sleep): on_error=on_error, predicate=retry.if_exception_type(ValueError), is_stream=True, + timeout=None, ) result = retry_(self._generator_mock)(error_on=3) # error thrown on 3 @@ -584,7 +585,7 @@ def test___call___with_generator_send(self, sleep): decorated = retry_(self._generator_mock) - generator = decorated(10) + generator = decorated(5) result = next(generator) assert result == 0 in_messages = ["test_1", "hello", "world"] @@ -594,7 +595,30 @@ def test___call___with_generator_send(self, sleep): out_messages.append(recv) assert in_messages == out_messages assert next(generator) == 4 - assert next(generator) == 5 + with pytest.raises(StopIteration): + generator.send("should be exhausted") + + @mock.patch("time.sleep", autospec=True) + def test___call___with_generator_send_retry(self, sleep): + """ + Send should support retries like next + """ + on_error = mock.Mock(return_value=None) + retry_ = retry.Retry( + on_error=on_error, + predicate=retry.if_exception_type(ValueError), + is_stream=True, + timeout=None, + ) + result = retry_(self._generator_mock)(error_on=3) + with pytest.raises(TypeError) as exc_info: + result.send("can not send to fresh generator") + assert exc_info.match("can't send non-None value") + # error thrown on 3 + # generator should contain 0, 1, 2 looping + unpacked = [result.send(None) for i in range(10)] + assert unpacked == [0, 1, 2, 0, 1, 2, 0, 1, 2, 0] + assert on_error.call_count == 3 @mock.patch("time.sleep", autospec=True) def test___call___with_iterable_send_close_throw(self, sleep): @@ -642,7 +666,7 @@ def test___call___with_generator_return(self, sleep): @mock.patch("time.sleep", autospec=True) def test___call___with_generator_close(self, sleep): - retry_ = retry.Retry() + retry_ = retry.Retry(is_stream=True) decorated = retry_(self._generator_mock) From d1def5d664588385d930095f596a730d150f8aa8 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 14 Apr 2023 10:52:26 -0700 Subject: [PATCH 062/204] improved async test coverage --- tests/asyncio/test_retry_async.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 718dc212..502d79a2 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -467,6 +467,7 @@ async def test___call___generator_retry(self, sleep): on_error=on_error, predicate=retry_async.if_exception_type(ValueError), is_stream=True, + timeout=None, ) generator = retry_(self._generator_mock)(error_on=3) # error thrown on 3 @@ -661,9 +662,6 @@ def __init__(self, n): self.n = n self.i = 0 - def __aiter__(self): - return self - async def __anext__(self): if self.i == self.n: raise StopAsyncIteration @@ -674,7 +672,7 @@ async def __anext__(self): decorated = retry_(iterable_fn) - retryable = decorated(10) + retryable = decorated(4) result = await retryable.__anext__() assert result == 0 with pytest.raises(AttributeError): @@ -686,6 +684,8 @@ async def __anext__(self): with pytest.raises(AttributeError): await retryable.athrow(ValueError("test")) assert await retryable.__anext__() == 3 + with pytest.raises(StopAsyncIteration): + await retryable.__anext__() @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio @@ -702,9 +702,6 @@ def __init__(self, n): self.n = n self.i = 0 - def __aiter__(self): - return self - async def __anext__(self): if self.i == self.n: raise StopAsyncIteration @@ -715,7 +712,7 @@ async def __anext__(self): decorated = retry_(iterable_fn) - retryable = decorated(10) + retryable = decorated(4) result = await retryable.__anext__() assert result == 0 with pytest.raises(AttributeError): @@ -727,3 +724,5 @@ async def __anext__(self): with pytest.raises(AttributeError): await retryable.athrow(ValueError("test")) assert await retryable.__anext__() == 3 + with pytest.raises(StopAsyncIteration): + await retryable.__anext__() From cbaaa1d81515bc1e67b88eba1901fbf90e626ed8 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 14 Apr 2023 10:58:48 -0700 Subject: [PATCH 063/204] added test for calling next on exhausted generator --- tests/asyncio/test_retry_async.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 502d79a2..132414bc 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -544,8 +544,13 @@ async def test___call___generator_timeout_cancellations(self): generator = retry_(self._generator_mock)(sleep_time=0.07) assert await generator.__anext__() == 0 assert await generator.__anext__() == 1 - with pytest.raises(exceptions.RetryError): + with pytest.raises(exceptions.RetryError) as exc_info: + await generator.__anext__() + assert "Timeout of 0.2s exceeded" in str(exc_info.value) + # subsequent calls should also return a RetryError + with pytest.raises(exceptions.RetryError) as excinfo: await generator.__anext__() + assert "Timeout of 0.2s exceeded" in str(exc_info.value) @pytest.mark.asyncio async def test___call___generator_await_cancel_retryable(self): From 21a863f768db041cba84bdf20366231ffebc9c0e Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 14 Apr 2023 11:03:39 -0700 Subject: [PATCH 064/204] fixed lint issue --- tests/asyncio/test_retry_async.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 132414bc..e7b23af6 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -544,13 +544,13 @@ async def test___call___generator_timeout_cancellations(self): generator = retry_(self._generator_mock)(sleep_time=0.07) assert await generator.__anext__() == 0 assert await generator.__anext__() == 1 - with pytest.raises(exceptions.RetryError) as exc_info: + with pytest.raises(exceptions.RetryError) as exc: await generator.__anext__() - assert "Timeout of 0.2s exceeded" in str(exc_info.value) + assert "Timeout of 0.2s exceeded" in str(exc.value) # subsequent calls should also return a RetryError - with pytest.raises(exceptions.RetryError) as excinfo: + with pytest.raises(exceptions.RetryError) as exc: await generator.__anext__() - assert "Timeout of 0.2s exceeded" in str(exc_info.value) + assert "Timeout of 0.2s exceeded" in str(exc.value) @pytest.mark.asyncio async def test___call___generator_await_cancel_retryable(self): From 878ddfb59a75ca88f94563403d042b6c493bdd5c Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 14 Apr 2023 11:26:51 -0700 Subject: [PATCH 065/204] changed docstring --- google/api_core/retry.py | 8 +++++--- google/api_core/retry_async.py | 14 ++++++++------ 2 files changed, 13 insertions(+), 9 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index 707dc30b..532b948e 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -306,9 +306,11 @@ class Retry(object): a retryable exception. Any error raised by this function will *not* be caught. is_stream (bool): Indicates whether the input function - should be treated as an iterable function. If True, retries will - `yield from` wrapped function. If false, retries will call wrapped - function directly. Defaults to False. + should be treated as an stream function (i.e. a Generator, + or function that returns an Iterable). If True, the iterable + will be wrapped with retry logic, and any failed outputs will + restart the stream. If False, only the input function call itself + will be retried. Defaults to False. deadline (float): DEPRECATED: use `timeout` instead. For backward compatibility, if specified it will override the ``timeout`` parameter. """ diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index 320ea892..79de83ff 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -81,8 +81,8 @@ async def retry_target( higher-level retry helper :class:`Retry`. Args: - target(Callable[..., Coroutine]): The coroutine function to call and retry. - This must be a nullary function - apply arguments with `functools.partial`. + target: The function to call and retry. This must be a + nullary function - apply arguments with `functools.partial`. predicate (Callable[Exception]): A callable used to determine if an exception raised by the target should be considered retryable. It should return True to retry or False otherwise. @@ -179,10 +179,12 @@ class AsyncRetry: a retryable exception. Any error raised by this function will *not* be caught. is_stream (bool): Indicates whether the input function - should be treated as an iterable function. If True, - the retries will be `yield from` wrapped function. If false, - retries will call wrapped function directly. Defaults to False. - + should be treated as an stream function (i.e. an AsyncGenerator, + or function or coroutine that returns an AsyncIterable). + If ``is_stream`` is True, the iterable will be wrapped with retry + logic, and any failed outputs will restart the stream. If False, + only the input function call itself will be retried. + Defaults to False. deadline (float): DEPRECATED use ``timeout`` instead. If set it will override ``timeout`` parameter. """ From 7b0a6005b6d85bcfeaf8e41d87d7092e0a25e441 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 14 Apr 2023 11:27:59 -0700 Subject: [PATCH 066/204] changed docstrings --- google/api_core/retry_async.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index 79de83ff..fea2b66b 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -81,7 +81,7 @@ async def retry_target( higher-level retry helper :class:`Retry`. Args: - target: The function to call and retry. This must be a + target(Callable): The function to call and retry. This must be a nullary function - apply arguments with `functools.partial`. predicate (Callable[Exception]): A callable used to determine if an exception raised by the target should be considered retryable. @@ -181,10 +181,9 @@ class AsyncRetry: is_stream (bool): Indicates whether the input function should be treated as an stream function (i.e. an AsyncGenerator, or function or coroutine that returns an AsyncIterable). - If ``is_stream`` is True, the iterable will be wrapped with retry - logic, and any failed outputs will restart the stream. If False, - only the input function call itself will be retried. - Defaults to False. + If True, the iterable will be wrapped with retry logic, and any + failed outputs will restart the stream. If False, only the input + function call itself will be retried. Defaults to False. deadline (float): DEPRECATED use ``timeout`` instead. If set it will override ``timeout`` parameter. """ From 0188228a1a7b7936cfbf72b0cc338a1a1c16d9b7 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 14 Apr 2023 11:32:10 -0700 Subject: [PATCH 067/204] updated comments --- google/api_core/retry_async.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index fea2b66b..5d6dc619 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -212,16 +212,14 @@ def __call__(self, func, on_error=None): """Wrap a callable with retry behavior. Args: - func (Callable[..., Union[Coroutine, AsynchronousGenerator]]): The - coroutine or async generator function to add retry behavior to. + func (Callable): The callable or stream to add retry behavior to. on_error (Callable[Exception]): A function to call while processing a retryable exception. Any error raised by this function will *not* be caught. Returns: - Union[Coroutine, AsynchronousGenerator]: - - An AsynchronousGenerator that yields from ``func`` if ``is_stream`` - - A couroutine that will invoke ``func`` if ``func`` if not ``is_stream`` + Callable: A callable that will invoke ``func`` with retry + behavior. """ if self._on_error is not None: on_error = self._on_error From 902a4ab6327c5657dc797e4a98096eb9ad7e8ed9 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 14 Apr 2023 11:35:03 -0700 Subject: [PATCH 068/204] updated comments --- google/api_core/retry_async.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index 5d6dc619..f6a8c5a0 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -241,12 +241,11 @@ async def retry_wrapped_func(*args, **kwargs): @functools.wraps(func) def retry_wrapped_stream(*args, deadline_dt=None, **kwargs): - """A wrapper that yields through target generator with retry.""" + """A wrapper that iterates over target stream with retry.""" target = functools.partial(func, *args, **kwargs) sleep_generator = exponential_sleep_generator( self._initial, self._maximum, multiplier=self._multiplier ) - # if the target is a generator or iterable function, make sure return is also a generator function return AsyncRetryableGenerator( target, self._predicate, From 74f3f3ef7a9325a3d6f605eb5359928481982223 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 14 Apr 2023 11:53:46 -0700 Subject: [PATCH 069/204] fixed send and asend retry logic --- google/api_core/retry_streaming.py | 4 +-- google/api_core/retry_streaming_async.py | 25 ++++++------------- tests/asyncio/test_retry_async.py | 31 +++++++++++++++++++++++- tests/unit/test_retry.py | 17 ++++++++++--- 4 files changed, 53 insertions(+), 24 deletions(-) diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index 6aae3d11..29a42ec4 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -151,8 +151,8 @@ def send(self, value): return self.active_target.send(value) except Exception as exc: self._handle_exception(exc) - # if retryable exception was handled, try again with new active_target - return self.send(value) + # if exception was retryable, use new target for return value + return self.__next__() else: raise AttributeError( "send() not implemented for {}".format(self.active_target) diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index bd804445..112afd02 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -128,20 +128,13 @@ def _subtract_time_from_budget(self, start_timestamp): datetime_helpers.utcnow() - start_timestamp ).total_seconds() - async def _iteration_helper( - self, iteration_fn: Callable[..., Awaitable], try_again_fn: Callable, *args - ): + async def _iteration_helper(self, iteration_routine: Awaitable): """ Helper function for sharing logic between __anext__ and asend. Args: - - iteration_fn: The function to call to get the next value from the - iterator (e.g. __anext__ or asend) - - try_again_fn: The function to call after a retryable exception is - encountered, to get a value from the new active_target - (e.g. self.__anext__ or self.asend) - - *args: Any additional arguments to pass to iteration_fn and - try_again_fn (e.g. the value to send to asend) + - iteration_routine: The coroutine to await to get the next value + from the iterator (e.g. __anext__ or asend) """ # check for expired timeouts before attempting to iterate if ( @@ -158,7 +151,7 @@ async def _iteration_helper( start_timestamp = datetime_helpers.utcnow() # grab the next value from the active_target next_val_routine = asyncio.wait_for( - iteration_fn(*args), self.remaining_timeout_budget + iteration_routine, self.remaining_timeout_budget ) next_val = await next_val_routine # subtract the time spent waiting for the next value from the @@ -168,8 +161,8 @@ async def _iteration_helper( except (Exception, asyncio.CancelledError) as exc: self._subtract_time_from_budget(start_timestamp) await self._handle_exception(exc) - # if retryable exception was handled, try again with new active_target - return await try_again_fn(*args) + # if retryable exception was handled, find the next value to return + return await self.__anext__() async def __anext__(self): """ @@ -177,7 +170,7 @@ async def __anext__(self): """ await self._ensure_active_target() return await self._iteration_helper( - self.active_target.__anext__, self.__anext__ + self.active_target.__anext__(), ) async def aclose(self): @@ -210,9 +203,7 @@ async def asend(self, value): """ await self._ensure_active_target() if getattr(self.active_target, "asend", None): - return await self._iteration_helper( - self.active_target.asend, self.asend, value - ) + return await self._iteration_helper(self.active_target.asend(value)) else: raise AttributeError( "asend() not implemented for {}".format(self.active_target) diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index e7b23af6..8580f4b7 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -421,7 +421,12 @@ async def test___init___when_retry_is_executed(self, sleep, uniform): sleep.assert_any_call(retry_._initial) async def _generator_mock( - self, num=5, error_on=None, exceptions_seen=None, sleep_time=0 + self, + num=5, + error_on=None, + exceptions_seen=None, + sleep_time=0, + ignore_sent=False, ): try: sent_in = None @@ -431,6 +436,8 @@ async def _generator_mock( if error_on and i == error_on: raise ValueError("generator mock error") sent_in = yield (sent_in if sent_in else i) + if ignore_sent: + sent_in = None except (Exception, BaseException, GeneratorExit) as e: # keep track of exceptions seen by generator if exceptions_seen is not None: @@ -607,6 +614,28 @@ async def test___call___with_generator_send(self, sleep): assert await generator.__anext__() == 4 assert await generator.__anext__() == 5 + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___generator_send_retry(self, sleep): + on_error = mock.Mock(return_value=None) + retry_ = retry_async.AsyncRetry( + on_error=on_error, + predicate=retry_async.if_exception_type(ValueError), + is_stream=True, + timeout=None, + ) + generator = retry_(self._generator_mock)(error_on=3, ignore_sent=True) + with pytest.raises(TypeError) as exc_info: + await generator.asend("can not send to fresh generator") + assert exc_info.match("can't send non-None value") + + # error thrown on 3 + # generator should contain 0, 1, 2 looping + assert await generator.__anext__() == 0 + unpacked = [await generator.asend(i) for i in range(10)] + assert unpacked == [1, 2, 0, 1, 2, 0, 1, 2, 0, 1] + assert on_error.call_count == 3 + @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___with_generator_close(self, sleep): diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 2ed9af21..9b4eefc3 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -484,7 +484,12 @@ def test___init___when_retry_is_executed(self, sleep, uniform): sleep.assert_any_call(retry_._initial) def _generator_mock( - self, num=5, error_on=None, return_val=None, exceptions_seen=None + self, + num=5, + error_on=None, + return_val=None, + exceptions_seen=None, + ignore_sent=False, ): try: sent_in = None @@ -492,6 +497,8 @@ def _generator_mock( if error_on and i == error_on: raise ValueError("generator mock error") sent_in = yield (sent_in if sent_in else i) + if ignore_sent: + sent_in = None return return_val except (Exception, BaseException, GeneratorExit) as e: # keep track of exceptions seen by generator @@ -610,14 +617,16 @@ def test___call___with_generator_send_retry(self, sleep): is_stream=True, timeout=None, ) - result = retry_(self._generator_mock)(error_on=3) + result = retry_(self._generator_mock)(error_on=3, ignore_sent=True) with pytest.raises(TypeError) as exc_info: result.send("can not send to fresh generator") assert exc_info.match("can't send non-None value") + # initiate iteration with None + assert result.send(None) == 0 # error thrown on 3 # generator should contain 0, 1, 2 looping - unpacked = [result.send(None) for i in range(10)] - assert unpacked == [0, 1, 2, 0, 1, 2, 0, 1, 2, 0] + unpacked = [result.send(i) for i in range(10)] + assert unpacked == [1, 2, 0, 1, 2, 0, 1, 2, 0, 1] assert on_error.call_count == 3 @mock.patch("time.sleep", autospec=True) From e506aada756fef2d059495ebc81507ed332b832f Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 19 Apr 2023 20:04:18 +0000 Subject: [PATCH 070/204] update test error string Co-authored-by: Anthonios Partheniou --- tests/unit/test_retry.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 9b4eefc3..2e84c5b7 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -172,7 +172,7 @@ def inner_gen(): return inner_gen() - with pytest.raises(ValueError, match="Sleep generator"): + with pytest.raises(ValueError, match="Sleep generator stopped yielding sleep values"): gen = RetryableGenerator(target_fn, lambda x: True, [], None) next(gen) From 5baa2aa27febdcd7632aa257ec392726657f39dd Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Wed, 19 Apr 2023 20:05:54 +0000 Subject: [PATCH 071/204] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot?= =?UTF-8?q?=20post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- tests/unit/test_retry.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 2e84c5b7..43dcc005 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -172,7 +172,9 @@ def inner_gen(): return inner_gen() - with pytest.raises(ValueError, match="Sleep generator stopped yielding sleep values"): + with pytest.raises( + ValueError, match="Sleep generator stopped yielding sleep values" + ): gen = RetryableGenerator(target_fn, lambda x: True, [], None) next(gen) From 5c3805d1e40533b36a8ca9b9a5c2b85dfb64150b Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 19 Apr 2023 14:42:53 -0700 Subject: [PATCH 072/204] improved type hinting --- google/api_core/retry_streaming.py | 46 ++++++++----- google/api_core/retry_streaming_async.py | 84 +++++++++++++++++------- 2 files changed, 87 insertions(+), 43 deletions(-) diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index 29a42ec4..2fee73af 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -14,21 +14,21 @@ """Helpers for retries for streaming APIs.""" -from typing import Callable, Optional, Iterable +from typing import Callable, Optional, Iterable, Iterator, Generator, TypeVar, Any, cast import datetime import logging import time -from collections.abc import Generator - from google.api_core import datetime_helpers from google.api_core import exceptions _LOGGER = logging.getLogger(__name__) +T = TypeVar("T") + -class RetryableGenerator(Generator): +class RetryableGenerator(Generator[T, Any, None]): """ Helper class for retrying Iterator and Generator-based streaming APIs. @@ -36,7 +36,7 @@ class RetryableGenerator(Generator): def __init__( self, - target: Callable[[], Iterable], + target: Callable[[], Iterable[T]], predicate: Callable[[Exception], bool], sleep_generator: Iterable[float], timeout: Optional[float] = None, @@ -52,13 +52,13 @@ def __init__( It should return True to retry or False otherwise. sleep_generator: An infinite iterator that determines how long to sleep between retries. - timeout: How long to keep retrying the target. + timeout: How long to keep retrying the target, in seconds. on_error: A function to call while processing a retryable exception. Any error raised by this function will *not* be caught. """ self.target_fn = target - self.active_target: Iterable = self.target_fn() + self.active_target: Iterator = self.target_fn().__iter__() self.predicate = predicate self.sleep_generator = iter(sleep_generator) self.on_error = on_error @@ -70,13 +70,13 @@ def __init__( else: self.deadline = None - def __iter__(self): + def __iter__(self) -> Generator[T, Any, None]: """ Implement the iterator protocol. """ return self - def _handle_exception(self, exc): + def _handle_exception(self, exc) -> None: """ When an exception is raised while iterating over the active_target, check if it is retryable. If so, create a new active_target and @@ -108,9 +108,12 @@ def _handle_exception(self, exc): time.sleep(next_sleep) self.active_target = self.target_fn() - def __next__(self): + def __next__(self) -> T: """ Implement the iterator protocol. + + Returns: + - the next value of the active_target iterator """ try: return next(self.active_target) @@ -119,7 +122,7 @@ def __next__(self): # if retryable exception was handled, try again with new active_target return self.__next__() - def close(self): + def close(self) -> None: """ Close the active_target if supported. (e.g. target is a generator) @@ -133,22 +136,25 @@ def close(self): "close() not implemented for {}".format(self.active_target) ) - def send(self, value): + def send(self, *args, **kwargs) -> T: """ Call send on the active_target if supported. (e.g. target is a generator) If an exception is raised, a retry may be attempted before returning a result. + Args: + - *args: arguments to pass to the wrapped generator's send method + - **kwargs: keyword arguments to pass to the wrapped generator's send method Returns: - - the result of calling send() on the active_target - + - the next value of the active_target iterator after calling send Raises: - AttributeError if the active_target does not have a send() method """ if getattr(self.active_target, "send", None): + casted_target = cast(Generator, self.active_target) try: - return self.active_target.send(value) + return casted_target.send(*args, **kwargs) except Exception as exc: self._handle_exception(exc) # if exception was retryable, use new target for return value @@ -158,21 +164,25 @@ def send(self, value): "send() not implemented for {}".format(self.active_target) ) - def throw(self, typ, val=None, tb=None): + def throw(self, *args, **kwargs) -> T: """ Call throw on the active_target if supported. (e.g. target is a generator) If an exception is raised, a retry may be attempted before returning a result. + Args: + - *args: arguments to pass to the wrapped generator's throw method + - **kwargs: keyword arguments to pass to the wrapped generator's throw method Returns: - - the result of calling throw() on the active_target + - the next vale of the active_target iterator after calling throw Raises: - AttributeError if the active_target does not have a throw() method """ if getattr(self.active_target, "throw", None): + casted_target = cast(Generator, self.active_target) try: - return self.active_target.throw(typ, val, tb) + return casted_target.throw(*args, **kwargs) except Exception as exc: self._handle_exception(exc) # if retryable exception was handled, return next from new active_target diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index 112afd02..e60278ec 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -14,21 +14,35 @@ """Helpers for retries for async streaming APIs.""" -from typing import Callable, Optional, Iterable, AsyncIterable, Awaitable, Union +from typing import ( + cast, + Callable, + Optional, + Iterable, + AsyncIterator, + AsyncIterable, + Awaitable, + Union, + Any, + TypeVar, + AsyncGenerator, +) import asyncio import inspect import logging +import datetime -from collections.abc import AsyncGenerator from google.api_core import datetime_helpers from google.api_core import exceptions _LOGGER = logging.getLogger(__name__) +T = TypeVar("T") -class AsyncRetryableGenerator(AsyncGenerator): + +class AsyncRetryableGenerator(AsyncGenerator[T, None]): """ Helper class for retrying AsyncIterator and AsyncGenerator-based streaming APIs. @@ -37,7 +51,8 @@ class AsyncRetryableGenerator(AsyncGenerator): def __init__( self, target: Union[ - Callable[[], AsyncIterable], Callable[[], Awaitable[AsyncIterable]] + Callable[[], AsyncIterable[T]], + Callable[[], Awaitable[AsyncIterable[T]]], ], predicate: Callable[[Exception], bool], sleep_generator: Iterable[float], @@ -61,27 +76,32 @@ def __init__( """ self.target_fn = target # active target must be populated in an async context - self.active_target: Optional[AsyncIterable] = None + self.active_target: Optional[AsyncIterator[T]] = None self.predicate = predicate self.sleep_generator = iter(sleep_generator) self.on_error = on_error self.timeout = timeout self.remaining_timeout_budget = timeout if timeout else None - async def _ensure_active_target(self): + async def _ensure_active_target(self) -> AsyncIterator[T]: """ Ensure that the active target is populated and ready to be iterated over. + + Returns: + - The active_target iterable """ if not self.active_target: - self.active_target = self.target_fn() - if inspect.iscoroutine(self.active_target): - self.active_target = await self.active_target + new_iterable = self.target_fn() + if isinstance(new_iterable, Awaitable): + new_iterable = await new_iterable + self.active_target = new_iterable.__aiter__() + return self.active_target - def __aiter__(self): + def __aiter__(self) -> AsyncIterator[T]: """Implement the async iterator protocol.""" return self - async def _handle_exception(self, exc): + async def _handle_exception(self, exc) -> None: """ When an exception is raised while iterating over the active_target, check if it is retryable. If so, create a new active_target and @@ -114,7 +134,7 @@ async def _handle_exception(self, exc): self.active_target = None await self._ensure_active_target() - def _subtract_time_from_budget(self, start_timestamp): + def _subtract_time_from_budget(self, start_timestamp: datetime.datetime) -> None: """ Subtract the time elapsed since start_timestamp from the remaining timeout budget. @@ -128,13 +148,15 @@ def _subtract_time_from_budget(self, start_timestamp): datetime_helpers.utcnow() - start_timestamp ).total_seconds() - async def _iteration_helper(self, iteration_routine: Awaitable): + async def _iteration_helper(self, iteration_routine: Awaitable) -> T: """ Helper function for sharing logic between __anext__ and asend. Args: - iteration_routine: The coroutine to await to get the next value from the iterator (e.g. __anext__ or asend) + Returns: + - The next value from the active_target iterator. """ # check for expired timeouts before attempting to iterate if ( @@ -164,16 +186,19 @@ async def _iteration_helper(self, iteration_routine: Awaitable): # if retryable exception was handled, find the next value to return return await self.__anext__() - async def __anext__(self): + async def __anext__(self) -> T: """ Implement the async iterator protocol. + + Returns: + - The next value from the active_target iterator. """ - await self._ensure_active_target() + iterable = await self._ensure_active_target() return await self._iteration_helper( - self.active_target.__anext__(), + iterable.__anext__(), ) - async def aclose(self): + async def aclose(self) -> None: """ Close the active_target if supported. (e.g. target is an async generator) @@ -182,48 +207,57 @@ async def aclose(self): """ await self._ensure_active_target() if getattr(self.active_target, "aclose", None): - return await self.active_target.aclose() + casted_target = cast(AsyncGenerator[T, None], self.active_target) + return await casted_target.aclose() else: raise AttributeError( "aclose() not implemented for {}".format(self.active_target) ) - async def asend(self, value): + async def asend(self, *args, **kwargs) -> T: """ Call asend on the active_target if supported. (e.g. target is an async generator) If an exception is raised, a retry may be attempted before returning a result. - Returns: - - the result of calling asend() on the active_target + Args: + - *args: arguments to pass to the wrapped generator's asend method + - **kwargs: keyword arguments to pass to the wrapped generator's asend method + Returns: + - the next value of the active_target iterator after calling asend Raises: - AttributeError if the active_target does not have a asend() method """ await self._ensure_active_target() if getattr(self.active_target, "asend", None): - return await self._iteration_helper(self.active_target.asend(value)) + casted_target = cast(AsyncGenerator[T, None], self.active_target) + return await self._iteration_helper(casted_target.asend(*args, **kwargs)) else: raise AttributeError( "asend() not implemented for {}".format(self.active_target) ) - async def athrow(self, typ, val=None, tb=None): + async def athrow(self, *args, **kwargs) -> T: """ Call athrow on the active_target if supported. (e.g. target is an async generator) If an exception is raised, a retry may be attempted before returning + Args: + - *args: arguments to pass to the wrapped generator's athrow method + - **kwargs: keyword arguments to pass to the wrapped generator's athrow method Returns: - - the result of calling athrow() on the active_target + - the next value of the active_target iterator after calling athrow Raises: - AttributeError if the active_target does not have a athrow() method """ await self._ensure_active_target() if getattr(self.active_target, "athrow", None): + casted_target = cast(AsyncGenerator[T, None], self.active_target) try: - return await self.active_target.athrow(typ, val, tb) + return await casted_target.athrow(*args, **kwargs) except Exception as exc: await self._handle_exception(exc) # if retryable exception was handled, return next from new active_target From 265d99801890126eb9f51b735c68ccb7da7a823c Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 19 Apr 2023 14:47:34 -0700 Subject: [PATCH 073/204] improved test docs --- google/api_core/retry_async.py | 2 +- tests/asyncio/test_retry_async.py | 42 ++++++++++++++++++++++++++++--- tests/unit/test_retry.py | 26 +++++++++++++++++++ 3 files changed, 65 insertions(+), 5 deletions(-) diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index f6a8c5a0..2179afe4 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -240,7 +240,7 @@ async def retry_wrapped_func(*args, **kwargs): ) @functools.wraps(func) - def retry_wrapped_stream(*args, deadline_dt=None, **kwargs): + def retry_wrapped_stream(*args, **kwargs): """A wrapper that iterates over target stream with retry.""" target = functools.partial(func, *args, **kwargs) sleep_generator = exponential_sleep_generator( diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 8580f4b7..07ab2f03 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -428,6 +428,10 @@ async def _generator_mock( sleep_time=0, ignore_sent=False, ): + """ + Helper to create a mock generator that yields a number of values + Generator can optionally raise an exception on a specific iteration + """ try: sent_in = None for i in range(num): @@ -447,6 +451,10 @@ async def _generator_mock( @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___generator_success(self, sleep): + """ + Test that a retry-decorated generator yields values as expected + This test checks a generator with no issues + """ from collections.abc import AsyncGenerator retry_ = retry_async.AsyncRetry(is_stream=True) @@ -469,6 +477,9 @@ async def test___call___generator_success(self, sleep): @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___generator_retry(self, sleep): + """ + Tests that a retry-decorated generator will retry on errors + """ on_error = mock.Mock(return_value=None) retry_ = retry_async.AsyncRetry( on_error=on_error, @@ -487,6 +498,10 @@ async def test___call___generator_retry(self, sleep): @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___generator_retry_hitting_deadline(self, sleep, uniform): + """ + Tests that a retry-decorated generator will throw a RetryError + after using the time budget + """ on_error = mock.Mock() retry_ = retry_async.AsyncRetry( predicate=retry_async.if_exception_type(ValueError), @@ -527,6 +542,10 @@ def increase_time(sleep_delay): @pytest.mark.asyncio async def test___call___generator_timeout_cancellations(self): + """ + Tests that a retry-decorated generator will throw a RetryError + after using its time budget + """ on_error = mock.Mock(return_value=None) retry_ = retry_async.AsyncRetry( predicate=retry_async.if_exception_type(ValueError), @@ -617,6 +636,9 @@ async def test___call___with_generator_send(self, sleep): @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___generator_send_retry(self, sleep): + """ + Send should be retried if target generator raises an error + """ on_error = mock.Mock(return_value=None) retry_ = retry_async.AsyncRetry( on_error=on_error, @@ -639,6 +661,9 @@ async def test___call___generator_send_retry(self, sleep): @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___with_generator_close(self, sleep): + """ + Close should be passed through retry into target generator + """ retry_ = retry_async.AsyncRetry(is_stream=True) decorated = retry_(self._generator_mock) exception_list = [] @@ -655,6 +680,9 @@ async def test___call___with_generator_close(self, sleep): @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___with_generator_throw(self, sleep): + """ + Throw should be passed through retry into target generator + """ retry_ = retry_async.AsyncRetry( predicate=retry_async.if_exception_type(ValueError), is_stream=True, @@ -691,18 +719,21 @@ async def test___call___with_iterable_coroutine_send_close_throw(self, sleep): retry_ = retry_async.AsyncRetry(is_stream=True) async def iterable_fn(n): - class CustomIterator: + class CustomIterable: def __init__(self, n): self.n = n self.i = 0 + def __aiter__(self): + return self + async def __anext__(self): if self.i == self.n: raise StopAsyncIteration self.i += 1 return self.i - 1 - return CustomIterator(n) + return CustomIterable(n) decorated = retry_(iterable_fn) @@ -731,18 +762,21 @@ async def test___call___with_iterable_send_close_throw(self, sleep): retry_ = retry_async.AsyncRetry(is_stream=True) def iterable_fn(n): - class CustomIterator: + class CustomIterable: def __init__(self, n): self.n = n self.i = 0 + def __aiter__(self): + return self + async def __anext__(self): if self.i == self.n: raise StopAsyncIteration self.i += 1 return self.i - 1 - return CustomIterator(n) + return CustomIterable(n) decorated = retry_(iterable_fn) diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 43dcc005..b3f1bef7 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -1,3 +1,4 @@ +# Copyright 2017 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -493,6 +494,10 @@ def _generator_mock( exceptions_seen=None, ignore_sent=False, ): + """ + Helper to create a mock generator that yields a number of values + Generator can optionally raise an exception on a specific iteration + """ try: sent_in = None for i in range(num): @@ -510,6 +515,10 @@ def _generator_mock( @mock.patch("time.sleep", autospec=True) def test___call___generator_success(self, sleep): + """ + Test that a retry-decorated generator yields values as expected + This test checks a generator with no issues + """ import types import collections @@ -533,6 +542,9 @@ def test___call___generator_success(self, sleep): @mock.patch("time.sleep", autospec=True) def test___call___generator_retry(self, sleep): + """ + Tests that a retry-decorated generator will retry on errors + """ on_error = mock.Mock(return_value=None) retry_ = retry.Retry( on_error=on_error, @@ -550,6 +562,10 @@ def test___call___generator_retry(self, sleep): @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) @mock.patch("time.sleep", autospec=True) def test___call___generator_retry_hitting_deadline(self, sleep, uniform): + """ + Tests that a retry-decorated generator will throw a RetryError + after using the time budget + """ on_error = mock.Mock(return_value=None) retry_ = retry.Retry( predicate=retry.if_exception_type(ValueError), @@ -677,6 +693,9 @@ def test___call___with_generator_return(self, sleep): @mock.patch("time.sleep", autospec=True) def test___call___with_generator_close(self, sleep): + """ + Close should be passed through retry into target generator + """ retry_ = retry.Retry(is_stream=True) decorated = retry_(self._generator_mock) @@ -693,6 +712,9 @@ def test___call___with_generator_close(self, sleep): @mock.patch("time.sleep", autospec=True) def test___call___with_generator_throw(self, sleep): + """ + Throw should be passed through retry into target generator + """ retry_ = retry.Retry( predicate=retry.if_exception_type(ValueError), is_stream=True ) @@ -721,6 +743,10 @@ def test___call___with_generator_throw(self, sleep): @mock.patch("time.sleep", autospec=True) def test___call___with_is_stream(self, sleep): + """ + is_stream should determine if the target is wrapped as a + generator or as a callable + """ gen_retry_ = retry.Retry( is_stream=True, predicate=retry.if_exception_type(ValueError) ) From 0423ebe6810ba7a4da024efaa0fd78f55bd26128 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 20 Apr 2023 09:23:42 -0700 Subject: [PATCH 074/204] fixed mypy issues --- google/api_core/retry_streaming.py | 7 ++++--- google/api_core/retry_streaming_async.py | 4 +--- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index 2fee73af..ef4e0e39 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -58,7 +58,7 @@ def __init__( be caught. """ self.target_fn = target - self.active_target: Iterator = self.target_fn().__iter__() + self.active_target: Iterator[T] = self.target_fn().__iter__() self.predicate = predicate self.sleep_generator = iter(sleep_generator) self.on_error = on_error @@ -106,7 +106,7 @@ def _handle_exception(self, exc) -> None: "Retrying due to {}, sleeping {:.1f}s ...".format(exc, next_sleep) ) time.sleep(next_sleep) - self.active_target = self.target_fn() + self.active_target = self.target_fn().__iter__() def __next__(self) -> T: """ @@ -130,7 +130,8 @@ def close(self) -> None: - AttributeError if the active_target does not have a close() method """ if getattr(self.active_target, "close", None): - return self.active_target.close() + casted_target = cast(Generator, self.active_target) + return casted_target.close() else: raise AttributeError( "close() not implemented for {}".format(self.active_target) diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index e60278ec..641949fc 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -23,13 +23,11 @@ AsyncIterable, Awaitable, Union, - Any, TypeVar, AsyncGenerator, ) import asyncio -import inspect import logging import datetime @@ -118,7 +116,7 @@ async def _handle_exception(self, exc) -> None: except StopIteration: raise ValueError("Sleep generator stopped yielding sleep values") # if time budget is exceeded, raise RetryError - if self.remaining_timeout_budget is not None: + if self.remaining_timeout_budget is not None and self.timeout is not None: if self.remaining_timeout_budget <= next_sleep: raise exceptions.RetryError( "Timeout of {:.1f}s exceeded".format(self.timeout), From acd654635eea2885158584c659afa553baea1486 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 8 May 2023 15:59:39 -0700 Subject: [PATCH 075/204] remove wait_for in async streaming for perf reasons --- google/api_core/retry_streaming.py | 25 ++++++++-- google/api_core/retry_streaming_async.py | 7 ++- tests/asyncio/test_retry_async.py | 59 +++++++++--------------- tests/unit/test_retry.py | 38 ++++++++++++++- 4 files changed, 82 insertions(+), 47 deletions(-) diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index ef4e0e39..7fa9fab5 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -97,10 +97,7 @@ def _handle_exception(self, exc) -> None: next_attempt = datetime_helpers.utcnow() + datetime.timedelta( seconds=next_sleep ) - if self.deadline < next_attempt: - raise exceptions.RetryError( - f"Deadline of {self.timeout:.1f} seconds exceeded", exc - ) from exc + self._check_timeout(next_attempt, exc) # sleep before retrying _LOGGER.debug( "Retrying due to {}, sleeping {:.1f}s ...".format(exc, next_sleep) @@ -108,6 +105,22 @@ def _handle_exception(self, exc) -> None: time.sleep(next_sleep) self.active_target = self.target_fn().__iter__() + def _check_timeout(self, current_time:float, source_exception: Optional[Exception] = None) -> None: + """ + Helper function to check if the timeout has been exceeded, and raise a RetryError if so. + + Args: + - current_time: the timestamp to check against the deadline + - source_exception: the exception that triggered the timeout check, if any + Raises: + - RetryError if the deadline has been exceeded + """ + if self.deadline is not None and self.deadline < current_time: + raise exceptions.RetryError( + "Timeout of {:.1f}s exceeded".format(self.timeout), + source_exception, + ) from source_exception + def __next__(self) -> T: """ Implement the iterator protocol. @@ -115,6 +128,8 @@ def __next__(self) -> T: Returns: - the next value of the active_target iterator """ + # check for expired timeouts before attempting to iterate + self._check_timeout(datetime_helpers.utcnow()) try: return next(self.active_target) except Exception as exc: @@ -152,6 +167,8 @@ def send(self, *args, **kwargs) -> T: Raises: - AttributeError if the active_target does not have a send() method """ + # check for expired timeouts before attempting to iterate + self._check_timeout(datetime_helpers.utcnow()) if getattr(self.active_target, "send", None): casted_target = cast(Generator, self.active_target) try: diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index 641949fc..bbce5edf 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -170,10 +170,9 @@ async def _iteration_helper(self, iteration_routine: Awaitable) -> T: # start the timer for the current operation start_timestamp = datetime_helpers.utcnow() # grab the next value from the active_target - next_val_routine = asyncio.wait_for( - iteration_routine, self.remaining_timeout_budget - ) - next_val = await next_val_routine + # Note: interrupting with asyncio.wait_for is expensive, + # so we only check for timeouts at the start of each iteration + next_val = await iteration_routine # subtract the time spent waiting for the next value from the # remaining timeout budget self._subtract_time_from_budget(start_timestamp) diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 07ab2f03..041c151a 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -540,44 +540,6 @@ def increase_time(sleep_delay): assert last_wait == 4 assert total_wait == 7 - @pytest.mark.asyncio - async def test___call___generator_timeout_cancellations(self): - """ - Tests that a retry-decorated generator will throw a RetryError - after using its time budget - """ - on_error = mock.Mock(return_value=None) - retry_ = retry_async.AsyncRetry( - predicate=retry_async.if_exception_type(ValueError), - deadline=0.2, - is_stream=True, - ) - utcnow = datetime.datetime.utcnow() - utcnow_patcher = mock.patch( - "google.api_core.datetime_helpers.utcnow", return_value=utcnow - ) - # ensure generator times out when awaiting past deadline - with pytest.raises(exceptions.RetryError): - infinite_gen = retry_(self._generator_mock, on_error)(sleep_time=60) - await infinite_gen.__anext__() - # ensure time between yields isn't counted - with utcnow_patcher as patched_utcnow: - generator = retry_(self._generator_mock)(sleep_time=0.05) - assert await generator.__anext__() == 0 - patched_utcnow.return_value += datetime.timedelta(20) - assert await generator.__anext__() == 1 - # ensure timeout budget is tracked - generator = retry_(self._generator_mock)(sleep_time=0.07) - assert await generator.__anext__() == 0 - assert await generator.__anext__() == 1 - with pytest.raises(exceptions.RetryError) as exc: - await generator.__anext__() - assert "Timeout of 0.2s exceeded" in str(exc.value) - # subsequent calls should also return a RetryError - with pytest.raises(exceptions.RetryError) as exc: - await generator.__anext__() - assert "Timeout of 0.2s exceeded" in str(exc.value) - @pytest.mark.asyncio async def test___call___generator_await_cancel_retryable(self): """ @@ -794,3 +756,24 @@ async def __anext__(self): assert await retryable.__anext__() == 3 with pytest.raises(StopAsyncIteration): await retryable.__anext__() + + @pytest.mark.asyncio + async def test_iterate_stream_after_deadline(self): + """ + Streaming retries should raise RetryError when calling next or send after deadline has passed + """ + retry_ = retry_async.AsyncRetry(is_stream=True, deadline=0.01) + decorated = retry_(self._generator_mock) + generator = decorated(10) + starting_time_budget = generator.remaining_timeout_budget + assert starting_time_budget == 0.01 + await generator.__anext__() + # ensure budget is used on each call + assert generator.remaining_timeout_budget < starting_time_budget + # simulate using up budget + generator.remaining_timeout_budget = 0 + with pytest.raises(exceptions.RetryError): + await generator.__anext__() + with pytest.raises(exceptions.RetryError): + await generator.asend("test") + diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index b3f1bef7..0628433e 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -304,7 +304,7 @@ def test_with_delay_partial_options(self): assert retry_ is not new_retry assert new_retry._initial == 1 assert new_retry._maximum == 4 - assert new_retry._multiplier == 3 + assert n ew_retry._multiplier == 3 new_retry = retry_.with_delay(multiplier=4) assert retry_ is not new_retry @@ -768,3 +768,39 @@ def test___call___with_is_stream(self, sleep): gen = gen_retry_(wrapped)() unpacked = [next(gen) for i in range(10)] assert unpacked == [0, 1, 2, 3, 4, 5, 0, 1, 2, 3] + + def test_iterate_stream_after_deadline(self): + """ + Streaming retries should raise RetryError when calling next after deadline has passed + """ + from time import sleep + retry_ = retry.Retry( + predicate=retry.if_exception_type(ValueError), + is_stream=True, + deadline=0.01, + ) + decorated = retry_(self._generator_mock) + generator = decorated(10) + next(generator) + sleep(0.02) + with pytest.raises(exceptions.RetryError): + next(generator) + + def test_iterate_stream_send_after_deadline(self): + """ + Streaming retries should raise RetryError when calling send after deadline has passed + """ + from time import sleep + retry_ = retry.Retry( + predicate=retry.if_exception_type(ValueError), + is_stream=True, + deadline=0.01, + ) + decorated = retry_(self._generator_mock) + generator = decorated(10) + next(generator) + generator.send("test") + sleep(0.02) + with pytest.raises(exceptions.RetryError): + generator.send("test") +  From b1ad4b3dc70bbd9d37341bcf1eea13ebd2a4888e Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 8 May 2023 16:05:48 -0700 Subject: [PATCH 076/204] fixed style issues --- google/api_core/retry_streaming.py | 10 ++++++++-- tests/asyncio/test_retry_async.py | 1 - tests/unit/test_retry.py | 5 +++-- 3 files changed, 11 insertions(+), 5 deletions(-) diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index 7fa9fab5..be5803a8 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -105,7 +105,9 @@ def _handle_exception(self, exc) -> None: time.sleep(next_sleep) self.active_target = self.target_fn().__iter__() - def _check_timeout(self, current_time:float, source_exception: Optional[Exception] = None) -> None: + def _check_timeout( + self, current_time: float, source_exception: Optional[Exception] = None + ) -> None: """ Helper function to check if the timeout has been exceeded, and raise a RetryError if so. @@ -115,7 +117,11 @@ def _check_timeout(self, current_time:float, source_exception: Optional[Exceptio Raises: - RetryError if the deadline has been exceeded """ - if self.deadline is not None and self.deadline < current_time: + if ( + self.deadline is not None + and self.timeout is not None + and self.deadline < current_time + ): raise exceptions.RetryError( "Timeout of {:.1f}s exceeded".format(self.timeout), source_exception, diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 041c151a..ea63baa4 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -776,4 +776,3 @@ async def test_iterate_stream_after_deadline(self): await generator.__anext__() with pytest.raises(exceptions.RetryError): await generator.asend("test") - diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 0628433e..2016e6e6 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -304,7 +304,7 @@ def test_with_delay_partial_options(self): assert retry_ is not new_retry assert new_retry._initial == 1 assert new_retry._maximum == 4 - assert n ew_retry._multiplier == 3 + assert new_retry._multiplier == 3 new_retry = retry_.with_delay(multiplier=4) assert retry_ is not new_retry @@ -774,6 +774,7 @@ def test_iterate_stream_after_deadline(self): Streaming retries should raise RetryError when calling next after deadline has passed """ from time import sleep + retry_ = retry.Retry( predicate=retry.if_exception_type(ValueError), is_stream=True, @@ -791,6 +792,7 @@ def test_iterate_stream_send_after_deadline(self): Streaming retries should raise RetryError when calling send after deadline has passed """ from time import sleep + retry_ = retry.Retry( predicate=retry.if_exception_type(ValueError), is_stream=True, @@ -803,4 +805,3 @@ def test_iterate_stream_send_after_deadline(self): sleep(0.02) with pytest.raises(exceptions.RetryError): generator.send("test") -  From 8dcf67c203aff8edbc7f4634f93a1a075b05ea61 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 10 May 2023 13:41:45 -0700 Subject: [PATCH 077/204] fixed callable type annotation --- google/api_core/retry.py | 2 +- google/api_core/retry_async.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index 532b948e..f9ba1ab5 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -155,7 +155,7 @@ def retry_target( higher-level retry helper :class:`Retry`. Args: - target(Callable[None, Any]): The function to call and retry. This must be a + target(Callable[[], Any]): The function to call and retry. This must be a nullary function - apply arguments with `functools.partial`. predicate (Callable[Exception]): A callable used to determine if an exception raised by the target should be considered retryable. diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index 2179afe4..dc031c00 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -81,7 +81,7 @@ async def retry_target( higher-level retry helper :class:`Retry`. Args: - target(Callable): The function to call and retry. This must be a + target(Callable[[], Any]): The function to call and retry. This must be a nullary function - apply arguments with `functools.partial`. predicate (Callable[Exception]): A callable used to determine if an exception raised by the target should be considered retryable. From 6104c59616380981b0b2510eb1ad2a49bac71aa8 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 11 May 2023 17:01:59 -0700 Subject: [PATCH 078/204] change time calculations --- google/api_core/retry_streaming_async.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index bbce5edf..370a0653 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -29,10 +29,9 @@ import asyncio import logging -import datetime +import time -from google.api_core import datetime_helpers from google.api_core import exceptions _LOGGER = logging.getLogger(__name__) @@ -132,19 +131,19 @@ async def _handle_exception(self, exc) -> None: self.active_target = None await self._ensure_active_target() - def _subtract_time_from_budget(self, start_timestamp: datetime.datetime) -> None: + def _subtract_time_from_budget(self, start_timestamp: float) -> None: """ Subtract the time elapsed since start_timestamp from the remaining timeout budget. Args: - - start_timestamp (datetime): The time at which the last operation + - start_timestamp: The timestamp at which the last operation started. """ if self.remaining_timeout_budget is not None: self.remaining_timeout_budget -= ( - datetime_helpers.utcnow() - start_timestamp - ).total_seconds() + time.monotonic() - start_timestamp + ) async def _iteration_helper(self, iteration_routine: Awaitable) -> T: """ @@ -168,7 +167,7 @@ async def _iteration_helper(self, iteration_routine: Awaitable) -> T: ) try: # start the timer for the current operation - start_timestamp = datetime_helpers.utcnow() + start_timestamp = time.monotonic() # grab the next value from the active_target # Note: interrupting with asyncio.wait_for is expensive, # so we only check for timeouts at the start of each iteration From 43d0913832a080666d2261b9fc1abf33097fe9fb Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Fri, 12 May 2023 00:03:52 +0000 Subject: [PATCH 079/204] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot?= =?UTF-8?q?=20post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- google/api_core/retry_streaming_async.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index 370a0653..c7f5a9a1 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -141,9 +141,7 @@ def _subtract_time_from_budget(self, start_timestamp: float) -> None: started. """ if self.remaining_timeout_budget is not None: - self.remaining_timeout_budget -= ( - time.monotonic() - start_timestamp - ) + self.remaining_timeout_budget -= time.monotonic() - start_timestamp async def _iteration_helper(self, iteration_routine: Awaitable) -> T: """ From 9ba76760f5b7ba8128be85ca780811a0b9ec9087 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 19 May 2023 16:31:56 -0700 Subject: [PATCH 080/204] simplified retry_streaming_async to use wall time instead of cpu time --- google/api_core/retry_streaming_async.py | 80 ++++++++++++------------ tests/asyncio/test_retry_async.py | 9 +-- 2 files changed, 40 insertions(+), 49 deletions(-) diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index c7f5a9a1..e490d39b 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -29,9 +29,9 @@ import asyncio import logging -import time - +import datetime +from google.api_core import datetime_helpers from google.api_core import exceptions _LOGGER = logging.getLogger(__name__) @@ -78,7 +78,35 @@ def __init__( self.sleep_generator = iter(sleep_generator) self.on_error = on_error self.timeout = timeout - self.remaining_timeout_budget = timeout if timeout else None + self.timeout_task = None + if self.timeout is not None: + self.deadline = datetime_helpers.utcnow() + datetime.timedelta( + seconds=self.timeout + ) + else: + self.deadline = None + + def _check_timeout( + self, current_time: float, source_exception: Optional[Exception] = None + ) -> None: + """ + Helper function to check if the timeout has been exceeded, and raise a RetryError if so. + + Args: + - current_time: the timestamp to check against the deadline + - source_exception: the exception that triggered the timeout check, if any + Raises: + - RetryError if the deadline has been exceeded + """ + if ( + self.deadline is not None + and self.timeout is not None + and self.deadline < current_time + ): + raise exceptions.RetryError( + "Timeout of {:.1f}s exceeded".format(self.timeout), + source_exception, + ) from source_exception async def _ensure_active_target(self) -> AsyncIterator[T]: """ @@ -114,15 +142,12 @@ async def _handle_exception(self, exc) -> None: next_sleep = next(self.sleep_generator) except StopIteration: raise ValueError("Sleep generator stopped yielding sleep values") - # if time budget is exceeded, raise RetryError - if self.remaining_timeout_budget is not None and self.timeout is not None: - if self.remaining_timeout_budget <= next_sleep: - raise exceptions.RetryError( - "Timeout of {:.1f}s exceeded".format(self.timeout), - exc, - ) from exc - else: - self.remaining_timeout_budget -= next_sleep + # if deadline is exceeded, raise RetryError + if self.deadline is not None: + next_attempt = datetime_helpers.utcnow() + datetime.timedelta( + seconds=next_sleep + ) + self._check_timeout(next_attempt, exc) _LOGGER.debug( "Retrying due to {}, sleeping {:.1f}s ...".format(exc, next_sleep) ) @@ -131,18 +156,6 @@ async def _handle_exception(self, exc) -> None: self.active_target = None await self._ensure_active_target() - def _subtract_time_from_budget(self, start_timestamp: float) -> None: - """ - Subtract the time elapsed since start_timestamp from the remaining - timeout budget. - - Args: - - start_timestamp: The timestamp at which the last operation - started. - """ - if self.remaining_timeout_budget is not None: - self.remaining_timeout_budget -= time.monotonic() - start_timestamp - async def _iteration_helper(self, iteration_routine: Awaitable) -> T: """ Helper function for sharing logic between __anext__ and asend. @@ -154,28 +167,13 @@ async def _iteration_helper(self, iteration_routine: Awaitable) -> T: - The next value from the active_target iterator. """ # check for expired timeouts before attempting to iterate - if ( - self.remaining_timeout_budget is not None - and self.remaining_timeout_budget <= 0 - and self.timeout is not None - ): - raise exceptions.RetryError( - "Timeout of {:.1f}s exceeded".format(self.timeout), - None, - ) + self._check_timeout(datetime_helpers.utcnow()) try: - # start the timer for the current operation - start_timestamp = time.monotonic() # grab the next value from the active_target # Note: interrupting with asyncio.wait_for is expensive, # so we only check for timeouts at the start of each iteration - next_val = await iteration_routine - # subtract the time spent waiting for the next value from the - # remaining timeout budget - self._subtract_time_from_budget(start_timestamp) - return next_val + return await iteration_routine except (Exception, asyncio.CancelledError) as exc: - self._subtract_time_from_budget(start_timestamp) await self._handle_exception(exc) # if retryable exception was handled, find the next value to return return await self.__anext__() diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index ea63baa4..6074ca4d 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -765,14 +765,7 @@ async def test_iterate_stream_after_deadline(self): retry_ = retry_async.AsyncRetry(is_stream=True, deadline=0.01) decorated = retry_(self._generator_mock) generator = decorated(10) - starting_time_budget = generator.remaining_timeout_budget - assert starting_time_budget == 0.01 await generator.__anext__() - # ensure budget is used on each call - assert generator.remaining_timeout_budget < starting_time_budget - # simulate using up budget - generator.remaining_timeout_budget = 0 + await asyncio.sleep(0.02) with pytest.raises(exceptions.RetryError): await generator.__anext__() - with pytest.raises(exceptions.RetryError): - await generator.asend("test") From de7b51a6e30a7684e47380f9f198b10ed057ea72 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 16 Jun 2023 17:22:16 -0700 Subject: [PATCH 081/204] removed extra CancelledError handling --- google/api_core/retry_streaming_async.py | 7 ++++--- tests/asyncio/test_retry_async.py | 18 ++---------------- 2 files changed, 6 insertions(+), 19 deletions(-) diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index e490d39b..d8ed2624 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -170,10 +170,11 @@ async def _iteration_helper(self, iteration_routine: Awaitable) -> T: self._check_timeout(datetime_helpers.utcnow()) try: # grab the next value from the active_target - # Note: interrupting with asyncio.wait_for is expensive, - # so we only check for timeouts at the start of each iteration + # Note: here would be a good place to add a timeout, like asyncio.wait_for. + # But wait_for is expensive, so we only check for timeouts at the + # start of each iteration. return await iteration_routine - except (Exception, asyncio.CancelledError) as exc: + except Exception as exc: await self._handle_exception(exc) # if retryable exception was handled, find the next value to return return await self.__anext__() diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 6074ca4d..86937ae3 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -541,9 +541,9 @@ def increase_time(sleep_delay): assert total_wait == 7 @pytest.mark.asyncio - async def test___call___generator_await_cancel_retryable(self): + async def test___call___generator_cancellations(self): """ - cancel calls should be supported as retryable errors + cancel calls should propagate to the generator """ # test without cancel as retryable retry_ = retry_async.AsyncRetry(is_stream=True) @@ -558,20 +558,6 @@ async def test___call___generator_await_cancel_retryable(self): await task with pytest.raises(StopAsyncIteration): await generator.__anext__() - # test with cancel as retryable - retry_cancel_ = retry_async.AsyncRetry( - predicate=retry_async.if_exception_type(asyncio.CancelledError), - is_stream=True, - ) - generator = retry_cancel_(self._generator_mock)(sleep_time=0.2) - await generator.__anext__() == 0 - await generator.__anext__() == 1 - task = asyncio.create_task(generator.__anext__()) - await asyncio.sleep(0.05) - task.cancel() - await task - assert task.result() == 0 - await generator.__anext__() == 1 @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio From 4cdee6b09a98228455913e6516860552ff0b6582 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 20 Jun 2023 15:41:46 -0700 Subject: [PATCH 082/204] improved docstrings --- google/api_core/retry.py | 3 ++ google/api_core/retry_async.py | 5 +- google/api_core/retry_streaming.py | 68 +++++++++++++++++++++++- google/api_core/retry_streaming_async.py | 68 +++++++++++++++++++++++- 4 files changed, 138 insertions(+), 6 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index f9ba1ab5..5b79de9b 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -311,6 +311,9 @@ class Retry(object): will be wrapped with retry logic, and any failed outputs will restart the stream. If False, only the input function call itself will be retried. Defaults to False. + To avoid duplicate values, retryable streams should typically be + wrapped in additional filter logic before use. For more details, see + ``google/api_core/retry_streaming.RetryaleGenerator``. deadline (float): DEPRECATED: use `timeout` instead. For backward compatibility, if specified it will override the ``timeout`` parameter. """ diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index dc031c00..f441bdbd 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -173,8 +173,6 @@ class AsyncRetry: maximum (float): The maximum amount of time to delay in seconds. multiplier (float): The multiplier applied to the delay. timeout (float): How long to keep retrying in seconds. - When ``is_stream``, only time spent waiting on the - target or sleeping between retries is counted towards the timeout. on_error (Callable[Exception]): A function to call while processing a retryable exception. Any error raised by this function will *not* be caught. @@ -184,6 +182,9 @@ class AsyncRetry: If True, the iterable will be wrapped with retry logic, and any failed outputs will restart the stream. If False, only the input function call itself will be retried. Defaults to False. + To avoid duplicate values, retryable streams should typically be + wrapped in additional filter logic before use. For more details, see + ``google.api_core.retry_streaming_async.AsyncRetryableGenerator``. deadline (float): DEPRECATED use ``timeout`` instead. If set it will override ``timeout`` parameter. """ diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index be5803a8..7aa6146a 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -30,8 +30,72 @@ class RetryableGenerator(Generator[T, Any, None]): """ - Helper class for retrying Iterator and Generator-based - streaming APIs. + Generator wrapper for retryable streaming RPCs. + RetryableGenerator will be used when initilizing a retry with + ``Retry(is_stream=True)``. + + When ``is_stream=False``, the target is treated as a callable, + and will retry when the callable returns an error. When ``is_stream=True``, + the target will be treated as a callable that retruns an iterable. Instead + of just wrapping the initial call in retry logic, the entire iterable is + wrapped, with each yield passing through RetryableGenerator. If any yield + in the stream raises a retryable exception, the entire stream will be + retried. + + Important Note: when a stream is encounters a retryable error, it will + silently construct a fresh iterator instance in the background + and continue yielding (likely duplicate) values as if no error occurred. + This is the most general way to retry a stream, but it often is not the + desired behavior. Example: iter([1, 2, 1/0]) -> [1, 2, 1, 2, ...] + + There are two ways to build more advanced retry logic for streams: + + 1. Wrap the target + Use a ``target`` that maintains state between retries, and creates a + different generator on each retry call. For example, you can wrap a + network call in a function that modifies the request based on what has + already been returned: + + ``` + def attempt_with_modified_request(target, request, seen_items=[]): + # remove seen items from request on each attempt + new_request = modify_request(request, seen_items) + new_generator = target(new_request) + for item in new_generator: + yield item + seen_items.append(item) + + retry_wrapped = Retry(is_stream=True)(attempt_with_modified_request, target, request, []) + ``` + + 2. Wrap the RetryableGenerator + Alternatively, you can wrap the RetryableGenerator itself before + passing it to the end-user to add a filter on the stream. For + example, you can keep track of the items that were successfully yielded + in previous retry attempts, and only yield new items when the + new attempt surpasses the previous ones: + + `` + def retryable_with_filter(target): + stream_idx = 0 + # reset stream_idx when the stream is retried + def on_error(e): + nonlocal stream_idx + stream_idx = 0 + # build retryable + retryable_gen = Retry(is_stream=True, on_error=on_error, ...)(target) + # keep track of what has been yielded out of filter + yielded_items = [] + for item in retryable_gen: + if stream_idx >= len(yielded_items): + yield item + yielded_items.append(item) + elif item != previous_stream[stream_idx]: + raise ValueError("Stream differs from last attempt")" + stream_idx += 1 + + filter_retry_wrapped = retryable_with_filter(target) + ``` """ def __init__( diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index d8ed2624..fe9cb55a 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -41,8 +41,72 @@ class AsyncRetryableGenerator(AsyncGenerator[T, None]): """ - Helper class for retrying AsyncIterator and AsyncGenerator-based - streaming APIs. + AsyncGenerator wrapper for retryable streaming RPCs. + AsyncRetryableGenerator will be used when initilizing a retry with + ``AsyncRetry(is_stream=True)``. + + When ``is_stream=False``, the target is treated as a coroutine, + and will retry when the coroutine returns an error. When ``is_stream=True``, + the target will be treated as a callable that retruns an AsyncIterable. Instead + of just wrapping the initial call in retry logic, the entire iterable is + wrapped, with each yield passing through AsyncRetryableGenerator. If any yield + in the stream raises a retryable exception, the entire stream will be + retried. + + Important Note: when a stream is encounters a retryable error, it will + silently construct a fresh iterator instance in the background + and continue yielding (likely duplicate) values as if no error occurred. + This is the most general way to retry a stream, but it often is not the + desired behavior. Example: iter([1, 2, 1/0]) -> [1, 2, 1, 2, ...] + + There are two ways to build more advanced retry logic for streams: + + 1. Wrap the target + Use a ``target`` that maintains state between retries, and creates a + different generator on each retry call. For example, you can wrap a + grpc call in a function that modifies the request based on what has + already been returned: + + ``` + async def attempt_with_modified_request(target, request, seen_items=[]): + # remove seen items from request on each attempt + new_request = modify_request(request, seen_items) + new_generator = await target(new_request) + async for item in new_generator: + yield item + seen_items.append(item) + + retry_wrapped = AsyncRetry(is_stream=True)(attempt_with_modified_request, target, request, []) + ``` + + 2. Wrap the AsyncRetryableGenerator + Alternatively, you can wrap the AsyncRetryableGenerator itself before + passing it to the end-user to add a filter on the stream. For + example, you can keep track of the items that were successfully yielded + in previous retry attempts, and only yield new items when the + new attempt surpasses the previous ones: + + `` + async def retryable_with_filter(target): + stream_idx = 0 + # reset stream_idx when the stream is retried + def on_error(e): + nonlocal stream_idx + stream_idx = 0 + # build retryable + retryable_gen = AsyncRetry(is_stream=True, on_error=on_error, ...)(target) + # keep track of what has been yielded out of filter + yielded_items = [] + async for item in retryable_gen: + if stream_idx >= len(yielded_items): + yield item + yielded_items.append(item) + elif item != previous_stream[stream_idx]: + raise ValueError("Stream differs from last attempt")" + stream_idx += 1 + + filter_retry_wrapped = retryable_with_filter(target) + ``` """ def __init__( From a526d659320939cd7f47ee775b250e8a3e3ab16b Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Tue, 20 Jun 2023 22:43:50 +0000 Subject: [PATCH 083/204] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot?= =?UTF-8?q?=20post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- google/api_core/retry_streaming.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index 7aa6146a..abfc3cef 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -51,9 +51,9 @@ class RetryableGenerator(Generator[T, Any, None]): There are two ways to build more advanced retry logic for streams: 1. Wrap the target - Use a ``target`` that maintains state between retries, and creates a - different generator on each retry call. For example, you can wrap a - network call in a function that modifies the request based on what has + Use a ``target`` that maintains state between retries, and creates a + different generator on each retry call. For example, you can wrap a + network call in a function that modifies the request based on what has already been returned: ``` From 5f82355533ac1971012c738705fb6ac54886170a Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 28 Jul 2023 16:01:52 -0700 Subject: [PATCH 084/204] swapped out utcnow for more performant time.monotonic --- google/api_core/retry_streaming.py | 13 ++++--------- google/api_core/retry_streaming_async.py | 13 ++++--------- tests/asyncio/test_retry_async.py | 11 ++++++----- tests/unit/test_retry.py | 11 ++++++----- 4 files changed, 20 insertions(+), 28 deletions(-) diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index abfc3cef..f8031b83 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -20,7 +20,6 @@ import logging import time -from google.api_core import datetime_helpers from google.api_core import exceptions _LOGGER = logging.getLogger(__name__) @@ -128,9 +127,7 @@ def __init__( self.on_error = on_error self.timeout = timeout if self.timeout is not None: - self.deadline = datetime_helpers.utcnow() + datetime.timedelta( - seconds=self.timeout - ) + self.deadline = time.monotonic() + self.timeout else: self.deadline = None @@ -158,9 +155,7 @@ def _handle_exception(self, exc) -> None: raise ValueError("Sleep generator stopped yielding sleep values") # if deadline is exceeded, raise RetryError if self.deadline is not None: - next_attempt = datetime_helpers.utcnow() + datetime.timedelta( - seconds=next_sleep - ) + next_attempt = time.monotonic() + next_sleep self._check_timeout(next_attempt, exc) # sleep before retrying _LOGGER.debug( @@ -199,7 +194,7 @@ def __next__(self) -> T: - the next value of the active_target iterator """ # check for expired timeouts before attempting to iterate - self._check_timeout(datetime_helpers.utcnow()) + self._check_timeout(time.monotonic()) try: return next(self.active_target) except Exception as exc: @@ -238,7 +233,7 @@ def send(self, *args, **kwargs) -> T: - AttributeError if the active_target does not have a send() method """ # check for expired timeouts before attempting to iterate - self._check_timeout(datetime_helpers.utcnow()) + self._check_timeout(time.monotonic()) if getattr(self.active_target, "send", None): casted_target = cast(Generator, self.active_target) try: diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index fe9cb55a..09c9061c 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -29,9 +29,8 @@ import asyncio import logging -import datetime +import time -from google.api_core import datetime_helpers from google.api_core import exceptions _LOGGER = logging.getLogger(__name__) @@ -144,9 +143,7 @@ def __init__( self.timeout = timeout self.timeout_task = None if self.timeout is not None: - self.deadline = datetime_helpers.utcnow() + datetime.timedelta( - seconds=self.timeout - ) + self.deadline = time.monotonic() + self.timeout else: self.deadline = None @@ -208,9 +205,7 @@ async def _handle_exception(self, exc) -> None: raise ValueError("Sleep generator stopped yielding sleep values") # if deadline is exceeded, raise RetryError if self.deadline is not None: - next_attempt = datetime_helpers.utcnow() + datetime.timedelta( - seconds=next_sleep - ) + next_attempt = time.monotonic() + next_sleep self._check_timeout(next_attempt, exc) _LOGGER.debug( "Retrying due to {}, sleeping {:.1f}s ...".format(exc, next_sleep) @@ -231,7 +226,7 @@ async def _iteration_helper(self, iteration_routine: Awaitable) -> T: - The next value from the active_target iterator. """ # check for expired timeouts before attempting to iterate - self._check_timeout(datetime_helpers.utcnow()) + self._check_timeout(time.monotonic()) try: # grab the next value from the active_target # Note: here would be a good place to add a timeout, like asyncio.wait_for. diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 86937ae3..f39bf76a 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -502,6 +502,7 @@ async def test___call___generator_retry_hitting_deadline(self, sleep, uniform): Tests that a retry-decorated generator will throw a RetryError after using the time budget """ + import time on_error = mock.Mock() retry_ = retry_async.AsyncRetry( predicate=retry_async.if_exception_type(ValueError), @@ -512,19 +513,19 @@ async def test___call___generator_retry_hitting_deadline(self, sleep, uniform): is_stream=True, ) - utcnow = datetime.datetime.utcnow() - utcnow_patcher = mock.patch( - "google.api_core.datetime_helpers.utcnow", return_value=utcnow + time_now = time.monotonic() + now_patcher = mock.patch( + "time.monotonic", return_value=time_now, ) decorated = retry_(self._generator_mock, on_error=on_error) generator = decorated(error_on=1) - with utcnow_patcher as patched_utcnow: + with now_patcher as patched_now: # Make sure that calls to fake asyncio.sleep() also advance the mocked # time clock. def increase_time(sleep_delay): - patched_utcnow.return_value += datetime.timedelta(seconds=sleep_delay) + patched_now.return_value += sleep_delay sleep.side_effect = increase_time diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 2016e6e6..8b0fc159 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -566,6 +566,7 @@ def test___call___generator_retry_hitting_deadline(self, sleep, uniform): Tests that a retry-decorated generator will throw a RetryError after using the time budget """ + import time on_error = mock.Mock(return_value=None) retry_ = retry.Retry( predicate=retry.if_exception_type(ValueError), @@ -576,18 +577,18 @@ def test___call___generator_retry_hitting_deadline(self, sleep, uniform): is_stream=True, ) - utcnow = datetime.datetime.utcnow() - utcnow_patcher = mock.patch( - "google.api_core.datetime_helpers.utcnow", return_value=utcnow + timenow = time.monotonic() + now_patcher = mock.patch( + "time.monotonic", return_value=timenow, ) decorated = retry_(self._generator_mock, on_error=on_error) generator = decorated(error_on=1) - with utcnow_patcher as patched_utcnow: + with now_patcher as patched_now: # Make sure that calls to fake time.sleep() also advance the mocked # time clock. def increase_time(sleep_delay): - patched_utcnow.return_value += datetime.timedelta(seconds=sleep_delay) + patched_now.return_value += sleep_delay sleep.side_effect = increase_time with pytest.raises(exceptions.RetryError): From 9900c40f2ba2c112d9e365caa88e4cb177953ddb Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Fri, 28 Jul 2023 23:15:47 +0000 Subject: [PATCH 085/204] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot?= =?UTF-8?q?=20post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- tests/asyncio/test_retry_async.py | 4 +++- tests/unit/test_retry.py | 4 +++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index f39bf76a..c3dc665b 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -503,6 +503,7 @@ async def test___call___generator_retry_hitting_deadline(self, sleep, uniform): after using the time budget """ import time + on_error = mock.Mock() retry_ = retry_async.AsyncRetry( predicate=retry_async.if_exception_type(ValueError), @@ -515,7 +516,8 @@ async def test___call___generator_retry_hitting_deadline(self, sleep, uniform): time_now = time.monotonic() now_patcher = mock.patch( - "time.monotonic", return_value=time_now, + "time.monotonic", + return_value=time_now, ) decorated = retry_(self._generator_mock, on_error=on_error) diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 8b0fc159..177ba4d9 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -567,6 +567,7 @@ def test___call___generator_retry_hitting_deadline(self, sleep, uniform): after using the time budget """ import time + on_error = mock.Mock(return_value=None) retry_ = retry.Retry( predicate=retry.if_exception_type(ValueError), @@ -579,7 +580,8 @@ def test___call___generator_retry_hitting_deadline(self, sleep, uniform): timenow = time.monotonic() now_patcher = mock.patch( - "time.monotonic", return_value=timenow, + "time.monotonic", + return_value=timenow, ) decorated = retry_(self._generator_mock, on_error=on_error) From 2c2dcbe6224187bc19bfad84527007e935044290 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Fri, 28 Jul 2023 23:16:03 +0000 Subject: [PATCH 086/204] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot?= =?UTF-8?q?=20post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- tests/asyncio/test_retry_async.py | 4 +++- tests/unit/test_retry.py | 4 +++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index f39bf76a..c3dc665b 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -503,6 +503,7 @@ async def test___call___generator_retry_hitting_deadline(self, sleep, uniform): after using the time budget """ import time + on_error = mock.Mock() retry_ = retry_async.AsyncRetry( predicate=retry_async.if_exception_type(ValueError), @@ -515,7 +516,8 @@ async def test___call___generator_retry_hitting_deadline(self, sleep, uniform): time_now = time.monotonic() now_patcher = mock.patch( - "time.monotonic", return_value=time_now, + "time.monotonic", + return_value=time_now, ) decorated = retry_(self._generator_mock, on_error=on_error) diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 8b0fc159..177ba4d9 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -567,6 +567,7 @@ def test___call___generator_retry_hitting_deadline(self, sleep, uniform): after using the time budget """ import time + on_error = mock.Mock(return_value=None) retry_ = retry.Retry( predicate=retry.if_exception_type(ValueError), @@ -579,7 +580,8 @@ def test___call___generator_retry_hitting_deadline(self, sleep, uniform): timenow = time.monotonic() now_patcher = mock.patch( - "time.monotonic", return_value=timenow, + "time.monotonic", + return_value=timenow, ) decorated = retry_(self._generator_mock, on_error=on_error) From 67068ac72ca41ba2b49130f4adc162ff6d66dd98 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 14 Aug 2023 16:05:00 -0700 Subject: [PATCH 087/204] don't check timeout on each yield by default --- google/api_core/retry_streaming.py | 14 ++++-- google/api_core/retry_streaming_async.py | 12 ++++- tests/asyncio/test_retry_async.py | 41 ++++++++++++---- tests/unit/test_retry.py | 61 ++++++++++++------------ 4 files changed, 83 insertions(+), 45 deletions(-) diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index f8031b83..1f85ad51 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -16,7 +16,6 @@ from typing import Callable, Optional, Iterable, Iterator, Generator, TypeVar, Any, cast -import datetime import logging import time @@ -104,6 +103,7 @@ def __init__( sleep_generator: Iterable[float], timeout: Optional[float] = None, on_error: Optional[Callable[[Exception], None]] = None, + check_timeout_on_yield=False, ): """ Args: @@ -119,6 +119,11 @@ def __init__( on_error: A function to call while processing a retryable exception. Any error raised by this function will *not* be caught. + check_timeout_on_yield: If True, the timeout value will be checked + after each yield. If the timeout has been exceeded, the generator + will raise a RetryError. Note that this adds an overhead to each + yield, so it is preferred to add the timeout logic to the wrapped + stream when possible. """ self.target_fn = target self.active_target: Iterator[T] = self.target_fn().__iter__() @@ -130,6 +135,7 @@ def __init__( self.deadline = time.monotonic() + self.timeout else: self.deadline = None + self._check_timeout_on_yield = check_timeout_on_yield def __iter__(self) -> Generator[T, Any, None]: """ @@ -194,7 +200,8 @@ def __next__(self) -> T: - the next value of the active_target iterator """ # check for expired timeouts before attempting to iterate - self._check_timeout(time.monotonic()) + if self._check_timeout_on_yield: + self._check_timeout(time.monotonic()) try: return next(self.active_target) except Exception as exc: @@ -233,7 +240,8 @@ def send(self, *args, **kwargs) -> T: - AttributeError if the active_target does not have a send() method """ # check for expired timeouts before attempting to iterate - self._check_timeout(time.monotonic()) + if self._check_timeout_on_yield: + self._check_timeout(time.monotonic()) if getattr(self.active_target, "send", None): casted_target = cast(Generator, self.active_target) try: diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index 09c9061c..b92b8cc3 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -118,6 +118,7 @@ def __init__( sleep_generator: Iterable[float], timeout: Optional[float] = None, on_error: Optional[Callable[[Exception], None]] = None, + check_timeout_on_yield=False, ): """ Args: @@ -133,6 +134,11 @@ def __init__( on_error: A function to call while processing a retryable exception. Any error raised by this function will *not* be caught. + check_timeout_on_yield: If True, the timeout value will be checked + after each yield. If the timeout has been exceeded, the generator + will raise a RetryError. Note that this adds an overhead to each + yield, so it is preferred to add the timeout logic to the wrapped + stream when possible. """ self.target_fn = target # active target must be populated in an async context @@ -146,6 +152,7 @@ def __init__( self.deadline = time.monotonic() + self.timeout else: self.deadline = None + self._check_timeout_on_yield = check_timeout_on_yield def _check_timeout( self, current_time: float, source_exception: Optional[Exception] = None @@ -176,7 +183,7 @@ async def _ensure_active_target(self) -> AsyncIterator[T]: Returns: - The active_target iterable """ - if not self.active_target: + if self.active_target is None: new_iterable = self.target_fn() if isinstance(new_iterable, Awaitable): new_iterable = await new_iterable @@ -226,7 +233,8 @@ async def _iteration_helper(self, iteration_routine: Awaitable) -> T: - The next value from the active_target iterator. """ # check for expired timeouts before attempting to iterate - self._check_timeout(time.monotonic()) + if self._check_timeout_on_yield: + self._check_timeout(time.monotonic()) try: # grab the next value from the active_target # Note: here would be a good place to add a timeout, like asyncio.wait_for. diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index f39bf76a..c556d916 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -744,15 +744,38 @@ async def __anext__(self): with pytest.raises(StopAsyncIteration): await retryable.__anext__() + @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) + @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio - async def test_iterate_stream_after_deadline(self): + async def test_yield_stream_after_deadline(self, sleep, uniform): """ - Streaming retries should raise RetryError when calling next or send after deadline has passed + By default, if the deadline is hit between yields, the generator will continue. + + There is a flag that should cause the wrapper to test for the deadline after + each yield. """ - retry_ = retry_async.AsyncRetry(is_stream=True, deadline=0.01) - decorated = retry_(self._generator_mock) - generator = decorated(10) - await generator.__anext__() - await asyncio.sleep(0.02) - with pytest.raises(exceptions.RetryError): - await generator.__anext__() + import time + from google.api_core.retry_streaming_async import AsyncRetryableGenerator + timeout = 2 + time_now = time.monotonic() + now_patcher = mock.patch( + "time.monotonic", return_value=time_now, + ) + + with now_patcher as patched_now: + no_check = AsyncRetryableGenerator(self._generator_mock, None, [], timeout=timeout, check_timeout_on_yield=False) + assert no_check._check_timeout_on_yield is False + check = AsyncRetryableGenerator(self._generator_mock, None, [], timeout=timeout, check_timeout_on_yield=True) + assert check._check_timeout_on_yield is True + + # first yield should be fine + await check.__anext__() + await no_check.__anext__() + + # simulate a delay before next yield + patched_now.return_value += timeout + 1 + + # second yield should raise when check_timeout_on_yield is True + with pytest.raises(exceptions.RetryError): + await check.__anext__() + await no_check.__anext__() diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 8b0fc159..52903d6b 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -770,39 +770,38 @@ def test___call___with_is_stream(self, sleep): unpacked = [next(gen) for i in range(10)] assert unpacked == [0, 1, 2, 3, 4, 5, 0, 1, 2, 3] - def test_iterate_stream_after_deadline(self): - """ - Streaming retries should raise RetryError when calling next after deadline has passed + @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test_yield_stream_after_deadline(self, sleep, uniform): """ - from time import sleep + By default, if the deadline is hit between yields, the generator will continue. - retry_ = retry.Retry( - predicate=retry.if_exception_type(ValueError), - is_stream=True, - deadline=0.01, + There is a flag that should cause the wrapper to test for the deadline after + each yield. + """ + import time + from google.api_core.retry_streaming import RetryableGenerator + timeout = 2 + time_now = time.monotonic() + now_patcher = mock.patch( + "time.monotonic", return_value=time_now, ) - decorated = retry_(self._generator_mock) - generator = decorated(10) - next(generator) - sleep(0.02) - with pytest.raises(exceptions.RetryError): - next(generator) - def test_iterate_stream_send_after_deadline(self): - """ - Streaming retries should raise RetryError when calling send after deadline has passed - """ - from time import sleep + with now_patcher as patched_now: + no_check = RetryableGenerator(self._generator_mock, None, [], timeout=timeout, check_timeout_on_yield=False) + assert no_check._check_timeout_on_yield is False + check = RetryableGenerator(self._generator_mock, None, [], timeout=timeout, check_timeout_on_yield=True) + assert check._check_timeout_on_yield is True - retry_ = retry.Retry( - predicate=retry.if_exception_type(ValueError), - is_stream=True, - deadline=0.01, - ) - decorated = retry_(self._generator_mock) - generator = decorated(10) - next(generator) - generator.send("test") - sleep(0.02) - with pytest.raises(exceptions.RetryError): - generator.send("test") + # first yield should be fine + next(check) + next(no_check) + + # simulate a delay before next yield + patched_now.return_value += timeout + 1 + + # second yield should raise when check_timeout_on_yield is True + with pytest.raises(exceptions.RetryError): + next(check) + next(no_check) From 54325bc1009b415da61bdca1d4500f5708898894 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 15 Aug 2023 09:36:56 -0700 Subject: [PATCH 088/204] added exception building logic --- google/api_core/retry_streaming.py | 85 +++++++++++---- google/api_core/retry_streaming_async.py | 53 ++++++---- tests/asyncio/test_retry_async.py | 128 ++++++++++++++++++++++- tests/unit/test_retry.py | 127 ++++++++++++++++++++-- 4 files changed, 340 insertions(+), 53 deletions(-) diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index 1f85ad51..fad89b25 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -18,6 +18,7 @@ import logging import time +from functools import partial from google.api_core import exceptions @@ -26,6 +27,34 @@ T = TypeVar("T") +def _build_timeout_error( + exc_list: list[Exception], is_timeout: bool, timeout_val: float +) -> tuple[Exception, Exception | None]: + """ + Default exception_factory implementation. Builds an exception after the retry fails + + Args: + - exc_list (list[Exception]): list of exceptions that occurred during the retry + - is_timeout (bool): whether the failure is due to the timeout value being exceeded, + or due to a non-retryable exception + - timeout_val (float): the original timeout value for the retry, for use in the exception message + + Returns: + - tuple[Exception, Exception|None]: a tuple of the exception to be raised, and the cause exception if any + """ + src_exc = exc_list[-1] if exc_list else None + if is_timeout: + return ( + exceptions.RetryError( + "Timeout of {:.1f}s exceeded".format(timeout_val), + src_exc, + ), + src_exc, + ) + else: + return exc_list[-1], None + + class RetryableGenerator(Generator[T, Any, None]): """ Generator wrapper for retryable streaming RPCs. @@ -103,6 +132,9 @@ def __init__( sleep_generator: Iterable[float], timeout: Optional[float] = None, on_error: Optional[Callable[[Exception], None]] = None, + exception_factory: Optional[ + Callable[[list[Exception], bool, float], tuple[Exception, Exception | None]] + ] = None, check_timeout_on_yield=False, ): """ @@ -119,23 +151,34 @@ def __init__( on_error: A function to call while processing a retryable exception. Any error raised by this function will *not* be caught. + exception_factory: A function that creates an exception to raise + when the retry fails. The function takes three arguments: + a list of exceptions that occurred during the retry, a boolean + indicating whether the failure is due to retry timeout, and the original + timeout value (for building a helpful error message). It is expected to + return a tuple of the exception to raise and (optionally) a source + exception to chain to the raised exception. + If not provided, a default exception will be raised. check_timeout_on_yield: If True, the timeout value will be checked after each yield. If the timeout has been exceeded, the generator - will raise a RetryError. Note that this adds an overhead to each - yield, so it is preferred to add the timeout logic to the wrapped - stream when possible. + will raise an exception from exception_factory. + Note that this adds an overhead to each yield, so it is better + to add the timeout logic to the wrapped stream when possible. """ self.target_fn = target self.active_target: Iterator[T] = self.target_fn().__iter__() self.predicate = predicate self.sleep_generator = iter(sleep_generator) self.on_error = on_error - self.timeout = timeout - if self.timeout is not None: - self.deadline = time.monotonic() + self.timeout + if timeout is not None: + self.deadline = time.monotonic() + timeout else: self.deadline = None self._check_timeout_on_yield = check_timeout_on_yield + self.error_list: list[Exception] = [] + self._exc_factory = partial( + exception_factory or _build_timeout_error, timeout_val=timeout + ) def __iter__(self) -> Generator[T, Any, None]: """ @@ -149,8 +192,12 @@ def _handle_exception(self, exc) -> None: check if it is retryable. If so, create a new active_target and continue iterating. If not, raise the exception. """ + self.error_list.append(exc) if not self.predicate(exc): - raise exc + final_exc, src_exc = self._exc_factory( + exc_list=self.error_list, is_timeout=False + ) + raise final_exc from src_exc else: # run on_error callback if provided if self.on_error: @@ -159,10 +206,10 @@ def _handle_exception(self, exc) -> None: next_sleep = next(self.sleep_generator) except StopIteration: raise ValueError("Sleep generator stopped yielding sleep values") - # if deadline is exceeded, raise RetryError + # if deadline is exceeded, raise exception if self.deadline is not None: next_attempt = time.monotonic() + next_sleep - self._check_timeout(next_attempt, exc) + self._check_timeout(next_attempt) # sleep before retrying _LOGGER.debug( "Retrying due to {}, sleeping {:.1f}s ...".format(exc, next_sleep) @@ -170,27 +217,19 @@ def _handle_exception(self, exc) -> None: time.sleep(next_sleep) self.active_target = self.target_fn().__iter__() - def _check_timeout( - self, current_time: float, source_exception: Optional[Exception] = None - ) -> None: + def _check_timeout(self, current_time: float) -> None: """ - Helper function to check if the timeout has been exceeded, and raise a RetryError if so. + Helper function to check if the timeout has been exceeded, and raise an exception if so. Args: - current_time: the timestamp to check against the deadline - source_exception: the exception that triggered the timeout check, if any Raises: - - RetryError if the deadline has been exceeded + - Exception from exception_factory if the timeout has been exceeded """ - if ( - self.deadline is not None - and self.timeout is not None - and self.deadline < current_time - ): - raise exceptions.RetryError( - "Timeout of {:.1f}s exceeded".format(self.timeout), - source_exception, - ) from source_exception + if self.deadline is not None and self.deadline < current_time: + exc, src_exc = self._exc_factory(exc_list=self.error_list, is_timeout=True) + raise exc from src_exc def __next__(self) -> T: """ diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index b92b8cc3..dd90dd66 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -30,8 +30,10 @@ import asyncio import logging import time +from functools import partial from google.api_core import exceptions +from google.api_core.retry_streaming import _build_timeout_error _LOGGER = logging.getLogger(__name__) @@ -118,6 +120,9 @@ def __init__( sleep_generator: Iterable[float], timeout: Optional[float] = None, on_error: Optional[Callable[[Exception], None]] = None, + exception_factory: Optional[ + Callable[[list[Exception], bool, float], tuple[Exception, Exception | None]] + ] = None, check_timeout_on_yield=False, ): """ @@ -134,11 +139,19 @@ def __init__( on_error: A function to call while processing a retryable exception. Any error raised by this function will *not* be caught. + exception_factory: A function that creates an exception to raise + when the retry fails. The function takes three arguments: + a list of exceptions that occurred during the retry, a boolean + indicating whether the failure is due to retry timeout, and the original + timeout value (for building a helpful error message). It is expected to + return a tuple of the exception to raise and (optionally) a source + exception to chain to the raised exception. + If not provided, a default exception will be raised. check_timeout_on_yield: If True, the timeout value will be checked after each yield. If the timeout has been exceeded, the generator - will raise a RetryError. Note that this adds an overhead to each - yield, so it is preferred to add the timeout logic to the wrapped - stream when possible. + will raise an exception from exception_factory. + Note that this adds an overhead to each yield, so it is better + to add the timeout logic to the wrapped stream when possible. """ self.target_fn = target # active target must be populated in an async context @@ -146,35 +159,31 @@ def __init__( self.predicate = predicate self.sleep_generator = iter(sleep_generator) self.on_error = on_error - self.timeout = timeout - self.timeout_task = None - if self.timeout is not None: - self.deadline = time.monotonic() + self.timeout + if timeout is not None: + self.deadline = time.monotonic() + timeout else: self.deadline = None self._check_timeout_on_yield = check_timeout_on_yield + self.error_list: list[Exception] = [] + self._exc_factory = partial( + exception_factory or _build_timeout_error, timeout_val=timeout + ) def _check_timeout( self, current_time: float, source_exception: Optional[Exception] = None ) -> None: """ - Helper function to check if the timeout has been exceeded, and raise a RetryError if so. + Helper function to check if the timeout has been exceeded, and raise an exception if so. Args: - current_time: the timestamp to check against the deadline - source_exception: the exception that triggered the timeout check, if any Raises: - - RetryError if the deadline has been exceeded + - Exception from exception_factory if the timeout has been exceeded """ - if ( - self.deadline is not None - and self.timeout is not None - and self.deadline < current_time - ): - raise exceptions.RetryError( - "Timeout of {:.1f}s exceeded".format(self.timeout), - source_exception, - ) from source_exception + if self.deadline is not None and self.deadline < current_time: + exc, src_exc = self._exc_factory(exc_list=self.error_list, is_timeout=True) + raise exc from src_exc async def _ensure_active_target(self) -> AsyncIterator[T]: """ @@ -200,8 +209,12 @@ async def _handle_exception(self, exc) -> None: check if it is retryable. If so, create a new active_target and continue iterating. If not, raise the exception. """ + self.error_list.append(exc) if not self.predicate(exc) and not isinstance(exc, asyncio.TimeoutError): - raise exc + final_exc, src_exc = self._exc_factory( + exc_list=self.error_list, is_timeout=False + ) + raise final_exc from src_exc else: # run on_error callback if provided if self.on_error: @@ -210,7 +223,7 @@ async def _handle_exception(self, exc) -> None: next_sleep = next(self.sleep_generator) except StopIteration: raise ValueError("Sleep generator stopped yielding sleep values") - # if deadline is exceeded, raise RetryError + # if deadline is exceeded, raise exception if self.deadline is not None: next_attempt = time.monotonic() + next_sleep self._check_timeout(next_attempt, exc) diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index c556d916..8b7edfc1 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -439,6 +439,7 @@ async def _generator_mock( await asyncio.sleep(sleep_time) if error_on and i == error_on: raise ValueError("generator mock error") + sent_in = yield (sent_in if sent_in else i) if ignore_sent: sent_in = None @@ -503,6 +504,7 @@ async def test___call___generator_retry_hitting_deadline(self, sleep, uniform): after using the time budget """ import time + on_error = mock.Mock() retry_ = retry_async.AsyncRetry( predicate=retry_async.if_exception_type(ValueError), @@ -515,7 +517,8 @@ async def test___call___generator_retry_hitting_deadline(self, sleep, uniform): time_now = time.monotonic() now_patcher = mock.patch( - "time.monotonic", return_value=time_now, + "time.monotonic", + return_value=time_now, ) decorated = retry_(self._generator_mock, on_error=on_error) @@ -756,16 +759,30 @@ async def test_yield_stream_after_deadline(self, sleep, uniform): """ import time from google.api_core.retry_streaming_async import AsyncRetryableGenerator + timeout = 2 time_now = time.monotonic() now_patcher = mock.patch( - "time.monotonic", return_value=time_now, + "time.monotonic", + return_value=time_now, ) with now_patcher as patched_now: - no_check = AsyncRetryableGenerator(self._generator_mock, None, [], timeout=timeout, check_timeout_on_yield=False) + no_check = AsyncRetryableGenerator( + self._generator_mock, + None, + [], + timeout=timeout, + check_timeout_on_yield=False, + ) assert no_check._check_timeout_on_yield is False - check = AsyncRetryableGenerator(self._generator_mock, None, [], timeout=timeout, check_timeout_on_yield=True) + check = AsyncRetryableGenerator( + self._generator_mock, + None, + [], + timeout=timeout, + check_timeout_on_yield=True, + ) assert check._check_timeout_on_yield is True # first yield should be fine @@ -779,3 +796,106 @@ async def test_yield_stream_after_deadline(self, sleep, uniform): with pytest.raises(exceptions.RetryError): await check.__anext__() await no_check.__anext__() + + @pytest.mark.asyncio + async def test_generator_error_list(self): + """ + generator should keep history of errors seen + """ + retry_ = retry_async.AsyncRetry( + predicate=retry_async.if_exception_type(ValueError), is_stream=True + ) + decorated = retry_(self._generator_mock) + + generator = decorated(1) + err1 = ValueError("test") + await generator.athrow(err1) + assert generator.error_list == [err1] + err2 = ValueError("test2") + await generator.athrow(err2) + assert generator.error_list == [err1, err2] + + @pytest.mark.asyncio + async def test_exc_factory_non_retryable_error(self): + """ + generator should give the option to override exception creation logic + test when non-retryable error is thrown + """ + from google.api_core.retry_streaming_async import AsyncRetryableGenerator + + timeout = 6 + sent_errors = [ValueError("test"), ValueError("test2"), BufferError("test3")] + expected_final_err = RuntimeError("done") + expected_source_err = ZeroDivisionError("test4") + + def factory(*args, **kwargs): + assert len(args) == 0 + assert kwargs["exc_list"] == sent_errors + assert kwargs["is_timeout"] is False + assert kwargs["timeout_val"] == timeout + return expected_final_err, expected_source_err + + generator = AsyncRetryableGenerator( + self._generator_mock, + retry_async.if_exception_type(ValueError), + [0] * 3, + timeout=timeout, + exception_factory=factory, + ) + # trigger some retryable errors + await generator.athrow(sent_errors[0]) + await generator.athrow(sent_errors[1]) + assert generator.error_list == [sent_errors[0], sent_errors[1]] + # trigger a non-retryable error + with pytest.raises(expected_final_err.__class__) as exc_info: + await generator.athrow(sent_errors[2]) + assert exc_info.value == expected_final_err + assert exc_info.value.__cause__ == expected_source_err + + @pytest.mark.asyncio + async def test_exc_factory_timeout(self): + """ + generator should give the option to override exception creation logic + test when timeout is exceeded + """ + import time + from google.api_core.retry_streaming_async import AsyncRetryableGenerator + + timeout = 2 + time_now = time.monotonic() + now_patcher = mock.patch( + "time.monotonic", + return_value=time_now, + ) + + with now_patcher as patched_now: + timeout = 2 + sent_errors = [ValueError("test"), ValueError("test2")] + expected_final_err = RuntimeError("done") + expected_source_err = ZeroDivisionError("test4") + + def factory(*args, **kwargs): + assert len(args) == 0 + assert kwargs["exc_list"] == sent_errors + assert kwargs["is_timeout"] is True + assert kwargs["timeout_val"] == timeout + return expected_final_err, expected_source_err + + generator = AsyncRetryableGenerator( + self._generator_mock, + retry_async.if_exception_type(ValueError), + [0] * 3, + timeout=timeout, + exception_factory=factory, + check_timeout_on_yield=True, + ) + # trigger some retryable errors + await generator.athrow(sent_errors[0]) + await generator.athrow(sent_errors[1]) + assert generator.error_list == [sent_errors[0], sent_errors[1]] + # trigger a timeout + patched_now.return_value += timeout + 1 + with pytest.raises(expected_final_err.__class__) as exc_info: + await generator.__anext__() + assert exc_info.value == expected_final_err + assert exc_info.value.__cause__ == expected_source_err diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 52903d6b..2dd7e0dd 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -567,6 +567,7 @@ def test___call___generator_retry_hitting_deadline(self, sleep, uniform): after using the time budget """ import time + on_error = mock.Mock(return_value=None) retry_ = retry.Retry( predicate=retry.if_exception_type(ValueError), @@ -579,7 +580,8 @@ def test___call___generator_retry_hitting_deadline(self, sleep, uniform): timenow = time.monotonic() now_patcher = mock.patch( - "time.monotonic", return_value=timenow, + "time.monotonic", + return_value=timenow, ) decorated = retry_(self._generator_mock, on_error=on_error) @@ -772,8 +774,7 @@ def test___call___with_is_stream(self, sleep): @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) @mock.patch("asyncio.sleep", autospec=True) - @pytest.mark.asyncio - async def test_yield_stream_after_deadline(self, sleep, uniform): + def test_yield_stream_after_deadline(self, sleep, uniform): """ By default, if the deadline is hit between yields, the generator will continue. @@ -782,16 +783,30 @@ async def test_yield_stream_after_deadline(self, sleep, uniform): """ import time from google.api_core.retry_streaming import RetryableGenerator + timeout = 2 time_now = time.monotonic() now_patcher = mock.patch( - "time.monotonic", return_value=time_now, + "time.monotonic", + return_value=time_now, ) with now_patcher as patched_now: - no_check = RetryableGenerator(self._generator_mock, None, [], timeout=timeout, check_timeout_on_yield=False) + no_check = RetryableGenerator( + self._generator_mock, + None, + [], + timeout=timeout, + check_timeout_on_yield=False, + ) assert no_check._check_timeout_on_yield is False - check = RetryableGenerator(self._generator_mock, None, [], timeout=timeout, check_timeout_on_yield=True) + check = RetryableGenerator( + self._generator_mock, + None, + [], + timeout=timeout, + check_timeout_on_yield=True, + ) assert check._check_timeout_on_yield is True # first yield should be fine @@ -805,3 +820,103 @@ async def test_yield_stream_after_deadline(self, sleep, uniform): with pytest.raises(exceptions.RetryError): next(check) next(no_check) + + def test_generator_error_list(self): + """ + generator should keep history of errors seen + """ + retry_ = retry.Retry( + predicate=retry.if_exception_type(ValueError), is_stream=True + ) + decorated = retry_(self._generator_mock) + + generator = decorated(1) + err1 = ValueError("test") + generator.throw(err1) + assert generator.error_list == [err1] + err2 = ValueError("test2") + generator.throw(err2) + assert generator.error_list == [err1, err2] + + def test_exc_factory_non_retryable_error(self): + """ + generator should give the option to override exception creation logic + test when non-retryable error is thrown + """ + from google.api_core.retry_streaming import RetryableGenerator + + timeout = 6 + sent_errors = [ValueError("test"), ValueError("test2"), BufferError("test3")] + expected_final_err = RuntimeError("done") + expected_source_err = ZeroDivisionError("test4") + + def factory(*args, **kwargs): + assert len(args) == 0 + assert kwargs["exc_list"] == sent_errors + assert kwargs["is_timeout"] is False + assert kwargs["timeout_val"] == timeout + return expected_final_err, expected_source_err + + generator = RetryableGenerator( + self._generator_mock, + retry.if_exception_type(ValueError), + [0] * 3, + timeout=timeout, + exception_factory=factory, + ) + # trigger some retryable errors + generator.throw(sent_errors[0]) + generator.throw(sent_errors[1]) + assert generator.error_list == [sent_errors[0], sent_errors[1]] + # trigger a non-retryable error + with pytest.raises(expected_final_err.__class__) as exc_info: + generator.throw(sent_errors[2]) + assert exc_info.value == expected_final_err + assert exc_info.value.__cause__ == expected_source_err + + def test_exc_factory_timeout(self): + """ + generator should give the option to override exception creation logic + test when timeout is exceeded + """ + import time + from google.api_core.retry_streaming import RetryableGenerator + + timeout = 2 + time_now = time.monotonic() + now_patcher = mock.patch( + "time.monotonic", + return_value=time_now, + ) + + with now_patcher as patched_now: + timeout = 2 + sent_errors = [ValueError("test"), ValueError("test2")] + expected_final_err = RuntimeError("done") + expected_source_err = ZeroDivisionError("test4") + + def factory(*args, **kwargs): + assert len(args) == 0 + assert kwargs["exc_list"] == sent_errors + assert kwargs["is_timeout"] is True + assert kwargs["timeout_val"] == timeout + return expected_final_err, expected_source_err + + generator = RetryableGenerator( + self._generator_mock, + retry.if_exception_type(ValueError), + [0] * 3, + timeout=timeout, + exception_factory=factory, + check_timeout_on_yield=True, + ) + # trigger some retryable errors + generator.throw(sent_errors[0]) + generator.throw(sent_errors[1]) + assert generator.error_list == [sent_errors[0], sent_errors[1]] + # trigger a timeout + patched_now.return_value += timeout + 1 + with pytest.raises(expected_final_err.__class__) as exc_info: + next(generator) + assert exc_info.value == expected_final_err + assert exc_info.value.__cause__ == expected_source_err From bafa18bd4220a1a216c11790529702eb53ac5997 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 15 Aug 2023 09:38:40 -0700 Subject: [PATCH 089/204] added type hint to check_timeout_on_yield --- google/api_core/retry_streaming.py | 2 +- google/api_core/retry_streaming_async.py | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index fad89b25..265a1525 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -135,7 +135,7 @@ def __init__( exception_factory: Optional[ Callable[[list[Exception], bool, float], tuple[Exception, Exception | None]] ] = None, - check_timeout_on_yield=False, + check_timeout_on_yield: bool = False, ): """ Args: diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index dd90dd66..5f00fa5d 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -32,7 +32,6 @@ import time from functools import partial -from google.api_core import exceptions from google.api_core.retry_streaming import _build_timeout_error _LOGGER = logging.getLogger(__name__) @@ -123,7 +122,7 @@ def __init__( exception_factory: Optional[ Callable[[list[Exception], bool, float], tuple[Exception, Exception | None]] ] = None, - check_timeout_on_yield=False, + check_timeout_on_yield: bool = False, ): """ Args: From 2ae2a327303aeecc724ed7ee6d2c3ea246d39560 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 15 Aug 2023 09:52:00 -0700 Subject: [PATCH 090/204] simplified ensure_tareget; fixed mypy issues --- google/api_core/retry_streaming.py | 39 ++++++++------------ google/api_core/retry_streaming_async.py | 45 +++++++++++------------- 2 files changed, 35 insertions(+), 49 deletions(-) diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index 265a1525..830d87c3 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -14,7 +14,7 @@ """Helpers for retries for streaming APIs.""" -from typing import Callable, Optional, Iterable, Iterator, Generator, TypeVar, Any, cast +from typing import Callable, Optional, List, Tuple, Iterable, Iterator, Generator, TypeVar, Any, cast import logging import time @@ -27,9 +27,7 @@ T = TypeVar("T") -def _build_timeout_error( - exc_list: list[Exception], is_timeout: bool, timeout_val: float -) -> tuple[Exception, Exception | None]: +def _build_timeout_error(exc_list:List[Exception], is_timeout:bool, timeout_val:float) -> Tuple[Exception, Optional[Exception]]: """ Default exception_factory implementation. Builds an exception after the retry fails @@ -44,13 +42,10 @@ def _build_timeout_error( """ src_exc = exc_list[-1] if exc_list else None if is_timeout: - return ( - exceptions.RetryError( - "Timeout of {:.1f}s exceeded".format(timeout_val), - src_exc, - ), + return exceptions.RetryError( + "Timeout of {:.1f}s exceeded".format(timeout_val), src_exc, - ) + ), src_exc else: return exc_list[-1], None @@ -132,9 +127,7 @@ def __init__( sleep_generator: Iterable[float], timeout: Optional[float] = None, on_error: Optional[Callable[[Exception], None]] = None, - exception_factory: Optional[ - Callable[[list[Exception], bool, float], tuple[Exception, Exception | None]] - ] = None, + exception_factory: Optional[Callable[[List[Exception], bool, float], Tuple[Exception, Optional[Exception]]]] = None, check_timeout_on_yield: bool = False, ): """ @@ -170,15 +163,10 @@ def __init__( self.predicate = predicate self.sleep_generator = iter(sleep_generator) self.on_error = on_error - if timeout is not None: - self.deadline = time.monotonic() + timeout - else: - self.deadline = None + self.deadline: Optional[float] = time.monotonic() + timeout if timeout else None self._check_timeout_on_yield = check_timeout_on_yield - self.error_list: list[Exception] = [] - self._exc_factory = partial( - exception_factory or _build_timeout_error, timeout_val=timeout - ) + self.error_list : List[Exception] = [] + self._exc_factory = partial(exception_factory or _build_timeout_error, timeout_val=timeout) def __iter__(self) -> Generator[T, Any, None]: """ @@ -194,9 +182,7 @@ def _handle_exception(self, exc) -> None: """ self.error_list.append(exc) if not self.predicate(exc): - final_exc, src_exc = self._exc_factory( - exc_list=self.error_list, is_timeout=False - ) + final_exc, src_exc = self._exc_factory(exc_list=self.error_list, is_timeout=False) raise final_exc from src_exc else: # run on_error callback if provided @@ -227,7 +213,10 @@ def _check_timeout(self, current_time: float) -> None: Raises: - Exception from exception_factory if the timeout has been exceeded """ - if self.deadline is not None and self.deadline < current_time: + if ( + self.deadline is not None + and self.deadline < current_time + ): exc, src_exc = self._exc_factory(exc_list=self.error_list, is_timeout=True) raise exc from src_exc diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index 5f00fa5d..62973d99 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -19,6 +19,8 @@ Callable, Optional, Iterable, + List, + Tuple, AsyncIterator, AsyncIterable, Awaitable, @@ -120,7 +122,7 @@ def __init__( timeout: Optional[float] = None, on_error: Optional[Callable[[Exception], None]] = None, exception_factory: Optional[ - Callable[[list[Exception], bool, float], tuple[Exception, Exception | None]] + Callable[[List[Exception], bool, float], Tuple[Exception, Optional[Exception]]] ] = None, check_timeout_on_yield: bool = False, ): @@ -158,12 +160,9 @@ def __init__( self.predicate = predicate self.sleep_generator = iter(sleep_generator) self.on_error = on_error - if timeout is not None: - self.deadline = time.monotonic() + timeout - else: - self.deadline = None + self.deadline: Optional[float] = time.monotonic() + timeout if timeout else None self._check_timeout_on_yield = check_timeout_on_yield - self.error_list: list[Exception] = [] + self.error_list: List[Exception] = [] self._exc_factory = partial( exception_factory or _build_timeout_error, timeout_val=timeout ) @@ -184,19 +183,14 @@ def _check_timeout( exc, src_exc = self._exc_factory(exc_list=self.error_list, is_timeout=True) raise exc from src_exc - async def _ensure_active_target(self) -> AsyncIterator[T]: + async def _new_target(self) -> AsyncIterator[T]: """ - Ensure that the active target is populated and ready to be iterated over. - - Returns: - - The active_target iterable + Creates and returns a new target iterator from the target function. """ - if self.active_target is None: - new_iterable = self.target_fn() - if isinstance(new_iterable, Awaitable): - new_iterable = await new_iterable - self.active_target = new_iterable.__aiter__() - return self.active_target + new_iterable = self.target_fn() + if isinstance(new_iterable, Awaitable): + new_iterable = await new_iterable + return new_iterable.__aiter__() def __aiter__(self) -> AsyncIterator[T]: """Implement the async iterator protocol.""" @@ -231,8 +225,7 @@ async def _handle_exception(self, exc) -> None: ) # sleep before retrying await asyncio.sleep(next_sleep) - self.active_target = None - await self._ensure_active_target() + self.active_target = await self._new_target() async def _iteration_helper(self, iteration_routine: Awaitable) -> T: """ @@ -265,9 +258,10 @@ async def __anext__(self) -> T: Returns: - The next value from the active_target iterator. """ - iterable = await self._ensure_active_target() + if self.active_target is None: + self.active_target = await self._new_target() return await self._iteration_helper( - iterable.__anext__(), + self.active_target.__anext__(), ) async def aclose(self) -> None: @@ -277,7 +271,8 @@ async def aclose(self) -> None: Raises: - AttributeError if the active_target does not have a aclose() method """ - await self._ensure_active_target() + if self.active_target is None: + self.active_target = await self._new_target() if getattr(self.active_target, "aclose", None): casted_target = cast(AsyncGenerator[T, None], self.active_target) return await casted_target.aclose() @@ -302,7 +297,8 @@ async def asend(self, *args, **kwargs) -> T: Raises: - AttributeError if the active_target does not have a asend() method """ - await self._ensure_active_target() + if self.active_target is None: + self.active_target = await self._new_target() if getattr(self.active_target, "asend", None): casted_target = cast(AsyncGenerator[T, None], self.active_target) return await self._iteration_helper(casted_target.asend(*args, **kwargs)) @@ -325,7 +321,8 @@ async def athrow(self, *args, **kwargs) -> T: Raises: - AttributeError if the active_target does not have a athrow() method """ - await self._ensure_active_target() + if self.active_target is None: + self.active_target = await self._new_target() if getattr(self.active_target, "athrow", None): casted_target = cast(AsyncGenerator[T, None], self.active_target) try: From 9cadd63ad125b6559c0b51df217f8a62b2fc54ec Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 14 Aug 2023 16:05:00 -0700 Subject: [PATCH 091/204] don't check timeout on each yield by default --- google/api_core/retry_streaming.py | 14 ++++-- google/api_core/retry_streaming_async.py | 12 ++++- tests/asyncio/test_retry_async.py | 41 ++++++++++++---- tests/unit/test_retry.py | 61 ++++++++++++------------ 4 files changed, 83 insertions(+), 45 deletions(-) diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index f8031b83..1f85ad51 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -16,7 +16,6 @@ from typing import Callable, Optional, Iterable, Iterator, Generator, TypeVar, Any, cast -import datetime import logging import time @@ -104,6 +103,7 @@ def __init__( sleep_generator: Iterable[float], timeout: Optional[float] = None, on_error: Optional[Callable[[Exception], None]] = None, + check_timeout_on_yield=False, ): """ Args: @@ -119,6 +119,11 @@ def __init__( on_error: A function to call while processing a retryable exception. Any error raised by this function will *not* be caught. + check_timeout_on_yield: If True, the timeout value will be checked + after each yield. If the timeout has been exceeded, the generator + will raise a RetryError. Note that this adds an overhead to each + yield, so it is preferred to add the timeout logic to the wrapped + stream when possible. """ self.target_fn = target self.active_target: Iterator[T] = self.target_fn().__iter__() @@ -130,6 +135,7 @@ def __init__( self.deadline = time.monotonic() + self.timeout else: self.deadline = None + self._check_timeout_on_yield = check_timeout_on_yield def __iter__(self) -> Generator[T, Any, None]: """ @@ -194,7 +200,8 @@ def __next__(self) -> T: - the next value of the active_target iterator """ # check for expired timeouts before attempting to iterate - self._check_timeout(time.monotonic()) + if self._check_timeout_on_yield: + self._check_timeout(time.monotonic()) try: return next(self.active_target) except Exception as exc: @@ -233,7 +240,8 @@ def send(self, *args, **kwargs) -> T: - AttributeError if the active_target does not have a send() method """ # check for expired timeouts before attempting to iterate - self._check_timeout(time.monotonic()) + if self._check_timeout_on_yield: + self._check_timeout(time.monotonic()) if getattr(self.active_target, "send", None): casted_target = cast(Generator, self.active_target) try: diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index 09c9061c..b92b8cc3 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -118,6 +118,7 @@ def __init__( sleep_generator: Iterable[float], timeout: Optional[float] = None, on_error: Optional[Callable[[Exception], None]] = None, + check_timeout_on_yield=False, ): """ Args: @@ -133,6 +134,11 @@ def __init__( on_error: A function to call while processing a retryable exception. Any error raised by this function will *not* be caught. + check_timeout_on_yield: If True, the timeout value will be checked + after each yield. If the timeout has been exceeded, the generator + will raise a RetryError. Note that this adds an overhead to each + yield, so it is preferred to add the timeout logic to the wrapped + stream when possible. """ self.target_fn = target # active target must be populated in an async context @@ -146,6 +152,7 @@ def __init__( self.deadline = time.monotonic() + self.timeout else: self.deadline = None + self._check_timeout_on_yield = check_timeout_on_yield def _check_timeout( self, current_time: float, source_exception: Optional[Exception] = None @@ -176,7 +183,7 @@ async def _ensure_active_target(self) -> AsyncIterator[T]: Returns: - The active_target iterable """ - if not self.active_target: + if self.active_target is None: new_iterable = self.target_fn() if isinstance(new_iterable, Awaitable): new_iterable = await new_iterable @@ -226,7 +233,8 @@ async def _iteration_helper(self, iteration_routine: Awaitable) -> T: - The next value from the active_target iterator. """ # check for expired timeouts before attempting to iterate - self._check_timeout(time.monotonic()) + if self._check_timeout_on_yield: + self._check_timeout(time.monotonic()) try: # grab the next value from the active_target # Note: here would be a good place to add a timeout, like asyncio.wait_for. diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index c3dc665b..1b309504 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -746,15 +746,38 @@ async def __anext__(self): with pytest.raises(StopAsyncIteration): await retryable.__anext__() + @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) + @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio - async def test_iterate_stream_after_deadline(self): + async def test_yield_stream_after_deadline(self, sleep, uniform): """ - Streaming retries should raise RetryError when calling next or send after deadline has passed + By default, if the deadline is hit between yields, the generator will continue. + + There is a flag that should cause the wrapper to test for the deadline after + each yield. """ - retry_ = retry_async.AsyncRetry(is_stream=True, deadline=0.01) - decorated = retry_(self._generator_mock) - generator = decorated(10) - await generator.__anext__() - await asyncio.sleep(0.02) - with pytest.raises(exceptions.RetryError): - await generator.__anext__() + import time + from google.api_core.retry_streaming_async import AsyncRetryableGenerator + timeout = 2 + time_now = time.monotonic() + now_patcher = mock.patch( + "time.monotonic", return_value=time_now, + ) + + with now_patcher as patched_now: + no_check = AsyncRetryableGenerator(self._generator_mock, None, [], timeout=timeout, check_timeout_on_yield=False) + assert no_check._check_timeout_on_yield is False + check = AsyncRetryableGenerator(self._generator_mock, None, [], timeout=timeout, check_timeout_on_yield=True) + assert check._check_timeout_on_yield is True + + # first yield should be fine + await check.__anext__() + await no_check.__anext__() + + # simulate a delay before next yield + patched_now.return_value += timeout + 1 + + # second yield should raise when check_timeout_on_yield is True + with pytest.raises(exceptions.RetryError): + await check.__anext__() + await no_check.__anext__() diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 177ba4d9..3e1dd7c1 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -772,39 +772,38 @@ def test___call___with_is_stream(self, sleep): unpacked = [next(gen) for i in range(10)] assert unpacked == [0, 1, 2, 3, 4, 5, 0, 1, 2, 3] - def test_iterate_stream_after_deadline(self): - """ - Streaming retries should raise RetryError when calling next after deadline has passed + @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test_yield_stream_after_deadline(self, sleep, uniform): """ - from time import sleep + By default, if the deadline is hit between yields, the generator will continue. - retry_ = retry.Retry( - predicate=retry.if_exception_type(ValueError), - is_stream=True, - deadline=0.01, + There is a flag that should cause the wrapper to test for the deadline after + each yield. + """ + import time + from google.api_core.retry_streaming import RetryableGenerator + timeout = 2 + time_now = time.monotonic() + now_patcher = mock.patch( + "time.monotonic", return_value=time_now, ) - decorated = retry_(self._generator_mock) - generator = decorated(10) - next(generator) - sleep(0.02) - with pytest.raises(exceptions.RetryError): - next(generator) - def test_iterate_stream_send_after_deadline(self): - """ - Streaming retries should raise RetryError when calling send after deadline has passed - """ - from time import sleep + with now_patcher as patched_now: + no_check = RetryableGenerator(self._generator_mock, None, [], timeout=timeout, check_timeout_on_yield=False) + assert no_check._check_timeout_on_yield is False + check = RetryableGenerator(self._generator_mock, None, [], timeout=timeout, check_timeout_on_yield=True) + assert check._check_timeout_on_yield is True - retry_ = retry.Retry( - predicate=retry.if_exception_type(ValueError), - is_stream=True, - deadline=0.01, - ) - decorated = retry_(self._generator_mock) - generator = decorated(10) - next(generator) - generator.send("test") - sleep(0.02) - with pytest.raises(exceptions.RetryError): - generator.send("test") + # first yield should be fine + next(check) + next(no_check) + + # simulate a delay before next yield + patched_now.return_value += timeout + 1 + + # second yield should raise when check_timeout_on_yield is True + with pytest.raises(exceptions.RetryError): + next(check) + next(no_check) From c9ef1d586e227904fc2d7dbcec62265345a3df59 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 15 Aug 2023 09:36:56 -0700 Subject: [PATCH 092/204] added exception building logic --- google/api_core/retry_streaming.py | 85 +++++++++++----- google/api_core/retry_streaming_async.py | 53 ++++++---- tests/asyncio/test_retry_async.py | 124 ++++++++++++++++++++++- tests/unit/test_retry.py | 123 +++++++++++++++++++++- 4 files changed, 334 insertions(+), 51 deletions(-) diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index 1f85ad51..fad89b25 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -18,6 +18,7 @@ import logging import time +from functools import partial from google.api_core import exceptions @@ -26,6 +27,34 @@ T = TypeVar("T") +def _build_timeout_error( + exc_list: list[Exception], is_timeout: bool, timeout_val: float +) -> tuple[Exception, Exception | None]: + """ + Default exception_factory implementation. Builds an exception after the retry fails + + Args: + - exc_list (list[Exception]): list of exceptions that occurred during the retry + - is_timeout (bool): whether the failure is due to the timeout value being exceeded, + or due to a non-retryable exception + - timeout_val (float): the original timeout value for the retry, for use in the exception message + + Returns: + - tuple[Exception, Exception|None]: a tuple of the exception to be raised, and the cause exception if any + """ + src_exc = exc_list[-1] if exc_list else None + if is_timeout: + return ( + exceptions.RetryError( + "Timeout of {:.1f}s exceeded".format(timeout_val), + src_exc, + ), + src_exc, + ) + else: + return exc_list[-1], None + + class RetryableGenerator(Generator[T, Any, None]): """ Generator wrapper for retryable streaming RPCs. @@ -103,6 +132,9 @@ def __init__( sleep_generator: Iterable[float], timeout: Optional[float] = None, on_error: Optional[Callable[[Exception], None]] = None, + exception_factory: Optional[ + Callable[[list[Exception], bool, float], tuple[Exception, Exception | None]] + ] = None, check_timeout_on_yield=False, ): """ @@ -119,23 +151,34 @@ def __init__( on_error: A function to call while processing a retryable exception. Any error raised by this function will *not* be caught. + exception_factory: A function that creates an exception to raise + when the retry fails. The function takes three arguments: + a list of exceptions that occurred during the retry, a boolean + indicating whether the failure is due to retry timeout, and the original + timeout value (for building a helpful error message). It is expected to + return a tuple of the exception to raise and (optionally) a source + exception to chain to the raised exception. + If not provided, a default exception will be raised. check_timeout_on_yield: If True, the timeout value will be checked after each yield. If the timeout has been exceeded, the generator - will raise a RetryError. Note that this adds an overhead to each - yield, so it is preferred to add the timeout logic to the wrapped - stream when possible. + will raise an exception from exception_factory. + Note that this adds an overhead to each yield, so it is better + to add the timeout logic to the wrapped stream when possible. """ self.target_fn = target self.active_target: Iterator[T] = self.target_fn().__iter__() self.predicate = predicate self.sleep_generator = iter(sleep_generator) self.on_error = on_error - self.timeout = timeout - if self.timeout is not None: - self.deadline = time.monotonic() + self.timeout + if timeout is not None: + self.deadline = time.monotonic() + timeout else: self.deadline = None self._check_timeout_on_yield = check_timeout_on_yield + self.error_list: list[Exception] = [] + self._exc_factory = partial( + exception_factory or _build_timeout_error, timeout_val=timeout + ) def __iter__(self) -> Generator[T, Any, None]: """ @@ -149,8 +192,12 @@ def _handle_exception(self, exc) -> None: check if it is retryable. If so, create a new active_target and continue iterating. If not, raise the exception. """ + self.error_list.append(exc) if not self.predicate(exc): - raise exc + final_exc, src_exc = self._exc_factory( + exc_list=self.error_list, is_timeout=False + ) + raise final_exc from src_exc else: # run on_error callback if provided if self.on_error: @@ -159,10 +206,10 @@ def _handle_exception(self, exc) -> None: next_sleep = next(self.sleep_generator) except StopIteration: raise ValueError("Sleep generator stopped yielding sleep values") - # if deadline is exceeded, raise RetryError + # if deadline is exceeded, raise exception if self.deadline is not None: next_attempt = time.monotonic() + next_sleep - self._check_timeout(next_attempt, exc) + self._check_timeout(next_attempt) # sleep before retrying _LOGGER.debug( "Retrying due to {}, sleeping {:.1f}s ...".format(exc, next_sleep) @@ -170,27 +217,19 @@ def _handle_exception(self, exc) -> None: time.sleep(next_sleep) self.active_target = self.target_fn().__iter__() - def _check_timeout( - self, current_time: float, source_exception: Optional[Exception] = None - ) -> None: + def _check_timeout(self, current_time: float) -> None: """ - Helper function to check if the timeout has been exceeded, and raise a RetryError if so. + Helper function to check if the timeout has been exceeded, and raise an exception if so. Args: - current_time: the timestamp to check against the deadline - source_exception: the exception that triggered the timeout check, if any Raises: - - RetryError if the deadline has been exceeded + - Exception from exception_factory if the timeout has been exceeded """ - if ( - self.deadline is not None - and self.timeout is not None - and self.deadline < current_time - ): - raise exceptions.RetryError( - "Timeout of {:.1f}s exceeded".format(self.timeout), - source_exception, - ) from source_exception + if self.deadline is not None and self.deadline < current_time: + exc, src_exc = self._exc_factory(exc_list=self.error_list, is_timeout=True) + raise exc from src_exc def __next__(self) -> T: """ diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index b92b8cc3..dd90dd66 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -30,8 +30,10 @@ import asyncio import logging import time +from functools import partial from google.api_core import exceptions +from google.api_core.retry_streaming import _build_timeout_error _LOGGER = logging.getLogger(__name__) @@ -118,6 +120,9 @@ def __init__( sleep_generator: Iterable[float], timeout: Optional[float] = None, on_error: Optional[Callable[[Exception], None]] = None, + exception_factory: Optional[ + Callable[[list[Exception], bool, float], tuple[Exception, Exception | None]] + ] = None, check_timeout_on_yield=False, ): """ @@ -134,11 +139,19 @@ def __init__( on_error: A function to call while processing a retryable exception. Any error raised by this function will *not* be caught. + exception_factory: A function that creates an exception to raise + when the retry fails. The function takes three arguments: + a list of exceptions that occurred during the retry, a boolean + indicating whether the failure is due to retry timeout, and the original + timeout value (for building a helpful error message). It is expected to + return a tuple of the exception to raise and (optionally) a source + exception to chain to the raised exception. + If not provided, a default exception will be raised. check_timeout_on_yield: If True, the timeout value will be checked after each yield. If the timeout has been exceeded, the generator - will raise a RetryError. Note that this adds an overhead to each - yield, so it is preferred to add the timeout logic to the wrapped - stream when possible. + will raise an exception from exception_factory. + Note that this adds an overhead to each yield, so it is better + to add the timeout logic to the wrapped stream when possible. """ self.target_fn = target # active target must be populated in an async context @@ -146,35 +159,31 @@ def __init__( self.predicate = predicate self.sleep_generator = iter(sleep_generator) self.on_error = on_error - self.timeout = timeout - self.timeout_task = None - if self.timeout is not None: - self.deadline = time.monotonic() + self.timeout + if timeout is not None: + self.deadline = time.monotonic() + timeout else: self.deadline = None self._check_timeout_on_yield = check_timeout_on_yield + self.error_list: list[Exception] = [] + self._exc_factory = partial( + exception_factory or _build_timeout_error, timeout_val=timeout + ) def _check_timeout( self, current_time: float, source_exception: Optional[Exception] = None ) -> None: """ - Helper function to check if the timeout has been exceeded, and raise a RetryError if so. + Helper function to check if the timeout has been exceeded, and raise an exception if so. Args: - current_time: the timestamp to check against the deadline - source_exception: the exception that triggered the timeout check, if any Raises: - - RetryError if the deadline has been exceeded + - Exception from exception_factory if the timeout has been exceeded """ - if ( - self.deadline is not None - and self.timeout is not None - and self.deadline < current_time - ): - raise exceptions.RetryError( - "Timeout of {:.1f}s exceeded".format(self.timeout), - source_exception, - ) from source_exception + if self.deadline is not None and self.deadline < current_time: + exc, src_exc = self._exc_factory(exc_list=self.error_list, is_timeout=True) + raise exc from src_exc async def _ensure_active_target(self) -> AsyncIterator[T]: """ @@ -200,8 +209,12 @@ async def _handle_exception(self, exc) -> None: check if it is retryable. If so, create a new active_target and continue iterating. If not, raise the exception. """ + self.error_list.append(exc) if not self.predicate(exc) and not isinstance(exc, asyncio.TimeoutError): - raise exc + final_exc, src_exc = self._exc_factory( + exc_list=self.error_list, is_timeout=False + ) + raise final_exc from src_exc else: # run on_error callback if provided if self.on_error: @@ -210,7 +223,7 @@ async def _handle_exception(self, exc) -> None: next_sleep = next(self.sleep_generator) except StopIteration: raise ValueError("Sleep generator stopped yielding sleep values") - # if deadline is exceeded, raise RetryError + # if deadline is exceeded, raise exception if self.deadline is not None: next_attempt = time.monotonic() + next_sleep self._check_timeout(next_attempt, exc) diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 1b309504..8b7edfc1 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -439,6 +439,7 @@ async def _generator_mock( await asyncio.sleep(sleep_time) if error_on and i == error_on: raise ValueError("generator mock error") + sent_in = yield (sent_in if sent_in else i) if ignore_sent: sent_in = None @@ -758,16 +759,30 @@ async def test_yield_stream_after_deadline(self, sleep, uniform): """ import time from google.api_core.retry_streaming_async import AsyncRetryableGenerator + timeout = 2 time_now = time.monotonic() now_patcher = mock.patch( - "time.monotonic", return_value=time_now, + "time.monotonic", + return_value=time_now, ) with now_patcher as patched_now: - no_check = AsyncRetryableGenerator(self._generator_mock, None, [], timeout=timeout, check_timeout_on_yield=False) + no_check = AsyncRetryableGenerator( + self._generator_mock, + None, + [], + timeout=timeout, + check_timeout_on_yield=False, + ) assert no_check._check_timeout_on_yield is False - check = AsyncRetryableGenerator(self._generator_mock, None, [], timeout=timeout, check_timeout_on_yield=True) + check = AsyncRetryableGenerator( + self._generator_mock, + None, + [], + timeout=timeout, + check_timeout_on_yield=True, + ) assert check._check_timeout_on_yield is True # first yield should be fine @@ -781,3 +796,106 @@ async def test_yield_stream_after_deadline(self, sleep, uniform): with pytest.raises(exceptions.RetryError): await check.__anext__() await no_check.__anext__() + + @pytest.mark.asyncio + async def test_generator_error_list(self): + """ + generator should keep history of errors seen + """ + retry_ = retry_async.AsyncRetry( + predicate=retry_async.if_exception_type(ValueError), is_stream=True + ) + decorated = retry_(self._generator_mock) + + generator = decorated(1) + err1 = ValueError("test") + await generator.athrow(err1) + assert generator.error_list == [err1] + err2 = ValueError("test2") + await generator.athrow(err2) + assert generator.error_list == [err1, err2] + + @pytest.mark.asyncio + async def test_exc_factory_non_retryable_error(self): + """ + generator should give the option to override exception creation logic + test when non-retryable error is thrown + """ + from google.api_core.retry_streaming_async import AsyncRetryableGenerator + + timeout = 6 + sent_errors = [ValueError("test"), ValueError("test2"), BufferError("test3")] + expected_final_err = RuntimeError("done") + expected_source_err = ZeroDivisionError("test4") + + def factory(*args, **kwargs): + assert len(args) == 0 + assert kwargs["exc_list"] == sent_errors + assert kwargs["is_timeout"] is False + assert kwargs["timeout_val"] == timeout + return expected_final_err, expected_source_err + + generator = AsyncRetryableGenerator( + self._generator_mock, + retry_async.if_exception_type(ValueError), + [0] * 3, + timeout=timeout, + exception_factory=factory, + ) + # trigger some retryable errors + await generator.athrow(sent_errors[0]) + await generator.athrow(sent_errors[1]) + assert generator.error_list == [sent_errors[0], sent_errors[1]] + # trigger a non-retryable error + with pytest.raises(expected_final_err.__class__) as exc_info: + await generator.athrow(sent_errors[2]) + assert exc_info.value == expected_final_err + assert exc_info.value.__cause__ == expected_source_err + + @pytest.mark.asyncio + async def test_exc_factory_timeout(self): + """ + generator should give the option to override exception creation logic + test when timeout is exceeded + """ + import time + from google.api_core.retry_streaming_async import AsyncRetryableGenerator + + timeout = 2 + time_now = time.monotonic() + now_patcher = mock.patch( + "time.monotonic", + return_value=time_now, + ) + + with now_patcher as patched_now: + timeout = 2 + sent_errors = [ValueError("test"), ValueError("test2")] + expected_final_err = RuntimeError("done") + expected_source_err = ZeroDivisionError("test4") + + def factory(*args, **kwargs): + assert len(args) == 0 + assert kwargs["exc_list"] == sent_errors + assert kwargs["is_timeout"] is True + assert kwargs["timeout_val"] == timeout + return expected_final_err, expected_source_err + + generator = AsyncRetryableGenerator( + self._generator_mock, + retry_async.if_exception_type(ValueError), + [0] * 3, + timeout=timeout, + exception_factory=factory, + check_timeout_on_yield=True, + ) + # trigger some retryable errors + await generator.athrow(sent_errors[0]) + await generator.athrow(sent_errors[1]) + assert generator.error_list == [sent_errors[0], sent_errors[1]] + # trigger a timeout + patched_now.return_value += timeout + 1 + with pytest.raises(expected_final_err.__class__) as exc_info: + await generator.__anext__() + assert exc_info.value == expected_final_err + assert exc_info.value.__cause__ == expected_source_err diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 3e1dd7c1..2dd7e0dd 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -774,8 +774,7 @@ def test___call___with_is_stream(self, sleep): @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) @mock.patch("asyncio.sleep", autospec=True) - @pytest.mark.asyncio - async def test_yield_stream_after_deadline(self, sleep, uniform): + def test_yield_stream_after_deadline(self, sleep, uniform): """ By default, if the deadline is hit between yields, the generator will continue. @@ -784,16 +783,30 @@ async def test_yield_stream_after_deadline(self, sleep, uniform): """ import time from google.api_core.retry_streaming import RetryableGenerator + timeout = 2 time_now = time.monotonic() now_patcher = mock.patch( - "time.monotonic", return_value=time_now, + "time.monotonic", + return_value=time_now, ) with now_patcher as patched_now: - no_check = RetryableGenerator(self._generator_mock, None, [], timeout=timeout, check_timeout_on_yield=False) + no_check = RetryableGenerator( + self._generator_mock, + None, + [], + timeout=timeout, + check_timeout_on_yield=False, + ) assert no_check._check_timeout_on_yield is False - check = RetryableGenerator(self._generator_mock, None, [], timeout=timeout, check_timeout_on_yield=True) + check = RetryableGenerator( + self._generator_mock, + None, + [], + timeout=timeout, + check_timeout_on_yield=True, + ) assert check._check_timeout_on_yield is True # first yield should be fine @@ -807,3 +820,103 @@ async def test_yield_stream_after_deadline(self, sleep, uniform): with pytest.raises(exceptions.RetryError): next(check) next(no_check) + + def test_generator_error_list(self): + """ + generator should keep history of errors seen + """ + retry_ = retry.Retry( + predicate=retry.if_exception_type(ValueError), is_stream=True + ) + decorated = retry_(self._generator_mock) + + generator = decorated(1) + err1 = ValueError("test") + generator.throw(err1) + assert generator.error_list == [err1] + err2 = ValueError("test2") + generator.throw(err2) + assert generator.error_list == [err1, err2] + + def test_exc_factory_non_retryable_error(self): + """ + generator should give the option to override exception creation logic + test when non-retryable error is thrown + """ + from google.api_core.retry_streaming import RetryableGenerator + + timeout = 6 + sent_errors = [ValueError("test"), ValueError("test2"), BufferError("test3")] + expected_final_err = RuntimeError("done") + expected_source_err = ZeroDivisionError("test4") + + def factory(*args, **kwargs): + assert len(args) == 0 + assert kwargs["exc_list"] == sent_errors + assert kwargs["is_timeout"] is False + assert kwargs["timeout_val"] == timeout + return expected_final_err, expected_source_err + + generator = RetryableGenerator( + self._generator_mock, + retry.if_exception_type(ValueError), + [0] * 3, + timeout=timeout, + exception_factory=factory, + ) + # trigger some retryable errors + generator.throw(sent_errors[0]) + generator.throw(sent_errors[1]) + assert generator.error_list == [sent_errors[0], sent_errors[1]] + # trigger a non-retryable error + with pytest.raises(expected_final_err.__class__) as exc_info: + generator.throw(sent_errors[2]) + assert exc_info.value == expected_final_err + assert exc_info.value.__cause__ == expected_source_err + + def test_exc_factory_timeout(self): + """ + generator should give the option to override exception creation logic + test when timeout is exceeded + """ + import time + from google.api_core.retry_streaming import RetryableGenerator + + timeout = 2 + time_now = time.monotonic() + now_patcher = mock.patch( + "time.monotonic", + return_value=time_now, + ) + + with now_patcher as patched_now: + timeout = 2 + sent_errors = [ValueError("test"), ValueError("test2")] + expected_final_err = RuntimeError("done") + expected_source_err = ZeroDivisionError("test4") + + def factory(*args, **kwargs): + assert len(args) == 0 + assert kwargs["exc_list"] == sent_errors + assert kwargs["is_timeout"] is True + assert kwargs["timeout_val"] == timeout + return expected_final_err, expected_source_err + + generator = RetryableGenerator( + self._generator_mock, + retry.if_exception_type(ValueError), + [0] * 3, + timeout=timeout, + exception_factory=factory, + check_timeout_on_yield=True, + ) + # trigger some retryable errors + generator.throw(sent_errors[0]) + generator.throw(sent_errors[1]) + assert generator.error_list == [sent_errors[0], sent_errors[1]] + # trigger a timeout + patched_now.return_value += timeout + 1 + with pytest.raises(expected_final_err.__class__) as exc_info: + next(generator) + assert exc_info.value == expected_final_err + assert exc_info.value.__cause__ == expected_source_err From 41c7868f6557df4c824d8b571e91cfdcce036aff Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 15 Aug 2023 09:38:40 -0700 Subject: [PATCH 093/204] added type hint to check_timeout_on_yield --- google/api_core/retry_streaming.py | 2 +- google/api_core/retry_streaming_async.py | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index fad89b25..265a1525 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -135,7 +135,7 @@ def __init__( exception_factory: Optional[ Callable[[list[Exception], bool, float], tuple[Exception, Exception | None]] ] = None, - check_timeout_on_yield=False, + check_timeout_on_yield: bool = False, ): """ Args: diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index dd90dd66..5f00fa5d 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -32,7 +32,6 @@ import time from functools import partial -from google.api_core import exceptions from google.api_core.retry_streaming import _build_timeout_error _LOGGER = logging.getLogger(__name__) @@ -123,7 +122,7 @@ def __init__( exception_factory: Optional[ Callable[[list[Exception], bool, float], tuple[Exception, Exception | None]] ] = None, - check_timeout_on_yield=False, + check_timeout_on_yield: bool = False, ): """ Args: From 30fccb906fe374f1cd1fbd5a6e4133e19066c064 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 15 Aug 2023 09:52:00 -0700 Subject: [PATCH 094/204] simplified ensure_tareget; fixed mypy issues --- google/api_core/retry_streaming.py | 39 ++++++++------------ google/api_core/retry_streaming_async.py | 45 +++++++++++------------- 2 files changed, 35 insertions(+), 49 deletions(-) diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index 265a1525..830d87c3 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -14,7 +14,7 @@ """Helpers for retries for streaming APIs.""" -from typing import Callable, Optional, Iterable, Iterator, Generator, TypeVar, Any, cast +from typing import Callable, Optional, List, Tuple, Iterable, Iterator, Generator, TypeVar, Any, cast import logging import time @@ -27,9 +27,7 @@ T = TypeVar("T") -def _build_timeout_error( - exc_list: list[Exception], is_timeout: bool, timeout_val: float -) -> tuple[Exception, Exception | None]: +def _build_timeout_error(exc_list:List[Exception], is_timeout:bool, timeout_val:float) -> Tuple[Exception, Optional[Exception]]: """ Default exception_factory implementation. Builds an exception after the retry fails @@ -44,13 +42,10 @@ def _build_timeout_error( """ src_exc = exc_list[-1] if exc_list else None if is_timeout: - return ( - exceptions.RetryError( - "Timeout of {:.1f}s exceeded".format(timeout_val), - src_exc, - ), + return exceptions.RetryError( + "Timeout of {:.1f}s exceeded".format(timeout_val), src_exc, - ) + ), src_exc else: return exc_list[-1], None @@ -132,9 +127,7 @@ def __init__( sleep_generator: Iterable[float], timeout: Optional[float] = None, on_error: Optional[Callable[[Exception], None]] = None, - exception_factory: Optional[ - Callable[[list[Exception], bool, float], tuple[Exception, Exception | None]] - ] = None, + exception_factory: Optional[Callable[[List[Exception], bool, float], Tuple[Exception, Optional[Exception]]]] = None, check_timeout_on_yield: bool = False, ): """ @@ -170,15 +163,10 @@ def __init__( self.predicate = predicate self.sleep_generator = iter(sleep_generator) self.on_error = on_error - if timeout is not None: - self.deadline = time.monotonic() + timeout - else: - self.deadline = None + self.deadline: Optional[float] = time.monotonic() + timeout if timeout else None self._check_timeout_on_yield = check_timeout_on_yield - self.error_list: list[Exception] = [] - self._exc_factory = partial( - exception_factory or _build_timeout_error, timeout_val=timeout - ) + self.error_list : List[Exception] = [] + self._exc_factory = partial(exception_factory or _build_timeout_error, timeout_val=timeout) def __iter__(self) -> Generator[T, Any, None]: """ @@ -194,9 +182,7 @@ def _handle_exception(self, exc) -> None: """ self.error_list.append(exc) if not self.predicate(exc): - final_exc, src_exc = self._exc_factory( - exc_list=self.error_list, is_timeout=False - ) + final_exc, src_exc = self._exc_factory(exc_list=self.error_list, is_timeout=False) raise final_exc from src_exc else: # run on_error callback if provided @@ -227,7 +213,10 @@ def _check_timeout(self, current_time: float) -> None: Raises: - Exception from exception_factory if the timeout has been exceeded """ - if self.deadline is not None and self.deadline < current_time: + if ( + self.deadline is not None + and self.deadline < current_time + ): exc, src_exc = self._exc_factory(exc_list=self.error_list, is_timeout=True) raise exc from src_exc diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index 5f00fa5d..62973d99 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -19,6 +19,8 @@ Callable, Optional, Iterable, + List, + Tuple, AsyncIterator, AsyncIterable, Awaitable, @@ -120,7 +122,7 @@ def __init__( timeout: Optional[float] = None, on_error: Optional[Callable[[Exception], None]] = None, exception_factory: Optional[ - Callable[[list[Exception], bool, float], tuple[Exception, Exception | None]] + Callable[[List[Exception], bool, float], Tuple[Exception, Optional[Exception]]] ] = None, check_timeout_on_yield: bool = False, ): @@ -158,12 +160,9 @@ def __init__( self.predicate = predicate self.sleep_generator = iter(sleep_generator) self.on_error = on_error - if timeout is not None: - self.deadline = time.monotonic() + timeout - else: - self.deadline = None + self.deadline: Optional[float] = time.monotonic() + timeout if timeout else None self._check_timeout_on_yield = check_timeout_on_yield - self.error_list: list[Exception] = [] + self.error_list: List[Exception] = [] self._exc_factory = partial( exception_factory or _build_timeout_error, timeout_val=timeout ) @@ -184,19 +183,14 @@ def _check_timeout( exc, src_exc = self._exc_factory(exc_list=self.error_list, is_timeout=True) raise exc from src_exc - async def _ensure_active_target(self) -> AsyncIterator[T]: + async def _new_target(self) -> AsyncIterator[T]: """ - Ensure that the active target is populated and ready to be iterated over. - - Returns: - - The active_target iterable + Creates and returns a new target iterator from the target function. """ - if self.active_target is None: - new_iterable = self.target_fn() - if isinstance(new_iterable, Awaitable): - new_iterable = await new_iterable - self.active_target = new_iterable.__aiter__() - return self.active_target + new_iterable = self.target_fn() + if isinstance(new_iterable, Awaitable): + new_iterable = await new_iterable + return new_iterable.__aiter__() def __aiter__(self) -> AsyncIterator[T]: """Implement the async iterator protocol.""" @@ -231,8 +225,7 @@ async def _handle_exception(self, exc) -> None: ) # sleep before retrying await asyncio.sleep(next_sleep) - self.active_target = None - await self._ensure_active_target() + self.active_target = await self._new_target() async def _iteration_helper(self, iteration_routine: Awaitable) -> T: """ @@ -265,9 +258,10 @@ async def __anext__(self) -> T: Returns: - The next value from the active_target iterator. """ - iterable = await self._ensure_active_target() + if self.active_target is None: + self.active_target = await self._new_target() return await self._iteration_helper( - iterable.__anext__(), + self.active_target.__anext__(), ) async def aclose(self) -> None: @@ -277,7 +271,8 @@ async def aclose(self) -> None: Raises: - AttributeError if the active_target does not have a aclose() method """ - await self._ensure_active_target() + if self.active_target is None: + self.active_target = await self._new_target() if getattr(self.active_target, "aclose", None): casted_target = cast(AsyncGenerator[T, None], self.active_target) return await casted_target.aclose() @@ -302,7 +297,8 @@ async def asend(self, *args, **kwargs) -> T: Raises: - AttributeError if the active_target does not have a asend() method """ - await self._ensure_active_target() + if self.active_target is None: + self.active_target = await self._new_target() if getattr(self.active_target, "asend", None): casted_target = cast(AsyncGenerator[T, None], self.active_target) return await self._iteration_helper(casted_target.asend(*args, **kwargs)) @@ -325,7 +321,8 @@ async def athrow(self, *args, **kwargs) -> T: Raises: - AttributeError if the active_target does not have a athrow() method """ - await self._ensure_active_target() + if self.active_target is None: + self.active_target = await self._new_target() if getattr(self.active_target, "athrow", None): casted_target = cast(AsyncGenerator[T, None], self.active_target) try: From a2b0e6c6f1abc223700e2ffc7cf31c3966cca3bc Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 15 Aug 2023 11:16:55 -0700 Subject: [PATCH 095/204] remove iteration helper --- google/api_core/retry_streaming.py | 47 +++++++++++++++++------- google/api_core/retry_streaming_async.py | 37 ++++++++----------- 2 files changed, 49 insertions(+), 35 deletions(-) diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index 830d87c3..5837df7e 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -14,7 +14,18 @@ """Helpers for retries for streaming APIs.""" -from typing import Callable, Optional, List, Tuple, Iterable, Iterator, Generator, TypeVar, Any, cast +from typing import ( + Callable, + Optional, + List, + Tuple, + Iterable, + Iterator, + Generator, + TypeVar, + Any, + cast, +) import logging import time @@ -27,7 +38,9 @@ T = TypeVar("T") -def _build_timeout_error(exc_list:List[Exception], is_timeout:bool, timeout_val:float) -> Tuple[Exception, Optional[Exception]]: +def _build_timeout_error( + exc_list: List[Exception], is_timeout: bool, timeout_val: float +) -> Tuple[Exception, Optional[Exception]]: """ Default exception_factory implementation. Builds an exception after the retry fails @@ -42,10 +55,13 @@ def _build_timeout_error(exc_list:List[Exception], is_timeout:bool, timeout_val: """ src_exc = exc_list[-1] if exc_list else None if is_timeout: - return exceptions.RetryError( - "Timeout of {:.1f}s exceeded".format(timeout_val), + return ( + exceptions.RetryError( + "Timeout of {:.1f}s exceeded".format(timeout_val), + src_exc, + ), src_exc, - ), src_exc + ) else: return exc_list[-1], None @@ -127,7 +143,11 @@ def __init__( sleep_generator: Iterable[float], timeout: Optional[float] = None, on_error: Optional[Callable[[Exception], None]] = None, - exception_factory: Optional[Callable[[List[Exception], bool, float], Tuple[Exception, Optional[Exception]]]] = None, + exception_factory: Optional[ + Callable[ + [List[Exception], bool, float], Tuple[Exception, Optional[Exception]] + ] + ] = None, check_timeout_on_yield: bool = False, ): """ @@ -165,8 +185,10 @@ def __init__( self.on_error = on_error self.deadline: Optional[float] = time.monotonic() + timeout if timeout else None self._check_timeout_on_yield = check_timeout_on_yield - self.error_list : List[Exception] = [] - self._exc_factory = partial(exception_factory or _build_timeout_error, timeout_val=timeout) + self.error_list: List[Exception] = [] + self._exc_factory = partial( + exception_factory or _build_timeout_error, timeout_val=timeout + ) def __iter__(self) -> Generator[T, Any, None]: """ @@ -182,7 +204,9 @@ def _handle_exception(self, exc) -> None: """ self.error_list.append(exc) if not self.predicate(exc): - final_exc, src_exc = self._exc_factory(exc_list=self.error_list, is_timeout=False) + final_exc, src_exc = self._exc_factory( + exc_list=self.error_list, is_timeout=False + ) raise final_exc from src_exc else: # run on_error callback if provided @@ -213,10 +237,7 @@ def _check_timeout(self, current_time: float) -> None: Raises: - Exception from exception_factory if the timeout has been exceeded """ - if ( - self.deadline is not None - and self.deadline < current_time - ): + if self.deadline is not None and self.deadline < current_time: exc, src_exc = self._exc_factory(exc_list=self.error_list, is_timeout=True) raise exc from src_exc diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index 62973d99..c3efa483 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -122,7 +122,9 @@ def __init__( timeout: Optional[float] = None, on_error: Optional[Callable[[Exception], None]] = None, exception_factory: Optional[ - Callable[[List[Exception], bool, float], Tuple[Exception, Optional[Exception]]] + Callable[ + [List[Exception], bool, float], Tuple[Exception, Optional[Exception]] + ] ] = None, check_timeout_on_yield: bool = False, ): @@ -227,17 +229,15 @@ async def _handle_exception(self, exc) -> None: await asyncio.sleep(next_sleep) self.active_target = await self._new_target() - async def _iteration_helper(self, iteration_routine: Awaitable) -> T: + async def __anext__(self) -> T: """ - Helper function for sharing logic between __anext__ and asend. + Implement the async iterator protocol. - Args: - - iteration_routine: The coroutine to await to get the next value - from the iterator (e.g. __anext__ or asend) Returns: - The next value from the active_target iterator. """ - # check for expired timeouts before attempting to iterate + if self.active_target is None: + self.active_target = await self._new_target() if self._check_timeout_on_yield: self._check_timeout(time.monotonic()) try: @@ -245,25 +245,12 @@ async def _iteration_helper(self, iteration_routine: Awaitable) -> T: # Note: here would be a good place to add a timeout, like asyncio.wait_for. # But wait_for is expensive, so we only check for timeouts at the # start of each iteration. - return await iteration_routine + return await self.active_target.__anext__() except Exception as exc: await self._handle_exception(exc) # if retryable exception was handled, find the next value to return return await self.__anext__() - async def __anext__(self) -> T: - """ - Implement the async iterator protocol. - - Returns: - - The next value from the active_target iterator. - """ - if self.active_target is None: - self.active_target = await self._new_target() - return await self._iteration_helper( - self.active_target.__anext__(), - ) - async def aclose(self) -> None: """ Close the active_target if supported. (e.g. target is an async generator) @@ -299,9 +286,15 @@ async def asend(self, *args, **kwargs) -> T: """ if self.active_target is None: self.active_target = await self._new_target() + if self._check_timeout_on_yield: + self._check_timeout(time.monotonic()) if getattr(self.active_target, "asend", None): casted_target = cast(AsyncGenerator[T, None], self.active_target) - return await self._iteration_helper(casted_target.asend(*args, **kwargs)) + try: + return await casted_target.asend(*args, **kwargs) + except Exception as exc: + await self._handle_exception(exc) + return await self.__anext__() else: raise AttributeError( "asend() not implemented for {}".format(self.active_target) From 4aa1ab495d0fc9cce54aa3068721a58b79da4174 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Tue, 15 Aug 2023 18:26:40 +0000 Subject: [PATCH 096/204] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot?= =?UTF-8?q?=20post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- google/api_core/retry_streaming.py | 47 +++++++++++++++++------- google/api_core/retry_streaming_async.py | 4 +- 2 files changed, 37 insertions(+), 14 deletions(-) diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index 830d87c3..5837df7e 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -14,7 +14,18 @@ """Helpers for retries for streaming APIs.""" -from typing import Callable, Optional, List, Tuple, Iterable, Iterator, Generator, TypeVar, Any, cast +from typing import ( + Callable, + Optional, + List, + Tuple, + Iterable, + Iterator, + Generator, + TypeVar, + Any, + cast, +) import logging import time @@ -27,7 +38,9 @@ T = TypeVar("T") -def _build_timeout_error(exc_list:List[Exception], is_timeout:bool, timeout_val:float) -> Tuple[Exception, Optional[Exception]]: +def _build_timeout_error( + exc_list: List[Exception], is_timeout: bool, timeout_val: float +) -> Tuple[Exception, Optional[Exception]]: """ Default exception_factory implementation. Builds an exception after the retry fails @@ -42,10 +55,13 @@ def _build_timeout_error(exc_list:List[Exception], is_timeout:bool, timeout_val: """ src_exc = exc_list[-1] if exc_list else None if is_timeout: - return exceptions.RetryError( - "Timeout of {:.1f}s exceeded".format(timeout_val), + return ( + exceptions.RetryError( + "Timeout of {:.1f}s exceeded".format(timeout_val), + src_exc, + ), src_exc, - ), src_exc + ) else: return exc_list[-1], None @@ -127,7 +143,11 @@ def __init__( sleep_generator: Iterable[float], timeout: Optional[float] = None, on_error: Optional[Callable[[Exception], None]] = None, - exception_factory: Optional[Callable[[List[Exception], bool, float], Tuple[Exception, Optional[Exception]]]] = None, + exception_factory: Optional[ + Callable[ + [List[Exception], bool, float], Tuple[Exception, Optional[Exception]] + ] + ] = None, check_timeout_on_yield: bool = False, ): """ @@ -165,8 +185,10 @@ def __init__( self.on_error = on_error self.deadline: Optional[float] = time.monotonic() + timeout if timeout else None self._check_timeout_on_yield = check_timeout_on_yield - self.error_list : List[Exception] = [] - self._exc_factory = partial(exception_factory or _build_timeout_error, timeout_val=timeout) + self.error_list: List[Exception] = [] + self._exc_factory = partial( + exception_factory or _build_timeout_error, timeout_val=timeout + ) def __iter__(self) -> Generator[T, Any, None]: """ @@ -182,7 +204,9 @@ def _handle_exception(self, exc) -> None: """ self.error_list.append(exc) if not self.predicate(exc): - final_exc, src_exc = self._exc_factory(exc_list=self.error_list, is_timeout=False) + final_exc, src_exc = self._exc_factory( + exc_list=self.error_list, is_timeout=False + ) raise final_exc from src_exc else: # run on_error callback if provided @@ -213,10 +237,7 @@ def _check_timeout(self, current_time: float) -> None: Raises: - Exception from exception_factory if the timeout has been exceeded """ - if ( - self.deadline is not None - and self.deadline < current_time - ): + if self.deadline is not None and self.deadline < current_time: exc, src_exc = self._exc_factory(exc_list=self.error_list, is_timeout=True) raise exc from src_exc diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index 62973d99..8a95b710 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -122,7 +122,9 @@ def __init__( timeout: Optional[float] = None, on_error: Optional[Callable[[Exception], None]] = None, exception_factory: Optional[ - Callable[[List[Exception], bool, float], Tuple[Exception, Optional[Exception]]] + Callable[ + [List[Exception], bool, float], Tuple[Exception, Optional[Exception]] + ] ] = None, check_timeout_on_yield: bool = False, ): From 8349424e808bb4d8a4def6ef7e454c51de9b7bf2 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 15 Aug 2023 11:32:25 -0700 Subject: [PATCH 097/204] added test coverage for send/asend --- tests/asyncio/test_retry_async.py | 23 ++++++++++++++++++----- tests/unit/test_retry.py | 22 +++++++++++++++++----- 2 files changed, 35 insertions(+), 10 deletions(-) diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 8b7edfc1..4b299970 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -747,10 +747,11 @@ async def __anext__(self): with pytest.raises(StopAsyncIteration): await retryable.__anext__() + @pytest.mark.parametrize("yield_method", ["__anext__", "asend"]) @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio - async def test_yield_stream_after_deadline(self, sleep, uniform): + async def test_yield_stream_after_deadline(self, sleep, uniform, yield_method): """ By default, if the deadline is hit between yields, the generator will continue. @@ -758,6 +759,7 @@ async def test_yield_stream_after_deadline(self, sleep, uniform): each yield. """ import time + import functools from google.api_core.retry_streaming_async import AsyncRetryableGenerator timeout = 2 @@ -785,17 +787,28 @@ async def test_yield_stream_after_deadline(self, sleep, uniform): ) assert check._check_timeout_on_yield is True - # first yield should be fine - await check.__anext__() + # initialize the generator await no_check.__anext__() + await check.__anext__() + + # use yield_method to advance the generator + no_check_yield = getattr(no_check, yield_method) + check_yield = getattr(check, yield_method) + if yield_method == "asend": + no_check_yield = functools.partial(no_check_yield, None) + check_yield = functools.partial(check_yield, None) + + # first yield should be fine + await check_yield() + await no_check_yield() # simulate a delay before next yield patched_now.return_value += timeout + 1 # second yield should raise when check_timeout_on_yield is True with pytest.raises(exceptions.RetryError): - await check.__anext__() - await no_check.__anext__() + await check_yield() + await no_check_yield() @pytest.mark.asyncio async def test_generator_error_list(self): diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 2dd7e0dd..b75a83d8 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -772,9 +772,10 @@ def test___call___with_is_stream(self, sleep): unpacked = [next(gen) for i in range(10)] assert unpacked == [0, 1, 2, 3, 4, 5, 0, 1, 2, 3] + @pytest.mark.parametrize("yield_method", ["__next__", "send"]) @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) @mock.patch("asyncio.sleep", autospec=True) - def test_yield_stream_after_deadline(self, sleep, uniform): + def test_yield_stream_after_deadline(self, sleep, uniform, yield_method): """ By default, if the deadline is hit between yields, the generator will continue. @@ -809,17 +810,28 @@ def test_yield_stream_after_deadline(self, sleep, uniform): ) assert check._check_timeout_on_yield is True - # first yield should be fine - next(check) + # initialize generator next(no_check) + next(check) + + # use the yield method to advance the generator + check_yield = getattr(check, yield_method) + no_check_yield = getattr(no_check, yield_method) + if yield_method == "send": + # bind variable to send method + check_yield = functools.partial(check_yield, None) + no_check_yield = functools.partial(no_check_yield, None) + # first yield should be fine + check_yield() + no_check_yield() # simulate a delay before next yield patched_now.return_value += timeout + 1 # second yield should raise when check_timeout_on_yield is True with pytest.raises(exceptions.RetryError): - next(check) - next(no_check) + check_yield() + no_check_yield() def test_generator_error_list(self): """ From 5ddda246061c33ccf46833cf0fa41fda36df3a36 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 15 Aug 2023 11:42:30 -0700 Subject: [PATCH 098/204] added test for closing new generator --- tests/asyncio/test_retry_async.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 4b299970..a70e16e2 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -629,6 +629,24 @@ async def test___call___with_generator_close(self, sleep): # calling next on closed generator should raise error await generator.__anext__() + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___with_new_generator_close(self, sleep): + """ + Close should be passed through retry into target generator, + even when it hasn't been iterated yet + """ + retry_ = retry_async.AsyncRetry(is_stream=True) + decorated = retry_(self._generator_mock) + exception_list = [] + generator = decorated(10, exceptions_seen=exception_list) + await generator.aclose() + assert generator.error_list == [] + + with pytest.raises(StopAsyncIteration): + # calling next on closed generator should raise error + await generator.__anext__() + @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___with_generator_throw(self, sleep): From 9e3ea92afbe14b0ed144fb6165af1ac00d91e26e Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 15 Aug 2023 11:52:53 -0700 Subject: [PATCH 099/204] improved test decorators --- tests/asyncio/test_retry_async.py | 6 +++--- tests/unit/test_retry.py | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index a70e16e2..bd742568 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -766,10 +766,9 @@ async def __anext__(self): await retryable.__anext__() @pytest.mark.parametrize("yield_method", ["__anext__", "asend"]) - @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio - async def test_yield_stream_after_deadline(self, sleep, uniform, yield_method): + async def test_yield_stream_after_deadline(self, sleep, yield_method): """ By default, if the deadline is hit between yields, the generator will continue. @@ -828,8 +827,9 @@ async def test_yield_stream_after_deadline(self, sleep, uniform, yield_method): await check_yield() await no_check_yield() + @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio - async def test_generator_error_list(self): + async def test_generator_error_list(self, sleep): """ generator should keep history of errors seen """ diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index b75a83d8..db414ee8 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -773,9 +773,8 @@ def test___call___with_is_stream(self, sleep): assert unpacked == [0, 1, 2, 3, 4, 5, 0, 1, 2, 3] @pytest.mark.parametrize("yield_method", ["__next__", "send"]) - @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) @mock.patch("asyncio.sleep", autospec=True) - def test_yield_stream_after_deadline(self, sleep, uniform, yield_method): + def test_yield_stream_after_deadline(self, sleep, yield_method): """ By default, if the deadline is hit between yields, the generator will continue. @@ -833,7 +832,8 @@ def test_yield_stream_after_deadline(self, sleep, uniform, yield_method): check_yield() no_check_yield() - def test_generator_error_list(self): + @mock.patch("asyncio.sleep", autospec=True) + def test_generator_error_list(self, sleep): """ generator should keep history of errors seen """ From 3b06b3ad03270cb4e4fdf84c69ed805d2dc471c2 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 15 Aug 2023 12:22:24 -0700 Subject: [PATCH 100/204] swapped out generator object with generator function --- google/api_core/retry_async.py | 4 +- google/api_core/retry_streaming_async.py | 366 +++++------------------ tests/asyncio/test_retry_async.py | 45 +-- 3 files changed, 92 insertions(+), 323 deletions(-) diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index f441bdbd..98a3e5fb 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -61,7 +61,7 @@ async def check_if_exists(): from google.api_core.retry import exponential_sleep_generator from google.api_core.retry import if_exception_type # noqa: F401 from google.api_core.retry import if_transient_error -from google.api_core.retry_streaming_async import AsyncRetryableGenerator +from google.api_core.retry_streaming_async import retry_target_generator _LOGGER = logging.getLogger(__name__) @@ -247,7 +247,7 @@ def retry_wrapped_stream(*args, **kwargs): sleep_generator = exponential_sleep_generator( self._initial, self._maximum, multiplier=self._multiplier ) - return AsyncRetryableGenerator( + return retry_target_generator( target, self._predicate, sleep_generator, diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index c3efa483..a9bae091 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -32,6 +32,7 @@ import asyncio import logging import time +import sys from functools import partial from google.api_core.retry_streaming import _build_timeout_error @@ -40,291 +41,82 @@ T = TypeVar("T") - -class AsyncRetryableGenerator(AsyncGenerator[T, None]): - """ - AsyncGenerator wrapper for retryable streaming RPCs. - AsyncRetryableGenerator will be used when initilizing a retry with - ``AsyncRetry(is_stream=True)``. - - When ``is_stream=False``, the target is treated as a coroutine, - and will retry when the coroutine returns an error. When ``is_stream=True``, - the target will be treated as a callable that retruns an AsyncIterable. Instead - of just wrapping the initial call in retry logic, the entire iterable is - wrapped, with each yield passing through AsyncRetryableGenerator. If any yield - in the stream raises a retryable exception, the entire stream will be - retried. - - Important Note: when a stream is encounters a retryable error, it will - silently construct a fresh iterator instance in the background - and continue yielding (likely duplicate) values as if no error occurred. - This is the most general way to retry a stream, but it often is not the - desired behavior. Example: iter([1, 2, 1/0]) -> [1, 2, 1, 2, ...] - - There are two ways to build more advanced retry logic for streams: - - 1. Wrap the target - Use a ``target`` that maintains state between retries, and creates a - different generator on each retry call. For example, you can wrap a - grpc call in a function that modifies the request based on what has - already been returned: - - ``` - async def attempt_with_modified_request(target, request, seen_items=[]): - # remove seen items from request on each attempt - new_request = modify_request(request, seen_items) - new_generator = await target(new_request) - async for item in new_generator: - yield item - seen_items.append(item) - - retry_wrapped = AsyncRetry(is_stream=True)(attempt_with_modified_request, target, request, []) - ``` - - 2. Wrap the AsyncRetryableGenerator - Alternatively, you can wrap the AsyncRetryableGenerator itself before - passing it to the end-user to add a filter on the stream. For - example, you can keep track of the items that were successfully yielded - in previous retry attempts, and only yield new items when the - new attempt surpasses the previous ones: - - `` - async def retryable_with_filter(target): - stream_idx = 0 - # reset stream_idx when the stream is retried - def on_error(e): - nonlocal stream_idx - stream_idx = 0 - # build retryable - retryable_gen = AsyncRetry(is_stream=True, on_error=on_error, ...)(target) - # keep track of what has been yielded out of filter - yielded_items = [] - async for item in retryable_gen: - if stream_idx >= len(yielded_items): - yield item - yielded_items.append(item) - elif item != previous_stream[stream_idx]: - raise ValueError("Stream differs from last attempt")" - stream_idx += 1 - - filter_retry_wrapped = retryable_with_filter(target) - ``` - """ - - def __init__( - self, - target: Union[ - Callable[[], AsyncIterable[T]], - Callable[[], Awaitable[AsyncIterable[T]]], - ], - predicate: Callable[[Exception], bool], - sleep_generator: Iterable[float], - timeout: Optional[float] = None, - on_error: Optional[Callable[[Exception], None]] = None, - exception_factory: Optional[ - Callable[ - [List[Exception], bool, float], Tuple[Exception, Optional[Exception]] - ] - ] = None, - check_timeout_on_yield: bool = False, - ): - """ - Args: - target: The function to call to produce iterables for each retry. - This must be a nullary function - apply arguments with - `functools.partial`. - predicate: A callable used to determine if an - exception raised by the target should be considered retryable. - It should return True to retry or False otherwise. - sleep_generator: An infinite iterator that determines - how long to sleep between retries. - timeout: How long to keep retrying the target. - on_error: A function to call while processing a - retryable exception. Any error raised by this function will *not* - be caught. - exception_factory: A function that creates an exception to raise - when the retry fails. The function takes three arguments: - a list of exceptions that occurred during the retry, a boolean - indicating whether the failure is due to retry timeout, and the original - timeout value (for building a helpful error message). It is expected to - return a tuple of the exception to raise and (optionally) a source - exception to chain to the raised exception. - If not provided, a default exception will be raised. - check_timeout_on_yield: If True, the timeout value will be checked - after each yield. If the timeout has been exceeded, the generator - will raise an exception from exception_factory. - Note that this adds an overhead to each yield, so it is better - to add the timeout logic to the wrapped stream when possible. - """ - self.target_fn = target - # active target must be populated in an async context - self.active_target: Optional[AsyncIterator[T]] = None - self.predicate = predicate - self.sleep_generator = iter(sleep_generator) - self.on_error = on_error - self.deadline: Optional[float] = time.monotonic() + timeout if timeout else None - self._check_timeout_on_yield = check_timeout_on_yield - self.error_list: List[Exception] = [] - self._exc_factory = partial( - exception_factory or _build_timeout_error, timeout_val=timeout - ) - - def _check_timeout( - self, current_time: float, source_exception: Optional[Exception] = None - ) -> None: - """ - Helper function to check if the timeout has been exceeded, and raise an exception if so. - - Args: - - current_time: the timestamp to check against the deadline - - source_exception: the exception that triggered the timeout check, if any - Raises: - - Exception from exception_factory if the timeout has been exceeded - """ - if self.deadline is not None and self.deadline < current_time: - exc, src_exc = self._exc_factory(exc_list=self.error_list, is_timeout=True) - raise exc from src_exc - - async def _new_target(self) -> AsyncIterator[T]: - """ - Creates and returns a new target iterator from the target function. - """ - new_iterable = self.target_fn() - if isinstance(new_iterable, Awaitable): - new_iterable = await new_iterable - return new_iterable.__aiter__() - - def __aiter__(self) -> AsyncIterator[T]: - """Implement the async iterator protocol.""" - return self - - async def _handle_exception(self, exc) -> None: - """ - When an exception is raised while iterating over the active_target, - check if it is retryable. If so, create a new active_target and - continue iterating. If not, raise the exception. - """ - self.error_list.append(exc) - if not self.predicate(exc) and not isinstance(exc, asyncio.TimeoutError): - final_exc, src_exc = self._exc_factory( - exc_list=self.error_list, is_timeout=False - ) - raise final_exc from src_exc - else: - # run on_error callback if provided - if self.on_error: - self.on_error(exc) - try: - next_sleep = next(self.sleep_generator) - except StopIteration: - raise ValueError("Sleep generator stopped yielding sleep values") - # if deadline is exceeded, raise exception - if self.deadline is not None: - next_attempt = time.monotonic() + next_sleep - self._check_timeout(next_attempt, exc) - _LOGGER.debug( - "Retrying due to {}, sleeping {:.1f}s ...".format(exc, next_sleep) - ) - # sleep before retrying - await asyncio.sleep(next_sleep) - self.active_target = await self._new_target() - - async def __anext__(self) -> T: - """ - Implement the async iterator protocol. - - Returns: - - The next value from the active_target iterator. - """ - if self.active_target is None: - self.active_target = await self._new_target() - if self._check_timeout_on_yield: - self._check_timeout(time.monotonic()) +async def retry_target_generator( + target: Union[ + Callable[[], AsyncIterable[T]], + Callable[[], Awaitable[AsyncIterable[T]]], + ], + predicate: Callable[[Exception], bool], + sleep_generator: Iterable[float], + timeout: Optional[float] = None, + on_error: Optional[Callable[[Exception], None]] = None, + exception_factory: Optional[ + Callable[ + [List[Exception], bool, float], Tuple[Exception, Optional[Exception]] + ] + ] = None, + check_timeout_on_yield: bool = False, + **kwargs, +) -> AsyncGenerator[T, None]: + subgenerator = None + + timeout = kwargs.get("deadline", timeout) + + deadline: Optional[float] = time.monotonic() + timeout if timeout else None + error_list: List[Exception] = [] + exc_factory = partial( + exception_factory or _build_timeout_error, timeout_val=timeout + ) + + for sleep in sleep_generator: + # Start a new retry loop try: - # grab the next value from the active_target - # Note: here would be a good place to add a timeout, like asyncio.wait_for. - # But wait_for is expensive, so we only check for timeouts at the - # start of each iteration. - return await self.active_target.__anext__() - except Exception as exc: - await self._handle_exception(exc) - # if retryable exception was handled, find the next value to return - return await self.__anext__() - - async def aclose(self) -> None: - """ - Close the active_target if supported. (e.g. target is an async generator) - - Raises: - - AttributeError if the active_target does not have a aclose() method - """ - if self.active_target is None: - self.active_target = await self._new_target() - if getattr(self.active_target, "aclose", None): - casted_target = cast(AsyncGenerator[T, None], self.active_target) - return await casted_target.aclose() - else: - raise AttributeError( - "aclose() not implemented for {}".format(self.active_target) - ) - - async def asend(self, *args, **kwargs) -> T: - """ - Call asend on the active_target if supported. (e.g. target is an async generator) - - If an exception is raised, a retry may be attempted before returning - a result. - - - Args: - - *args: arguments to pass to the wrapped generator's asend method - - **kwargs: keyword arguments to pass to the wrapped generator's asend method - Returns: - - the next value of the active_target iterator after calling asend - Raises: - - AttributeError if the active_target does not have a asend() method - """ - if self.active_target is None: - self.active_target = await self._new_target() - if self._check_timeout_on_yield: - self._check_timeout(time.monotonic()) - if getattr(self.active_target, "asend", None): - casted_target = cast(AsyncGenerator[T, None], self.active_target) - try: - return await casted_target.asend(*args, **kwargs) - except Exception as exc: - await self._handle_exception(exc) - return await self.__anext__() - else: - raise AttributeError( - "asend() not implemented for {}".format(self.active_target) - ) - - async def athrow(self, *args, **kwargs) -> T: - """ - Call athrow on the active_target if supported. (e.g. target is an async generator) - - If an exception is raised, a retry may be attempted before returning - - Args: - - *args: arguments to pass to the wrapped generator's athrow method - - **kwargs: keyword arguments to pass to the wrapped generator's athrow method - Returns: - - the next value of the active_target iterator after calling athrow - Raises: - - AttributeError if the active_target does not have a athrow() method - """ - if self.active_target is None: - self.active_target = await self._new_target() - if getattr(self.active_target, "athrow", None): - casted_target = cast(AsyncGenerator[T, None], self.active_target) - try: - return await casted_target.athrow(*args, **kwargs) - except Exception as exc: - await self._handle_exception(exc) - # if retryable exception was handled, return next from new active_target - return await self.__anext__() - else: - raise AttributeError( - "athrow() not implemented for {}".format(self.active_target) - ) + subgenerator = target() + + sent_in = None + while True: + # Check for expiration before starting + if check_timeout_on_yield is True and deadline is not None and time.monotonic() > deadline: + exc, source_exc = exc_factory(exc_list=error_list, is_timeout=True) + raise exc from source_exc + ## Read from Subgenerator + next_value = await subgenerator.asend(sent_in) + ## Yield from Wrapper to caller + try: + # yield last value from subgenerator + # exceptions from `athrow` and `aclose` are injected here + sent_in = yield next_value + except GeneratorExit: + # if wrapper received `aclose`, pass to subgenerator and close + await subgenerator.aclose() + return + except: # noqa: E722 + # bare except catches any exception passed to `athrow` + # delegate error handling to subgenerator + await subgenerator.athrow(*sys.exc_info()) + return + except StopAsyncIteration: + # if generator exhausted, return + return + # pylint: disable=broad-except + # This function handles exceptions thrown by subgenerator + except (Exception, asyncio.CancelledError) as exc: + error_list.append(exc) + if not predicate(exc): + exc, source_exc = exc_factory(exc_list=error_list, is_timeout=False) + raise exc from source_exc + if on_error is not None: + on_error(exc) + finally: + if subgenerator is not None: + await subgenerator.aclose() + + # sleep and adjust timeout budget + if deadline is not None and time.monotonic() + sleep > deadline: + exc, source_exc = exc_factory(exc_list=error_list, is_timeout=True) + raise exc from source_exc + _LOGGER.debug( + "Retrying due to {}, sleeping {:.1f}s ...".format(error_list[-1], sleep) + ) + await asyncio.sleep(sleep) + raise ValueError("Sleep generator stopped yielding sleep values.") diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index bd742568..c48a07da 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -136,7 +136,7 @@ async def test_retry_target_bad_sleep_generator(): @pytest.mark.asyncio async def test_retry_streaming_target_bad_sleep_generator(): - from google.api_core.retry_streaming_async import AsyncRetryableGenerator + from google.api_core.retry_streaming_async import retry_target_generator async def target_fn(): async def inner_gen(): @@ -146,7 +146,7 @@ async def inner_gen(): return inner_gen() with pytest.raises(ValueError, match="Sleep generator"): - gen = AsyncRetryableGenerator(target_fn, lambda x: True, [], None) + gen = retry_target_generator(target_fn, lambda x: True, [], None) await gen.__anext__() @@ -641,7 +641,6 @@ async def test___call___with_new_generator_close(self, sleep): exception_list = [] generator = decorated(10, exceptions_seen=exception_list) await generator.aclose() - assert generator.error_list == [] with pytest.raises(StopAsyncIteration): # calling next on closed generator should raise error @@ -777,7 +776,7 @@ async def test_yield_stream_after_deadline(self, sleep, yield_method): """ import time import functools - from google.api_core.retry_streaming_async import AsyncRetryableGenerator + from google.api_core.retry_streaming_async import retry_target_generator timeout = 2 time_now = time.monotonic() @@ -787,22 +786,20 @@ async def test_yield_stream_after_deadline(self, sleep, yield_method): ) with now_patcher as patched_now: - no_check = AsyncRetryableGenerator( + no_check = retry_target_generator( self._generator_mock, None, - [], + [0]*10, timeout=timeout, check_timeout_on_yield=False, ) - assert no_check._check_timeout_on_yield is False - check = AsyncRetryableGenerator( + check = retry_target_generator( self._generator_mock, None, - [], + [0]*10, timeout=timeout, check_timeout_on_yield=True, ) - assert check._check_timeout_on_yield is True # initialize the generator await no_check.__anext__() @@ -827,32 +824,13 @@ async def test_yield_stream_after_deadline(self, sleep, yield_method): await check_yield() await no_check_yield() - @mock.patch("asyncio.sleep", autospec=True) - @pytest.mark.asyncio - async def test_generator_error_list(self, sleep): - """ - generator should keep history of errors seen - """ - retry_ = retry_async.AsyncRetry( - predicate=retry_async.if_exception_type(ValueError), is_stream=True - ) - decorated = retry_(self._generator_mock) - - generator = decorated(1) - err1 = ValueError("test") - await generator.athrow(err1) - assert generator.error_list == [err1] - err2 = ValueError("test2") - await generator.athrow(err2) - assert generator.error_list == [err1, err2] - @pytest.mark.asyncio async def test_exc_factory_non_retryable_error(self): """ generator should give the option to override exception creation logic test when non-retryable error is thrown """ - from google.api_core.retry_streaming_async import AsyncRetryableGenerator + from google.api_core.retry_streaming_async import retry_target_generator timeout = 6 sent_errors = [ValueError("test"), ValueError("test2"), BufferError("test3")] @@ -866,7 +844,7 @@ def factory(*args, **kwargs): assert kwargs["timeout_val"] == timeout return expected_final_err, expected_source_err - generator = AsyncRetryableGenerator( + generator = retry_target_generator( self._generator_mock, retry_async.if_exception_type(ValueError), [0] * 3, @@ -890,7 +868,7 @@ async def test_exc_factory_timeout(self): test when timeout is exceeded """ import time - from google.api_core.retry_streaming_async import AsyncRetryableGenerator + from google.api_core.retry_streaming_async import retry_target_generator timeout = 2 time_now = time.monotonic() @@ -912,7 +890,7 @@ def factory(*args, **kwargs): assert kwargs["timeout_val"] == timeout return expected_final_err, expected_source_err - generator = AsyncRetryableGenerator( + generator = retry_target_generator( self._generator_mock, retry_async.if_exception_type(ValueError), [0] * 3, @@ -923,7 +901,6 @@ def factory(*args, **kwargs): # trigger some retryable errors await generator.athrow(sent_errors[0]) await generator.athrow(sent_errors[1]) - assert generator.error_list == [sent_errors[0], sent_errors[1]] # trigger a timeout patched_now.return_value += timeout + 1 with pytest.raises(expected_final_err.__class__) as exc_info: From 8bb6b0c6a2ff336e4fa340957ca624706d052dd8 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 15 Aug 2023 13:24:22 -0700 Subject: [PATCH 101/204] support iterators, along with generators --- google/api_core/retry_streaming_async.py | 24 ++++++++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index a9bae091..c7fff7a4 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -73,6 +73,10 @@ async def retry_target_generator( try: subgenerator = target() + # if target is a generator, we will advance it using asend + # otherwise, we will use anext + supports_send = bool(getattr(subgenerator, "asend", None)) + sent_in = None while True: # Check for expiration before starting @@ -80,7 +84,13 @@ async def retry_target_generator( exc, source_exc = exc_factory(exc_list=error_list, is_timeout=True) raise exc from source_exc ## Read from Subgenerator - next_value = await subgenerator.asend(sent_in) + if supports_send: + next_value = await subgenerator.asend(sent_in) + elif sent_in is not None: + # asend was called on an iterator that does not support it + raise AttributeError(f"asend() not implemented for {target}") + else: + next_value = await subgenerator.__anext__() ## Yield from Wrapper to caller try: # yield last value from subgenerator @@ -88,12 +98,18 @@ async def retry_target_generator( sent_in = yield next_value except GeneratorExit: # if wrapper received `aclose`, pass to subgenerator and close - await subgenerator.aclose() + if bool(getattr(subgenerator, "aclose", None)): + await subgenerator.aclose() + else: + raise AttributeError(f"aclose() not implemented for {target}") return except: # noqa: E722 # bare except catches any exception passed to `athrow` # delegate error handling to subgenerator - await subgenerator.athrow(*sys.exc_info()) + if getattr(subgenerator, "athrow", None): + await subgenerator.athrow(*sys.exc_info()) + else: + raise return except StopAsyncIteration: # if generator exhausted, return @@ -108,7 +124,7 @@ async def retry_target_generator( if on_error is not None: on_error(exc) finally: - if subgenerator is not None: + if subgenerator is not None and getattr(subgenerator, "aclose", None): await subgenerator.aclose() # sleep and adjust timeout budget From 37c64a02d41d75fca6ff866b53c5090ea82f88f9 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 15 Aug 2023 14:23:14 -0700 Subject: [PATCH 102/204] got tests passing with new structure --- google/api_core/retry_streaming.py | 7 ++ google/api_core/retry_streaming_async.py | 18 +++- tests/asyncio/test_retry_async.py | 126 +++++++++++++++++------ 3 files changed, 113 insertions(+), 38 deletions(-) diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index 5837df7e..d0e47b01 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -38,6 +38,13 @@ T = TypeVar("T") +class _TerminalException(Exception): + """ + Exception to bypasses retry logic and raises __cause__ immediately. + """ + pass + + def _build_timeout_error( exc_list: List[Exception], is_timeout: bool, timeout_val: float ) -> Tuple[Exception, Optional[Exception]]: diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index c7fff7a4..d82220cd 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -36,11 +36,13 @@ from functools import partial from google.api_core.retry_streaming import _build_timeout_error +from google.api_core.retry_streaming import _TerminalException _LOGGER = logging.getLogger(__name__) T = TypeVar("T") + async def retry_target_generator( target: Union[ Callable[[], AsyncIterable[T]], @@ -71,7 +73,13 @@ async def retry_target_generator( for sleep in sleep_generator: # Start a new retry loop try: + # generator may be raw iterator, or wrapped in an awaitable subgenerator = target() + try: + subgenerator = await subgenerator + except TypeError: + # was not awaitable + pass # if target is a generator, we will advance it using asend # otherwise, we will use anext @@ -82,13 +90,11 @@ async def retry_target_generator( # Check for expiration before starting if check_timeout_on_yield is True and deadline is not None and time.monotonic() > deadline: exc, source_exc = exc_factory(exc_list=error_list, is_timeout=True) - raise exc from source_exc + exc.__cause__ = source_exc + raise _TerminalException() from exc ## Read from Subgenerator if supports_send: next_value = await subgenerator.asend(sent_in) - elif sent_in is not None: - # asend was called on an iterator that does not support it - raise AttributeError(f"asend() not implemented for {target}") else: next_value = await subgenerator.__anext__() ## Yield from Wrapper to caller @@ -101,7 +107,7 @@ async def retry_target_generator( if bool(getattr(subgenerator, "aclose", None)): await subgenerator.aclose() else: - raise AttributeError(f"aclose() not implemented for {target}") + raise return except: # noqa: E722 # bare except catches any exception passed to `athrow` @@ -111,6 +117,8 @@ async def retry_target_generator( else: raise return + except _TerminalException as exc: + raise exc.__cause__ from exc.__cause__.__cause__ except StopAsyncIteration: # if generator exhausted, return return diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index c48a07da..698be665 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -678,16 +678,16 @@ async def test___call___with_generator_throw(self, sleep): # calling next on closed generator should not raise error assert await generator.__anext__() == 1 + @pytest.mark.parametrize("awaitale_wrapped", [True, False]) @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio - async def test___call___with_iterable_coroutine_send_close_throw(self, sleep): + async def test___call___with_iterable_send(self, sleep, awaitale_wrapped): """ - Send, Throw, and Close should raise AttributeErrors when target is a coroutine that - produces an iterable + Send should work like next if the wrapped iterable does not support it """ retry_ = retry_async.AsyncRetry(is_stream=True) - async def iterable_fn(n): + def iterable_fn(n): class CustomIterable: def __init__(self, n): self.n = n @@ -704,31 +704,79 @@ async def __anext__(self): return CustomIterable(n) - decorated = retry_(iterable_fn) + if awaitale_wrapped: + + async def wrapper(n): + return iterable_fn(n) + + decorated = retry_(wrapper) + else: + decorated = retry_(iterable_fn) retryable = decorated(4) result = await retryable.__anext__() assert result == 0 - with pytest.raises(AttributeError): - await retryable.asend("test") - assert await retryable.__anext__() == 1 - with pytest.raises(AttributeError): - await retryable.aclose() - assert await retryable.__anext__() == 2 - with pytest.raises(AttributeError): - await retryable.athrow(ValueError("test")) - assert await retryable.__anext__() == 3 + await retryable.asend("test") == 1 + await retryable.asend("test2") == 2 + await retryable.asend("test3") == 3 + + @pytest.mark.parametrize("awaitale_wrapped", [True, False]) + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___with_iterable_close(self, sleep, awaitale_wrapped): + """ + close should be handled by wrapper if wrapped iterable does not support it + """ + retry_ = retry_async.AsyncRetry(is_stream=True) + + def iterable_fn(n): + class CustomIterable: + def __init__(self, n): + self.n = n + self.i = 0 + + def __aiter__(self): + return self + + async def __anext__(self): + if self.i == self.n: + raise StopAsyncIteration + self.i += 1 + return self.i - 1 + + return CustomIterable(n) + + if awaitale_wrapped: + + async def wrapper(n): + return iterable_fn(n) + + decorated = retry_(wrapper) + else: + decorated = retry_(iterable_fn) + + # try closing active generator + retryable = decorated(4) + assert await retryable.__anext__() == 0 + await retryable.aclose() with pytest.raises(StopAsyncIteration): await retryable.__anext__() + # try closing new generator + new_retryable = decorated(4) + await new_retryable.aclose() + with pytest.raises(StopAsyncIteration): + await new_retryable.__anext__() + @pytest.mark.parametrize("awaitale_wrapped", [True, False]) @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio - async def test___call___with_iterable_send_close_throw(self, sleep): + async def test___call___with_iterable_throw(self, sleep, awaitale_wrapped): """ - Send, Throw, and Close should raise AttributeErrors when target is a - function that produces an iterable + Throw should work even if the wrapped iterable does not support it """ - retry_ = retry_async.AsyncRetry(is_stream=True) + + predicate = retry_async.if_exception_type(ValueError) + retry_ = retry_async.AsyncRetry(is_stream=True, predicate=predicate) def iterable_fn(n): class CustomIterable: @@ -747,22 +795,31 @@ async def __anext__(self): return CustomIterable(n) - decorated = retry_(iterable_fn) + if awaitale_wrapped: + + async def wrapper(n): + return iterable_fn(n) + + decorated = retry_(wrapper) + else: + decorated = retry_(iterable_fn) + # try throwing with active generator retryable = decorated(4) - result = await retryable.__anext__() - assert result == 0 - with pytest.raises(AttributeError): - await retryable.asend("test") - assert await retryable.__anext__() == 1 - with pytest.raises(AttributeError): - await retryable.aclose() - assert await retryable.__anext__() == 2 - with pytest.raises(AttributeError): - await retryable.athrow(ValueError("test")) - assert await retryable.__anext__() == 3 + assert await retryable.__anext__() == 0 + # should swallow errors in predicate + await retryable.athrow(ValueError("test")) + # should raise errors not in predicate + with pytest.raises(BufferError): + await retryable.athrow(BufferError("test")) with pytest.raises(StopAsyncIteration): await retryable.__anext__() + # try throwing with new generator + new_retryable = decorated(4) + with pytest.raises(BufferError): + await new_retryable.athrow(BufferError("test")) + with pytest.raises(StopAsyncIteration): + await new_retryable.__anext__() @pytest.mark.parametrize("yield_method", ["__anext__", "asend"]) @mock.patch("asyncio.sleep", autospec=True) @@ -789,14 +846,14 @@ async def test_yield_stream_after_deadline(self, sleep, yield_method): no_check = retry_target_generator( self._generator_mock, None, - [0]*10, + [0] * 10, timeout=timeout, check_timeout_on_yield=False, ) check = retry_target_generator( self._generator_mock, None, - [0]*10, + [0] * 10, timeout=timeout, check_timeout_on_yield=True, ) @@ -851,10 +908,11 @@ def factory(*args, **kwargs): timeout=timeout, exception_factory=factory, ) + # initialize the generator + await generator.__anext__() # trigger some retryable errors await generator.athrow(sent_errors[0]) await generator.athrow(sent_errors[1]) - assert generator.error_list == [sent_errors[0], sent_errors[1]] # trigger a non-retryable error with pytest.raises(expected_final_err.__class__) as exc_info: await generator.athrow(sent_errors[2]) @@ -898,6 +956,8 @@ def factory(*args, **kwargs): exception_factory=factory, check_timeout_on_yield=True, ) + # initialize the generator + await generator.__anext__() # trigger some retryable errors await generator.athrow(sent_errors[0]) await generator.athrow(sent_errors[1]) From cee0028b5bf14163eb2fd8649b3225001f633ea6 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 15 Aug 2023 14:54:31 -0700 Subject: [PATCH 103/204] replaces sync streaming retries object with generator function --- google/api_core/retry.py | 4 +- google/api_core/retry_streaming.py | 306 +++++------------------------ tests/unit/test_retry.py | 171 +++++++--------- 3 files changed, 118 insertions(+), 363 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index 5b79de9b..8e48af56 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -66,7 +66,7 @@ def check_if_exists(): from google.api_core import datetime_helpers from google.api_core import exceptions -from google.api_core.retry_streaming import RetryableGenerator +from google.api_core.retry_streaming import retry_target_generator from google.auth import exceptions as auth_exceptions _LOGGER = logging.getLogger(__name__) @@ -361,7 +361,7 @@ def retry_wrapped_func(*args, **kwargs): sleep_generator = exponential_sleep_generator( self._initial, self._maximum, multiplier=self._multiplier ) - retry_func = RetryableGenerator if self._is_stream else retry_target + retry_func = retry_target_generator if self._is_stream else retry_target return retry_func( target, self._predicate, diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index d0e47b01..0f510597 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -73,268 +73,50 @@ def _build_timeout_error( return exc_list[-1], None -class RetryableGenerator(Generator[T, Any, None]): - """ - Generator wrapper for retryable streaming RPCs. - RetryableGenerator will be used when initilizing a retry with - ``Retry(is_stream=True)``. - - When ``is_stream=False``, the target is treated as a callable, - and will retry when the callable returns an error. When ``is_stream=True``, - the target will be treated as a callable that retruns an iterable. Instead - of just wrapping the initial call in retry logic, the entire iterable is - wrapped, with each yield passing through RetryableGenerator. If any yield - in the stream raises a retryable exception, the entire stream will be - retried. - - Important Note: when a stream is encounters a retryable error, it will - silently construct a fresh iterator instance in the background - and continue yielding (likely duplicate) values as if no error occurred. - This is the most general way to retry a stream, but it often is not the - desired behavior. Example: iter([1, 2, 1/0]) -> [1, 2, 1, 2, ...] - - There are two ways to build more advanced retry logic for streams: - - 1. Wrap the target - Use a ``target`` that maintains state between retries, and creates a - different generator on each retry call. For example, you can wrap a - network call in a function that modifies the request based on what has - already been returned: - - ``` - def attempt_with_modified_request(target, request, seen_items=[]): - # remove seen items from request on each attempt - new_request = modify_request(request, seen_items) - new_generator = target(new_request) - for item in new_generator: - yield item - seen_items.append(item) - - retry_wrapped = Retry(is_stream=True)(attempt_with_modified_request, target, request, []) - ``` - - 2. Wrap the RetryableGenerator - Alternatively, you can wrap the RetryableGenerator itself before - passing it to the end-user to add a filter on the stream. For - example, you can keep track of the items that were successfully yielded - in previous retry attempts, and only yield new items when the - new attempt surpasses the previous ones: - - `` - def retryable_with_filter(target): - stream_idx = 0 - # reset stream_idx when the stream is retried - def on_error(e): - nonlocal stream_idx - stream_idx = 0 - # build retryable - retryable_gen = Retry(is_stream=True, on_error=on_error, ...)(target) - # keep track of what has been yielded out of filter - yielded_items = [] - for item in retryable_gen: - if stream_idx >= len(yielded_items): - yield item - yielded_items.append(item) - elif item != previous_stream[stream_idx]: - raise ValueError("Stream differs from last attempt")" - stream_idx += 1 - - filter_retry_wrapped = retryable_with_filter(target) - ``` - """ - - def __init__( - self, - target: Callable[[], Iterable[T]], - predicate: Callable[[Exception], bool], - sleep_generator: Iterable[float], - timeout: Optional[float] = None, - on_error: Optional[Callable[[Exception], None]] = None, - exception_factory: Optional[ - Callable[ - [List[Exception], bool, float], Tuple[Exception, Optional[Exception]] - ] - ] = None, - check_timeout_on_yield: bool = False, - ): - """ - Args: - target: The function to call to produce iterables for each retry. - This must be a nullary function - apply arguments with - `functools.partial`. - predicate: A callable used to determine if an - exception raised by the target should be considered retryable. - It should return True to retry or False otherwise. - sleep_generator: An infinite iterator that determines - how long to sleep between retries. - timeout: How long to keep retrying the target, in seconds. - on_error: A function to call while processing a - retryable exception. Any error raised by this function will *not* - be caught. - exception_factory: A function that creates an exception to raise - when the retry fails. The function takes three arguments: - a list of exceptions that occurred during the retry, a boolean - indicating whether the failure is due to retry timeout, and the original - timeout value (for building a helpful error message). It is expected to - return a tuple of the exception to raise and (optionally) a source - exception to chain to the raised exception. - If not provided, a default exception will be raised. - check_timeout_on_yield: If True, the timeout value will be checked - after each yield. If the timeout has been exceeded, the generator - will raise an exception from exception_factory. - Note that this adds an overhead to each yield, so it is better - to add the timeout logic to the wrapped stream when possible. - """ - self.target_fn = target - self.active_target: Iterator[T] = self.target_fn().__iter__() - self.predicate = predicate - self.sleep_generator = iter(sleep_generator) - self.on_error = on_error - self.deadline: Optional[float] = time.monotonic() + timeout if timeout else None - self._check_timeout_on_yield = check_timeout_on_yield - self.error_list: List[Exception] = [] - self._exc_factory = partial( - exception_factory or _build_timeout_error, timeout_val=timeout - ) - - def __iter__(self) -> Generator[T, Any, None]: - """ - Implement the iterator protocol. - """ - return self - - def _handle_exception(self, exc) -> None: - """ - When an exception is raised while iterating over the active_target, - check if it is retryable. If so, create a new active_target and - continue iterating. If not, raise the exception. - """ - self.error_list.append(exc) - if not self.predicate(exc): - final_exc, src_exc = self._exc_factory( - exc_list=self.error_list, is_timeout=False - ) - raise final_exc from src_exc - else: - # run on_error callback if provided - if self.on_error: - self.on_error(exc) - try: - next_sleep = next(self.sleep_generator) - except StopIteration: - raise ValueError("Sleep generator stopped yielding sleep values") - # if deadline is exceeded, raise exception - if self.deadline is not None: - next_attempt = time.monotonic() + next_sleep - self._check_timeout(next_attempt) - # sleep before retrying - _LOGGER.debug( - "Retrying due to {}, sleeping {:.1f}s ...".format(exc, next_sleep) - ) - time.sleep(next_sleep) - self.active_target = self.target_fn().__iter__() - - def _check_timeout(self, current_time: float) -> None: - """ - Helper function to check if the timeout has been exceeded, and raise an exception if so. - - Args: - - current_time: the timestamp to check against the deadline - - source_exception: the exception that triggered the timeout check, if any - Raises: - - Exception from exception_factory if the timeout has been exceeded - """ - if self.deadline is not None and self.deadline < current_time: - exc, src_exc = self._exc_factory(exc_list=self.error_list, is_timeout=True) - raise exc from src_exc - - def __next__(self) -> T: - """ - Implement the iterator protocol. - - Returns: - - the next value of the active_target iterator - """ - # check for expired timeouts before attempting to iterate - if self._check_timeout_on_yield: - self._check_timeout(time.monotonic()) +def retry_target_generator( + target: Callable[[], Iterable[T]], + predicate: Callable[[Exception], bool], + sleep_generator: Iterable[float], + timeout: Optional[float] = None, + on_error: Optional[Callable[[Exception], None]] = None, + exception_factory: Optional[ + Callable[ + [List[Exception], bool, float], Tuple[Exception, Optional[Exception]] + ] + ] = None, + **kwargs, +) -> Generator[T, Any, None]: + timeout = kwargs.get("deadline", timeout) + deadline: Optional[float] = time.monotonic() + timeout if timeout else None + error_list: List[Exception] = [] + exc_factory = partial( + exception_factory or _build_timeout_error, timeout_val=timeout + ) + + for sleep in sleep_generator: + # Start a new retry loop try: - return next(self.active_target) + # create and yeild from a new instance of the generator from input generator function + subgenerator = target() + return (yield from subgenerator) + # handle exceptions raised by the subgenerator except Exception as exc: - self._handle_exception(exc) - # if retryable exception was handled, try again with new active_target - return self.__next__() - - def close(self) -> None: - """ - Close the active_target if supported. (e.g. target is a generator) - - Raises: - - AttributeError if the active_target does not have a close() method - """ - if getattr(self.active_target, "close", None): - casted_target = cast(Generator, self.active_target) - return casted_target.close() - else: - raise AttributeError( - "close() not implemented for {}".format(self.active_target) - ) - - def send(self, *args, **kwargs) -> T: - """ - Call send on the active_target if supported. (e.g. target is a generator) - - If an exception is raised, a retry may be attempted before returning - a result. - - Args: - - *args: arguments to pass to the wrapped generator's send method - - **kwargs: keyword arguments to pass to the wrapped generator's send method - Returns: - - the next value of the active_target iterator after calling send - Raises: - - AttributeError if the active_target does not have a send() method - """ - # check for expired timeouts before attempting to iterate - if self._check_timeout_on_yield: - self._check_timeout(time.monotonic()) - if getattr(self.active_target, "send", None): - casted_target = cast(Generator, self.active_target) - try: - return casted_target.send(*args, **kwargs) - except Exception as exc: - self._handle_exception(exc) - # if exception was retryable, use new target for return value - return self.__next__() - else: - raise AttributeError( - "send() not implemented for {}".format(self.active_target) - ) - - def throw(self, *args, **kwargs) -> T: - """ - Call throw on the active_target if supported. (e.g. target is a generator) - - If an exception is raised, a retry may be attempted before returning - a result. + error_list.append(exc) + if not predicate(exc): + exc, source_exc = exc_factory(exc_list=error_list, is_timeout=False) + raise exc from source_exc + if on_error is not None: + on_error(exc) + finally: + if subgenerator is not None and getattr(subgenerator, "close", None): + subgenerator.close() + + if deadline is not None and time.monotonic() + sleep > deadline: + exc, source_exc = exc_factory(exc_list=error_list, is_timeout=True) + raise exc from source_exc + _LOGGER.debug( + "Retrying due to {}, sleeping {:.1f}s ...".format(error_list[-1], sleep) + ) + time.sleep(sleep) - Args: - - *args: arguments to pass to the wrapped generator's throw method - - **kwargs: keyword arguments to pass to the wrapped generator's throw method - Returns: - - the next vale of the active_target iterator after calling throw - Raises: - - AttributeError if the active_target does not have a throw() method - """ - if getattr(self.active_target, "throw", None): - casted_target = cast(Generator, self.active_target) - try: - return casted_target.throw(*args, **kwargs) - except Exception as exc: - self._handle_exception(exc) - # if retryable exception was handled, return next from new active_target - return self.__next__() - else: - raise AttributeError( - "throw() not implemented for {}".format(self.active_target) - ) + raise ValueError("Sleep generator stopped yielding sleep values.") diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index db414ee8..a6ad5d51 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -164,7 +164,7 @@ def test_retry_target_bad_sleep_generator(): def test_retry_streaming_target_bad_sleep_generator(): - from google.api_core.retry_streaming import RetryableGenerator + from google.api_core.retry_streaming import retry_target_generator def target_fn(): def inner_gen(): @@ -176,7 +176,7 @@ def inner_gen(): with pytest.raises( ValueError, match="Sleep generator stopped yielding sleep values" ): - gen = RetryableGenerator(target_fn, lambda x: True, [], None) + gen = retry_target_generator(target_fn, lambda x: True, [], None) next(gen) @@ -651,9 +651,9 @@ def test___call___with_generator_send_retry(self, sleep): assert on_error.call_count == 3 @mock.patch("time.sleep", autospec=True) - def test___call___with_iterable_send_close_throw(self, sleep): + def test___call___with_iterable_send(self, sleep): """ - Send, Throw, and Close should raise AttributeErrors + send should raise attribute error if wrapped iterator does not support it """ retry_ = retry.Retry(is_stream=True) @@ -661,19 +661,68 @@ def iterable_fn(n): return iter(range(n)) decorated = retry_(iterable_fn) + generator = decorated(5) + # initialize + next(generator) + # call send + with pytest.raises(AttributeError): + generator.send("test") + + + @mock.patch("time.sleep", autospec=True) + def test___call___with_iterable_close(self, sleep): + """ + close should be handled by wrapper if wrapped iterable does not support it + """ + retry_ = retry.Retry(is_stream=True) + + def iterable_fn(n): + return iter(range(n)) + decorated = retry_(iterable_fn) + + # try closing active generator retryable = decorated(10) - result = next(retryable) - assert result == 0 - with pytest.raises(AttributeError): - retryable.send("test") + assert next(retryable) == 0 + retryable.close() + with pytest.raises(StopIteration): + next(retryable) + # try closing new generator + retryable = decorated(10) + retryable.close() + with pytest.raises(StopIteration): + next(retryable) + + @mock.patch("time.sleep", autospec=True) + def test___call___with_iterable_throw(self, sleep): + """ + Throw should work even if the wrapped iterable does not support it + """ + predicate = retry.if_exception_type(ValueError) + retry_ = retry.Retry(is_stream=True, predicate=predicate) + + def iterable_fn(n): + return iter(range(n)) + + decorated = retry_(iterable_fn) + + # try throwing with active generator + retryable = decorated(10) + assert next(retryable) == 0 + # should swallow errors in predicate + retryable.throw(ValueError) assert next(retryable) == 1 - with pytest.raises(AttributeError): - retryable.close() - assert next(retryable) == 2 - with pytest.raises(AttributeError): + # should raise on other errors + with pytest.raises(TypeError): + retryable.throw(TypeError) + with pytest.raises(StopIteration): + next(retryable) + # try throwing with new generator + retryable = decorated(10) + with pytest.raises(ValueError): retryable.throw(ValueError) - assert next(retryable) == 3 + with pytest.raises(StopIteration): + next(retryable) @mock.patch("time.sleep", autospec=True) def test___call___with_generator_return(self, sleep): @@ -772,90 +821,12 @@ def test___call___with_is_stream(self, sleep): unpacked = [next(gen) for i in range(10)] assert unpacked == [0, 1, 2, 3, 4, 5, 0, 1, 2, 3] - @pytest.mark.parametrize("yield_method", ["__next__", "send"]) - @mock.patch("asyncio.sleep", autospec=True) - def test_yield_stream_after_deadline(self, sleep, yield_method): - """ - By default, if the deadline is hit between yields, the generator will continue. - - There is a flag that should cause the wrapper to test for the deadline after - each yield. - """ - import time - from google.api_core.retry_streaming import RetryableGenerator - - timeout = 2 - time_now = time.monotonic() - now_patcher = mock.patch( - "time.monotonic", - return_value=time_now, - ) - - with now_patcher as patched_now: - no_check = RetryableGenerator( - self._generator_mock, - None, - [], - timeout=timeout, - check_timeout_on_yield=False, - ) - assert no_check._check_timeout_on_yield is False - check = RetryableGenerator( - self._generator_mock, - None, - [], - timeout=timeout, - check_timeout_on_yield=True, - ) - assert check._check_timeout_on_yield is True - - # initialize generator - next(no_check) - next(check) - - # use the yield method to advance the generator - check_yield = getattr(check, yield_method) - no_check_yield = getattr(no_check, yield_method) - if yield_method == "send": - # bind variable to send method - check_yield = functools.partial(check_yield, None) - no_check_yield = functools.partial(no_check_yield, None) - # first yield should be fine - check_yield() - no_check_yield() - - # simulate a delay before next yield - patched_now.return_value += timeout + 1 - - # second yield should raise when check_timeout_on_yield is True - with pytest.raises(exceptions.RetryError): - check_yield() - no_check_yield() - - @mock.patch("asyncio.sleep", autospec=True) - def test_generator_error_list(self, sleep): - """ - generator should keep history of errors seen - """ - retry_ = retry.Retry( - predicate=retry.if_exception_type(ValueError), is_stream=True - ) - decorated = retry_(self._generator_mock) - - generator = decorated(1) - err1 = ValueError("test") - generator.throw(err1) - assert generator.error_list == [err1] - err2 = ValueError("test2") - generator.throw(err2) - assert generator.error_list == [err1, err2] - def test_exc_factory_non_retryable_error(self): """ generator should give the option to override exception creation logic test when non-retryable error is thrown """ - from google.api_core.retry_streaming import RetryableGenerator + from google.api_core.retry_streaming import retry_target_generator timeout = 6 sent_errors = [ValueError("test"), ValueError("test2"), BufferError("test3")] @@ -869,17 +840,18 @@ def factory(*args, **kwargs): assert kwargs["timeout_val"] == timeout return expected_final_err, expected_source_err - generator = RetryableGenerator( + generator = retry_target_generator( self._generator_mock, retry.if_exception_type(ValueError), [0] * 3, timeout=timeout, exception_factory=factory, ) + # initialize generator + next(generator) # trigger some retryable errors generator.throw(sent_errors[0]) generator.throw(sent_errors[1]) - assert generator.error_list == [sent_errors[0], sent_errors[1]] # trigger a non-retryable error with pytest.raises(expected_final_err.__class__) as exc_info: generator.throw(sent_errors[2]) @@ -892,7 +864,7 @@ def test_exc_factory_timeout(self): test when timeout is exceeded """ import time - from google.api_core.retry_streaming import RetryableGenerator + from google.api_core.retry_streaming import retry_target_generator timeout = 2 time_now = time.monotonic() @@ -903,7 +875,7 @@ def test_exc_factory_timeout(self): with now_patcher as patched_now: timeout = 2 - sent_errors = [ValueError("test"), ValueError("test2")] + sent_errors = [ValueError("test"), ValueError("test2"), ValueError("test3")] expected_final_err = RuntimeError("done") expected_source_err = ZeroDivisionError("test4") @@ -914,7 +886,7 @@ def factory(*args, **kwargs): assert kwargs["timeout_val"] == timeout return expected_final_err, expected_source_err - generator = RetryableGenerator( + generator = retry_target_generator( self._generator_mock, retry.if_exception_type(ValueError), [0] * 3, @@ -922,13 +894,14 @@ def factory(*args, **kwargs): exception_factory=factory, check_timeout_on_yield=True, ) + # initialize generator + next(generator) # trigger some retryable errors generator.throw(sent_errors[0]) generator.throw(sent_errors[1]) - assert generator.error_list == [sent_errors[0], sent_errors[1]] # trigger a timeout patched_now.return_value += timeout + 1 with pytest.raises(expected_final_err.__class__) as exc_info: - next(generator) + generator.throw(sent_errors[2]) assert exc_info.value == expected_final_err assert exc_info.value.__cause__ == expected_source_err From 3a7e5fad1acd8cd0ed73b2d306913b8a811a6049 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 15 Aug 2023 14:57:14 -0700 Subject: [PATCH 104/204] removed timeout on yield functionality --- google/api_core/retry_streaming_async.py | 6 -- tests/asyncio/test_retry_async.py | 75 ++---------------------- 2 files changed, 4 insertions(+), 77 deletions(-) diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index d82220cd..3dc915f1 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -57,7 +57,6 @@ async def retry_target_generator( [List[Exception], bool, float], Tuple[Exception, Optional[Exception]] ] ] = None, - check_timeout_on_yield: bool = False, **kwargs, ) -> AsyncGenerator[T, None]: subgenerator = None @@ -87,11 +86,6 @@ async def retry_target_generator( sent_in = None while True: - # Check for expiration before starting - if check_timeout_on_yield is True and deadline is not None and time.monotonic() > deadline: - exc, source_exc = exc_factory(exc_list=error_list, is_timeout=True) - exc.__cause__ = source_exc - raise _TerminalException() from exc ## Read from Subgenerator if supports_send: next_value = await subgenerator.asend(sent_in) diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 698be665..8d46f9a9 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -705,10 +705,8 @@ async def __anext__(self): return CustomIterable(n) if awaitale_wrapped: - async def wrapper(n): return iterable_fn(n) - decorated = retry_(wrapper) else: decorated = retry_(iterable_fn) @@ -720,6 +718,7 @@ async def wrapper(n): await retryable.asend("test2") == 2 await retryable.asend("test3") == 3 + @pytest.mark.parametrize("awaitale_wrapped", [True, False]) @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio @@ -745,12 +744,9 @@ async def __anext__(self): return self.i - 1 return CustomIterable(n) - if awaitale_wrapped: - async def wrapper(n): return iterable_fn(n) - decorated = retry_(wrapper) else: decorated = retry_(iterable_fn) @@ -767,6 +763,7 @@ async def wrapper(n): with pytest.raises(StopAsyncIteration): await new_retryable.__anext__() + @pytest.mark.parametrize("awaitale_wrapped", [True, False]) @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio @@ -794,12 +791,9 @@ async def __anext__(self): return self.i - 1 return CustomIterable(n) - if awaitale_wrapped: - async def wrapper(n): return iterable_fn(n) - decorated = retry_(wrapper) else: decorated = retry_(iterable_fn) @@ -821,66 +815,6 @@ async def wrapper(n): with pytest.raises(StopAsyncIteration): await new_retryable.__anext__() - @pytest.mark.parametrize("yield_method", ["__anext__", "asend"]) - @mock.patch("asyncio.sleep", autospec=True) - @pytest.mark.asyncio - async def test_yield_stream_after_deadline(self, sleep, yield_method): - """ - By default, if the deadline is hit between yields, the generator will continue. - - There is a flag that should cause the wrapper to test for the deadline after - each yield. - """ - import time - import functools - from google.api_core.retry_streaming_async import retry_target_generator - - timeout = 2 - time_now = time.monotonic() - now_patcher = mock.patch( - "time.monotonic", - return_value=time_now, - ) - - with now_patcher as patched_now: - no_check = retry_target_generator( - self._generator_mock, - None, - [0] * 10, - timeout=timeout, - check_timeout_on_yield=False, - ) - check = retry_target_generator( - self._generator_mock, - None, - [0] * 10, - timeout=timeout, - check_timeout_on_yield=True, - ) - - # initialize the generator - await no_check.__anext__() - await check.__anext__() - - # use yield_method to advance the generator - no_check_yield = getattr(no_check, yield_method) - check_yield = getattr(check, yield_method) - if yield_method == "asend": - no_check_yield = functools.partial(no_check_yield, None) - check_yield = functools.partial(check_yield, None) - - # first yield should be fine - await check_yield() - await no_check_yield() - - # simulate a delay before next yield - patched_now.return_value += timeout + 1 - - # second yield should raise when check_timeout_on_yield is True - with pytest.raises(exceptions.RetryError): - await check_yield() - await no_check_yield() - @pytest.mark.asyncio async def test_exc_factory_non_retryable_error(self): """ @@ -937,7 +871,7 @@ async def test_exc_factory_timeout(self): with now_patcher as patched_now: timeout = 2 - sent_errors = [ValueError("test"), ValueError("test2")] + sent_errors = [ValueError("test"), ValueError("test2"), ValueError("test3")] expected_final_err = RuntimeError("done") expected_source_err = ZeroDivisionError("test4") @@ -954,7 +888,6 @@ def factory(*args, **kwargs): [0] * 3, timeout=timeout, exception_factory=factory, - check_timeout_on_yield=True, ) # initialize the generator await generator.__anext__() @@ -964,6 +897,6 @@ def factory(*args, **kwargs): # trigger a timeout patched_now.return_value += timeout + 1 with pytest.raises(expected_final_err.__class__) as exc_info: - await generator.__anext__() + await generator.athrow(sent_errors[2]) assert exc_info.value == expected_final_err assert exc_info.value.__cause__ == expected_source_err From ba6dc9f683f4ade65811d1313046a7aab3bc794f Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 15 Aug 2023 15:11:06 -0700 Subject: [PATCH 105/204] fixed comments --- google/api_core/retry.py | 2 +- google/api_core/retry_async.py | 2 +- google/api_core/retry_streaming.py | 68 +++++++++++++++++++++ google/api_core/retry_streaming_async.py | 76 +++++++++++++++++++++++- 4 files changed, 145 insertions(+), 3 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index 8e48af56..9f8f96ec 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -313,7 +313,7 @@ class Retry(object): will be retried. Defaults to False. To avoid duplicate values, retryable streams should typically be wrapped in additional filter logic before use. For more details, see - ``google/api_core/retry_streaming.RetryaleGenerator``. + ``google.api_core.retry_streaming.retry_target_generator``. deadline (float): DEPRECATED: use `timeout` instead. For backward compatibility, if specified it will override the ``timeout`` parameter. """ diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index 98a3e5fb..de6eb31c 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -184,7 +184,7 @@ class AsyncRetry: function call itself will be retried. Defaults to False. To avoid duplicate values, retryable streams should typically be wrapped in additional filter logic before use. For more details, see - ``google.api_core.retry_streaming_async.AsyncRetryableGenerator``. + ``google.api_core.retry_streaming_async.retry_target_generator``. deadline (float): DEPRECATED use ``timeout`` instead. If set it will override ``timeout`` parameter. """ diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index 0f510597..caf6c8b8 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -86,6 +86,74 @@ def retry_target_generator( ] = None, **kwargs, ) -> Generator[T, Any, None]: + """ + Generator wrapper for retryable streaming RPCs. + This function will be used when initilizing a retry with + ``Retry(is_stream=True)``. + + When ``is_stream=False``, the target is treated as a callable, + and will retry when the callable returns an error. When ``is_stream=True``, + the target will be treated as a callable that retruns an iterable. Instead + of just wrapping the initial call in retry logic, the entire iterable is + wrapped, with each yield passing through the retryable generator. If any yield + in the stream raises a retryable exception, the entire stream will be + retried. + + Important Note: when a stream is encounters a retryable error, it will + silently construct a fresh iterator instance in the background + and continue yielding (likely duplicate) values as if no error occurred. + This is the most general way to retry a stream, but it often is not the + desired behavior. Example: iter([1, 2, 1/0]) -> [1, 2, 1, 2, ...] + + There are two ways to build more advanced retry logic for streams: + + 1. Wrap the target + Use a ``target`` that maintains state between retries, and creates a + different generator on each retry call. For example, you can wrap a + network call in a function that modifies the request based on what has + already been returned: + + ``` + def attempt_with_modified_request(target, request, seen_items=[]): + # remove seen items from request on each attempt + new_request = modify_request(request, seen_items) + new_generator = target(new_request) + for item in new_generator: + yield item + seen_items.append(item) + + retry_wrapped = Retry(is_stream=True)(attempt_with_modified_request, target, request, []) + ``` + + 2. Wrap the retry generator + Alternatively, you can wrap the retryable generator itself before + passing it to the end-user to add a filter on the stream. For + example, you can keep track of the items that were successfully yielded + in previous retry attempts, and only yield new items when the + new attempt surpasses the previous ones: + + `` + def retryable_with_filter(target): + stream_idx = 0 + # reset stream_idx when the stream is retried + def on_error(e): + nonlocal stream_idx + stream_idx = 0 + # build retryable + retryable_gen = Retry(is_stream=True, on_error=on_error, ...)(target) + # keep track of what has been yielded out of filter + yielded_items = [] + for item in retryable_gen: + if stream_idx >= len(yielded_items): + yield item + yielded_items.append(item) + elif item != previous_stream[stream_idx]: + raise ValueError("Stream differs from last attempt")" + stream_idx += 1 + + filter_retry_wrapped = retryable_with_filter(target) + ``` + """ timeout = kwargs.get("deadline", timeout) deadline: Optional[float] = time.monotonic() + timeout if timeout else None error_list: List[Exception] = [] diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index 3dc915f1..b252d6b0 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -36,13 +36,19 @@ from functools import partial from google.api_core.retry_streaming import _build_timeout_error -from google.api_core.retry_streaming import _TerminalException _LOGGER = logging.getLogger(__name__) T = TypeVar("T") +class _TerminalException(Exception): + """ + Exception to bypasses retry logic and raises __cause__ immediately. + """ + pass + + async def retry_target_generator( target: Union[ Callable[[], AsyncIterable[T]], @@ -59,6 +65,74 @@ async def retry_target_generator( ] = None, **kwargs, ) -> AsyncGenerator[T, None]: + """ + Generator wrapper for retryable streaming RPCs. + This function will be used when initilizing a retry with + ``AsyncRetry(is_stream=True)``. + + When ``is_stream=False``, the target is treated as a coroutine, + and will retry when the coroutine returns an error. When ``is_stream=True``, + the target will be treated as a callable that retruns an AsyncIterable. Instead + of just wrapping the initial call in retry logic, the entire iterable is + wrapped, with each yield passing through the retryable generatpr. If any yield + in the stream raises a retryable exception, the entire stream will be + retried. + + Important Note: when a stream is encounters a retryable error, it will + silently construct a fresh iterator instance in the background + and continue yielding (likely duplicate) values as if no error occurred. + This is the most general way to retry a stream, but it often is not the + desired behavior. Example: iter([1, 2, 1/0]) -> [1, 2, 1, 2, ...] + + There are two ways to build more advanced retry logic for streams: + + 1. Wrap the target + Use a ``target`` that maintains state between retries, and creates a + different generator on each retry call. For example, you can wrap a + grpc call in a function that modifies the request based on what has + already been returned: + + ``` + async def attempt_with_modified_request(target, request, seen_items=[]): + # remove seen items from request on each attempt + new_request = modify_request(request, seen_items) + new_generator = await target(new_request) + async for item in new_generator: + yield item + seen_items.append(item) + + retry_wrapped = AsyncRetry(is_stream=True)(attempt_with_modified_request, target, request, []) + ``` + + 2. Wrap the retry generator + Alternatively, you can wrap the retryable generator itself before + passing it to the end-user to add a filter on the stream. For + example, you can keep track of the items that were successfully yielded + in previous retry attempts, and only yield new items when the + new attempt surpasses the previous ones: + + `` + async def retryable_with_filter(target): + stream_idx = 0 + # reset stream_idx when the stream is retried + def on_error(e): + nonlocal stream_idx + stream_idx = 0 + # build retryable + retryable_gen = AsyncRetry(is_stream=True, on_error=on_error, ...)(target) + # keep track of what has been yielded out of filter + yielded_items = [] + async for item in retryable_gen: + if stream_idx >= len(yielded_items): + yield item + yielded_items.append(item) + elif item != previous_stream[stream_idx]: + raise ValueError("Stream differs from last attempt")" + stream_idx += 1 + + filter_retry_wrapped = retryable_with_filter(target) + ``` + """ subgenerator = None timeout = kwargs.get("deadline", timeout) From 0500b8b8a55fd0f384aa1c950baf8be72200f0da Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 15 Aug 2023 15:37:32 -0700 Subject: [PATCH 106/204] fixed mypy issues --- google/api_core/retry_streaming.py | 26 ++++++++----------- google/api_core/retry_streaming_async.py | 32 +++++++++--------------- tests/asyncio/test_retry_async.py | 10 ++++++-- tests/unit/test_retry.py | 1 - 4 files changed, 30 insertions(+), 39 deletions(-) diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index caf6c8b8..f74da3b4 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -24,6 +24,7 @@ Generator, TypeVar, Any, + Union, cast, ) @@ -38,13 +39,6 @@ T = TypeVar("T") -class _TerminalException(Exception): - """ - Exception to bypasses retry logic and raises __cause__ immediately. - """ - pass - - def _build_timeout_error( exc_list: List[Exception], is_timeout: bool, timeout_val: float ) -> Tuple[Exception, Optional[Exception]]: @@ -74,15 +68,13 @@ def _build_timeout_error( def retry_target_generator( - target: Callable[[], Iterable[T]], + target: Callable[[], Union[Iterable[T], Generator[T, Any, None]]], predicate: Callable[[Exception], bool], sleep_generator: Iterable[float], timeout: Optional[float] = None, on_error: Optional[Callable[[Exception], None]] = None, exception_factory: Optional[ - Callable[ - [List[Exception], bool, float], Tuple[Exception, Optional[Exception]] - ] + Callable[[List[Exception], bool, float], Tuple[Exception, Optional[Exception]]] ] = None, **kwargs, ) -> Generator[T, Any, None]: @@ -171,17 +163,19 @@ def on_error(e): except Exception as exc: error_list.append(exc) if not predicate(exc): - exc, source_exc = exc_factory(exc_list=error_list, is_timeout=False) - raise exc from source_exc + final_exc, source_exc = exc_factory( + exc_list=error_list, is_timeout=False + ) + raise final_exc from source_exc if on_error is not None: on_error(exc) finally: if subgenerator is not None and getattr(subgenerator, "close", None): - subgenerator.close() + cast(Generator, subgenerator).close() if deadline is not None and time.monotonic() + sleep > deadline: - exc, source_exc = exc_factory(exc_list=error_list, is_timeout=True) - raise exc from source_exc + final_exc, source_exc = exc_factory(exc_list=error_list, is_timeout=True) + raise final_exc from source_exc _LOGGER.debug( "Retrying due to {}, sleeping {:.1f}s ...".format(error_list[-1], sleep) ) diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index b252d6b0..2869a84a 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -42,13 +42,6 @@ T = TypeVar("T") -class _TerminalException(Exception): - """ - Exception to bypasses retry logic and raises __cause__ immediately. - """ - pass - - async def retry_target_generator( target: Union[ Callable[[], AsyncIterable[T]], @@ -133,12 +126,12 @@ def on_error(e): filter_retry_wrapped = retryable_with_filter(target) ``` """ - subgenerator = None - + subgenerator : Optional[AsyncIterator[T]] = None timeout = kwargs.get("deadline", timeout) - deadline: Optional[float] = time.monotonic() + timeout if timeout else None + # keep track of retryable exceptions we encounter to pass in to exception_factory error_list: List[Exception] = [] + # override exception_factory to build a more complex exception exc_factory = partial( exception_factory or _build_timeout_error, timeout_val=timeout ) @@ -147,12 +140,13 @@ def on_error(e): # Start a new retry loop try: # generator may be raw iterator, or wrapped in an awaitable - subgenerator = target() + gen_instance: Union[AsyncIterable[T], Awaitable[AsyncIterable[T]]] = target() try: - subgenerator = await subgenerator + gen_instance = await gen_instance # type: ignore except TypeError: # was not awaitable pass + subgenerator = cast(AsyncIterable[T], gen_instance).__aiter__() # if target is a generator, we will advance it using asend # otherwise, we will use anext @@ -162,7 +156,7 @@ def on_error(e): while True: ## Read from Subgenerator if supports_send: - next_value = await subgenerator.asend(sent_in) + next_value = await subgenerator.asend(sent_in) # type: ignore else: next_value = await subgenerator.__anext__() ## Yield from Wrapper to caller @@ -173,7 +167,7 @@ def on_error(e): except GeneratorExit: # if wrapper received `aclose`, pass to subgenerator and close if bool(getattr(subgenerator, "aclose", None)): - await subgenerator.aclose() + await cast(AsyncGenerator[T, None], subgenerator).aclose() else: raise return @@ -181,12 +175,10 @@ def on_error(e): # bare except catches any exception passed to `athrow` # delegate error handling to subgenerator if getattr(subgenerator, "athrow", None): - await subgenerator.athrow(*sys.exc_info()) + await cast(AsyncGenerator[T, None], subgenerator).athrow(*sys.exc_info()) else: raise return - except _TerminalException as exc: - raise exc.__cause__ from exc.__cause__.__cause__ except StopAsyncIteration: # if generator exhausted, return return @@ -201,12 +193,12 @@ def on_error(e): on_error(exc) finally: if subgenerator is not None and getattr(subgenerator, "aclose", None): - await subgenerator.aclose() + await cast(AsyncGenerator[T, None], subgenerator).aclose() # sleep and adjust timeout budget if deadline is not None and time.monotonic() + sleep > deadline: - exc, source_exc = exc_factory(exc_list=error_list, is_timeout=True) - raise exc from source_exc + final_exc, source_exc = exc_factory(exc_list=error_list, is_timeout=True) + raise final_exc from source_exc _LOGGER.debug( "Retrying due to {}, sleeping {:.1f}s ...".format(error_list[-1], sleep) ) diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 8d46f9a9..78f14a01 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -705,8 +705,10 @@ async def __anext__(self): return CustomIterable(n) if awaitale_wrapped: + async def wrapper(n): return iterable_fn(n) + decorated = retry_(wrapper) else: decorated = retry_(iterable_fn) @@ -718,7 +720,6 @@ async def wrapper(n): await retryable.asend("test2") == 2 await retryable.asend("test3") == 3 - @pytest.mark.parametrize("awaitale_wrapped", [True, False]) @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio @@ -744,9 +745,12 @@ async def __anext__(self): return self.i - 1 return CustomIterable(n) + if awaitale_wrapped: + async def wrapper(n): return iterable_fn(n) + decorated = retry_(wrapper) else: decorated = retry_(iterable_fn) @@ -763,7 +767,6 @@ async def wrapper(n): with pytest.raises(StopAsyncIteration): await new_retryable.__anext__() - @pytest.mark.parametrize("awaitale_wrapped", [True, False]) @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio @@ -791,9 +794,12 @@ async def __anext__(self): return self.i - 1 return CustomIterable(n) + if awaitale_wrapped: + async def wrapper(n): return iterable_fn(n) + decorated = retry_(wrapper) else: decorated = retry_(iterable_fn) diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index a6ad5d51..3538d253 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -668,7 +668,6 @@ def iterable_fn(n): with pytest.raises(AttributeError): generator.send("test") - @mock.patch("time.sleep", autospec=True) def test___call___with_iterable_close(self, sleep): """ From 1ccadb139b8a0bce8f1425bb13c6098414d28b88 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 15 Aug 2023 15:47:17 -0700 Subject: [PATCH 107/204] fixed issue with py310 --- google/api_core/retry_streaming_async.py | 18 ++++++++++-------- tests/asyncio/test_retry_async.py | 1 + tests/unit/test_retry.py | 1 + 3 files changed, 12 insertions(+), 8 deletions(-) diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index 2869a84a..c3074695 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -52,9 +52,7 @@ async def retry_target_generator( timeout: Optional[float] = None, on_error: Optional[Callable[[Exception], None]] = None, exception_factory: Optional[ - Callable[ - [List[Exception], bool, float], Tuple[Exception, Optional[Exception]] - ] + Callable[[List[Exception], bool, float], Tuple[Exception, Optional[Exception]]] ] = None, **kwargs, ) -> AsyncGenerator[T, None]: @@ -126,7 +124,7 @@ def on_error(e): filter_retry_wrapped = retryable_with_filter(target) ``` """ - subgenerator : Optional[AsyncIterator[T]] = None + subgenerator: Optional[AsyncIterator[T]] = None timeout = kwargs.get("deadline", timeout) deadline: Optional[float] = time.monotonic() + timeout if timeout else None # keep track of retryable exceptions we encounter to pass in to exception_factory @@ -140,9 +138,11 @@ def on_error(e): # Start a new retry loop try: # generator may be raw iterator, or wrapped in an awaitable - gen_instance: Union[AsyncIterable[T], Awaitable[AsyncIterable[T]]] = target() + gen_instance: Union[ + AsyncIterable[T], Awaitable[AsyncIterable[T]] + ] = target() try: - gen_instance = await gen_instance # type: ignore + gen_instance = await gen_instance # type: ignore except TypeError: # was not awaitable pass @@ -156,7 +156,7 @@ def on_error(e): while True: ## Read from Subgenerator if supports_send: - next_value = await subgenerator.asend(sent_in) # type: ignore + next_value = await subgenerator.asend(sent_in) # type: ignore else: next_value = await subgenerator.__anext__() ## Yield from Wrapper to caller @@ -175,7 +175,9 @@ def on_error(e): # bare except catches any exception passed to `athrow` # delegate error handling to subgenerator if getattr(subgenerator, "athrow", None): - await cast(AsyncGenerator[T, None], subgenerator).athrow(*sys.exc_info()) + await cast(AsyncGenerator[T, None], subgenerator).athrow( + *sys.exc_info() + ) else: raise return diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 78f14a01..1dade61a 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -605,6 +605,7 @@ async def test___call___generator_send_retry(self, sleep): # error thrown on 3 # generator should contain 0, 1, 2 looping + generator = retry_(self._generator_mock)(error_on=3, ignore_sent=True) assert await generator.__anext__() == 0 unpacked = [await generator.asend(i) for i in range(10)] assert unpacked == [1, 2, 0, 1, 2, 0, 1, 2, 0, 1] diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 3538d253..999c57c7 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -643,6 +643,7 @@ def test___call___with_generator_send_retry(self, sleep): result.send("can not send to fresh generator") assert exc_info.match("can't send non-None value") # initiate iteration with None + result = retry_(self._generator_mock)(error_on=3, ignore_sent=True) assert result.send(None) == 0 # error thrown on 3 # generator should contain 0, 1, 2 looping From c3122629e84b541a1c870cc547743a3eba33cfb8 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 15 Aug 2023 15:53:35 -0700 Subject: [PATCH 108/204] renamed streaming retry function --- google/api_core/retry.py | 6 +++--- google/api_core/retry_async.py | 6 +++--- google/api_core/retry_streaming.py | 3 +-- google/api_core/retry_streaming_async.py | 2 +- tests/asyncio/test_retry_async.py | 12 ++++++------ tests/unit/test_retry.py | 12 ++++++------ 6 files changed, 20 insertions(+), 21 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index 9f8f96ec..c202a928 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -66,7 +66,7 @@ def check_if_exists(): from google.api_core import datetime_helpers from google.api_core import exceptions -from google.api_core.retry_streaming import retry_target_generator +from google.api_core.retry_streaming import retry_target_stream from google.auth import exceptions as auth_exceptions _LOGGER = logging.getLogger(__name__) @@ -313,7 +313,7 @@ class Retry(object): will be retried. Defaults to False. To avoid duplicate values, retryable streams should typically be wrapped in additional filter logic before use. For more details, see - ``google.api_core.retry_streaming.retry_target_generator``. + ``google.api_core.retry_streaming.retry_target_stream``. deadline (float): DEPRECATED: use `timeout` instead. For backward compatibility, if specified it will override the ``timeout`` parameter. """ @@ -361,7 +361,7 @@ def retry_wrapped_func(*args, **kwargs): sleep_generator = exponential_sleep_generator( self._initial, self._maximum, multiplier=self._multiplier ) - retry_func = retry_target_generator if self._is_stream else retry_target + retry_func = retry_target_stream if self._is_stream else retry_target return retry_func( target, self._predicate, diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index de6eb31c..874ad72e 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -61,7 +61,7 @@ async def check_if_exists(): from google.api_core.retry import exponential_sleep_generator from google.api_core.retry import if_exception_type # noqa: F401 from google.api_core.retry import if_transient_error -from google.api_core.retry_streaming_async import retry_target_generator +from google.api_core.retry_streaming_async import retry_target_stream _LOGGER = logging.getLogger(__name__) @@ -184,7 +184,7 @@ class AsyncRetry: function call itself will be retried. Defaults to False. To avoid duplicate values, retryable streams should typically be wrapped in additional filter logic before use. For more details, see - ``google.api_core.retry_streaming_async.retry_target_generator``. + ``google.api_core.retry_streaming_async.retry_target_stream``. deadline (float): DEPRECATED use ``timeout`` instead. If set it will override ``timeout`` parameter. """ @@ -247,7 +247,7 @@ def retry_wrapped_stream(*args, **kwargs): sleep_generator = exponential_sleep_generator( self._initial, self._maximum, multiplier=self._multiplier ) - return retry_target_generator( + return retry_target_stream( target, self._predicate, sleep_generator, diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index f74da3b4..32627edc 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -20,7 +20,6 @@ List, Tuple, Iterable, - Iterator, Generator, TypeVar, Any, @@ -67,7 +66,7 @@ def _build_timeout_error( return exc_list[-1], None -def retry_target_generator( +def retry_target_stream( target: Callable[[], Union[Iterable[T], Generator[T, Any, None]]], predicate: Callable[[Exception], bool], sleep_generator: Iterable[float], diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index c3074695..63ad281b 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -42,7 +42,7 @@ T = TypeVar("T") -async def retry_target_generator( +async def retry_target_stream( target: Union[ Callable[[], AsyncIterable[T]], Callable[[], Awaitable[AsyncIterable[T]]], diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 1dade61a..42c44e8f 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -136,7 +136,7 @@ async def test_retry_target_bad_sleep_generator(): @pytest.mark.asyncio async def test_retry_streaming_target_bad_sleep_generator(): - from google.api_core.retry_streaming_async import retry_target_generator + from google.api_core.retry_streaming_async import retry_target_stream async def target_fn(): async def inner_gen(): @@ -146,7 +146,7 @@ async def inner_gen(): return inner_gen() with pytest.raises(ValueError, match="Sleep generator"): - gen = retry_target_generator(target_fn, lambda x: True, [], None) + gen = retry_target_stream(target_fn, lambda x: True, [], None) await gen.__anext__() @@ -828,7 +828,7 @@ async def test_exc_factory_non_retryable_error(self): generator should give the option to override exception creation logic test when non-retryable error is thrown """ - from google.api_core.retry_streaming_async import retry_target_generator + from google.api_core.retry_streaming_async import retry_target_stream timeout = 6 sent_errors = [ValueError("test"), ValueError("test2"), BufferError("test3")] @@ -842,7 +842,7 @@ def factory(*args, **kwargs): assert kwargs["timeout_val"] == timeout return expected_final_err, expected_source_err - generator = retry_target_generator( + generator = retry_target_stream( self._generator_mock, retry_async.if_exception_type(ValueError), [0] * 3, @@ -867,7 +867,7 @@ async def test_exc_factory_timeout(self): test when timeout is exceeded """ import time - from google.api_core.retry_streaming_async import retry_target_generator + from google.api_core.retry_streaming_async import retry_target_stream timeout = 2 time_now = time.monotonic() @@ -889,7 +889,7 @@ def factory(*args, **kwargs): assert kwargs["timeout_val"] == timeout return expected_final_err, expected_source_err - generator = retry_target_generator( + generator = retry_target_stream( self._generator_mock, retry_async.if_exception_type(ValueError), [0] * 3, diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 999c57c7..1e05445e 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -164,7 +164,7 @@ def test_retry_target_bad_sleep_generator(): def test_retry_streaming_target_bad_sleep_generator(): - from google.api_core.retry_streaming import retry_target_generator + from google.api_core.retry_streaming import retry_target_stream def target_fn(): def inner_gen(): @@ -176,7 +176,7 @@ def inner_gen(): with pytest.raises( ValueError, match="Sleep generator stopped yielding sleep values" ): - gen = retry_target_generator(target_fn, lambda x: True, [], None) + gen = retry_target_stream(target_fn, lambda x: True, [], None) next(gen) @@ -826,7 +826,7 @@ def test_exc_factory_non_retryable_error(self): generator should give the option to override exception creation logic test when non-retryable error is thrown """ - from google.api_core.retry_streaming import retry_target_generator + from google.api_core.retry_streaming import retry_target_stream timeout = 6 sent_errors = [ValueError("test"), ValueError("test2"), BufferError("test3")] @@ -840,7 +840,7 @@ def factory(*args, **kwargs): assert kwargs["timeout_val"] == timeout return expected_final_err, expected_source_err - generator = retry_target_generator( + generator = retry_target_stream( self._generator_mock, retry.if_exception_type(ValueError), [0] * 3, @@ -864,7 +864,7 @@ def test_exc_factory_timeout(self): test when timeout is exceeded """ import time - from google.api_core.retry_streaming import retry_target_generator + from google.api_core.retry_streaming import retry_target_stream timeout = 2 time_now = time.monotonic() @@ -886,7 +886,7 @@ def factory(*args, **kwargs): assert kwargs["timeout_val"] == timeout return expected_final_err, expected_source_err - generator = retry_target_generator( + generator = retry_target_stream( self._generator_mock, retry.if_exception_type(ValueError), [0] * 3, From 1fe57e0905b9b23c8eb839edb8f6f4e46ea68c67 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 15 Aug 2023 15:58:12 -0700 Subject: [PATCH 109/204] removed unneeded functions --- tests/asyncio/test_retry_async.py | 9 +-------- tests/unit/test_retry.py | 9 +-------- 2 files changed, 2 insertions(+), 16 deletions(-) diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 42c44e8f..3c136897 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -138,15 +138,8 @@ async def test_retry_target_bad_sleep_generator(): async def test_retry_streaming_target_bad_sleep_generator(): from google.api_core.retry_streaming_async import retry_target_stream - async def target_fn(): - async def inner_gen(): - raise RuntimeError("initiate retry") - yield None - - return inner_gen() - with pytest.raises(ValueError, match="Sleep generator"): - gen = retry_target_stream(target_fn, lambda x: True, [], None) + gen = retry_target_stream(None, lambda x: True, [], None) await gen.__anext__() diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 1e05445e..0b13c292 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -166,17 +166,10 @@ def test_retry_target_bad_sleep_generator(): def test_retry_streaming_target_bad_sleep_generator(): from google.api_core.retry_streaming import retry_target_stream - def target_fn(): - def inner_gen(): - raise RuntimeError("initiate retry") - yield None - - return inner_gen() - with pytest.raises( ValueError, match="Sleep generator stopped yielding sleep values" ): - gen = retry_target_stream(target_fn, lambda x: True, [], None) + gen = retry_target_stream(None, lambda x: True, [], None) next(gen) From 4f09f291a83acba0711ac928017c736650eadf59 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 15 Aug 2023 16:06:06 -0700 Subject: [PATCH 110/204] simplified some test functions --- tests/asyncio/test_retry_async.py | 66 +++++++++++++------------------ tests/unit/test_retry.py | 2 +- 2 files changed, 28 insertions(+), 40 deletions(-) diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 3c136897..26d55f38 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -138,8 +138,8 @@ async def test_retry_target_bad_sleep_generator(): async def test_retry_streaming_target_bad_sleep_generator(): from google.api_core.retry_streaming_async import retry_target_stream + gen = retry_target_stream(None, lambda x: True, [], None) with pytest.raises(ValueError, match="Sleep generator"): - gen = retry_target_stream(None, lambda x: True, [], None) await gen.__anext__() @@ -681,33 +681,28 @@ async def test___call___with_iterable_send(self, sleep, awaitale_wrapped): """ retry_ = retry_async.AsyncRetry(is_stream=True) - def iterable_fn(n): + def iterable_fn(): class CustomIterable: - def __init__(self, n): - self.n = n - self.i = 0 + def __init__(self): + self.i = -1 def __aiter__(self): return self async def __anext__(self): - if self.i == self.n: - raise StopAsyncIteration self.i += 1 - return self.i - 1 + return self.i - return CustomIterable(n) + return CustomIterable() if awaitale_wrapped: - - async def wrapper(n): - return iterable_fn(n) - + async def wrapper(): + return iterable_fn() decorated = retry_(wrapper) else: decorated = retry_(iterable_fn) - retryable = decorated(4) + retryable = decorated() result = await retryable.__anext__() assert result == 0 await retryable.asend("test") == 1 @@ -723,40 +718,36 @@ async def test___call___with_iterable_close(self, sleep, awaitale_wrapped): """ retry_ = retry_async.AsyncRetry(is_stream=True) - def iterable_fn(n): + def iterable_fn(): class CustomIterable: - def __init__(self, n): - self.n = n - self.i = 0 + def __init__(self): + self.i = -1 def __aiter__(self): return self async def __anext__(self): - if self.i == self.n: - raise StopAsyncIteration self.i += 1 - return self.i - 1 + return self.i - return CustomIterable(n) + return CustomIterable() if awaitale_wrapped: - - async def wrapper(n): - return iterable_fn(n) + async def wrapper(): + return iterable_fn() decorated = retry_(wrapper) else: decorated = retry_(iterable_fn) # try closing active generator - retryable = decorated(4) + retryable = decorated() assert await retryable.__anext__() == 0 await retryable.aclose() with pytest.raises(StopAsyncIteration): await retryable.__anext__() # try closing new generator - new_retryable = decorated(4) + new_retryable = decorated() await new_retryable.aclose() with pytest.raises(StopAsyncIteration): await new_retryable.__anext__() @@ -772,34 +763,31 @@ async def test___call___with_iterable_throw(self, sleep, awaitale_wrapped): predicate = retry_async.if_exception_type(ValueError) retry_ = retry_async.AsyncRetry(is_stream=True, predicate=predicate) - def iterable_fn(n): + def iterable_fn(): class CustomIterable: - def __init__(self, n): - self.n = n - self.i = 0 + def __init__(self): + self.i = -1 def __aiter__(self): return self async def __anext__(self): - if self.i == self.n: - raise StopAsyncIteration self.i += 1 - return self.i - 1 + return self.i - return CustomIterable(n) + return CustomIterable() if awaitale_wrapped: - async def wrapper(n): - return iterable_fn(n) + async def wrapper(): + return iterable_fn() decorated = retry_(wrapper) else: decorated = retry_(iterable_fn) # try throwing with active generator - retryable = decorated(4) + retryable = decorated() assert await retryable.__anext__() == 0 # should swallow errors in predicate await retryable.athrow(ValueError("test")) @@ -809,7 +797,7 @@ async def wrapper(n): with pytest.raises(StopAsyncIteration): await retryable.__anext__() # try throwing with new generator - new_retryable = decorated(4) + new_retryable = decorated() with pytest.raises(BufferError): await new_retryable.athrow(BufferError("test")) with pytest.raises(StopAsyncIteration): diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 0b13c292..3e0ecd51 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -166,10 +166,10 @@ def test_retry_target_bad_sleep_generator(): def test_retry_streaming_target_bad_sleep_generator(): from google.api_core.retry_streaming import retry_target_stream + gen = retry_target_stream(None, lambda x: True, [], None) with pytest.raises( ValueError, match="Sleep generator stopped yielding sleep values" ): - gen = retry_target_stream(None, lambda x: True, [], None) next(gen) From 06824b96461a893ea40f154dd051b4e78deaa34e Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Tue, 15 Aug 2023 23:08:11 +0000 Subject: [PATCH 111/204] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot?= =?UTF-8?q?=20post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- tests/asyncio/test_retry_async.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 26d55f38..b3ee681e 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -696,8 +696,10 @@ async def __anext__(self): return CustomIterable() if awaitale_wrapped: + async def wrapper(): return iterable_fn() + decorated = retry_(wrapper) else: decorated = retry_(iterable_fn) @@ -733,6 +735,7 @@ async def __anext__(self): return CustomIterable() if awaitale_wrapped: + async def wrapper(): return iterable_fn() From 343157bcfb1469f1bbbb28afc0012d2d869f4719 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 15 Aug 2023 16:15:05 -0700 Subject: [PATCH 112/204] removed unneeded test variable --- tests/asyncio/test_retry_async.py | 3 +-- tests/unit/test_retry.py | 3 +-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index b3ee681e..53b04799 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -138,9 +138,8 @@ async def test_retry_target_bad_sleep_generator(): async def test_retry_streaming_target_bad_sleep_generator(): from google.api_core.retry_streaming_async import retry_target_stream - gen = retry_target_stream(None, lambda x: True, [], None) with pytest.raises(ValueError, match="Sleep generator"): - await gen.__anext__() + await retry_target_stream(None, None, [], None).__anext__() class TestAsyncRetry: diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 3e0ecd51..61868e1b 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -166,11 +166,10 @@ def test_retry_target_bad_sleep_generator(): def test_retry_streaming_target_bad_sleep_generator(): from google.api_core.retry_streaming import retry_target_stream - gen = retry_target_stream(None, lambda x: True, [], None) with pytest.raises( ValueError, match="Sleep generator stopped yielding sleep values" ): - next(gen) + next(retry_target_stream(None, None, [], None)) class TestRetry(object): From 93f82cc8d948dc0845412fb027d7dbb160924dcd Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 16 Aug 2023 09:32:54 -0700 Subject: [PATCH 113/204] improved documentation --- google/api_core/retry_streaming.py | 168 +++++++++++++--------- google/api_core/retry_streaming_async.py | 173 ++++++++++++++--------- 2 files changed, 208 insertions(+), 133 deletions(-) diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index 32627edc..fe21195c 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -12,7 +12,74 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Helpers for retries for streaming APIs.""" +""" +Generator wrapper for retryable streaming RPCs. +This function will be used when initilizing a retry with +``Retry(is_stream=True)``. + +When ``is_stream=False``, the target is treated as a callable, +and will retry when the callable returns an error. When ``is_stream=True``, +the target will be treated as a callable that retruns an iterable. Instead +of just wrapping the initial call in retry logic, the entire iterable is +wrapped, with each yield passing through the retryable generator. If any yield +in the stream raises a retryable exception, the entire stream will be +retried. + +Important Note: when a stream is encounters a retryable error, it will +silently construct a fresh iterator instance in the background +and continue yielding (likely duplicate) values as if no error occurred. +This is the most general way to retry a stream, but it often is not the +desired behavior. Example: iter([1, 2, 1/0]) -> [1, 2, 1, 2, ...] + +There are two ways to build more advanced retry logic for streams: + +1. Wrap the target + Use a ``target`` that maintains state between retries, and creates a + different generator on each retry call. For example, you can wrap a + network call in a function that modifies the request based on what has + already been returned: + + ``` + def attempt_with_modified_request(target, request, seen_items=[]): + # remove seen items from request on each attempt + new_request = modify_request(request, seen_items) + new_generator = target(new_request) + for item in new_generator: + yield item + seen_items.append(item) + + retry_wrapped = Retry(is_stream=True)(attempt_with_modified_request, target, request, []) + ``` + + 2. Wrap the retry generator + Alternatively, you can wrap the retryable generator itself before + passing it to the end-user to add a filter on the stream. For + example, you can keep track of the items that were successfully yielded + in previous retry attempts, and only yield new items when the + new attempt surpasses the previous ones: + + `` + def retryable_with_filter(target): + stream_idx = 0 + # reset stream_idx when the stream is retried + def on_error(e): + nonlocal stream_idx + stream_idx = 0 + # build retryable + retryable_gen = Retry(is_stream=True, on_error=on_error, ...)(target) + # keep track of what has been yielded out of filter + yielded_items = [] + for item in retryable_gen: + if stream_idx >= len(yielded_items): + yield item + yielded_items.append(item) + elif item != previous_stream[stream_idx]: + raise ValueError("Stream differs from last attempt")" + stream_idx += 1 + + filter_retry_wrapped = retryable_with_filter(target) + ``` +""" from typing import ( Callable, @@ -77,74 +144,45 @@ def retry_target_stream( ] = None, **kwargs, ) -> Generator[T, Any, None]: - """ - Generator wrapper for retryable streaming RPCs. - This function will be used when initilizing a retry with - ``Retry(is_stream=True)``. - - When ``is_stream=False``, the target is treated as a callable, - and will retry when the callable returns an error. When ``is_stream=True``, - the target will be treated as a callable that retruns an iterable. Instead - of just wrapping the initial call in retry logic, the entire iterable is - wrapped, with each yield passing through the retryable generator. If any yield - in the stream raises a retryable exception, the entire stream will be - retried. - - Important Note: when a stream is encounters a retryable error, it will - silently construct a fresh iterator instance in the background - and continue yielding (likely duplicate) values as if no error occurred. - This is the most general way to retry a stream, but it often is not the - desired behavior. Example: iter([1, 2, 1/0]) -> [1, 2, 1, 2, ...] - - There are two ways to build more advanced retry logic for streams: - - 1. Wrap the target - Use a ``target`` that maintains state between retries, and creates a - different generator on each retry call. For example, you can wrap a - network call in a function that modifies the request based on what has - already been returned: + """Create a generator wrapper that retries the wrapped stream if it fails. - ``` - def attempt_with_modified_request(target, request, seen_items=[]): - # remove seen items from request on each attempt - new_request = modify_request(request, seen_items) - new_generator = target(new_request) - for item in new_generator: - yield item - seen_items.append(item) - - retry_wrapped = Retry(is_stream=True)(attempt_with_modified_request, target, request, []) - ``` + This is the lowest-level retry helper. Generally, you'll use the + higher-level retry helper :class:`Retry`. - 2. Wrap the retry generator - Alternatively, you can wrap the retryable generator itself before - passing it to the end-user to add a filter on the stream. For - example, you can keep track of the items that were successfully yielded - in previous retry attempts, and only yield new items when the - new attempt surpasses the previous ones: + Args: + target: The generator function to call and retry. This must be a + nullary function - apply arguments with `functools.partial`. + predicate: A callable used to determine if an + exception raised by the target should be considered retryable. + It should return True to retry or False otherwise. + sleep_generator: An infinite iterator that determines + how long to sleep between retries. + timeout: How long to keep retrying the target. + Note: timeout is only checked before initiating a retry, so the target may + run past the timeout value as long as it is healthy. + on_error: A function to call while processing a + retryable exception. Any error raised by this function will *not* + be caught. + exception_factory: A function that is called when the retryable reaches + a terminal failure state, used to construct an exception to be raised. + It it given a list of all exceptions encountered, a boolean indicating + whether the failure was due to a timeout, and the original timeout value + as arguments. It should return a tuple of the exception to be raised, + along with the cause exception if any. + If not provided, a default implementation will raise a RetryError + on timeout, or the last exception encountered otherwise. - `` - def retryable_with_filter(target): - stream_idx = 0 - # reset stream_idx when the stream is retried - def on_error(e): - nonlocal stream_idx - stream_idx = 0 - # build retryable - retryable_gen = Retry(is_stream=True, on_error=on_error, ...)(target) - # keep track of what has been yielded out of filter - yielded_items = [] - for item in retryable_gen: - if stream_idx >= len(yielded_items): - yield item - yielded_items.append(item) - elif item != previous_stream[stream_idx]: - raise ValueError("Stream differs from last attempt")" - stream_idx += 1 - - filter_retry_wrapped = retryable_with_filter(target) - ``` + Returns: + Generator: A retryable generator that wraps the target generator function. + + Raises: + ValueError: If the sleep generator stops yielding values. + Exception: a custom exception specified by the exception_factory if provided. + If no exception_factory is provided: + google.api_core.RetryError: If the deadline is exceeded while retrying. + Exception: If the target raises an error that isn't retryable. """ + timeout = kwargs.get("deadline", timeout) deadline: Optional[float] = time.monotonic() + timeout if timeout else None error_list: List[Exception] = [] diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index 63ad281b..eb235143 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -11,8 +11,74 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -"""Helpers for retries for async streaming APIs.""" +""" +Generator wrapper for retryable streaming RPCs. +This function will be used when initilizing a retry with +``AsyncRetry(is_stream=True)``. + +When ``is_stream=False``, the target is treated as a coroutine, +and will retry when the coroutine returns an error. When ``is_stream=True``, +the target will be treated as a callable that retruns an AsyncIterable. Instead +of just wrapping the initial call in retry logic, the entire iterable is +wrapped, with each yield passing through the retryable generatpr. If any yield +in the stream raises a retryable exception, the entire stream will be +retried. + +Important Note: when a stream is encounters a retryable error, it will +silently construct a fresh iterator instance in the background +and continue yielding (likely duplicate) values as if no error occurred. +This is the most general way to retry a stream, but it often is not the +desired behavior. Example: iter([1, 2, 1/0]) -> [1, 2, 1, 2, ...] + +There are two ways to build more advanced retry logic for streams: + +1. Wrap the target + Use a ``target`` that maintains state between retries, and creates a + different generator on each retry call. For example, you can wrap a + grpc call in a function that modifies the request based on what has + already been returned: + + ``` + async def attempt_with_modified_request(target, request, seen_items=[]): + # remove seen items from request on each attempt + new_request = modify_request(request, seen_items) + new_generator = await target(new_request) + async for item in new_generator: + yield item + seen_items.append(item) + + retry_wrapped = AsyncRetry(is_stream=True)(attempt_with_modified_request, target, request, []) + ``` + + 2. Wrap the retry generator + Alternatively, you can wrap the retryable generator itself before + passing it to the end-user to add a filter on the stream. For + example, you can keep track of the items that were successfully yielded + in previous retry attempts, and only yield new items when the + new attempt surpasses the previous ones: + + `` + async def retryable_with_filter(target): + stream_idx = 0 + # reset stream_idx when the stream is retried + def on_error(e): + nonlocal stream_idx + stream_idx = 0 + # build retryable + retryable_gen = AsyncRetry(is_stream=True, on_error=on_error, ...)(target) + # keep track of what has been yielded out of filter + yielded_items = [] + async for item in retryable_gen: + if stream_idx >= len(yielded_items): + yield item + yielded_items.append(item) + elif item != previous_stream[stream_idx]: + raise ValueError("Stream differs from last attempt")" + stream_idx += 1 + + filter_retry_wrapped = retryable_with_filter(target) + ``` +""" from typing import ( cast, @@ -56,74 +122,45 @@ async def retry_target_stream( ] = None, **kwargs, ) -> AsyncGenerator[T, None]: + """Create a generator wrapper that retries the wrapped stream if it fails. + + This is the lowest-level retry helper. Generally, you'll use the + higher-level retry helper :class:`Retry`. + + Args: + target: The generator function to call and retry. This must be a + nullary function - apply arguments with `functools.partial`. + predicate: A callable used to determine if an + exception raised by the target should be considered retryable. + It should return True to retry or False otherwise. + sleep_generator: An infinite iterator that determines + how long to sleep between retries. + timeout: How long to keep retrying the target. + Note: timeout is only checked before initiating a retry, so the target may + run past the timeout value as long as it is healthy. + on_error: A function to call while processing a + retryable exception. Any error raised by this function will *not* + be caught. + exception_factory: A function that is called when the retryable reaches + a terminal failure state, used to construct an exception to be raised. + It it given a list of all exceptions encountered, a boolean indicating + whether the failure was due to a timeout, and the original timeout value + as arguments. It should return a tuple of the exception to be raised, + along with the cause exception if any. + If not provided, a default implementation will raise a RetryError + on timeout, or the last exception encountered otherwise. + + Returns: + AssyncGenerator: A retryable generator that wraps the target generator function. + + Raises: + ValueError: If the sleep generator stops yielding values. + Exception: a custom exception specified by the exception_factory if provided. + If no exception_factory is provided: + google.api_core.RetryError: If the deadline is exceeded while retrying. + Exception: If the target raises an error that isn't retryable. """ - Generator wrapper for retryable streaming RPCs. - This function will be used when initilizing a retry with - ``AsyncRetry(is_stream=True)``. - - When ``is_stream=False``, the target is treated as a coroutine, - and will retry when the coroutine returns an error. When ``is_stream=True``, - the target will be treated as a callable that retruns an AsyncIterable. Instead - of just wrapping the initial call in retry logic, the entire iterable is - wrapped, with each yield passing through the retryable generatpr. If any yield - in the stream raises a retryable exception, the entire stream will be - retried. - - Important Note: when a stream is encounters a retryable error, it will - silently construct a fresh iterator instance in the background - and continue yielding (likely duplicate) values as if no error occurred. - This is the most general way to retry a stream, but it often is not the - desired behavior. Example: iter([1, 2, 1/0]) -> [1, 2, 1, 2, ...] - - There are two ways to build more advanced retry logic for streams: - - 1. Wrap the target - Use a ``target`` that maintains state between retries, and creates a - different generator on each retry call. For example, you can wrap a - grpc call in a function that modifies the request based on what has - already been returned: - ``` - async def attempt_with_modified_request(target, request, seen_items=[]): - # remove seen items from request on each attempt - new_request = modify_request(request, seen_items) - new_generator = await target(new_request) - async for item in new_generator: - yield item - seen_items.append(item) - - retry_wrapped = AsyncRetry(is_stream=True)(attempt_with_modified_request, target, request, []) - ``` - - 2. Wrap the retry generator - Alternatively, you can wrap the retryable generator itself before - passing it to the end-user to add a filter on the stream. For - example, you can keep track of the items that were successfully yielded - in previous retry attempts, and only yield new items when the - new attempt surpasses the previous ones: - - `` - async def retryable_with_filter(target): - stream_idx = 0 - # reset stream_idx when the stream is retried - def on_error(e): - nonlocal stream_idx - stream_idx = 0 - # build retryable - retryable_gen = AsyncRetry(is_stream=True, on_error=on_error, ...)(target) - # keep track of what has been yielded out of filter - yielded_items = [] - async for item in retryable_gen: - if stream_idx >= len(yielded_items): - yield item - yielded_items.append(item) - elif item != previous_stream[stream_idx]: - raise ValueError("Stream differs from last attempt")" - stream_idx += 1 - - filter_retry_wrapped = retryable_with_filter(target) - ``` - """ subgenerator: Optional[AsyncIterator[T]] = None timeout = kwargs.get("deadline", timeout) deadline: Optional[float] = time.monotonic() + timeout if timeout else None From 61e5ab52efcd7e3e39abc2658335a3daaef4c84e Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 1 Sep 2023 12:58:24 -0700 Subject: [PATCH 114/204] fixed type hinting issues --- google/api_core/retry.py | 39 +++++++++++++++++++----------- google/api_core/retry_streaming.py | 10 +++++--- 2 files changed, 31 insertions(+), 18 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index cd98a857..3248bbe9 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -62,7 +62,16 @@ def check_if_exists(): import random import sys import time -from typing import Any, Callable, TypeVar, TYPE_CHECKING +from typing import ( + Any, + Callable, + TypeVar, + Union, + Generator, + Iterable, + cast, + TYPE_CHECKING, +) import requests.exceptions @@ -77,8 +86,9 @@ def check_if_exists(): else: from typing_extensions import ParamSpec - _P = ParamSpec("_P") - _R = TypeVar("_R") + _P = ParamSpec("_P") # target function call parameters + _R = TypeVar("_R") # target function returned value + _Y = TypeVar("_Y") # target stream yielded values _LOGGER = logging.getLogger(__name__) _DEFAULT_INITIAL_DELAY = 1.0 # seconds @@ -353,9 +363,9 @@ def __init__( def __call__( self, - func: Callable[_P, _R], + func: Callable[_P, _R | Iterable[_Y]], on_error: Callable[[BaseException], Any] | None = None, - ) -> Callable[_P, _R]: + ) -> Callable[_P, _R | Generator[_Y, Any, None]]: """Wrap a callable with retry behavior. Args: @@ -372,20 +382,21 @@ def __call__( on_error = self._on_error @functools.wraps(func) - def retry_wrapped_func(*args: _P.args, **kwargs: _P.kwargs) -> _R: + def retry_wrapped_func( + *args: _P.args, **kwargs: _P.kwargs + ) -> _R | Generator[_Y, Any, None]: """A wrapper that calls target function with retry.""" target = functools.partial(func, *args, **kwargs) sleep_generator = exponential_sleep_generator( self._initial, self._maximum, multiplier=self._multiplier ) - retry_func = retry_target_stream if self._is_stream else retry_target - return retry_func( - target, - self._predicate, - sleep_generator, - self._timeout, - on_error=on_error, - ) + retry_args = (self._predicate, sleep_generator, self._timeout, on_error) + if self._is_stream: + # when stream is enabled, assume target returns an iterable that yields _Y + stream_target = cast(Callable[[], Iterable[_Y]], target) + return retry_target_stream(stream_target, *retry_args) + else: + return retry_target(target, *retry_args) return retry_wrapped_func diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index fe21195c..1d2cd21d 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -92,6 +92,7 @@ def on_error(e): Any, Union, cast, + TYPE_CHECKING, ) import logging @@ -100,9 +101,10 @@ def on_error(e): from google.api_core import exceptions -_LOGGER = logging.getLogger(__name__) +if TYPE_CHECKING: + _Y = TypeVar("_Y") -T = TypeVar("T") +_LOGGER = logging.getLogger(__name__) def _build_timeout_error( @@ -134,7 +136,7 @@ def _build_timeout_error( def retry_target_stream( - target: Callable[[], Union[Iterable[T], Generator[T, Any, None]]], + target: Callable[[], Iterable[_Y]], predicate: Callable[[Exception], bool], sleep_generator: Iterable[float], timeout: Optional[float] = None, @@ -143,7 +145,7 @@ def retry_target_stream( Callable[[List[Exception], bool, float], Tuple[Exception, Optional[Exception]]] ] = None, **kwargs, -) -> Generator[T, Any, None]: +) -> Generator[_Y, Any, None]: """Create a generator wrapper that retries the wrapped stream if it fails. This is the lowest-level retry helper. Generally, you'll use the From 51c125b29c2aabbd4321e992feff5e4798cd809c Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 1 Sep 2023 13:10:16 -0700 Subject: [PATCH 115/204] fixed undefined name issues --- google/api_core/retry.py | 2 +- google/api_core/retry_streaming.py | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index 3248bbe9..d65aac83 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -393,7 +393,7 @@ def retry_wrapped_func( retry_args = (self._predicate, sleep_generator, self._timeout, on_error) if self._is_stream: # when stream is enabled, assume target returns an iterable that yields _Y - stream_target = cast(Callable[[], Iterable[_Y]], target) + stream_target = cast(Callable[[], Iterable["_Y"]], target) return retry_target_stream(stream_target, *retry_args) else: return retry_target(target, *retry_args) diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index 1d2cd21d..f2ea719a 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -101,8 +101,7 @@ def on_error(e): from google.api_core import exceptions -if TYPE_CHECKING: - _Y = TypeVar("_Y") +_Y = TypeVar("_Y") _LOGGER = logging.getLogger(__name__) From 02604bc73ab92f6d5ca1128f0fd4f2355a087790 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 1 Sep 2023 13:13:29 -0700 Subject: [PATCH 116/204] fixed lint issues --- google/api_core/retry.py | 11 +---------- google/api_core/retry_streaming.py | 4 +--- 2 files changed, 2 insertions(+), 13 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index d65aac83..58022fb6 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -62,16 +62,7 @@ def check_if_exists(): import random import sys import time -from typing import ( - Any, - Callable, - TypeVar, - Union, - Generator, - Iterable, - cast, - TYPE_CHECKING, -) +from typing import Any, Callable, TypeVar, Generator, Iterable, cast, TYPE_CHECKING import requests.exceptions diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index f2ea719a..d8e7cfa7 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -90,9 +90,7 @@ def on_error(e): Generator, TypeVar, Any, - Union, cast, - TYPE_CHECKING, ) import logging @@ -101,7 +99,7 @@ def on_error(e): from google.api_core import exceptions -_Y = TypeVar("_Y") +_Y = TypeVar("_Y") # yielded values _LOGGER = logging.getLogger(__name__) From 6269db2ae9f0fe407c8893bd45e3840cac9af915 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 1 Sep 2023 21:43:47 +0000 Subject: [PATCH 117/204] update comment Co-authored-by: Victor Chudnovsky --- google/api_core/retry_streaming.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index d8e7cfa7..368a37bd 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -25,7 +25,7 @@ in the stream raises a retryable exception, the entire stream will be retried. -Important Note: when a stream is encounters a retryable error, it will +NOTE: when a stream encounters a retryable error, it will silently construct a fresh iterator instance in the background and continue yielding (likely duplicate) values as if no error occurred. This is the most general way to retry a stream, but it often is not the From 0dcd0dec87b9a44b29fb0e2bcd7adc75f1f6d30d Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 1 Sep 2023 21:44:11 +0000 Subject: [PATCH 118/204] fix typo Co-authored-by: Victor Chudnovsky --- google/api_core/retry_streaming.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index 368a37bd..fd527cea 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -19,7 +19,7 @@ When ``is_stream=False``, the target is treated as a callable, and will retry when the callable returns an error. When ``is_stream=True``, -the target will be treated as a callable that retruns an iterable. Instead +the target will be treated as a callable that returns an iterable. Instead of just wrapping the initial call in retry logic, the entire iterable is wrapped, with each yield passing through the retryable generator. If any yield in the stream raises a retryable exception, the entire stream will be From 54e9c81ec97ba1ee568d3fefc9c89419fe99d835 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 1 Sep 2023 21:50:25 +0000 Subject: [PATCH 119/204] Update google/api_core/retry_streaming.py fixed typo Co-authored-by: Victor Chudnovsky --- google/api_core/retry_streaming.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index fd527cea..48bbd170 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -192,7 +192,7 @@ def retry_target_stream( for sleep in sleep_generator: # Start a new retry loop try: - # create and yeild from a new instance of the generator from input generator function + # create and yield from a new instance of the generator from input generator function subgenerator = target() return (yield from subgenerator) # handle exceptions raised by the subgenerator From 234291081e45e7fb2514ae2822ca79c3746b67dc Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 1 Sep 2023 14:44:15 -0700 Subject: [PATCH 120/204] added comment to on_error --- google/api_core/retry.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index 58022fb6..b9616793 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -364,6 +364,8 @@ def __call__( on_error (Callable[Exception]): A function to call while processing a retryable exception. Any error raised by this function will *not* be caught. + If on_error was specified in the constructor, this value will + be ignored. Returns: Callable: A callable that will invoke ``func`` with retry From eada0d74fc01a5a0aea6041f7317ee3379769fe0 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 1 Sep 2023 14:45:27 -0700 Subject: [PATCH 121/204] fixed indentation --- google/api_core/retry_streaming.py | 54 +++++++++++++++--------------- 1 file changed, 27 insertions(+), 27 deletions(-) diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index 48bbd170..8ad112f0 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -51,34 +51,34 @@ def attempt_with_modified_request(target, request, seen_items=[]): retry_wrapped = Retry(is_stream=True)(attempt_with_modified_request, target, request, []) ``` - 2. Wrap the retry generator - Alternatively, you can wrap the retryable generator itself before - passing it to the end-user to add a filter on the stream. For - example, you can keep track of the items that were successfully yielded - in previous retry attempts, and only yield new items when the - new attempt surpasses the previous ones: - - `` - def retryable_with_filter(target): +2. Wrap the retry generator + Alternatively, you can wrap the retryable generator itself before + passing it to the end-user to add a filter on the stream. For + example, you can keep track of the items that were successfully yielded + in previous retry attempts, and only yield new items when the + new attempt surpasses the previous ones: + + `` + def retryable_with_filter(target): + stream_idx = 0 + # reset stream_idx when the stream is retried + def on_error(e): + nonlocal stream_idx stream_idx = 0 - # reset stream_idx when the stream is retried - def on_error(e): - nonlocal stream_idx - stream_idx = 0 - # build retryable - retryable_gen = Retry(is_stream=True, on_error=on_error, ...)(target) - # keep track of what has been yielded out of filter - yielded_items = [] - for item in retryable_gen: - if stream_idx >= len(yielded_items): - yield item - yielded_items.append(item) - elif item != previous_stream[stream_idx]: - raise ValueError("Stream differs from last attempt")" - stream_idx += 1 - - filter_retry_wrapped = retryable_with_filter(target) - ``` + # build retryable + retryable_gen = Retry(is_stream=True, on_error=on_error, ...)(target) + # keep track of what has been yielded out of filter + yielded_items = [] + for item in retryable_gen: + if stream_idx >= len(yielded_items): + yield item + yielded_items.append(item) + elif item != previous_stream[stream_idx]: + raise ValueError("Stream differs from last attempt")" + stream_idx += 1 + + filter_retry_wrapped = retryable_with_filter(target) + ``` """ from typing import ( From ae2bf37c309f397a592b0f8ebdb47d8cf54b3972 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 1 Sep 2023 15:06:02 -0700 Subject: [PATCH 122/204] improved sample --- google/api_core/retry_streaming.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index 8ad112f0..56cdc848 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -69,12 +69,12 @@ def on_error(e): retryable_gen = Retry(is_stream=True, on_error=on_error, ...)(target) # keep track of what has been yielded out of filter yielded_items = [] - for item in retryable_gen: + for item in retryable_gen(): if stream_idx >= len(yielded_items): - yield item yielded_items.append(item) - elif item != previous_stream[stream_idx]: - raise ValueError("Stream differs from last attempt")" + yield item + elif item != yielded_items[stream_idx]: + raise ValueError("Stream differs from last attempt") stream_idx += 1 filter_retry_wrapped = retryable_with_filter(target) From c8a4f26eca415063aba5c6fd578a1f3f2dfbd43c Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 1 Sep 2023 15:19:31 -0700 Subject: [PATCH 123/204] improved default exception factory --- google/api_core/retry_streaming.py | 13 +++++++++---- google/api_core/retry_streaming_async.py | 4 ++-- 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index 56cdc848..81db2d84 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -104,7 +104,7 @@ def on_error(e): _LOGGER = logging.getLogger(__name__) -def _build_timeout_error( +def _build_retry_error( exc_list: List[Exception], is_timeout: bool, timeout_val: float ) -> Tuple[Exception, Optional[Exception]]: """ @@ -119,8 +119,9 @@ def _build_timeout_error( Returns: - tuple[Exception, Exception|None]: a tuple of the exception to be raised, and the cause exception if any """ - src_exc = exc_list[-1] if exc_list else None if is_timeout: + # return RetryError with the most recent exception as the cause + src_exc = exc_list[-1] if exc_list else None return ( exceptions.RetryError( "Timeout of {:.1f}s exceeded".format(timeout_val), @@ -128,8 +129,12 @@ def _build_timeout_error( ), src_exc, ) - else: + elif exc_list: + # return most recent exception encountered return exc_list[-1], None + else: + # no exceptions were given in exc_list. Raise generic RetryError + return exceptions.RetryError("Unknown error", None), None def retry_target_stream( @@ -186,7 +191,7 @@ def retry_target_stream( deadline: Optional[float] = time.monotonic() + timeout if timeout else None error_list: List[Exception] = [] exc_factory = partial( - exception_factory or _build_timeout_error, timeout_val=timeout + exception_factory or _build_retry_error, timeout_val=timeout ) for sleep in sleep_generator: diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index eb235143..9d80b796 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -101,7 +101,7 @@ def on_error(e): import sys from functools import partial -from google.api_core.retry_streaming import _build_timeout_error +from google.api_core.retry_streaming import _build_retry_error _LOGGER = logging.getLogger(__name__) @@ -168,7 +168,7 @@ async def retry_target_stream( error_list: List[Exception] = [] # override exception_factory to build a more complex exception exc_factory = partial( - exception_factory or _build_timeout_error, timeout_val=timeout + exception_factory or _build_retry_error, timeout_val=timeout ) for sleep in sleep_generator: From 2840b9f3e11a52117fe690bd80c64e7d46397b56 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 1 Sep 2023 15:23:01 -0700 Subject: [PATCH 124/204] added pylint disable line --- google/api_core/retry_streaming.py | 2 ++ google/api_core/retry_streaming_async.py | 3 ++- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index 81db2d84..997b4e23 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -201,6 +201,8 @@ def retry_target_stream( subgenerator = target() return (yield from subgenerator) # handle exceptions raised by the subgenerator + # pylint: disable=broad-except + # This function explicitly must deal with broad exceptions. except Exception as exc: error_list.append(exc) if not predicate(exc): diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index 9d80b796..fa16261d 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -221,8 +221,9 @@ async def retry_target_stream( except StopAsyncIteration: # if generator exhausted, return return + # handle exceptions raised by the subgenerator # pylint: disable=broad-except - # This function handles exceptions thrown by subgenerator + # This function explicitly must deal with broad exceptions. except (Exception, asyncio.CancelledError) as exc: error_list.append(exc) if not predicate(exc): From 82274a37fcea3cdeb1ff68127c2e46848b003ee7 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 1 Sep 2023 15:36:08 -0700 Subject: [PATCH 125/204] cleaned up async retry wrapping --- google/api_core/retry_async.py | 25 ++++--------------------- 1 file changed, 4 insertions(+), 21 deletions(-) diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index 874ad72e..970dbca4 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -226,13 +226,14 @@ def __call__(self, func, on_error=None): on_error = self._on_error @functools.wraps(func) - async def retry_wrapped_func(*args, **kwargs): + def retry_wrapped_func(*args, **kwargs): """A wrapper that calls target function with retry.""" target = functools.partial(func, *args, **kwargs) sleep_generator = exponential_sleep_generator( self._initial, self._maximum, multiplier=self._multiplier ) - return await retry_target( + retry_func = retry_target if not self._is_stream else retry_target_stream + return retry_func( target, self._predicate, sleep_generator, @@ -240,25 +241,7 @@ async def retry_wrapped_func(*args, **kwargs): on_error=on_error, ) - @functools.wraps(func) - def retry_wrapped_stream(*args, **kwargs): - """A wrapper that iterates over target stream with retry.""" - target = functools.partial(func, *args, **kwargs) - sleep_generator = exponential_sleep_generator( - self._initial, self._maximum, multiplier=self._multiplier - ) - return retry_target_stream( - target, - self._predicate, - sleep_generator, - timeout=self._timeout, - on_error=on_error, - ) - - if self._is_stream: - return retry_wrapped_stream - else: - return retry_wrapped_func + return retry_wrapped_func def _replace( self, From 1594a17dbefb1a3e43b5563921b5c62d45e66592 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 1 Sep 2023 15:57:57 -0700 Subject: [PATCH 126/204] improved sample --- google/api_core/retry_streaming.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index 997b4e23..088979ce 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -48,7 +48,8 @@ def attempt_with_modified_request(target, request, seen_items=[]): yield item seen_items.append(item) - retry_wrapped = Retry(is_stream=True)(attempt_with_modified_request, target, request, []) + retry_wrapped_fn = Retry(is_stream=True)(attempt_with_modified_request) + retryable_generator = retry_wrapped_fn(target, request) ``` 2. Wrap the retry generator From 9b0ddb0afea4e207241a0ab8102dec90022bb346 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Fri, 1 Sep 2023 23:06:59 +0000 Subject: [PATCH 127/204] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot?= =?UTF-8?q?=20post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- google/api_core/retry_streaming.py | 4 +--- google/api_core/retry_streaming_async.py | 4 +--- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index 088979ce..fc4a2ef0 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -191,9 +191,7 @@ def retry_target_stream( timeout = kwargs.get("deadline", timeout) deadline: Optional[float] = time.monotonic() + timeout if timeout else None error_list: List[Exception] = [] - exc_factory = partial( - exception_factory or _build_retry_error, timeout_val=timeout - ) + exc_factory = partial(exception_factory or _build_retry_error, timeout_val=timeout) for sleep in sleep_generator: # Start a new retry loop diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index fa16261d..851fb731 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -167,9 +167,7 @@ async def retry_target_stream( # keep track of retryable exceptions we encounter to pass in to exception_factory error_list: List[Exception] = [] # override exception_factory to build a more complex exception - exc_factory = partial( - exception_factory or _build_retry_error, timeout_val=timeout - ) + exc_factory = partial(exception_factory or _build_retry_error, timeout_val=timeout) for sleep in sleep_generator: # Start a new retry loop From 8985127cdd4c7213ece80feb41b0f36b6577281d Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 1 Sep 2023 16:23:59 -0700 Subject: [PATCH 128/204] remove extra generator close line --- google/api_core/retry_streaming.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index fc4a2ef0..950b25d0 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -211,9 +211,6 @@ def retry_target_stream( raise final_exc from source_exc if on_error is not None: on_error(exc) - finally: - if subgenerator is not None and getattr(subgenerator, "close", None): - cast(Generator, subgenerator).close() if deadline is not None and time.monotonic() + sleep > deadline: final_exc, source_exc = exc_factory(exc_list=error_list, is_timeout=True) From 60b20abd37b8f57e5a497df057fa98a2cdc242f7 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 1 Sep 2023 16:32:12 -0700 Subject: [PATCH 129/204] added missing test --- google/api_core/retry_streaming.py | 1 - tests/unit/test_retry.py | 13 +++++++++++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index 950b25d0..e007906c 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -91,7 +91,6 @@ def on_error(e): Generator, TypeVar, Any, - cast, ) import logging diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 61868e1b..0aa61c5f 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -61,6 +61,19 @@ def test_exponential_sleep_generator_base_2(uniform): assert result == [1, 2, 4, 8, 16, 32, 60, 60] +def test__build_retry_error_empty_list(): + """ + attempt to build a retry error with no errors encountered + should return a generic RetryError + """ + from google.api_core.retry_streaming import _build_retry_error + + src, cause = _build_retry_error([], False, 10) + assert isinstance(src, exceptions.RetryError) + assert cause is None + assert src.message == "Unknown error" + + @mock.patch("time.sleep", autospec=True) @mock.patch( "google.api_core.datetime_helpers.utcnow", From 237ca3d947cdeb7db97f172bc37e6cf78dfe27d3 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 12 Sep 2023 11:58:01 -0700 Subject: [PATCH 130/204] AsyncRetry adds a coroutine in front of async streams --- google/api_core/retry_async.py | 8 ++++--- tests/asyncio/test_retry_async.py | 35 +++++++++++++++---------------- 2 files changed, 22 insertions(+), 21 deletions(-) diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index 970dbca4..2cff0548 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -225,21 +225,23 @@ def __call__(self, func, on_error=None): if self._on_error is not None: on_error = self._on_error - @functools.wraps(func) - def retry_wrapped_func(*args, **kwargs): + # @functools.wraps(func) + async def retry_wrapped_func(*args, **kwargs): """A wrapper that calls target function with retry.""" target = functools.partial(func, *args, **kwargs) sleep_generator = exponential_sleep_generator( self._initial, self._maximum, multiplier=self._multiplier ) retry_func = retry_target if not self._is_stream else retry_target_stream - return retry_func( + fn_result = retry_func( target, self._predicate, sleep_generator, timeout=self._timeout, on_error=on_error, ) + # if the target is not a stream, await the result before returning + return await fn_result if not self._is_stream else fn_result return retry_wrapped_func diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 53b04799..84008445 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -451,13 +451,12 @@ async def test___call___generator_success(self, sleep): from collections.abc import AsyncGenerator retry_ = retry_async.AsyncRetry(is_stream=True) - decorated = retry_(self._generator_mock) num = 10 - generator = decorated(num) + generator = await decorated(num) # check types - assert isinstance(decorated(num), AsyncGenerator) + assert isinstance(generator, AsyncGenerator) assert isinstance(self._generator_mock(num), AsyncGenerator) # check yield contents unpacked = [i async for i in generator] @@ -480,7 +479,7 @@ async def test___call___generator_retry(self, sleep): is_stream=True, timeout=None, ) - generator = retry_(self._generator_mock)(error_on=3) + generator = await retry_(self._generator_mock)(error_on=3) # error thrown on 3 # generator should contain 0, 1, 2 looping unpacked = [await generator.__anext__() for i in range(10)] @@ -514,7 +513,7 @@ async def test___call___generator_retry_hitting_deadline(self, sleep, uniform): ) decorated = retry_(self._generator_mock, on_error=on_error) - generator = decorated(error_on=1) + generator = await decorated(error_on=1) with now_patcher as patched_now: # Make sure that calls to fake asyncio.sleep() also advance the mocked @@ -545,7 +544,7 @@ async def test___call___generator_cancellations(self): retry_ = retry_async.AsyncRetry(is_stream=True) utcnow = datetime.datetime.utcnow() mock.patch("google.api_core.datetime_helpers.utcnow", return_value=utcnow) - generator = retry_(self._generator_mock)(sleep_time=0.2) + generator = await retry_(self._generator_mock)(sleep_time=0.2) await generator.__anext__() == 0 task = asyncio.create_task(generator.__anext__()) await asyncio.sleep(0.1) @@ -565,7 +564,7 @@ async def test___call___with_generator_send(self, sleep): decorated = retry_(self._generator_mock) - generator = decorated(10) + generator = await decorated(10) result = await generator.__anext__() assert result == 0 in_messages = ["test_1", "hello", "world"] @@ -590,14 +589,14 @@ async def test___call___generator_send_retry(self, sleep): is_stream=True, timeout=None, ) - generator = retry_(self._generator_mock)(error_on=3, ignore_sent=True) + generator = await retry_(self._generator_mock)(error_on=3, ignore_sent=True) with pytest.raises(TypeError) as exc_info: await generator.asend("can not send to fresh generator") assert exc_info.match("can't send non-None value") # error thrown on 3 # generator should contain 0, 1, 2 looping - generator = retry_(self._generator_mock)(error_on=3, ignore_sent=True) + generator = await retry_(self._generator_mock)(error_on=3, ignore_sent=True) assert await generator.__anext__() == 0 unpacked = [await generator.asend(i) for i in range(10)] assert unpacked == [1, 2, 0, 1, 2, 0, 1, 2, 0, 1] @@ -612,7 +611,7 @@ async def test___call___with_generator_close(self, sleep): retry_ = retry_async.AsyncRetry(is_stream=True) decorated = retry_(self._generator_mock) exception_list = [] - generator = decorated(10, exceptions_seen=exception_list) + generator = await decorated(10, exceptions_seen=exception_list) for i in range(2): await generator.__anext__() await generator.aclose() @@ -632,7 +631,7 @@ async def test___call___with_new_generator_close(self, sleep): retry_ = retry_async.AsyncRetry(is_stream=True) decorated = retry_(self._generator_mock) exception_list = [] - generator = decorated(10, exceptions_seen=exception_list) + generator = await decorated(10, exceptions_seen=exception_list) await generator.aclose() with pytest.raises(StopAsyncIteration): @@ -651,7 +650,7 @@ async def test___call___with_generator_throw(self, sleep): ) decorated = retry_(self._generator_mock) exception_list = [] - generator = decorated(10, exceptions_seen=exception_list) + generator = await decorated(10, exceptions_seen=exception_list) for i in range(2): await generator.__anext__() with pytest.raises(BufferError): @@ -662,7 +661,7 @@ async def test___call___with_generator_throw(self, sleep): await generator.__anext__() # should retry if throw retryable exception exception_list = [] - generator = decorated(10, exceptions_seen=exception_list) + generator = await decorated(10, exceptions_seen=exception_list) for i in range(2): await generator.__anext__() throw_val = await generator.athrow(ValueError("test")) @@ -703,7 +702,7 @@ async def wrapper(): else: decorated = retry_(iterable_fn) - retryable = decorated() + retryable = await decorated() result = await retryable.__anext__() assert result == 0 await retryable.asend("test") == 1 @@ -743,13 +742,13 @@ async def wrapper(): decorated = retry_(iterable_fn) # try closing active generator - retryable = decorated() + retryable = await decorated() assert await retryable.__anext__() == 0 await retryable.aclose() with pytest.raises(StopAsyncIteration): await retryable.__anext__() # try closing new generator - new_retryable = decorated() + new_retryable = await decorated() await new_retryable.aclose() with pytest.raises(StopAsyncIteration): await new_retryable.__anext__() @@ -789,7 +788,7 @@ async def wrapper(): decorated = retry_(iterable_fn) # try throwing with active generator - retryable = decorated() + retryable = await decorated() assert await retryable.__anext__() == 0 # should swallow errors in predicate await retryable.athrow(ValueError("test")) @@ -799,7 +798,7 @@ async def wrapper(): with pytest.raises(StopAsyncIteration): await retryable.__anext__() # try throwing with new generator - new_retryable = decorated() + new_retryable = await decorated() with pytest.raises(BufferError): await new_retryable.athrow(BufferError("test")) with pytest.raises(StopAsyncIteration): From a46c0f7b0be9b411d187702776752a1111f68e33 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 12 Sep 2023 14:35:13 -0700 Subject: [PATCH 131/204] improved type checking --- google/api_core/retry_async.py | 67 +++++++++++++++++------- google/api_core/retry_streaming_async.py | 24 +++++---- 2 files changed, 60 insertions(+), 31 deletions(-) diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index 2cff0548..b5871e39 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -51,10 +51,22 @@ async def check_if_exists(): """ +from __future__ import annotations + import asyncio import datetime import functools import logging +from typing import ( + Awaitable, + Any, + Callable, + TypeVar, + AsyncGenerator, + AsyncIterable, + cast, + TYPE_CHECKING, +) from google.api_core import datetime_helpers from google.api_core import exceptions @@ -63,6 +75,17 @@ async def check_if_exists(): from google.api_core.retry import if_transient_error from google.api_core.retry_streaming_async import retry_target_stream +if TYPE_CHECKING: + import sys + + if sys.version_info >= (3, 10): + from typing import ParamSpec + else: + from typing_extensions import ParamSpec + + _P = ParamSpec("_P") # target function call parameters + _R = TypeVar("_R") # target function returned value + _Y = TypeVar("_Y") # target stream yielded values _LOGGER = logging.getLogger(__name__) _DEFAULT_INITIAL_DELAY = 1.0 # seconds @@ -191,14 +214,14 @@ class AsyncRetry: def __init__( self, - predicate=if_transient_error, - initial=_DEFAULT_INITIAL_DELAY, - maximum=_DEFAULT_MAXIMUM_DELAY, - multiplier=_DEFAULT_DELAY_MULTIPLIER, - timeout=_DEFAULT_TIMEOUT, - on_error=None, - is_stream=False, - **kwargs + predicate: Callable[[BaseException], bool] = if_transient_error, + initial: float = _DEFAULT_INITIAL_DELAY, + maximum: float = _DEFAULT_MAXIMUM_DELAY, + multiplier: float = _DEFAULT_DELAY_MULTIPLIER, + timeout: float = _DEFAULT_TIMEOUT, + on_error: Callable[[BaseException], Any] | None = None, + is_stream: bool = False, + **kwargs, ): self._predicate = predicate self._initial = initial @@ -209,7 +232,13 @@ def __init__( self._on_error = on_error self._is_stream = is_stream - def __call__(self, func, on_error=None): + def __call__( + self, + func: Callable[ + ..., Awaitable[_R] | AsyncIterable[_Y] | Awaitable[AsyncIterable[_Y]] + ], + on_error: Callable[[BaseException], Any] | None = None, + ) -> Callable[_P, Awaitable[_R | AsyncGenerator[_Y, None]]]: """Wrap a callable with retry behavior. Args: @@ -226,22 +255,20 @@ def __call__(self, func, on_error=None): on_error = self._on_error # @functools.wraps(func) - async def retry_wrapped_func(*args, **kwargs): + async def retry_wrapped_func( + *args: _P.args, **kwargs: _P.kwargs + ) -> _R | AsyncGenerator[_Y, None]: """A wrapper that calls target function with retry.""" target = functools.partial(func, *args, **kwargs) sleep_generator = exponential_sleep_generator( self._initial, self._maximum, multiplier=self._multiplier ) - retry_func = retry_target if not self._is_stream else retry_target_stream - fn_result = retry_func( - target, - self._predicate, - sleep_generator, - timeout=self._timeout, - on_error=on_error, - ) - # if the target is not a stream, await the result before returning - return await fn_result if not self._is_stream else fn_result + retry_args = (self._predicate, sleep_generator, self._timeout, on_error) + if self._is_stream: + stream_target = cast(Callable[[], AsyncIterable[_Y]], target) + return retry_target_stream(stream_target, *retry_args) + else: + return await retry_target(target, *retry_args) return retry_wrapped_func diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index 851fb731..36fa87c6 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -93,6 +93,7 @@ def on_error(e): Union, TypeVar, AsyncGenerator, + TYPE_CHECKING, ) import asyncio @@ -103,15 +104,16 @@ def on_error(e): from google.api_core.retry_streaming import _build_retry_error -_LOGGER = logging.getLogger(__name__) +if TYPE_CHECKING: + _Y = TypeVar("_Y") # yielded values -T = TypeVar("T") +_LOGGER = logging.getLogger(__name__) async def retry_target_stream( target: Union[ - Callable[[], AsyncIterable[T]], - Callable[[], Awaitable[AsyncIterable[T]]], + Callable[[], AsyncIterable[_Y]], + Callable[[], Awaitable[AsyncIterable[_Y]]], ], predicate: Callable[[Exception], bool], sleep_generator: Iterable[float], @@ -121,7 +123,7 @@ async def retry_target_stream( Callable[[List[Exception], bool, float], Tuple[Exception, Optional[Exception]]] ] = None, **kwargs, -) -> AsyncGenerator[T, None]: +) -> AsyncGenerator[_Y, None]: """Create a generator wrapper that retries the wrapped stream if it fails. This is the lowest-level retry helper. Generally, you'll use the @@ -161,7 +163,7 @@ async def retry_target_stream( Exception: If the target raises an error that isn't retryable. """ - subgenerator: Optional[AsyncIterator[T]] = None + subgenerator: Optional[AsyncIterator[_Y]] = None timeout = kwargs.get("deadline", timeout) deadline: Optional[float] = time.monotonic() + timeout if timeout else None # keep track of retryable exceptions we encounter to pass in to exception_factory @@ -174,14 +176,14 @@ async def retry_target_stream( try: # generator may be raw iterator, or wrapped in an awaitable gen_instance: Union[ - AsyncIterable[T], Awaitable[AsyncIterable[T]] + AsyncIterable[_Y], Awaitable[AsyncIterable[_Y]] ] = target() try: gen_instance = await gen_instance # type: ignore except TypeError: # was not awaitable pass - subgenerator = cast(AsyncIterable[T], gen_instance).__aiter__() + subgenerator = cast(AsyncIterable[_Y], gen_instance).__aiter__() # if target is a generator, we will advance it using asend # otherwise, we will use anext @@ -202,7 +204,7 @@ async def retry_target_stream( except GeneratorExit: # if wrapper received `aclose`, pass to subgenerator and close if bool(getattr(subgenerator, "aclose", None)): - await cast(AsyncGenerator[T, None], subgenerator).aclose() + await cast(AsyncGenerator[_Y, None], subgenerator).aclose() else: raise return @@ -210,7 +212,7 @@ async def retry_target_stream( # bare except catches any exception passed to `athrow` # delegate error handling to subgenerator if getattr(subgenerator, "athrow", None): - await cast(AsyncGenerator[T, None], subgenerator).athrow( + await cast(AsyncGenerator[_Y, None], subgenerator).athrow( *sys.exc_info() ) else: @@ -231,7 +233,7 @@ async def retry_target_stream( on_error(exc) finally: if subgenerator is not None and getattr(subgenerator, "aclose", None): - await cast(AsyncGenerator[T, None], subgenerator).aclose() + await cast(AsyncGenerator[_Y, None], subgenerator).aclose() # sleep and adjust timeout budget if deadline is not None and time.monotonic() + sleep > deadline: From 796ae520838027fce262b8a47f8c727194b40dcd Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 12 Sep 2023 14:53:23 -0700 Subject: [PATCH 132/204] fixed typing issues --- google/api_core/retry_async.py | 2 +- google/api_core/retry_streaming_async.py | 14 +++++++------- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index b5871e39..dce2129e 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -265,7 +265,7 @@ async def retry_wrapped_func( ) retry_args = (self._predicate, sleep_generator, self._timeout, on_error) if self._is_stream: - stream_target = cast(Callable[[], AsyncIterable[_Y]], target) + stream_target = cast(Callable[[], AsyncIterable["_Y"]], target) return retry_target_stream(stream_target, *retry_args) else: return await retry_target(target, *retry_args) diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index 36fa87c6..30fdb5b8 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -112,8 +112,8 @@ def on_error(e): async def retry_target_stream( target: Union[ - Callable[[], AsyncIterable[_Y]], - Callable[[], Awaitable[AsyncIterable[_Y]]], + Callable[[], AsyncIterable["_Y"]], + Callable[[], Awaitable[AsyncIterable["_Y"]]], ], predicate: Callable[[Exception], bool], sleep_generator: Iterable[float], @@ -123,7 +123,7 @@ async def retry_target_stream( Callable[[List[Exception], bool, float], Tuple[Exception, Optional[Exception]]] ] = None, **kwargs, -) -> AsyncGenerator[_Y, None]: +) -> AsyncGenerator["_Y", None]: """Create a generator wrapper that retries the wrapped stream if it fails. This is the lowest-level retry helper. Generally, you'll use the @@ -183,7 +183,7 @@ async def retry_target_stream( except TypeError: # was not awaitable pass - subgenerator = cast(AsyncIterable[_Y], gen_instance).__aiter__() + subgenerator = cast(AsyncIterable["_Y"], gen_instance).__aiter__() # if target is a generator, we will advance it using asend # otherwise, we will use anext @@ -204,7 +204,7 @@ async def retry_target_stream( except GeneratorExit: # if wrapper received `aclose`, pass to subgenerator and close if bool(getattr(subgenerator, "aclose", None)): - await cast(AsyncGenerator[_Y, None], subgenerator).aclose() + await cast(AsyncGenerator["_Y", None], subgenerator).aclose() else: raise return @@ -212,7 +212,7 @@ async def retry_target_stream( # bare except catches any exception passed to `athrow` # delegate error handling to subgenerator if getattr(subgenerator, "athrow", None): - await cast(AsyncGenerator[_Y, None], subgenerator).athrow( + await cast(AsyncGenerator["_Y", None], subgenerator).athrow( *sys.exc_info() ) else: @@ -233,7 +233,7 @@ async def retry_target_stream( on_error(exc) finally: if subgenerator is not None and getattr(subgenerator, "aclose", None): - await cast(AsyncGenerator[_Y, None], subgenerator).aclose() + await cast(AsyncGenerator["_Y", None], subgenerator).aclose() # sleep and adjust timeout budget if deadline is not None and time.monotonic() + sleep > deadline: From 0688ffeaa90791138980a325704286790b023be6 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 21 Sep 2023 14:34:43 -0700 Subject: [PATCH 133/204] moved docstrings --- google/api_core/retry.py | 64 ++++++++++++++++++++++++ google/api_core/retry_async.py | 63 +++++++++++++++++++++++ google/api_core/retry_streaming.py | 64 ------------------------ google/api_core/retry_streaming_async.py | 63 ----------------------- 4 files changed, 127 insertions(+), 127 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index b9616793..2d00abee 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -308,6 +308,70 @@ class Retry(object): ``Operation`` or ``PollingFuture`` in general Timeout stands for Polling Timeout. + When ``is_stream=False``, the target is treated as a callable, + and will retry when the callable returns an error. When ``is_stream=True``, + the target will be treated as a generator function. Instead of just wrapping + the initial call in retry logic, the entire output iterable is + wrapped, with each yield passing through the retryable generator. If any yield + in the stream raises a retryable exception, the entire stream will be + retried. + + NOTE: when a stream encounters a retryable error, it will + silently construct a fresh iterator instance in the background + and continue yielding (likely duplicate) values as if no error occurred. + This is the most general way to retry a stream, but it often is not the + desired behavior. Example: iter([1, 2, 1/0]) -> [1, 2, 1, 2, ...] + + There are two ways to build more advanced retry logic for streams: + + 1. Wrap the target + Use a ``target`` that maintains state between retries, and creates a + different generator on each retry call. For example, you can wrap a + network call in a function that modifies the request based on what has + already been returned: + + ``` + def attempt_with_modified_request(target, request, seen_items=[]): + # remove seen items from request on each attempt + new_request = modify_request(request, seen_items) + new_generator = target(new_request) + for item in new_generator: + yield item + seen_items.append(item) + + retry_wrapped_fn = Retry(is_stream=True)(attempt_with_modified_request) + retryable_generator = retry_wrapped_fn(target, request) + ``` + + 2. Wrap the retry generator + Alternatively, you can wrap the retryable generator itself before + passing it to the end-user to add a filter on the stream. For + example, you can keep track of the items that were successfully yielded + in previous retry attempts, and only yield new items when the + new attempt surpasses the previous ones: + + ``` + def retryable_with_filter(target): + stream_idx = 0 + # reset stream_idx when the stream is retried + def on_error(e): + nonlocal stream_idx + stream_idx = 0 + # build retryable + retryable_gen = Retry(is_stream=True, on_error=on_error, ...)(target) + # keep track of what has been yielded out of filter + yielded_items = [] + for item in retryable_gen(): + if stream_idx >= len(yielded_items): + yielded_items.append(item) + yield item + elif item != yielded_items[stream_idx]: + raise ValueError("Stream differs from last attempt") + stream_idx += 1 + + filter_retry_wrapped = retryable_with_filter(target) + ``` + Args: predicate (Callable[Exception]): A callable that should return ``True`` if the given exception is retryable. diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index dce2129e..686a1691 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -188,6 +188,69 @@ class AsyncRetry: Although the default behavior is to retry transient API errors, a different predicate can be provided to retry other exceptions. + When ``is_stream=False``, the target is treated as a coroutine function, + and will retry when the coroutine returns an error. When ``is_stream=True``, + the target will be treated as an async generator function. Instead + of just wrapping the initial call in retry logic, the output iterable is + wrapped, with each yield passing through the retryable generator. If any yield + in the stream raises a retryable exception, the entire stream will be + retried. + + Important Note: when a stream is encounters a retryable error, it will + silently construct a fresh iterator instance in the background + and continue yielding (likely duplicate) values as if no error occurred. + This is the most general way to retry a stream, but it often is not the + desired behavior. Example: iter([1, 2, 1/0]) -> [1, 2, 1, 2, ...] + + There are two ways to build more advanced retry logic for streams: + + 1. Wrap the target + Use a ``target`` that maintains state between retries, and creates a + different generator on each retry call. For example, you can wrap a + grpc call in a function that modifies the request based on what has + already been returned: + + ``` + async def attempt_with_modified_request(target, request, seen_items=[]): + # remove seen items from request on each attempt + new_request = modify_request(request, seen_items) + new_generator = await target(new_request) + async for item in new_generator: + yield item + seen_items.append(item) + + retry_wrapped = AsyncRetry(is_stream=True)(attempt_with_modified_request, target, request, []) + ``` + + 2. Wrap the retry generator + Alternatively, you can wrap the retryable generator itself before + passing it to the end-user to add a filter on the stream. For + example, you can keep track of the items that were successfully yielded + in previous retry attempts, and only yield new items when the + new attempt surpasses the previous ones: + + `` + async def retryable_with_filter(target): + stream_idx = 0 + # reset stream_idx when the stream is retried + def on_error(e): + nonlocal stream_idx + stream_idx = 0 + # build retryable + retryable_gen = AsyncRetry(is_stream=True, on_error=on_error, ...)(target) + # keep track of what has been yielded out of filter + yielded_items = [] + async for item in retryable_gen: + if stream_idx >= len(yielded_items): + yield item + yielded_items.append(item) + elif item != previous_stream[stream_idx]: + raise ValueError("Stream differs from last attempt")" + stream_idx += 1 + + filter_retry_wrapped = retryable_with_filter(target) + ``` + Args: predicate (Callable[Exception]): A callable that should return ``True`` if the given exception is retryable. diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index e007906c..e331b98d 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -16,70 +16,6 @@ Generator wrapper for retryable streaming RPCs. This function will be used when initilizing a retry with ``Retry(is_stream=True)``. - -When ``is_stream=False``, the target is treated as a callable, -and will retry when the callable returns an error. When ``is_stream=True``, -the target will be treated as a callable that returns an iterable. Instead -of just wrapping the initial call in retry logic, the entire iterable is -wrapped, with each yield passing through the retryable generator. If any yield -in the stream raises a retryable exception, the entire stream will be -retried. - -NOTE: when a stream encounters a retryable error, it will -silently construct a fresh iterator instance in the background -and continue yielding (likely duplicate) values as if no error occurred. -This is the most general way to retry a stream, but it often is not the -desired behavior. Example: iter([1, 2, 1/0]) -> [1, 2, 1, 2, ...] - -There are two ways to build more advanced retry logic for streams: - -1. Wrap the target - Use a ``target`` that maintains state between retries, and creates a - different generator on each retry call. For example, you can wrap a - network call in a function that modifies the request based on what has - already been returned: - - ``` - def attempt_with_modified_request(target, request, seen_items=[]): - # remove seen items from request on each attempt - new_request = modify_request(request, seen_items) - new_generator = target(new_request) - for item in new_generator: - yield item - seen_items.append(item) - - retry_wrapped_fn = Retry(is_stream=True)(attempt_with_modified_request) - retryable_generator = retry_wrapped_fn(target, request) - ``` - -2. Wrap the retry generator - Alternatively, you can wrap the retryable generator itself before - passing it to the end-user to add a filter on the stream. For - example, you can keep track of the items that were successfully yielded - in previous retry attempts, and only yield new items when the - new attempt surpasses the previous ones: - - `` - def retryable_with_filter(target): - stream_idx = 0 - # reset stream_idx when the stream is retried - def on_error(e): - nonlocal stream_idx - stream_idx = 0 - # build retryable - retryable_gen = Retry(is_stream=True, on_error=on_error, ...)(target) - # keep track of what has been yielded out of filter - yielded_items = [] - for item in retryable_gen(): - if stream_idx >= len(yielded_items): - yielded_items.append(item) - yield item - elif item != yielded_items[stream_idx]: - raise ValueError("Stream differs from last attempt") - stream_idx += 1 - - filter_retry_wrapped = retryable_with_filter(target) - ``` """ from typing import ( diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index 30fdb5b8..d634f805 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -15,69 +15,6 @@ Generator wrapper for retryable streaming RPCs. This function will be used when initilizing a retry with ``AsyncRetry(is_stream=True)``. - -When ``is_stream=False``, the target is treated as a coroutine, -and will retry when the coroutine returns an error. When ``is_stream=True``, -the target will be treated as a callable that retruns an AsyncIterable. Instead -of just wrapping the initial call in retry logic, the entire iterable is -wrapped, with each yield passing through the retryable generatpr. If any yield -in the stream raises a retryable exception, the entire stream will be -retried. - -Important Note: when a stream is encounters a retryable error, it will -silently construct a fresh iterator instance in the background -and continue yielding (likely duplicate) values as if no error occurred. -This is the most general way to retry a stream, but it often is not the -desired behavior. Example: iter([1, 2, 1/0]) -> [1, 2, 1, 2, ...] - -There are two ways to build more advanced retry logic for streams: - -1. Wrap the target - Use a ``target`` that maintains state between retries, and creates a - different generator on each retry call. For example, you can wrap a - grpc call in a function that modifies the request based on what has - already been returned: - - ``` - async def attempt_with_modified_request(target, request, seen_items=[]): - # remove seen items from request on each attempt - new_request = modify_request(request, seen_items) - new_generator = await target(new_request) - async for item in new_generator: - yield item - seen_items.append(item) - - retry_wrapped = AsyncRetry(is_stream=True)(attempt_with_modified_request, target, request, []) - ``` - - 2. Wrap the retry generator - Alternatively, you can wrap the retryable generator itself before - passing it to the end-user to add a filter on the stream. For - example, you can keep track of the items that were successfully yielded - in previous retry attempts, and only yield new items when the - new attempt surpasses the previous ones: - - `` - async def retryable_with_filter(target): - stream_idx = 0 - # reset stream_idx when the stream is retried - def on_error(e): - nonlocal stream_idx - stream_idx = 0 - # build retryable - retryable_gen = AsyncRetry(is_stream=True, on_error=on_error, ...)(target) - # keep track of what has been yielded out of filter - yielded_items = [] - async for item in retryable_gen: - if stream_idx >= len(yielded_items): - yield item - yielded_items.append(item) - elif item != previous_stream[stream_idx]: - raise ValueError("Stream differs from last attempt")" - stream_idx += 1 - - filter_retry_wrapped = retryable_with_filter(target) - ``` """ from typing import ( From da048ab59580bdc18a0d285fb84a7bd395dd1a86 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 21 Sep 2023 16:10:00 -0700 Subject: [PATCH 134/204] use enum in exception builder --- google/api_core/retry.py | 46 ++++++++++++++++++++++-- google/api_core/retry_streaming.py | 40 +++------------------ google/api_core/retry_streaming_async.py | 7 ++-- tests/asyncio/test_retry_async.py | 6 ++-- tests/unit/test_retry.py | 8 +++-- 5 files changed, 61 insertions(+), 46 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index 2d00abee..6198bfd1 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -62,13 +62,14 @@ def check_if_exists(): import random import sys import time +from enum import Enum from typing import Any, Callable, TypeVar, Generator, Iterable, cast, TYPE_CHECKING import requests.exceptions from google.api_core import datetime_helpers from google.api_core import exceptions -from google.api_core.retry_streaming import retry_target_stream +import google.api_core.retry_streaming as retry_streaming from google.auth import exceptions as auth_exceptions if TYPE_CHECKING: @@ -88,6 +89,47 @@ def check_if_exists(): _DEFAULT_DEADLINE = 60.0 * 2.0 # seconds +class RetryFailureReason(Enum): + """ + The cause of a failed retry, used when building exceptions + """ + TIMEOUT = "TIMEOUT" + NON_RETRYABLE_ERROR = "NON_RETRYABLE_ERROR" + + +def _build_retry_error( + exc_list: list[Exception], reason: RetryFailureReason, timeout_val: float, **kwargs: Any +) -> tuple[Exception, Exception | None]: + """ + Default exception_factory implementation. Builds an exception after the retry fails + + Args: + - exc_list (list[Exception]): list of exceptions that occurred during the retry + - reason (google.api_core.retry.RetryFailureReason): reason for the retry failure. + Can be TIMEOUT or NON_RETRYABLE_ERROR + - timeout_val (float): the original timeout value for the retry, for use in the exception message + + Returns: + - tuple[Exception, Exception|None]: a tuple of the exception to be raised, and the cause exception if any + """ + if reason == RetryFailureReason.TIMEOUT: + # return RetryError with the most recent exception as the cause + src_exc = exc_list[-1] if exc_list else None + return ( + exceptions.RetryError( + "Timeout of {:.1f}s exceeded".format(timeout_val), + src_exc, + ), + src_exc, + ) + elif exc_list: + # return most recent exception encountered + return exc_list[-1], None + else: + # no exceptions were given in exc_list. Raise generic RetryError + return exceptions.RetryError("Unknown error", None), None + + def if_exception_type( *exception_types: type[BaseException], ) -> Callable[[BaseException], bool]: @@ -451,7 +493,7 @@ def retry_wrapped_func( if self._is_stream: # when stream is enabled, assume target returns an iterable that yields _Y stream_target = cast(Callable[[], Iterable["_Y"]], target) - return retry_target_stream(stream_target, *retry_args) + return retry_streaming.retry_target_stream(stream_target, *retry_args) else: return retry_target(target, *retry_args) diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index e331b98d..bd93d235 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -34,45 +34,13 @@ from functools import partial from google.api_core import exceptions +import google.api_core.retry as retries _Y = TypeVar("_Y") # yielded values _LOGGER = logging.getLogger(__name__) -def _build_retry_error( - exc_list: List[Exception], is_timeout: bool, timeout_val: float -) -> Tuple[Exception, Optional[Exception]]: - """ - Default exception_factory implementation. Builds an exception after the retry fails - - Args: - - exc_list (list[Exception]): list of exceptions that occurred during the retry - - is_timeout (bool): whether the failure is due to the timeout value being exceeded, - or due to a non-retryable exception - - timeout_val (float): the original timeout value for the retry, for use in the exception message - - Returns: - - tuple[Exception, Exception|None]: a tuple of the exception to be raised, and the cause exception if any - """ - if is_timeout: - # return RetryError with the most recent exception as the cause - src_exc = exc_list[-1] if exc_list else None - return ( - exceptions.RetryError( - "Timeout of {:.1f}s exceeded".format(timeout_val), - src_exc, - ), - src_exc, - ) - elif exc_list: - # return most recent exception encountered - return exc_list[-1], None - else: - # no exceptions were given in exc_list. Raise generic RetryError - return exceptions.RetryError("Unknown error", None), None - - def retry_target_stream( target: Callable[[], Iterable[_Y]], predicate: Callable[[Exception], bool], @@ -126,7 +94,7 @@ def retry_target_stream( timeout = kwargs.get("deadline", timeout) deadline: Optional[float] = time.monotonic() + timeout if timeout else None error_list: List[Exception] = [] - exc_factory = partial(exception_factory or _build_retry_error, timeout_val=timeout) + exc_factory = partial(exception_factory or retries._build_retry_error, timeout_val=timeout) for sleep in sleep_generator: # Start a new retry loop @@ -141,14 +109,14 @@ def retry_target_stream( error_list.append(exc) if not predicate(exc): final_exc, source_exc = exc_factory( - exc_list=error_list, is_timeout=False + exc_list=error_list, reason=retries.RetryFailureReason.NON_RETRYABLE_ERROR ) raise final_exc from source_exc if on_error is not None: on_error(exc) if deadline is not None and time.monotonic() + sleep > deadline: - final_exc, source_exc = exc_factory(exc_list=error_list, is_timeout=True) + final_exc, source_exc = exc_factory(exc_list=error_list, reason=retries.RetryFailureReason.TIMEOUT) raise final_exc from source_exc _LOGGER.debug( "Retrying due to {}, sleeping {:.1f}s ...".format(error_list[-1], sleep) diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index d634f805..96c03aec 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -39,7 +39,8 @@ import sys from functools import partial -from google.api_core.retry_streaming import _build_retry_error +from google.api_core.retry import _build_retry_error +from google.api_core.retry import RetryFailureReason if TYPE_CHECKING: _Y = TypeVar("_Y") # yielded values @@ -164,7 +165,7 @@ async def retry_target_stream( except (Exception, asyncio.CancelledError) as exc: error_list.append(exc) if not predicate(exc): - exc, source_exc = exc_factory(exc_list=error_list, is_timeout=False) + exc, source_exc = exc_factory(exc_list=error_list, reason=RetryFailureReason.NON_RETRYABLE_ERROR) raise exc from source_exc if on_error is not None: on_error(exc) @@ -174,7 +175,7 @@ async def retry_target_stream( # sleep and adjust timeout budget if deadline is not None and time.monotonic() + sleep > deadline: - final_exc, source_exc = exc_factory(exc_list=error_list, is_timeout=True) + final_exc, source_exc = exc_factory(exc_list=error_list, reason=RetryFailureReason.TIMEOUT) raise final_exc from source_exc _LOGGER.debug( "Retrying due to {}, sleeping {:.1f}s ...".format(error_list[-1], sleep) diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 84008445..c5b8be40 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -810,6 +810,7 @@ async def test_exc_factory_non_retryable_error(self): generator should give the option to override exception creation logic test when non-retryable error is thrown """ + from google.api_core.retry import RetryFailureReason from google.api_core.retry_streaming_async import retry_target_stream timeout = 6 @@ -820,7 +821,7 @@ async def test_exc_factory_non_retryable_error(self): def factory(*args, **kwargs): assert len(args) == 0 assert kwargs["exc_list"] == sent_errors - assert kwargs["is_timeout"] is False + assert kwargs["reason"] == RetryFailureReason.NON_RETRYABLE_ERROR assert kwargs["timeout_val"] == timeout return expected_final_err, expected_source_err @@ -849,6 +850,7 @@ async def test_exc_factory_timeout(self): test when timeout is exceeded """ import time + from google.api_core.retry import RetryFailureReason from google.api_core.retry_streaming_async import retry_target_stream timeout = 2 @@ -867,7 +869,7 @@ async def test_exc_factory_timeout(self): def factory(*args, **kwargs): assert len(args) == 0 assert kwargs["exc_list"] == sent_errors - assert kwargs["is_timeout"] is True + assert kwargs["reason"] == RetryFailureReason.TIMEOUT assert kwargs["timeout_val"] == timeout return expected_final_err, expected_source_err diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 0aa61c5f..dcab0408 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -66,7 +66,7 @@ def test__build_retry_error_empty_list(): attempt to build a retry error with no errors encountered should return a generic RetryError """ - from google.api_core.retry_streaming import _build_retry_error + from google.api_core.retry import _build_retry_error src, cause = _build_retry_error([], False, 10) assert isinstance(src, exceptions.RetryError) @@ -831,6 +831,7 @@ def test_exc_factory_non_retryable_error(self): generator should give the option to override exception creation logic test when non-retryable error is thrown """ + from google.api_core.retry import RetryFailureReason from google.api_core.retry_streaming import retry_target_stream timeout = 6 @@ -841,7 +842,7 @@ def test_exc_factory_non_retryable_error(self): def factory(*args, **kwargs): assert len(args) == 0 assert kwargs["exc_list"] == sent_errors - assert kwargs["is_timeout"] is False + assert kwargs["reason"] == RetryFailureReason.NON_RETRYABLE_ERROR assert kwargs["timeout_val"] == timeout return expected_final_err, expected_source_err @@ -869,6 +870,7 @@ def test_exc_factory_timeout(self): test when timeout is exceeded """ import time + from google.api_core.retry import RetryFailureReason from google.api_core.retry_streaming import retry_target_stream timeout = 2 @@ -887,7 +889,7 @@ def test_exc_factory_timeout(self): def factory(*args, **kwargs): assert len(args) == 0 assert kwargs["exc_list"] == sent_errors - assert kwargs["is_timeout"] is True + assert kwargs["reason"] == RetryFailureReason.TIMEOUT assert kwargs["timeout_val"] == timeout return expected_final_err, expected_source_err From 80e5eb06acc7e1a99ebbb9d6ab29edf0f7a8b28a Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Thu, 21 Sep 2023 23:12:15 +0000 Subject: [PATCH 135/204] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot?= =?UTF-8?q?=20post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- google/api_core/retry.py | 8 ++++++-- google/api_core/retry_streaming.py | 11 ++++++++--- google/api_core/retry_streaming_async.py | 8 ++++++-- 3 files changed, 20 insertions(+), 7 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index 6198bfd1..98b8bbd9 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -93,19 +93,23 @@ class RetryFailureReason(Enum): """ The cause of a failed retry, used when building exceptions """ + TIMEOUT = "TIMEOUT" NON_RETRYABLE_ERROR = "NON_RETRYABLE_ERROR" def _build_retry_error( - exc_list: list[Exception], reason: RetryFailureReason, timeout_val: float, **kwargs: Any + exc_list: list[Exception], + reason: RetryFailureReason, + timeout_val: float, + **kwargs: Any, ) -> tuple[Exception, Exception | None]: """ Default exception_factory implementation. Builds an exception after the retry fails Args: - exc_list (list[Exception]): list of exceptions that occurred during the retry - - reason (google.api_core.retry.RetryFailureReason): reason for the retry failure. + - reason (google.api_core.retry.RetryFailureReason): reason for the retry failure. Can be TIMEOUT or NON_RETRYABLE_ERROR - timeout_val (float): the original timeout value for the retry, for use in the exception message diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index bd93d235..114b47fd 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -94,7 +94,9 @@ def retry_target_stream( timeout = kwargs.get("deadline", timeout) deadline: Optional[float] = time.monotonic() + timeout if timeout else None error_list: List[Exception] = [] - exc_factory = partial(exception_factory or retries._build_retry_error, timeout_val=timeout) + exc_factory = partial( + exception_factory or retries._build_retry_error, timeout_val=timeout + ) for sleep in sleep_generator: # Start a new retry loop @@ -109,14 +111,17 @@ def retry_target_stream( error_list.append(exc) if not predicate(exc): final_exc, source_exc = exc_factory( - exc_list=error_list, reason=retries.RetryFailureReason.NON_RETRYABLE_ERROR + exc_list=error_list, + reason=retries.RetryFailureReason.NON_RETRYABLE_ERROR, ) raise final_exc from source_exc if on_error is not None: on_error(exc) if deadline is not None and time.monotonic() + sleep > deadline: - final_exc, source_exc = exc_factory(exc_list=error_list, reason=retries.RetryFailureReason.TIMEOUT) + final_exc, source_exc = exc_factory( + exc_list=error_list, reason=retries.RetryFailureReason.TIMEOUT + ) raise final_exc from source_exc _LOGGER.debug( "Retrying due to {}, sleeping {:.1f}s ...".format(error_list[-1], sleep) diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index 96c03aec..cd733e1c 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -165,7 +165,9 @@ async def retry_target_stream( except (Exception, asyncio.CancelledError) as exc: error_list.append(exc) if not predicate(exc): - exc, source_exc = exc_factory(exc_list=error_list, reason=RetryFailureReason.NON_RETRYABLE_ERROR) + exc, source_exc = exc_factory( + exc_list=error_list, reason=RetryFailureReason.NON_RETRYABLE_ERROR + ) raise exc from source_exc if on_error is not None: on_error(exc) @@ -175,7 +177,9 @@ async def retry_target_stream( # sleep and adjust timeout budget if deadline is not None and time.monotonic() + sleep > deadline: - final_exc, source_exc = exc_factory(exc_list=error_list, reason=RetryFailureReason.TIMEOUT) + final_exc, source_exc = exc_factory( + exc_list=error_list, reason=RetryFailureReason.TIMEOUT + ) raise final_exc from source_exc _LOGGER.debug( "Retrying due to {}, sleeping {:.1f}s ...".format(error_list[-1], sleep) From 562079b8d4ed88ebe14ab90f60e074fc38cc19e5 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 21 Sep 2023 16:23:37 -0700 Subject: [PATCH 136/204] fixed lint and docs issues --- google/api_core/retry.py | 64 +++++++++++++++--------------- google/api_core/retry_async.py | 62 ++++++++++++++--------------- google/api_core/retry_streaming.py | 1 - 3 files changed, 63 insertions(+), 64 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index 98b8bbd9..36882d11 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -376,18 +376,18 @@ class Retry(object): network call in a function that modifies the request based on what has already been returned: - ``` - def attempt_with_modified_request(target, request, seen_items=[]): - # remove seen items from request on each attempt - new_request = modify_request(request, seen_items) - new_generator = target(new_request) - for item in new_generator: - yield item - seen_items.append(item) - - retry_wrapped_fn = Retry(is_stream=True)(attempt_with_modified_request) - retryable_generator = retry_wrapped_fn(target, request) - ``` + .. code-block:: python + + def attempt_with_modified_request(target, request, seen_items=[]): + # remove seen items from request on each attempt + new_request = modify_request(request, seen_items) + new_generator = target(new_request) + for item in new_generator: + yield item + seen_items.append(item) + + retry_wrapped_fn = Retry(is_stream=True)(attempt_with_modified_request) + retryable_generator = retry_wrapped_fn(target, request) 2. Wrap the retry generator Alternatively, you can wrap the retryable generator itself before @@ -396,27 +396,27 @@ def attempt_with_modified_request(target, request, seen_items=[]): in previous retry attempts, and only yield new items when the new attempt surpasses the previous ones: - ``` - def retryable_with_filter(target): - stream_idx = 0 - # reset stream_idx when the stream is retried - def on_error(e): - nonlocal stream_idx - stream_idx = 0 - # build retryable - retryable_gen = Retry(is_stream=True, on_error=on_error, ...)(target) - # keep track of what has been yielded out of filter - yielded_items = [] - for item in retryable_gen(): - if stream_idx >= len(yielded_items): - yielded_items.append(item) - yield item - elif item != yielded_items[stream_idx]: - raise ValueError("Stream differs from last attempt") - stream_idx += 1 + .. code-block:: python - filter_retry_wrapped = retryable_with_filter(target) - ``` + def retryable_with_filter(target): + stream_idx = 0 + # reset stream_idx when the stream is retried + def on_error(e): + nonlocal stream_idx + stream_idx = 0 + # build retryable + retryable_gen = Retry(is_stream=True, on_error=on_error, ...)(target) + # keep track of what has been yielded out of filter + yielded_items = [] + for item in retryable_gen(): + if stream_idx >= len(yielded_items): + yielded_items.append(item) + yield item + elif item != yielded_items[stream_idx]: + raise ValueError("Stream differs from last attempt") + stream_idx += 1 + + filter_retry_wrapped = retryable_with_filter(target) Args: predicate (Callable[Exception]): A callable that should return ``True`` diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index 686a1691..23d5f23d 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -210,17 +210,17 @@ class AsyncRetry: grpc call in a function that modifies the request based on what has already been returned: - ``` - async def attempt_with_modified_request(target, request, seen_items=[]): - # remove seen items from request on each attempt - new_request = modify_request(request, seen_items) - new_generator = await target(new_request) - async for item in new_generator: - yield item - seen_items.append(item) - - retry_wrapped = AsyncRetry(is_stream=True)(attempt_with_modified_request, target, request, []) - ``` + .. code-block:: python + + async def attempt_with_modified_request(target, request, seen_items=[]): + # remove seen items from request on each attempt + new_request = modify_request(request, seen_items) + new_generator = await target(new_request) + async for item in new_generator: + yield item + seen_items.append(item) + + retry_wrapped = AsyncRetry(is_stream=True)(attempt_with_modified_request, target, request, []) 2. Wrap the retry generator Alternatively, you can wrap the retryable generator itself before @@ -229,27 +229,27 @@ async def attempt_with_modified_request(target, request, seen_items=[]): in previous retry attempts, and only yield new items when the new attempt surpasses the previous ones: - `` - async def retryable_with_filter(target): - stream_idx = 0 - # reset stream_idx when the stream is retried - def on_error(e): - nonlocal stream_idx + .. code-block:: python + + async def retryable_with_filter(target): stream_idx = 0 - # build retryable - retryable_gen = AsyncRetry(is_stream=True, on_error=on_error, ...)(target) - # keep track of what has been yielded out of filter - yielded_items = [] - async for item in retryable_gen: - if stream_idx >= len(yielded_items): - yield item - yielded_items.append(item) - elif item != previous_stream[stream_idx]: - raise ValueError("Stream differs from last attempt")" - stream_idx += 1 - - filter_retry_wrapped = retryable_with_filter(target) - ``` + # reset stream_idx when the stream is retried + def on_error(e): + nonlocal stream_idx + stream_idx = 0 + # build retryable + retryable_gen = AsyncRetry(is_stream=True, on_error=on_error, ...)(target) + # keep track of what has been yielded out of filter + yielded_items = [] + async for item in retryable_gen: + if stream_idx >= len(yielded_items): + yield item + yielded_items.append(item) + elif item != previous_stream[stream_idx]: + raise ValueError("Stream differs from last attempt")" + stream_idx += 1 + + filter_retry_wrapped = retryable_with_filter(target) Args: predicate (Callable[Exception]): A callable that should return ``True`` diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index 114b47fd..0bd50e41 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -33,7 +33,6 @@ import time from functools import partial -from google.api_core import exceptions import google.api_core.retry as retries _Y = TypeVar("_Y") # yielded values From 8cc6ea9c4f9717a2a0b0c8e2bd55efc726f5837e Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 6 Oct 2023 21:09:43 +0000 Subject: [PATCH 137/204] Update tests/unit/test_retry.py Co-authored-by: Victor Chudnovsky --- tests/unit/test_retry.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index dcab0408..e48a3343 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -514,7 +514,9 @@ def _generator_mock( return return_val except (Exception, BaseException, GeneratorExit) as e: # keep track of exceptions seen by generator - if exceptions_seen is not None: + if not exceptions_seen: + exceptions_seen = [] + exceptions_seen.append(e) exceptions_seen.append(e) raise From e7a5cd485788fd7251c548fa99dc7561a9a1954f Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 6 Oct 2023 13:47:36 -0700 Subject: [PATCH 138/204] fixed comment line break --- google/api_core/retry.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index 36882d11..143fa590 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -342,7 +342,11 @@ class Retry(object): expected to succeed (its errors are supposed to be handled by the retry logic). The decision as to whether a new polling attempt needs to be made is based not on the RPC status code but on the status of the returned - status of an operation. In other words: we will poll a long-running operation until the operation is done or the polling timeout expires. Each poll will inform us of the status of the operation. The poll consists of an RPC to the server that may itself be retried as per the poll-specific retry settings in case of errors. The operation-level retry settings do NOT apply to polling-RPC retries. + status of an operation. In other words: we will poll a long-running operation until + the operation is done or the polling timeout expires. Each poll will inform us of + the status of the operation. The poll consists of an RPC to the server that may + itself be retried as per the poll-specific retry settings in case of errors. The + operation-level retry settings do NOT apply to polling-RPC retries. With the actual timeout types being defined above, the client libraries often refer to just Timeout without clarifying which type specifically From 02c12cc609096a635a23b7efa78e152ae1a00614 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 6 Oct 2023 13:50:37 -0700 Subject: [PATCH 139/204] use kwargs map --- google/api_core/retry.py | 11 ++++++++--- google/api_core/retry_async.py | 11 ++++++++--- 2 files changed, 16 insertions(+), 6 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index 143fa590..efb9e174 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -497,13 +497,18 @@ def retry_wrapped_func( sleep_generator = exponential_sleep_generator( self._initial, self._maximum, multiplier=self._multiplier ) - retry_args = (self._predicate, sleep_generator, self._timeout, on_error) + retry_kwargs = { + "predicate": self._predicate, + "sleep_generator": sleep_generator, + "timeout": self._timeout, + "on_error": on_error, + } if self._is_stream: # when stream is enabled, assume target returns an iterable that yields _Y stream_target = cast(Callable[[], Iterable["_Y"]], target) - return retry_streaming.retry_target_stream(stream_target, *retry_args) + return retry_streaming.retry_target_stream(stream_target, **retry_kwargs) else: - return retry_target(target, *retry_args) + return retry_target(target, **retry_kwargs) return retry_wrapped_func diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index 23d5f23d..94fe5f91 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -326,12 +326,17 @@ async def retry_wrapped_func( sleep_generator = exponential_sleep_generator( self._initial, self._maximum, multiplier=self._multiplier ) - retry_args = (self._predicate, sleep_generator, self._timeout, on_error) + retry_kwargs = { + "predicate": self._predicate, + "sleep_generator": sleep_generator, + "timeout": self._timeout, + "on_error": on_error, + } if self._is_stream: stream_target = cast(Callable[[], AsyncIterable["_Y"]], target) - return retry_target_stream(stream_target, *retry_args) + return retry_target_stream(stream_target, **retry_kwargs) else: - return await retry_target(target, *retry_args) + return await retry_target(target, **retry_kwargs) return retry_wrapped_func From 03b16080b7e07fce772af446304a03fb385f7973 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 6 Oct 2023 14:00:06 -0700 Subject: [PATCH 140/204] fixed on_error docstrings --- google/api_core/retry.py | 22 ++++++++++++---------- google/api_core/retry_async.py | 21 ++++++++++++--------- google/api_core/retry_streaming.py | 6 +++--- google/api_core/retry_streaming_async.py | 6 +++--- 4 files changed, 30 insertions(+), 25 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index efb9e174..7bf75790 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -222,10 +222,10 @@ def retry_target( It should return True to retry or False otherwise. sleep_generator (Iterable[float]): An infinite iterator that determines how long to sleep between retries. - timeout (float): How long to keep retrying the target. - on_error (Callable[Exception]): A function to call while processing a - retryable exception. Any error raised by this function will *not* - be caught. + timeout (Optional[float]): How long to keep retrying the target. + on_error (Optional[Callable[Exception]]): If given, the on_error + callback will be called with each retryable exception raised by the + target. Any error raised by this function will *not* be caught. deadline (float): DEPRECATED: use ``timeout`` instead. For backward compatibility, if specified it will override ``timeout`` parameter. @@ -475,11 +475,11 @@ def __call__( Args: func (Callable): The callable to add retry behavior to. - on_error (Callable[Exception]): A function to call while processing - a retryable exception. Any error raised by this function will - *not* be caught. - If on_error was specified in the constructor, this value will - be ignored. + on_error (Optional[Callable[Exception]]): If given, the + on_error callback will be called with each retryable exception + raised by the wrapped function. Any error raised by this + function will *not* be caught. If on_error was specified in the + constructor, this value will be ignored. Returns: Callable: A callable that will invoke ``func`` with retry @@ -506,7 +506,9 @@ def retry_wrapped_func( if self._is_stream: # when stream is enabled, assume target returns an iterable that yields _Y stream_target = cast(Callable[[], Iterable["_Y"]], target) - return retry_streaming.retry_target_stream(stream_target, **retry_kwargs) + return retry_streaming.retry_target_stream( + stream_target, **retry_kwargs + ) else: return retry_target(target, **retry_kwargs) diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index 94fe5f91..b2aed9c0 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -111,10 +111,10 @@ async def retry_target( It should return True to retry or False otherwise. sleep_generator (Iterable[float]): An infinite iterator that determines how long to sleep between retries. - timeout (float): How long to keep retrying the target, in seconds. - on_error (Callable[Exception]): A function to call while processing a - retryable exception. Any error raised by this function will *not* - be caught. + timeout (Optional[float]): How long to keep retrying the target, in seconds. + on_error (Optional[Callable[Exception]]): If given, the on_error + callback will be called with each retryable exception raised by the + target. Any error raised by this function will *not* be caught. deadline (float): DEPRECATED use ``timeout`` instead. For backward compatibility, if set it will override the ``timeout`` parameter. @@ -258,8 +258,8 @@ def on_error(e): must be greater than 0. maximum (float): The maximum amount of time to delay in seconds. multiplier (float): The multiplier applied to the delay. - timeout (float): How long to keep retrying in seconds. - on_error (Callable[Exception]): A function to call while processing + timeout (Optional[float]): How long to keep retrying in seconds. + on_error (Optional[Callable[Exception]]): A function to call while processing a retryable exception. Any error raised by this function will *not* be caught. is_stream (bool): Indicates whether the input function @@ -306,9 +306,12 @@ def __call__( Args: func (Callable): The callable or stream to add retry behavior to. - on_error (Callable[Exception]): A function to call while processing - a retryable exception. Any error raised by this function will - *not* be caught. + on_error (Optional[Callable[Exception]]): If given, the + on_error callback will be called with each retryable exception + raised by the wrapped function. Any error raised by this + function will *not* be caught. If on_error was specified in the + constructor, this value will be ignored. + Returns: Callable: A callable that will invoke ``func`` with retry diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index 0bd50e41..1e7a341c 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -67,9 +67,9 @@ def retry_target_stream( timeout: How long to keep retrying the target. Note: timeout is only checked before initiating a retry, so the target may run past the timeout value as long as it is healthy. - on_error: A function to call while processing a - retryable exception. Any error raised by this function will *not* - be caught. + on_error: If given, the on_error callback will be called with each + retryable exception raised by the target. Any error raised by this + function will *not* be caught. exception_factory: A function that is called when the retryable reaches a terminal failure state, used to construct an exception to be raised. It it given a list of all exceptions encountered, a boolean indicating diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index cd733e1c..01e44158 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -78,9 +78,9 @@ async def retry_target_stream( timeout: How long to keep retrying the target. Note: timeout is only checked before initiating a retry, so the target may run past the timeout value as long as it is healthy. - on_error: A function to call while processing a - retryable exception. Any error raised by this function will *not* - be caught. + on_error: If given, the on_error callback will be called with each + retryable exception raised by the target. Any error raised by this + function will *not* be caught. exception_factory: A function that is called when the retryable reaches a terminal failure state, used to construct an exception to be raised. It it given a list of all exceptions encountered, a boolean indicating From b05b11f35e19201af92fcf0a5178a7d375b3550e Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 6 Oct 2023 14:09:11 -0700 Subject: [PATCH 141/204] renamed example lists --- google/api_core/retry.py | 8 ++++---- google/api_core/retry_async.py | 6 +++--- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index 7bf75790..0d61a5b5 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -411,12 +411,12 @@ def on_error(e): # build retryable retryable_gen = Retry(is_stream=True, on_error=on_error, ...)(target) # keep track of what has been yielded out of filter - yielded_items = [] + seen_items = [] for item in retryable_gen(): - if stream_idx >= len(yielded_items): - yielded_items.append(item) + if stream_idx >= len(seen_items): + seen_items.append(item) yield item - elif item != yielded_items[stream_idx]: + elif item != seen_items[stream_idx]: raise ValueError("Stream differs from last attempt") stream_idx += 1 diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index b2aed9c0..035fe185 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -240,11 +240,11 @@ def on_error(e): # build retryable retryable_gen = AsyncRetry(is_stream=True, on_error=on_error, ...)(target) # keep track of what has been yielded out of filter - yielded_items = [] + seen_items = [] async for item in retryable_gen: - if stream_idx >= len(yielded_items): + if stream_idx >= len(seen_items): yield item - yielded_items.append(item) + seen_items.append(item) elif item != previous_stream[stream_idx]: raise ValueError("Stream differs from last attempt")" stream_idx += 1 From 0b5d3a2551c244d237c998e22577e0820447c73e Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 6 Oct 2023 14:26:55 -0700 Subject: [PATCH 142/204] removed ignore_sent --- tests/asyncio/test_retry_async.py | 27 ++++++++++++++------------- tests/unit/test_retry.py | 29 ++++++++++++++++------------- 2 files changed, 30 insertions(+), 26 deletions(-) diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index c5b8be40..c4f03217 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -418,23 +418,18 @@ async def _generator_mock( error_on=None, exceptions_seen=None, sleep_time=0, - ignore_sent=False, ): """ Helper to create a mock generator that yields a number of values Generator can optionally raise an exception on a specific iteration """ try: - sent_in = None for i in range(num): if sleep_time: await asyncio.sleep(sleep_time) if error_on and i == error_on: raise ValueError("generator mock error") - - sent_in = yield (sent_in if sent_in else i) - if ignore_sent: - sent_in = None + yield i except (Exception, BaseException, GeneratorExit) as e: # keep track of exceptions seen by generator if exceptions_seen is not None: @@ -560,21 +555,27 @@ async def test___call___with_generator_send(self, sleep): """ Send should be passed through retry into target generator """ + async def _mock_send_gen(): + """ + always yield whatever was sent in + """ + in_ = yield + while True: + in_ = yield in_ retry_ = retry_async.AsyncRetry(is_stream=True) - decorated = retry_(self._generator_mock) + decorated = retry_(_mock_send_gen) - generator = await decorated(10) + generator = await decorated() result = await generator.__anext__() - assert result == 0 + # fist yield should be None + assert result is None in_messages = ["test_1", "hello", "world"] out_messages = [] for msg in in_messages: recv = await generator.asend(msg) out_messages.append(recv) assert in_messages == out_messages - assert await generator.__anext__() == 4 - assert await generator.__anext__() == 5 @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio @@ -589,14 +590,14 @@ async def test___call___generator_send_retry(self, sleep): is_stream=True, timeout=None, ) - generator = await retry_(self._generator_mock)(error_on=3, ignore_sent=True) + generator = await retry_(self._generator_mock)(error_on=3) with pytest.raises(TypeError) as exc_info: await generator.asend("can not send to fresh generator") assert exc_info.match("can't send non-None value") # error thrown on 3 # generator should contain 0, 1, 2 looping - generator = await retry_(self._generator_mock)(error_on=3, ignore_sent=True) + generator = await retry_(self._generator_mock)(error_on=3) assert await generator.__anext__() == 0 unpacked = [await generator.asend(i) for i in range(10)] assert unpacked == [1, 2, 0, 1, 2, 0, 1, 2, 0, 1] diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index e48a3343..2a54bec9 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -497,20 +497,16 @@ def _generator_mock( error_on=None, return_val=None, exceptions_seen=None, - ignore_sent=False, ): """ Helper to create a mock generator that yields a number of values Generator can optionally raise an exception on a specific iteration """ try: - sent_in = None for i in range(num): if error_on and i == error_on: raise ValueError("generator mock error") - sent_in = yield (sent_in if sent_in else i) - if ignore_sent: - sent_in = None + yield i return return_val except (Exception, BaseException, GeneratorExit) as e: # keep track of exceptions seen by generator @@ -616,22 +612,29 @@ def test___call___with_generator_send(self, sleep): """ Send should be passed through retry into target generator """ + def _mock_send_gen(): + """ + always yield whatever was sent in + """ + in_ = yield + while True: + in_ = yield in_ + retry_ = retry.Retry(is_stream=True) - decorated = retry_(self._generator_mock) + decorated = retry_(_mock_send_gen) - generator = decorated(5) + generator = decorated() result = next(generator) - assert result == 0 + # first call should be None + assert result is None in_messages = ["test_1", "hello", "world"] out_messages = [] for msg in in_messages: recv = generator.send(msg) out_messages.append(recv) assert in_messages == out_messages - assert next(generator) == 4 - with pytest.raises(StopIteration): - generator.send("should be exhausted") + @mock.patch("time.sleep", autospec=True) def test___call___with_generator_send_retry(self, sleep): @@ -645,12 +648,12 @@ def test___call___with_generator_send_retry(self, sleep): is_stream=True, timeout=None, ) - result = retry_(self._generator_mock)(error_on=3, ignore_sent=True) + result = retry_(self._generator_mock)(error_on=3) with pytest.raises(TypeError) as exc_info: result.send("can not send to fresh generator") assert exc_info.match("can't send non-None value") # initiate iteration with None - result = retry_(self._generator_mock)(error_on=3, ignore_sent=True) + result = retry_(self._generator_mock)(error_on=3) assert result.send(None) == 0 # error thrown on 3 # generator should contain 0, 1, 2 looping From 03f2af52e1878fa2ea4c8e124df637ecdf3f73a2 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 6 Oct 2023 14:37:26 -0700 Subject: [PATCH 143/204] fixed lint issues --- tests/asyncio/test_retry_async.py | 2 ++ tests/unit/test_retry.py | 6 +++--- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index c4f03217..bbdc61f0 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -555,6 +555,7 @@ async def test___call___with_generator_send(self, sleep): """ Send should be passed through retry into target generator """ + async def _mock_send_gen(): """ always yield whatever was sent in @@ -562,6 +563,7 @@ async def _mock_send_gen(): in_ = yield while True: in_ = yield in_ + retry_ = retry_async.AsyncRetry(is_stream=True) decorated = retry_(_mock_send_gen) diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 2a54bec9..94883bd1 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -539,7 +539,7 @@ def test___call___generator_success(self, sleep): # check yield contents unpacked = [i for i in result] assert len(unpacked) == num - for a, b in zip(decorated(num), self._generator_mock(num)): + for a, b in zip(unpacked, self._generator_mock(num)): assert a == b sleep.assert_not_called() @@ -612,6 +612,7 @@ def test___call___with_generator_send(self, sleep): """ Send should be passed through retry into target generator """ + def _mock_send_gen(): """ always yield whatever was sent in @@ -626,7 +627,7 @@ def _mock_send_gen(): generator = decorated() result = next(generator) - # first call should be None + # first yield should be None assert result is None in_messages = ["test_1", "hello", "world"] out_messages = [] @@ -635,7 +636,6 @@ def _mock_send_gen(): out_messages.append(recv) assert in_messages == out_messages - @mock.patch("time.sleep", autospec=True) def test___call___with_generator_send_retry(self, sleep): """ From 5fee88886310dc4053f925c1bef1b011722b14b7 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 6 Oct 2023 14:51:34 -0700 Subject: [PATCH 144/204] fixed generator mock and added comments --- tests/asyncio/test_retry_async.py | 6 ++++++ tests/unit/test_retry.py | 10 +++++++--- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index bbdc61f0..3febb232 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -422,6 +422,12 @@ async def _generator_mock( """ Helper to create a mock generator that yields a number of values Generator can optionally raise an exception on a specific iteration + + Args: + - num (int): the number of values to yield + - error_on (int): if given, the generator will raise a ValueError on the specified iteration + - exceptions_seen (list): if given, the generator will append any exceptions to this list before raising + - sleep_time (int): if given, the generator will asyncio.sleep for this many seconds before yielding each value """ try: for i in range(num): diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 94883bd1..08eb6b22 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -501,6 +501,12 @@ def _generator_mock( """ Helper to create a mock generator that yields a number of values Generator can optionally raise an exception on a specific iteration + + Args: + - num (int): the number of values to yield. After this, the generator will return `return_val` + - error_on (int): if given, the generator will raise a ValueError on the specified iteration + - return_val (any): if given, the generator will return this value after yielding num values + - exceptions_seen (list): if given, the generator will append any exceptions to this list before raising """ try: for i in range(num): @@ -510,9 +516,7 @@ def _generator_mock( return return_val except (Exception, BaseException, GeneratorExit) as e: # keep track of exceptions seen by generator - if not exceptions_seen: - exceptions_seen = [] - exceptions_seen.append(e) + if exceptions_seen is not None: exceptions_seen.append(e) raise From b0faa2ddb4f43f4a0e46aa0a3a996024a12df76b Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 9 Nov 2023 21:38:12 +0000 Subject: [PATCH 145/204] Apply suggestions from code review comments, typos, etc Co-authored-by: Victor Chudnovsky --- google/api_core/retry.py | 4 ++-- google/api_core/retry_async.py | 6 +++--- google/api_core/retry_streaming_async.py | 5 +++-- tests/asyncio/test_retry_async.py | 19 ++++++++++--------- tests/unit/test_retry.py | 6 ++++-- 5 files changed, 22 insertions(+), 18 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index 28b6805c..8a15ffb3 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -415,7 +415,7 @@ def on_error(e): nonlocal stream_idx stream_idx = 0 # build retryable - retryable_gen = Retry(is_stream=True, on_error=on_error, ...)(target) + retryable_gen = Retry(is_stream=True,...)(target) # keep track of what has been yielded out of filter seen_items = [] for item in retryable_gen(): @@ -440,7 +440,7 @@ def on_error(e): a retryable exception. Any error raised by this function will *not* be caught. is_stream (bool): Indicates whether the input function - should be treated as an stream function (i.e. a Generator, + should be treated as a stream function (i.e. a Generator, or function that returns an Iterable). If True, the iterable will be wrapped with retry logic, and any failed outputs will restart the stream. If False, only the input function call itself diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index 035fe185..5f03c17d 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -220,7 +220,7 @@ async def attempt_with_modified_request(target, request, seen_items=[]): yield item seen_items.append(item) - retry_wrapped = AsyncRetry(is_stream=True)(attempt_with_modified_request, target, request, []) + retry_wrapped = AsyncRetry(is_stream=True,...)(attempt_with_modified_request, target, request, []) 2. Wrap the retry generator Alternatively, you can wrap the retryable generator itself before @@ -238,7 +238,7 @@ def on_error(e): nonlocal stream_idx stream_idx = 0 # build retryable - retryable_gen = AsyncRetry(is_stream=True, on_error=on_error, ...)(target) + retryable_gen = AsyncRetry(is_stream=True, ...)(target) # keep track of what has been yielded out of filter seen_items = [] async for item in retryable_gen: @@ -263,7 +263,7 @@ def on_error(e): a retryable exception. Any error raised by this function will *not* be caught. is_stream (bool): Indicates whether the input function - should be treated as an stream function (i.e. an AsyncGenerator, + should be treated as a stream function (i.e. an AsyncGenerator, or function or coroutine that returns an AsyncIterable). If True, the iterable will be wrapped with retry logic, and any failed outputs will restart the stream. If False, only the input diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index 01e44158..f9dd40ed 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + """ Generator wrapper for retryable streaming RPCs. This function will be used when initilizing a retry with @@ -65,7 +66,7 @@ async def retry_target_stream( """Create a generator wrapper that retries the wrapped stream if it fails. This is the lowest-level retry helper. Generally, you'll use the - higher-level retry helper :class:`Retry`. + higher-level retry helper :class:`AsyncRetry`. Args: target: The generator function to call and retry. This must be a @@ -91,7 +92,7 @@ async def retry_target_stream( on timeout, or the last exception encountered otherwise. Returns: - AssyncGenerator: A retryable generator that wraps the target generator function. + AsyncGenerator: A retryable generator that wraps the target generator function. Raises: ValueError: If the sleep generator stops yielding values. diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 3febb232..cac01f39 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -576,7 +576,7 @@ async def _mock_send_gen(): generator = await decorated() result = await generator.__anext__() - # fist yield should be None + # first yield should be None assert result is None in_messages = ["test_1", "hello", "world"] out_messages = [] @@ -600,7 +600,7 @@ async def test___call___generator_send_retry(self, sleep): ) generator = await retry_(self._generator_mock)(error_on=3) with pytest.raises(TypeError) as exc_info: - await generator.asend("can not send to fresh generator") + await generator.asend("cannot send to fresh generator") assert exc_info.match("can't send non-None value") # error thrown on 3 @@ -653,6 +653,8 @@ async def test___call___with_generator_throw(self, sleep): """ Throw should be passed through retry into target generator """ + + # The generator should not retry when it encounters a non-retryable error retry_ = retry_async.AsyncRetry( predicate=retry_async.if_exception_type(ValueError), is_stream=True, @@ -668,7 +670,8 @@ async def test___call___with_generator_throw(self, sleep): with pytest.raises(StopAsyncIteration): # calling next on closed generator should raise error await generator.__anext__() - # should retry if throw retryable exception + + # In contrast, the generator should retry if we throw a retryable exception exception_list = [] generator = await decorated(10, exceptions_seen=exception_list) for i in range(2): @@ -679,7 +682,7 @@ async def test___call___with_generator_throw(self, sleep): # calling next on closed generator should not raise error assert await generator.__anext__() == 1 - @pytest.mark.parametrize("awaitale_wrapped", [True, False]) + @pytest.mark.parametrize("awaitable_wrapped", [True, False]) @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___with_iterable_send(self, sleep, awaitale_wrapped): @@ -702,11 +705,9 @@ async def __anext__(self): return CustomIterable() - if awaitale_wrapped: - + if awaitable_wrapped: async def wrapper(): return iterable_fn() - decorated = retry_(wrapper) else: decorated = retry_(iterable_fn) @@ -718,7 +719,7 @@ async def wrapper(): await retryable.asend("test2") == 2 await retryable.asend("test3") == 3 - @pytest.mark.parametrize("awaitale_wrapped", [True, False]) + @pytest.mark.parametrize("awaitable_wrapped", [True, False]) @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___with_iterable_close(self, sleep, awaitale_wrapped): @@ -762,7 +763,7 @@ async def wrapper(): with pytest.raises(StopAsyncIteration): await new_retryable.__anext__() - @pytest.mark.parametrize("awaitale_wrapped", [True, False]) + @pytest.mark.parametrize("awaitable_wrapped", [True, False]) @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___with_iterable_throw(self, sleep, awaitale_wrapped): diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 80ea0968..7e7d5897 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -721,7 +721,8 @@ def iterable_fn(n): retryable.close() with pytest.raises(StopIteration): next(retryable) - # try closing new generator + + # try closing a new generator retryable = decorated(10) retryable.close() with pytest.raises(StopIteration): @@ -751,7 +752,8 @@ def iterable_fn(n): retryable.throw(TypeError) with pytest.raises(StopIteration): next(retryable) - # try throwing with new generator + + # try throwing with a new generator retryable = decorated(10) with pytest.raises(ValueError): retryable.throw(ValueError) From 6c44298d97ec969a10d78f2068c6fb9aabe022dd Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Thu, 9 Nov 2023 21:40:05 +0000 Subject: [PATCH 146/204] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot?= =?UTF-8?q?=20post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- tests/asyncio/test_retry_async.py | 2 ++ tests/unit/test_retry.py | 4 ++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index cac01f39..4190061d 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -706,8 +706,10 @@ async def __anext__(self): return CustomIterable() if awaitable_wrapped: + async def wrapper(): return iterable_fn() + decorated = retry_(wrapper) else: decorated = retry_(iterable_fn) diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 7e7d5897..70e694f8 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -721,7 +721,7 @@ def iterable_fn(n): retryable.close() with pytest.raises(StopIteration): next(retryable) - + # try closing a new generator retryable = decorated(10) retryable.close() @@ -752,7 +752,7 @@ def iterable_fn(n): retryable.throw(TypeError) with pytest.raises(StopIteration): next(retryable) - + # try throwing with a new generator retryable = decorated(10) with pytest.raises(ValueError): From 51df6721dea06d870dbdbe5d17abb9fd28d826c2 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 9 Nov 2023 22:16:33 +0000 Subject: [PATCH 147/204] Update google/api_core/retry.py Co-authored-by: Victor Chudnovsky --- google/api_core/retry.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index 8a15ffb3..938c38e3 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -396,7 +396,7 @@ def attempt_with_modified_request(target, request, seen_items=[]): yield item seen_items.append(item) - retry_wrapped_fn = Retry(is_stream=True)(attempt_with_modified_request) + retry_wrapped_fn = Retry(is_stream=True,...)(attempt_with_modified_request) retryable_generator = retry_wrapped_fn(target, request) 2. Wrap the retry generator From e207376c406375b11600fe52a2e5e80003395bbb Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 9 Nov 2023 14:22:33 -0800 Subject: [PATCH 148/204] removed unneeded comments --- google/api_core/retry.py | 3 +-- google/api_core/retry_async.py | 3 +-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index 938c38e3..2af08cef 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -446,8 +446,7 @@ def on_error(e): restart the stream. If False, only the input function call itself will be retried. Defaults to False. To avoid duplicate values, retryable streams should typically be - wrapped in additional filter logic before use. For more details, see - ``google.api_core.retry_streaming.retry_target_stream``. + wrapped in additional filter logic before use. deadline (float): DEPRECATED: use `timeout` instead. For backward compatibility, if specified it will override the ``timeout`` parameter. """ diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index 5f03c17d..d406ac2a 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -269,8 +269,7 @@ def on_error(e): failed outputs will restart the stream. If False, only the input function call itself will be retried. Defaults to False. To avoid duplicate values, retryable streams should typically be - wrapped in additional filter logic before use. For more details, see - ``google.api_core.retry_streaming_async.retry_target_stream``. + wrapped in additional filter logic before use. deadline (float): DEPRECATED use ``timeout`` instead. If set it will override ``timeout`` parameter. """ From 39716a77453cdad244ce9305b1fd248aa1ce3fdc Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 9 Nov 2023 14:39:40 -0800 Subject: [PATCH 149/204] improved comments --- google/api_core/retry_streaming_async.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index f9dd40ed..d3dae741 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -113,14 +113,15 @@ async def retry_target_stream( for sleep in sleep_generator: # Start a new retry loop try: - # generator may be raw iterator, or wrapped in an awaitable gen_instance: Union[ AsyncIterable[_Y], Awaitable[AsyncIterable[_Y]] ] = target() try: + # gapic functions return the generator behind an awaitable + # unwrap the awaitable so we can work with the generator directly gen_instance = await gen_instance # type: ignore except TypeError: - # was not awaitable + # was not awaitable, continue pass subgenerator = cast(AsyncIterable["_Y"], gen_instance).__aiter__() From 2bbf33f5bdd3f9c8fbcc47da1294aea29f1d983b Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 9 Nov 2023 15:10:58 -0800 Subject: [PATCH 150/204] simplified generator detection --- google/api_core/retry_streaming_async.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index d3dae741..4d5556ad 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -109,6 +109,7 @@ async def retry_target_stream( error_list: List[Exception] = [] # override exception_factory to build a more complex exception exc_factory = partial(exception_factory or _build_retry_error, timeout_val=timeout) + target_is_generator: bool | None = None for sleep in sleep_generator: # Start a new retry loop @@ -125,14 +126,16 @@ async def retry_target_stream( pass subgenerator = cast(AsyncIterable["_Y"], gen_instance).__aiter__() - # if target is a generator, we will advance it using asend - # otherwise, we will use anext - supports_send = bool(getattr(subgenerator, "asend", None)) + if target_is_generator is None: + # Check if target supports generator features (asend, athrow, aclose) + target_is_generator = bool(getattr(subgenerator, "asend", None)) sent_in = None while True: ## Read from Subgenerator - if supports_send: + # If the target is a generator, we will advance it with `asend` + # otherwise, we will use `anext` + if target_is_generator: next_value = await subgenerator.asend(sent_in) # type: ignore else: next_value = await subgenerator.__anext__() @@ -143,7 +146,7 @@ async def retry_target_stream( sent_in = yield next_value except GeneratorExit: # if wrapper received `aclose`, pass to subgenerator and close - if bool(getattr(subgenerator, "aclose", None)): + if target_is_generator: await cast(AsyncGenerator["_Y", None], subgenerator).aclose() else: raise @@ -151,7 +154,7 @@ async def retry_target_stream( except: # noqa: E722 # bare except catches any exception passed to `athrow` # delegate error handling to subgenerator - if getattr(subgenerator, "athrow", None): + if target_is_generator: await cast(AsyncGenerator["_Y", None], subgenerator).athrow( *sys.exc_info() ) @@ -174,7 +177,7 @@ async def retry_target_stream( if on_error is not None: on_error(exc) finally: - if subgenerator is not None and getattr(subgenerator, "aclose", None): + if target_is_generator: await cast(AsyncGenerator["_Y", None], subgenerator).aclose() # sleep and adjust timeout budget From 3b03bfa5e7d349695d88497b71b41d98b5dee9d4 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 9 Nov 2023 15:23:13 -0800 Subject: [PATCH 151/204] renamed variables --- google/api_core/retry_streaming_async.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index 4d5556ad..be3a4fe3 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -102,33 +102,33 @@ async def retry_target_stream( Exception: If the target raises an error that isn't retryable. """ - subgenerator: Optional[AsyncIterator[_Y]] = None + async_iterator: Optional[AsyncIterator[_Y]] = None timeout = kwargs.get("deadline", timeout) deadline: Optional[float] = time.monotonic() + timeout if timeout else None # keep track of retryable exceptions we encounter to pass in to exception_factory error_list: List[Exception] = [] # override exception_factory to build a more complex exception exc_factory = partial(exception_factory or _build_retry_error, timeout_val=timeout) - target_is_generator: bool | None = None + target_is_generator: Optional[bool] = None for sleep in sleep_generator: # Start a new retry loop try: - gen_instance: Union[ + target_output: Union[ AsyncIterable[_Y], Awaitable[AsyncIterable[_Y]] ] = target() try: # gapic functions return the generator behind an awaitable # unwrap the awaitable so we can work with the generator directly - gen_instance = await gen_instance # type: ignore + target_output = await target_output # type: ignore except TypeError: # was not awaitable, continue pass - subgenerator = cast(AsyncIterable["_Y"], gen_instance).__aiter__() + async_iterator = cast(AsyncIterable["_Y"], target_output).__aiter__() if target_is_generator is None: # Check if target supports generator features (asend, athrow, aclose) - target_is_generator = bool(getattr(subgenerator, "asend", None)) + target_is_generator = bool(getattr(async_iterator, "asend", None)) sent_in = None while True: @@ -136,9 +136,9 @@ async def retry_target_stream( # If the target is a generator, we will advance it with `asend` # otherwise, we will use `anext` if target_is_generator: - next_value = await subgenerator.asend(sent_in) # type: ignore + next_value = await async_iterator.asend(sent_in) # type: ignore else: - next_value = await subgenerator.__anext__() + next_value = await async_iterator.__anext__() ## Yield from Wrapper to caller try: # yield last value from subgenerator @@ -147,7 +147,7 @@ async def retry_target_stream( except GeneratorExit: # if wrapper received `aclose`, pass to subgenerator and close if target_is_generator: - await cast(AsyncGenerator["_Y", None], subgenerator).aclose() + await cast(AsyncGenerator["_Y", None], async_iterator).aclose() else: raise return @@ -155,7 +155,7 @@ async def retry_target_stream( # bare except catches any exception passed to `athrow` # delegate error handling to subgenerator if target_is_generator: - await cast(AsyncGenerator["_Y", None], subgenerator).athrow( + await cast(AsyncGenerator["_Y", None], async_iterator).athrow( *sys.exc_info() ) else: @@ -177,8 +177,8 @@ async def retry_target_stream( if on_error is not None: on_error(exc) finally: - if target_is_generator: - await cast(AsyncGenerator["_Y", None], subgenerator).aclose() + if target_is_generator and async_iterator is not None: + await cast(AsyncGenerator["_Y", None], async_iterator).aclose() # sleep and adjust timeout budget if deadline is not None and time.monotonic() + sleep > deadline: From e63701d54111d3214e551d5d3fc18a6cf40ea54d Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 9 Nov 2023 15:32:48 -0800 Subject: [PATCH 152/204] improved comments --- google/api_core/retry_streaming_async.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index be3a4fe3..4b55d4d6 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -132,7 +132,7 @@ async def retry_target_stream( sent_in = None while True: - ## Read from Subgenerator + ## Read from async_iterator # If the target is a generator, we will advance it with `asend` # otherwise, we will use `anext` if target_is_generator: @@ -141,20 +141,22 @@ async def retry_target_stream( next_value = await async_iterator.__anext__() ## Yield from Wrapper to caller try: - # yield last value from subgenerator + # yield latest value from target # exceptions from `athrow` and `aclose` are injected here sent_in = yield next_value except GeneratorExit: - # if wrapper received `aclose`, pass to subgenerator and close + # if wrapper received `aclose` while waiting on yield, + # it will raise GeneratorExit here if target_is_generator: + # pass to inner async_iterator for handling await cast(AsyncGenerator["_Y", None], async_iterator).aclose() else: raise return except: # noqa: E722 # bare except catches any exception passed to `athrow` - # delegate error handling to subgenerator if target_is_generator: + # delegate error handling to async_iterator await cast(AsyncGenerator["_Y", None], async_iterator).athrow( *sys.exc_info() ) @@ -162,9 +164,9 @@ async def retry_target_stream( raise return except StopAsyncIteration: - # if generator exhausted, return + # if iterator exhausted, return return - # handle exceptions raised by the subgenerator + # handle exceptions raised by the async_iterator # pylint: disable=broad-except # This function explicitly must deal with broad exceptions. except (Exception, asyncio.CancelledError) as exc: From c101ea6bc0a436f0f32f62d132da6da91795f30a Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 9 Nov 2023 15:36:47 -0800 Subject: [PATCH 153/204] renamed variable --- google/api_core/retry_streaming_async.py | 26 ++++++++++++------------ 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index 4b55d4d6..442981d9 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -102,7 +102,7 @@ async def retry_target_stream( Exception: If the target raises an error that isn't retryable. """ - async_iterator: Optional[AsyncIterator[_Y]] = None + target_iterator: Optional[AsyncIterator[_Y]] = None timeout = kwargs.get("deadline", timeout) deadline: Optional[float] = time.monotonic() + timeout if timeout else None # keep track of retryable exceptions we encounter to pass in to exception_factory @@ -124,21 +124,21 @@ async def retry_target_stream( except TypeError: # was not awaitable, continue pass - async_iterator = cast(AsyncIterable["_Y"], target_output).__aiter__() + target_iterator = cast(AsyncIterable["_Y"], target_output).__aiter__() if target_is_generator is None: # Check if target supports generator features (asend, athrow, aclose) - target_is_generator = bool(getattr(async_iterator, "asend", None)) + target_is_generator = bool(getattr(target_iterator, "asend", None)) sent_in = None while True: - ## Read from async_iterator + ## Read from target_iterator # If the target is a generator, we will advance it with `asend` # otherwise, we will use `anext` if target_is_generator: - next_value = await async_iterator.asend(sent_in) # type: ignore + next_value = await target_iterator.asend(sent_in) # type: ignore else: - next_value = await async_iterator.__anext__() + next_value = await target_iterator.__anext__() ## Yield from Wrapper to caller try: # yield latest value from target @@ -148,16 +148,16 @@ async def retry_target_stream( # if wrapper received `aclose` while waiting on yield, # it will raise GeneratorExit here if target_is_generator: - # pass to inner async_iterator for handling - await cast(AsyncGenerator["_Y", None], async_iterator).aclose() + # pass to inner target_iterator for handling + await cast(AsyncGenerator["_Y", None], target_iterator).aclose() else: raise return except: # noqa: E722 # bare except catches any exception passed to `athrow` if target_is_generator: - # delegate error handling to async_iterator - await cast(AsyncGenerator["_Y", None], async_iterator).athrow( + # delegate error handling to target_iterator + await cast(AsyncGenerator["_Y", None], target_iterator).athrow( *sys.exc_info() ) else: @@ -166,7 +166,7 @@ async def retry_target_stream( except StopAsyncIteration: # if iterator exhausted, return return - # handle exceptions raised by the async_iterator + # handle exceptions raised by the target_iterator # pylint: disable=broad-except # This function explicitly must deal with broad exceptions. except (Exception, asyncio.CancelledError) as exc: @@ -179,8 +179,8 @@ async def retry_target_stream( if on_error is not None: on_error(exc) finally: - if target_is_generator and async_iterator is not None: - await cast(AsyncGenerator["_Y", None], async_iterator).aclose() + if target_is_generator and target_iterator is not None: + await cast(AsyncGenerator["_Y", None], target_iterator).aclose() # sleep and adjust timeout budget if deadline is not None and time.monotonic() + sleep > deadline: From 3642d740c20b096eb780b79715f4a8409c594a9d Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 9 Nov 2023 15:42:25 -0800 Subject: [PATCH 154/204] fixed tests --- tests/asyncio/test_retry_async.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 4190061d..e221cae4 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -548,7 +548,6 @@ async def test___call___generator_cancellations(self): generator = await retry_(self._generator_mock)(sleep_time=0.2) await generator.__anext__() == 0 task = asyncio.create_task(generator.__anext__()) - await asyncio.sleep(0.1) task.cancel() with pytest.raises(asyncio.CancelledError): await task @@ -685,7 +684,7 @@ async def test___call___with_generator_throw(self, sleep): @pytest.mark.parametrize("awaitable_wrapped", [True, False]) @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio - async def test___call___with_iterable_send(self, sleep, awaitale_wrapped): + async def test___call___with_iterable_send(self, sleep, awaitable_wrapped): """ Send should work like next if the wrapped iterable does not support it """ @@ -724,7 +723,7 @@ async def wrapper(): @pytest.mark.parametrize("awaitable_wrapped", [True, False]) @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio - async def test___call___with_iterable_close(self, sleep, awaitale_wrapped): + async def test___call___with_iterable_close(self, sleep, awaitable_wrapped): """ close should be handled by wrapper if wrapped iterable does not support it """ @@ -744,7 +743,7 @@ async def __anext__(self): return CustomIterable() - if awaitale_wrapped: + if awaitable_wrapped: async def wrapper(): return iterable_fn() @@ -768,7 +767,7 @@ async def wrapper(): @pytest.mark.parametrize("awaitable_wrapped", [True, False]) @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio - async def test___call___with_iterable_throw(self, sleep, awaitale_wrapped): + async def test___call___with_iterable_throw(self, sleep, awaitable_wrapped): """ Throw should work even if the wrapped iterable does not support it """ @@ -790,7 +789,7 @@ async def __anext__(self): return CustomIterable() - if awaitale_wrapped: + if awaitable_wrapped: async def wrapper(): return iterable_fn() From 34cfa0897aa368189d9e0645a741dea59e4d4fcf Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 9 Nov 2023 15:49:22 -0800 Subject: [PATCH 155/204] improved comments --- tests/asyncio/test_retry_async.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index e221cae4..ee86355d 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -678,7 +678,7 @@ async def test___call___with_generator_throw(self, sleep): throw_val = await generator.athrow(ValueError("test")) assert throw_val == 0 assert isinstance(exception_list[0], ValueError) - # calling next on closed generator should not raise error + # calling next on generator should not raise error, because it was retried assert await generator.__anext__() == 1 @pytest.mark.parametrize("awaitable_wrapped", [True, False]) From b311b87eefbaeeaa1a39ddb3bca05321007dffd4 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 17 Nov 2023 16:16:21 -0800 Subject: [PATCH 156/204] fixed retry factory functionality --- google/api_core/retry.py | 13 +++--- google/api_core/retry_streaming.py | 21 +++++----- google/api_core/retry_streaming_async.py | 20 +++++---- tests/asyncio/test_retry_async.py | 16 ++++---- tests/unit/test_retry.py | 52 +++++++++++++++++++----- 5 files changed, 79 insertions(+), 43 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index 2af08cef..6ddde37d 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -104,27 +104,28 @@ class RetryFailureReason(Enum): def _build_retry_error( exc_list: list[Exception], reason: RetryFailureReason, - timeout_val: float, + timeout_val: float | None, **kwargs: Any, ) -> tuple[Exception, Exception | None]: """ Default exception_factory implementation. Builds an exception after the retry fails Args: - - exc_list (list[Exception]): list of exceptions that occurred during the retry - - reason (google.api_core.retry.RetryFailureReason): reason for the retry failure. + - exc_list: list of exceptions that occurred during the retry + - reason: reason for the retry failure. Can be TIMEOUT or NON_RETRYABLE_ERROR - - timeout_val (float): the original timeout value for the retry, for use in the exception message + - timeout_val: the original timeout value for the retry, for use in the exception message Returns: - - tuple[Exception, Exception|None]: a tuple of the exception to be raised, and the cause exception if any + - tuple: a tuple of the exception to be raised, and the cause exception if any """ if reason == RetryFailureReason.TIMEOUT: # return RetryError with the most recent exception as the cause src_exc = exc_list[-1] if exc_list else None + timeout_val_str = f"of {timeout_val:0.1f}s " if timeout_val is not None else "" return ( exceptions.RetryError( - "Timeout of {:.1f}s exceeded".format(timeout_val), + f"Timeout {timeout_val_str}exceeded", src_exc, ), src_exc, diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index 1e7a341c..20fafd08 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -31,7 +31,6 @@ import logging import time -from functools import partial import google.api_core.retry as retries @@ -47,7 +46,10 @@ def retry_target_stream( timeout: Optional[float] = None, on_error: Optional[Callable[[Exception], None]] = None, exception_factory: Optional[ - Callable[[List[Exception], bool, float], Tuple[Exception, Optional[Exception]]] + Callable[ + [List[Exception], "retries.RetryFailureReason", Optional[float]], + Tuple[Exception, Optional[Exception]], + ] ] = None, **kwargs, ) -> Generator[_Y, Any, None]: @@ -72,8 +74,8 @@ def retry_target_stream( function will *not* be caught. exception_factory: A function that is called when the retryable reaches a terminal failure state, used to construct an exception to be raised. - It it given a list of all exceptions encountered, a boolean indicating - whether the failure was due to a timeout, and the original timeout value + It it given a list of all exceptions encountered, a retry.RetryFailureReason + enum indicating the failure cause, and the original timeout value as arguments. It should return a tuple of the exception to be raised, along with the cause exception if any. If not provided, a default implementation will raise a RetryError @@ -93,9 +95,7 @@ def retry_target_stream( timeout = kwargs.get("deadline", timeout) deadline: Optional[float] = time.monotonic() + timeout if timeout else None error_list: List[Exception] = [] - exc_factory = partial( - exception_factory or retries._build_retry_error, timeout_val=timeout - ) + exc_factory = exception_factory or retries._build_retry_error for sleep in sleep_generator: # Start a new retry loop @@ -110,8 +110,9 @@ def retry_target_stream( error_list.append(exc) if not predicate(exc): final_exc, source_exc = exc_factory( - exc_list=error_list, - reason=retries.RetryFailureReason.NON_RETRYABLE_ERROR, + error_list, + retries.RetryFailureReason.NON_RETRYABLE_ERROR, + timeout, ) raise final_exc from source_exc if on_error is not None: @@ -119,7 +120,7 @@ def retry_target_stream( if deadline is not None and time.monotonic() + sleep > deadline: final_exc, source_exc = exc_factory( - exc_list=error_list, reason=retries.RetryFailureReason.TIMEOUT + error_list, retries.RetryFailureReason.TIMEOUT, timeout ) raise final_exc from source_exc _LOGGER.debug( diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index 442981d9..b8786633 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -38,10 +38,9 @@ import logging import time import sys -from functools import partial -from google.api_core.retry import _build_retry_error -from google.api_core.retry import RetryFailureReason +import google.api_core.retry as retries + if TYPE_CHECKING: _Y = TypeVar("_Y") # yielded values @@ -59,7 +58,10 @@ async def retry_target_stream( timeout: Optional[float] = None, on_error: Optional[Callable[[Exception], None]] = None, exception_factory: Optional[ - Callable[[List[Exception], bool, float], Tuple[Exception, Optional[Exception]]] + Callable[ + [List[Exception], retries.RetryFailureReason, Optional[float]], + Tuple[Exception, Optional[Exception]], + ] ] = None, **kwargs, ) -> AsyncGenerator["_Y", None]: @@ -84,8 +86,8 @@ async def retry_target_stream( function will *not* be caught. exception_factory: A function that is called when the retryable reaches a terminal failure state, used to construct an exception to be raised. - It it given a list of all exceptions encountered, a boolean indicating - whether the failure was due to a timeout, and the original timeout value + It it given a list of all exceptions encountered, a retry.RetryFailureReason + enum indicating the failure cause, and the original timeout value as arguments. It should return a tuple of the exception to be raised, along with the cause exception if any. If not provided, a default implementation will raise a RetryError @@ -108,7 +110,7 @@ async def retry_target_stream( # keep track of retryable exceptions we encounter to pass in to exception_factory error_list: List[Exception] = [] # override exception_factory to build a more complex exception - exc_factory = partial(exception_factory or _build_retry_error, timeout_val=timeout) + exc_factory = exception_factory or retries._build_retry_error target_is_generator: Optional[bool] = None for sleep in sleep_generator: @@ -173,7 +175,7 @@ async def retry_target_stream( error_list.append(exc) if not predicate(exc): exc, source_exc = exc_factory( - exc_list=error_list, reason=RetryFailureReason.NON_RETRYABLE_ERROR + error_list, retries.RetryFailureReason.NON_RETRYABLE_ERROR, timeout ) raise exc from source_exc if on_error is not None: @@ -185,7 +187,7 @@ async def retry_target_stream( # sleep and adjust timeout budget if deadline is not None and time.monotonic() + sleep > deadline: final_exc, source_exc = exc_factory( - exc_list=error_list, reason=RetryFailureReason.TIMEOUT + error_list, retries.RetryFailureReason.TIMEOUT, timeout ) raise final_exc from source_exc _LOGGER.debug( diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 91f04f42..ccd5532c 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -828,10 +828,10 @@ async def test_exc_factory_non_retryable_error(self): expected_source_err = ZeroDivisionError("test4") def factory(*args, **kwargs): - assert len(args) == 0 - assert kwargs["exc_list"] == sent_errors - assert kwargs["reason"] == RetryFailureReason.NON_RETRYABLE_ERROR - assert kwargs["timeout_val"] == timeout + assert len(kwargs) == 0 + assert args[0] == sent_errors + assert args[1] == RetryFailureReason.NON_RETRYABLE_ERROR + assert args[2] == timeout return expected_final_err, expected_source_err generator = retry_target_stream( @@ -876,10 +876,10 @@ async def test_exc_factory_timeout(self): expected_source_err = ZeroDivisionError("test4") def factory(*args, **kwargs): - assert len(args) == 0 - assert kwargs["exc_list"] == sent_errors - assert kwargs["reason"] == RetryFailureReason.TIMEOUT - assert kwargs["timeout_val"] == timeout + assert len(kwargs) == 0 + assert args[0] == sent_errors + assert args[1] == RetryFailureReason.TIMEOUT + assert args[2] == timeout return expected_final_err, expected_source_err generator = retry_target_stream( diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index da8714c0..967a137a 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -67,13 +67,45 @@ def test__build_retry_error_empty_list(): should return a generic RetryError """ from google.api_core.retry import _build_retry_error + from google.api_core.retry import RetryFailureReason - src, cause = _build_retry_error([], False, 10) + reason = RetryFailureReason.NON_RETRYABLE_ERROR + src, cause = _build_retry_error([], reason, 10) assert isinstance(src, exceptions.RetryError) assert cause is None assert src.message == "Unknown error" +def test__build_retry_error_timeout_message(): + """ + should provide helpful error message when timeout is reached + """ + from google.api_core.retry import _build_retry_error + from google.api_core.retry import RetryFailureReason + + reason = RetryFailureReason.TIMEOUT + cause = RuntimeError("timeout") + src, found_cause = _build_retry_error([ValueError(), cause], reason, 10) + assert isinstance(src, exceptions.RetryError) + assert src.message == "Timeout of 10.0s exceeded" + # should attach appropriate cause + assert found_cause is cause + + +def test__build_retry_error_empty_timeout(): + """ + attempt to build a retry error with timout is None + should return a generic timeout error message + """ + from google.api_core.retry import _build_retry_error + from google.api_core.retry import RetryFailureReason + + reason = RetryFailureReason.TIMEOUT + src, _ = _build_retry_error([], reason, None) + assert isinstance(src, exceptions.RetryError) + assert src.message == "Timeout exceeded" + + @mock.patch("time.sleep", autospec=True) @mock.patch( "google.api_core.datetime_helpers.utcnow", @@ -863,16 +895,16 @@ def test_exc_factory_non_retryable_error(self): from google.api_core.retry import RetryFailureReason from google.api_core.retry_streaming import retry_target_stream - timeout = 6 + timeout = None sent_errors = [ValueError("test"), ValueError("test2"), BufferError("test3")] expected_final_err = RuntimeError("done") expected_source_err = ZeroDivisionError("test4") def factory(*args, **kwargs): - assert len(args) == 0 - assert kwargs["exc_list"] == sent_errors - assert kwargs["reason"] == RetryFailureReason.NON_RETRYABLE_ERROR - assert kwargs["timeout_val"] == timeout + assert len(kwargs) == 0 + assert args[0] == sent_errors + assert args[1] == RetryFailureReason.NON_RETRYABLE_ERROR + assert args[2] == timeout return expected_final_err, expected_source_err generator = retry_target_stream( @@ -916,10 +948,10 @@ def test_exc_factory_timeout(self): expected_source_err = ZeroDivisionError("test4") def factory(*args, **kwargs): - assert len(args) == 0 - assert kwargs["exc_list"] == sent_errors - assert kwargs["reason"] == RetryFailureReason.TIMEOUT - assert kwargs["timeout_val"] == timeout + assert len(kwargs) == 0 + assert args[0] == sent_errors + assert args[1] == RetryFailureReason.TIMEOUT + assert args[2] == timeout return expected_final_err, expected_source_err generator = retry_target_stream( From 19a998d88b3bbfc9a7cdf9b35c572925b19e0000 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 20 Nov 2023 15:01:16 -0800 Subject: [PATCH 157/204] created new objects for streaming retry config --- google/api_core/retry.py | 336 ++++------- google/api_core/retry_async.py | 210 +------ google/api_core/retry_streaming.py | 221 ++++++- google/api_core/retry_streaming_async.py | 162 ++++- tests/asyncio/test_retry_async.py | 491 --------------- tests/asyncio/test_retry_streaming_async.py | 638 ++++++++++++++++++++ tests/unit/test_retry.py | 433 ------------- tests/unit/test_retry_streaming.py | 586 ++++++++++++++++++ 8 files changed, 1733 insertions(+), 1344 deletions(-) create mode 100644 tests/asyncio/test_retry_streaming_async.py create mode 100644 tests/unit/test_retry_streaming.py diff --git a/google/api_core/retry.py b/google/api_core/retry.py index 6ddde37d..447285e9 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -71,7 +71,6 @@ def check_if_exists(): from google.api_core import datetime_helpers from google.api_core import exceptions -import google.api_core.retry_streaming as retry_streaming from google.auth import exceptions as auth_exceptions if TYPE_CHECKING: @@ -287,8 +286,133 @@ def retry_target( raise ValueError("Sleep generator stopped yielding sleep values.") -class Retry(object): - """Exponential retry decorator. +class _BaseRetry(object): + """ + Base class for retry configuration objects. This class is intended to capture retry + and backoff configuration that is common to both synchronous and asynchronous retries, + for both unary and streaming RPCs. It is not intended to be instantiated directly, + but rather to be subclassed by the various retry configuration classes. + """ + + def __init__( + self, + predicate: Callable[[BaseException], bool] = if_transient_error, + initial: float = _DEFAULT_INITIAL_DELAY, + maximum: float = _DEFAULT_MAXIMUM_DELAY, + multiplier: float = _DEFAULT_DELAY_MULTIPLIER, + timeout: float = _DEFAULT_DEADLINE, + on_error: Callable[[BaseException], Any] | None = None, + **kwargs: Any, + ) -> None: + self._predicate = predicate + self._initial = initial + self._multiplier = multiplier + self._maximum = maximum + self._timeout = kwargs.get("deadline", timeout) + self._deadline = self._timeout + self._on_error = on_error + + def __call__(self, *args, **kwargs) -> Any: + raise NotImplementedError("Not implemented in base class") + + @property + def deadline(self): + """ + DEPRECATED: use ``timeout`` instead. Refer to the ``Retry`` class + documentation for details. + """ + return self._timeout + + @property + def timeout(self): + return self._timeout + + def _replace( + self, + predicate=None, + initial=None, + maximum=None, + multiplier=None, + timeout=None, + on_error=None, + ): + return type(self)( + predicate=predicate or self._predicate, + initial=initial or self._initial, + maximum=maximum or self._maximum, + multiplier=multiplier or self._multiplier, + timeout=timeout or self._timeout, + on_error=on_error or self._on_error, + ) + + def with_deadline(self, deadline): + """Return a copy of this retry with the given timeout. + + DEPRECATED: use :meth:`with_timeout` instead. Refer to the ``Retry`` class + documentation for details. + + Args: + deadline (float): How long to keep retrying in seconds. + + Returns: + Retry: A new retry instance with the given timeout. + """ + return self._replace(timeout=deadline) + + def with_timeout(self, timeout): + """Return a copy of this retry with the given timeout. + + Args: + timeout (float): How long to keep retrying, in seconds. + + Returns: + Retry: A new retry instance with the given timeout. + """ + return self._replace(timeout=timeout) + + def with_predicate(self, predicate): + """Return a copy of this retry with the given predicate. + + Args: + predicate (Callable[Exception]): A callable that should return + ``True`` if the given exception is retryable. + + Returns: + Retry: A new retry instance with the given predicate. + """ + return self._replace(predicate=predicate) + + def with_delay(self, initial=None, maximum=None, multiplier=None): + """Return a copy of this retry with the given delay options. + + Args: + initial (float): The minimum amount of time to delay. This must + be greater than 0. + maximum (float): The maximum amount of time to delay. + multiplier (float): The multiplier applied to the delay. + + Returns: + Retry: A new retry instance with the given predicate. + """ + return self._replace(initial=initial, maximum=maximum, multiplier=multiplier) + + def __str__(self): + return ( + "<{} predicate={}, initial={:.1f}, maximum={:.1f}, " + "multiplier={:.1f}, timeout={}, on_error={}>".format( + type(self).__name__, + self._predicate, + self._initial, + self._maximum, + self._multiplier, + self._timeout, # timeout can be None, thus no {:.1f} + self._on_error, + ) + ) + + +class Retry(_BaseRetry): + """Exponential retry decorator for unary synchronous RPCs. This class is a decorator used to add retry or polling behavior to an RPC call. @@ -365,70 +489,6 @@ class Retry(object): ``Operation`` or ``PollingFuture`` in general Timeout stands for Polling Timeout. - When ``is_stream=False``, the target is treated as a callable, - and will retry when the callable returns an error. When ``is_stream=True``, - the target will be treated as a generator function. Instead of just wrapping - the initial call in retry logic, the entire output iterable is - wrapped, with each yield passing through the retryable generator. If any yield - in the stream raises a retryable exception, the entire stream will be - retried. - - NOTE: when a stream encounters a retryable error, it will - silently construct a fresh iterator instance in the background - and continue yielding (likely duplicate) values as if no error occurred. - This is the most general way to retry a stream, but it often is not the - desired behavior. Example: iter([1, 2, 1/0]) -> [1, 2, 1, 2, ...] - - There are two ways to build more advanced retry logic for streams: - - 1. Wrap the target - Use a ``target`` that maintains state between retries, and creates a - different generator on each retry call. For example, you can wrap a - network call in a function that modifies the request based on what has - already been returned: - - .. code-block:: python - - def attempt_with_modified_request(target, request, seen_items=[]): - # remove seen items from request on each attempt - new_request = modify_request(request, seen_items) - new_generator = target(new_request) - for item in new_generator: - yield item - seen_items.append(item) - - retry_wrapped_fn = Retry(is_stream=True,...)(attempt_with_modified_request) - retryable_generator = retry_wrapped_fn(target, request) - - 2. Wrap the retry generator - Alternatively, you can wrap the retryable generator itself before - passing it to the end-user to add a filter on the stream. For - example, you can keep track of the items that were successfully yielded - in previous retry attempts, and only yield new items when the - new attempt surpasses the previous ones: - - .. code-block:: python - - def retryable_with_filter(target): - stream_idx = 0 - # reset stream_idx when the stream is retried - def on_error(e): - nonlocal stream_idx - stream_idx = 0 - # build retryable - retryable_gen = Retry(is_stream=True,...)(target) - # keep track of what has been yielded out of filter - seen_items = [] - for item in retryable_gen(): - if stream_idx >= len(seen_items): - seen_items.append(item) - yield item - elif item != seen_items[stream_idx]: - raise ValueError("Stream differs from last attempt") - stream_idx += 1 - - filter_retry_wrapped = retryable_with_filter(target) - Args: predicate (Callable[Exception]): A callable that should return ``True`` if the given exception is retryable. @@ -440,43 +500,15 @@ def on_error(e): on_error (Callable[Exception]): A function to call while processing a retryable exception. Any error raised by this function will *not* be caught. - is_stream (bool): Indicates whether the input function - should be treated as a stream function (i.e. a Generator, - or function that returns an Iterable). If True, the iterable - will be wrapped with retry logic, and any failed outputs will - restart the stream. If False, only the input function call itself - will be retried. Defaults to False. - To avoid duplicate values, retryable streams should typically be - wrapped in additional filter logic before use. deadline (float): DEPRECATED: use `timeout` instead. For backward compatibility, if specified it will override the ``timeout`` parameter. """ - def __init__( - self, - predicate: Callable[[BaseException], bool] = if_transient_error, - initial: float = _DEFAULT_INITIAL_DELAY, - maximum: float = _DEFAULT_MAXIMUM_DELAY, - multiplier: float = _DEFAULT_DELAY_MULTIPLIER, - timeout: float = _DEFAULT_DEADLINE, - on_error: Callable[[BaseException], Any] | None = None, - is_stream: bool = False, - **kwargs: Any, - ) -> None: - self._predicate = predicate - self._initial = initial - self._multiplier = multiplier - self._maximum = maximum - self._timeout = kwargs.get("deadline", timeout) - self._deadline = self._timeout - self._on_error = on_error - self._is_stream = is_stream - def __call__( self, - func: Callable[_P, _R | Iterable[_Y]], + func: Callable[_P, _R], on_error: Callable[[BaseException], Any] | None = None, - ) -> Callable[_P, _R | Generator[_Y, Any, None]]: + ) -> Callable[_P, _R]: """Wrap a callable with retry behavior. Args: @@ -495,9 +527,7 @@ def __call__( on_error = self._on_error @functools.wraps(func) - def retry_wrapped_func( - *args: _P.args, **kwargs: _P.kwargs - ) -> _R | Generator[_Y, Any, None]: + def retry_wrapped_func(*args: _P.args, **kwargs: _P.kwargs) -> _R: """A wrapper that calls target function with retry.""" target = functools.partial(func, *args, **kwargs) sleep_generator = exponential_sleep_generator( @@ -509,110 +539,6 @@ def retry_wrapped_func( "timeout": self._timeout, "on_error": on_error, } - if self._is_stream: - # when stream is enabled, assume target returns an iterable that yields _Y - stream_target = cast(Callable[[], Iterable["_Y"]], target) - return retry_streaming.retry_target_stream( - stream_target, **retry_kwargs - ) - else: - return retry_target(target, **retry_kwargs) + return retry_target(target, **retry_kwargs) return retry_wrapped_func - - @property - def deadline(self): - """ - DEPRECATED: use ``timeout`` instead. Refer to the ``Retry`` class - documentation for details. - """ - return self._timeout - - @property - def timeout(self): - return self._timeout - - def with_deadline(self, deadline): - """Return a copy of this retry with the given timeout. - - DEPRECATED: use :meth:`with_timeout` instead. Refer to the ``Retry`` class - documentation for details. - - Args: - deadline (float): How long to keep retrying in seconds. - - Returns: - Retry: A new retry instance with the given timeout. - """ - return self.with_timeout(timeout=deadline) - - def with_timeout(self, timeout): - """Return a copy of this retry with the given timeout. - - Args: - timeout (float): How long to keep retrying, in seconds. - - Returns: - Retry: A new retry instance with the given timeout. - """ - return Retry( - predicate=self._predicate, - initial=self._initial, - maximum=self._maximum, - multiplier=self._multiplier, - timeout=timeout, - on_error=self._on_error, - ) - - def with_predicate(self, predicate): - """Return a copy of this retry with the given predicate. - - Args: - predicate (Callable[Exception]): A callable that should return - ``True`` if the given exception is retryable. - - Returns: - Retry: A new retry instance with the given predicate. - """ - return Retry( - predicate=predicate, - initial=self._initial, - maximum=self._maximum, - multiplier=self._multiplier, - timeout=self._timeout, - on_error=self._on_error, - ) - - def with_delay(self, initial=None, maximum=None, multiplier=None): - """Return a copy of this retry with the given delay options. - - Args: - initial (float): The minimum amount of time to delay. This must - be greater than 0. - maximum (float): The maximum amount of time to delay. - multiplier (float): The multiplier applied to the delay. - - Returns: - Retry: A new retry instance with the given predicate. - """ - return Retry( - predicate=self._predicate, - initial=initial if initial is not None else self._initial, - maximum=maximum if maximum is not None else self._maximum, - multiplier=multiplier if multiplier is not None else self._multiplier, - timeout=self._timeout, - on_error=self._on_error, - ) - - def __str__(self): - return ( - "".format( - self._predicate, - self._initial, - self._maximum, - self._multiplier, - self._timeout, # timeout can be None, thus no {:.1f} - self._on_error, - ) - ) diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index d406ac2a..3ec7ac46 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -70,10 +70,12 @@ async def check_if_exists(): from google.api_core import datetime_helpers from google.api_core import exceptions +from google.api_core.retry import _BaseRetry from google.api_core.retry import exponential_sleep_generator from google.api_core.retry import if_exception_type # noqa: F401 from google.api_core.retry import if_transient_error -from google.api_core.retry_streaming_async import retry_target_stream +from google.api_core.retry import _build_retry_error +from google.api_core.retry import RetryFailureReason if TYPE_CHECKING: import sys @@ -85,7 +87,6 @@ async def check_if_exists(): _P = ParamSpec("_P") # target function call parameters _R = TypeVar("_R") # target function returned value - _Y = TypeVar("_Y") # target stream yielded values _LOGGER = logging.getLogger(__name__) _DEFAULT_INITIAL_DELAY = 1.0 # seconds @@ -179,7 +180,7 @@ async def retry_target( raise ValueError("Sleep generator stopped yielding sleep values.") -class AsyncRetry: +class AsyncRetry(_BaseRetry): """Exponential retry decorator for async coroutines. This class is a decorator used to add exponential back-off retry behavior @@ -188,69 +189,6 @@ class AsyncRetry: Although the default behavior is to retry transient API errors, a different predicate can be provided to retry other exceptions. - When ``is_stream=False``, the target is treated as a coroutine function, - and will retry when the coroutine returns an error. When ``is_stream=True``, - the target will be treated as an async generator function. Instead - of just wrapping the initial call in retry logic, the output iterable is - wrapped, with each yield passing through the retryable generator. If any yield - in the stream raises a retryable exception, the entire stream will be - retried. - - Important Note: when a stream is encounters a retryable error, it will - silently construct a fresh iterator instance in the background - and continue yielding (likely duplicate) values as if no error occurred. - This is the most general way to retry a stream, but it often is not the - desired behavior. Example: iter([1, 2, 1/0]) -> [1, 2, 1, 2, ...] - - There are two ways to build more advanced retry logic for streams: - - 1. Wrap the target - Use a ``target`` that maintains state between retries, and creates a - different generator on each retry call. For example, you can wrap a - grpc call in a function that modifies the request based on what has - already been returned: - - .. code-block:: python - - async def attempt_with_modified_request(target, request, seen_items=[]): - # remove seen items from request on each attempt - new_request = modify_request(request, seen_items) - new_generator = await target(new_request) - async for item in new_generator: - yield item - seen_items.append(item) - - retry_wrapped = AsyncRetry(is_stream=True,...)(attempt_with_modified_request, target, request, []) - - 2. Wrap the retry generator - Alternatively, you can wrap the retryable generator itself before - passing it to the end-user to add a filter on the stream. For - example, you can keep track of the items that were successfully yielded - in previous retry attempts, and only yield new items when the - new attempt surpasses the previous ones: - - .. code-block:: python - - async def retryable_with_filter(target): - stream_idx = 0 - # reset stream_idx when the stream is retried - def on_error(e): - nonlocal stream_idx - stream_idx = 0 - # build retryable - retryable_gen = AsyncRetry(is_stream=True, ...)(target) - # keep track of what has been yielded out of filter - seen_items = [] - async for item in retryable_gen: - if stream_idx >= len(seen_items): - yield item - seen_items.append(item) - elif item != previous_stream[stream_idx]: - raise ValueError("Stream differs from last attempt")" - stream_idx += 1 - - filter_retry_wrapped = retryable_with_filter(target) - Args: predicate (Callable[Exception]): A callable that should return ``True`` if the given exception is retryable. @@ -262,45 +200,15 @@ def on_error(e): on_error (Optional[Callable[Exception]]): A function to call while processing a retryable exception. Any error raised by this function will *not* be caught. - is_stream (bool): Indicates whether the input function - should be treated as a stream function (i.e. an AsyncGenerator, - or function or coroutine that returns an AsyncIterable). - If True, the iterable will be wrapped with retry logic, and any - failed outputs will restart the stream. If False, only the input - function call itself will be retried. Defaults to False. - To avoid duplicate values, retryable streams should typically be - wrapped in additional filter logic before use. deadline (float): DEPRECATED use ``timeout`` instead. If set it will override ``timeout`` parameter. """ - def __init__( - self, - predicate: Callable[[BaseException], bool] = if_transient_error, - initial: float = _DEFAULT_INITIAL_DELAY, - maximum: float = _DEFAULT_MAXIMUM_DELAY, - multiplier: float = _DEFAULT_DELAY_MULTIPLIER, - timeout: float = _DEFAULT_TIMEOUT, - on_error: Callable[[BaseException], Any] | None = None, - is_stream: bool = False, - **kwargs, - ): - self._predicate = predicate - self._initial = initial - self._multiplier = multiplier - self._maximum = maximum - self._timeout = kwargs.get("deadline", timeout) - self._deadline = self._timeout - self._on_error = on_error - self._is_stream = is_stream - def __call__( self, - func: Callable[ - ..., Awaitable[_R] | AsyncIterable[_Y] | Awaitable[AsyncIterable[_Y]] - ], + func: Callable[..., Awaitable[_R]], on_error: Callable[[BaseException], Any] | None = None, - ) -> Callable[_P, Awaitable[_R | AsyncGenerator[_Y, None]]]: + ) -> Callable[_P, Awaitable[_R]]: """Wrap a callable with retry behavior. Args: @@ -311,7 +219,6 @@ def __call__( function will *not* be caught. If on_error was specified in the constructor, this value will be ignored. - Returns: Callable: A callable that will invoke ``func`` with retry behavior. @@ -319,105 +226,18 @@ def __call__( if self._on_error is not None: on_error = self._on_error - # @functools.wraps(func) - async def retry_wrapped_func( - *args: _P.args, **kwargs: _P.kwargs - ) -> _R | AsyncGenerator[_Y, None]: + @functools.wraps(func) + async def retry_wrapped_func(*args: _P.args, **kwargs: _P.kwargs) -> _R: """A wrapper that calls target function with retry.""" - target = functools.partial(func, *args, **kwargs) sleep_generator = exponential_sleep_generator( self._initial, self._maximum, multiplier=self._multiplier ) - retry_kwargs = { - "predicate": self._predicate, - "sleep_generator": sleep_generator, - "timeout": self._timeout, - "on_error": on_error, - } - if self._is_stream: - stream_target = cast(Callable[[], AsyncIterable["_Y"]], target) - return retry_target_stream(stream_target, **retry_kwargs) - else: - return await retry_target(target, **retry_kwargs) + return await retry_target( + functools.partial(func, *args, **kwargs), + predicate=self._predicate, + sleep_generator=sleep_generator, + timeout=self._timeout, + on_error=on_error, + ) return retry_wrapped_func - - def _replace( - self, - predicate=None, - initial=None, - maximum=None, - multiplier=None, - timeout=None, - on_error=None, - ): - return AsyncRetry( - predicate=predicate or self._predicate, - initial=initial or self._initial, - maximum=maximum or self._maximum, - multiplier=multiplier or self._multiplier, - timeout=timeout or self._timeout, - on_error=on_error or self._on_error, - ) - - def with_deadline(self, deadline): - """Return a copy of this retry with the given deadline. - DEPRECATED: use :meth:`with_timeout` instead. - - Args: - deadline (float): How long to keep retrying. - - Returns: - AsyncRetry: A new retry instance with the given deadline. - """ - return self._replace(timeout=deadline) - - def with_timeout(self, timeout): - """Return a copy of this retry with the given timeout. - - Args: - timeout (float): How long to keep retrying, in seconds. - - Returns: - AsyncRetry: A new retry instance with the given timeout. - """ - return self._replace(timeout=timeout) - - def with_predicate(self, predicate): - """Return a copy of this retry with the given predicate. - - Args: - predicate (Callable[Exception]): A callable that should return - ``True`` if the given exception is retryable. - - Returns: - AsyncRetry: A new retry instance with the given predicate. - """ - return self._replace(predicate=predicate) - - def with_delay(self, initial=None, maximum=None, multiplier=None): - """Return a copy of this retry with the given delay options. - - Args: - initial (float): The minimum amount of time to delay. This must - be greater than 0. - maximum (float): The maximum amount of time to delay. - multiplier (float): The multiplier applied to the delay. - - Returns: - AsyncRetry: A new retry instance with the given predicate. - """ - return self._replace(initial=initial, maximum=maximum, multiplier=multiplier) - - def __str__(self): - return ( - "".format( - self._predicate, - self._initial, - self._maximum, - self._multiplier, - self._timeout, - self._on_error, - ) - ) diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index 20fafd08..623049fc 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -14,9 +14,8 @@ """ Generator wrapper for retryable streaming RPCs. -This function will be used when initilizing a retry with -``Retry(is_stream=True)``. """ +from __future__ import annotations from typing import ( Callable, @@ -27,14 +26,29 @@ Generator, TypeVar, Any, + TYPE_CHECKING, ) +import sys import logging import time +import functools -import google.api_core.retry as retries +from google.api_core.retry import _BaseRetry +from google.api_core.retry import exponential_sleep_generator +from google.api_core.retry import if_exception_type # noqa: F401 +from google.api_core.retry import if_transient_error +from google.api_core.retry import _build_retry_error +from google.api_core.retry import RetryFailureReason -_Y = TypeVar("_Y") # yielded values +if TYPE_CHECKING: + if sys.version_info >= (3, 10): + from typing import ParamSpec + else: + from typing_extensions import ParamSpec + + _P = ParamSpec("_P") # target function call parameters + _Y = TypeVar("_Y") # yielded values _LOGGER = logging.getLogger(__name__) @@ -47,7 +61,7 @@ def retry_target_stream( on_error: Optional[Callable[[Exception], None]] = None, exception_factory: Optional[ Callable[ - [List[Exception], "retries.RetryFailureReason", Optional[float]], + [List[Exception], RetryFailureReason, Optional[float]], Tuple[Exception, Optional[Exception]], ] ] = None, @@ -95,7 +109,7 @@ def retry_target_stream( timeout = kwargs.get("deadline", timeout) deadline: Optional[float] = time.monotonic() + timeout if timeout else None error_list: List[Exception] = [] - exc_factory = exception_factory or retries._build_retry_error + exc_factory = exception_factory or _build_retry_error for sleep in sleep_generator: # Start a new retry loop @@ -111,7 +125,7 @@ def retry_target_stream( if not predicate(exc): final_exc, source_exc = exc_factory( error_list, - retries.RetryFailureReason.NON_RETRYABLE_ERROR, + RetryFailureReason.NON_RETRYABLE_ERROR, timeout, ) raise final_exc from source_exc @@ -120,7 +134,7 @@ def retry_target_stream( if deadline is not None and time.monotonic() + sleep > deadline: final_exc, source_exc = exc_factory( - error_list, retries.RetryFailureReason.TIMEOUT, timeout + error_list, RetryFailureReason.TIMEOUT, timeout ) raise final_exc from source_exc _LOGGER.debug( @@ -129,3 +143,194 @@ def retry_target_stream( time.sleep(sleep) raise ValueError("Sleep generator stopped yielding sleep values.") + + +class StreamingRetry(_BaseRetry): + """Exponential retry decorator for streaming synchronous RPCs. + + This class returns a Generator when called, which wraps the target + stream in retry logic. If any exception is raised by the target, the + entire stream will be retried within the wrapper. + + Although the default behavior is to retry transient API errors, a + different predicate can be provided to retry other exceptions. + + There two important concepts that retry/polling behavior may operate on, + Deadline and Timeout, which need to be properly defined for the correct + usage of this class and the rest of the library. + + Deadline: a fixed point in time by which a certain operation must + terminate. For example, if a certain operation has a deadline + "2022-10-18T23:30:52.123Z" it must terminate (successfully or with an + error) by that time, regardless of when it was started or whether it + was started at all. + + Timeout: the maximum duration of time after which a certain operation + must terminate (successfully or with an error). The countdown begins right + after an operation was started. For example, if an operation was started at + 09:24:00 with timeout of 75 seconds, it must terminate no later than + 09:25:15. + + Unfortunately, in the past this class (and the api-core library as a whole) has not been + properly distinguishing the concepts of "timeout" and "deadline", and the + ``deadline`` parameter has meant ``timeout``. That is why + ``deadline`` has been deprecated and ``timeout`` should be used instead. If the + ``deadline`` parameter is set, it will override the ``timeout`` parameter. In other words, + ``retry.deadline`` should be treated as just a deprecated alias for + ``retry.timeout``. + + Said another way, it is safe to assume that this class and the rest of this + library operate in terms of timeouts (not deadlines) unless explicitly + noted the usage of deadline semantics. + + It is also important to + understand the three most common applications of the Timeout concept in the + context of this library. + + Usually the generic Timeout term may stand for one of the following actual + timeouts: RPC Timeout, Retry Timeout, or Polling Timeout. + + RPC Timeout: a value supplied by the client to the server so + that the server side knows the maximum amount of time it is expected to + spend handling that specific RPC. For example, in the case of gRPC transport, + RPC Timeout is represented by setting "grpc-timeout" header in the HTTP2 + request. The `timeout` property of this class normally never represents the + RPC Timeout as it is handled separately by the ``google.api_core.timeout`` + module of this library. + + Retry Timeout: this is the most common meaning of the ``timeout`` property + of this class, and defines how long a certain RPC may be retried in case + the server returns an error. + + Polling Timeout: defines how long the + client side is allowed to call the polling RPC repeatedly to check a status of a + long-running operation. Each polling RPC is + expected to succeed (its errors are supposed to be handled by the retry + logic). The decision as to whether a new polling attempt needs to be made is based + not on the RPC status code but on the status of the returned + status of an operation. In other words: we will poll a long-running operation until + the operation is done or the polling timeout expires. Each poll will inform us of + the status of the operation. The poll consists of an RPC to the server that may + itself be retried as per the poll-specific retry settings in case of errors. The + operation-level retry settings do NOT apply to polling-RPC retries. + + With the actual timeout types being defined above, the client libraries + often refer to just Timeout without clarifying which type specifically + that is. In that case the actual timeout type (sometimes also referred to as + Logical Timeout) can be determined from the context. If it is a unary rpc + call (i.e. a regular one) Timeout usually stands for the RPC Timeout (if + provided directly as a standalone value) or Retry Timeout (if provided as + ``retry.timeout`` property of the unary RPC's retry config). For + ``Operation`` or ``PollingFuture`` in general Timeout stands for + Polling Timeout. + + Important Note: when a stream encounters a retryable error, it will + silently construct a fresh iterator instance in the background + and continue yielding (likely duplicate) values as if no error occurred. + This is the most general way to retry a stream, but it often is not the + desired behavior. Example: iter([1, 2, 1/0]) -> [1, 2, 1, 2, ...] + + There are two ways to build more advanced retry logic for streams: + + 1. Wrap the target + Use a ``target`` that maintains state between retries, and creates a + different generator on each retry call. For example, you can wrap a + network call in a function that modifies the request based on what has + already been returned: + + .. code-block:: python + + def attempt_with_modified_request(target, request, seen_items=[]): + # remove seen items from request on each attempt + new_request = modify_request(request, seen_items) + new_generator = target(new_request) + for item in new_generator: + yield item + seen_items.append(item) + + retry_wrapped_fn = StreamingRetry()(attempt_with_modified_request) + retryable_generator = retry_wrapped_fn(target, request) + + 2. Wrap the retry generator + Alternatively, you can wrap the retryable generator itself before + passing it to the end-user to add a filter on the stream. For + example, you can keep track of the items that were successfully yielded + in previous retry attempts, and only yield new items when the + new attempt surpasses the previous ones: + + .. code-block:: python + + def retryable_with_filter(target): + stream_idx = 0 + # reset stream_idx when the stream is retried + def on_error(e): + nonlocal stream_idx + stream_idx = 0 + # build retryable + retryable_gen = StreamingRetry(...)(target) + # keep track of what has been yielded out of filter + seen_items = [] + for item in retryable_gen(): + if stream_idx >= len(seen_items): + seen_items.append(item) + yield item + elif item != seen_items[stream_idx]: + raise ValueError("Stream differs from last attempt") + stream_idx += 1 + + filter_retry_wrapped = retryable_with_filter(target) + + Args: + predicate (Callable[Exception]): A callable that should return ``True`` + if the given exception is retryable. + initial (float): The minimum amount of time to delay in seconds. This + must be greater than 0. + maximum (float): The maximum amount of time to delay in seconds. + multiplier (float): The multiplier applied to the delay. + timeout (float): How long to keep retrying, in seconds. + on_error (Callable[Exception]): A function to call while processing + a retryable exception. Any error raised by this function will + *not* be caught. + deadline (float): DEPRECATED: use `timeout` instead. For backward + compatibility, if specified it will override the ``timeout`` parameter. + """ + + def __call__( + self, + func: Callable[_P, Iterable[_Y]], + on_error: Callable[[BaseException], Any] | None = None, + ) -> Callable[_P, Generator[_Y, Any, None]]: + """Wrap a callable with retry behavior. + + Args: + func (Callable): The callable to add retry behavior to. + on_error (Optional[Callable[Exception]]): If given, the + on_error callback will be called with each retryable exception + raised by the wrapped function. Any error raised by this + function will *not* be caught. If on_error was specified in the + constructor, this value will be ignored. + + Returns: + Callable: A callable that will invoke ``func`` with retry + behavior. + """ + if self._on_error is not None: + on_error = self._on_error + + @functools.wraps(func) + def retry_wrapped_func( + *args: _P.args, **kwargs: _P.kwargs + ) -> Generator[_Y, Any, None]: + """A wrapper that calls target function with retry.""" + sleep_generator = exponential_sleep_generator( + self._initial, self._maximum, multiplier=self._multiplier + ) + return retry_target_stream( + functools.partial(func, *args, **kwargs), + predicate=self._predicate, + sleep_generator=sleep_generator, + timeout=self._timeout, + on_error=on_error, + ) + + return retry_wrapped_func diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index b8786633..be276894 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -13,13 +13,13 @@ # limitations under the License. """ -Generator wrapper for retryable streaming RPCs. -This function will be used when initilizing a retry with -``AsyncRetry(is_stream=True)``. +Generator wrapper for retryable async streaming RPCs. """ +from __future__ import annotations from typing import ( cast, + Any, Callable, Optional, Iterable, @@ -38,28 +38,37 @@ import logging import time import sys +import functools -import google.api_core.retry as retries +from google.api_core.retry import _BaseRetry +from google.api_core.retry import exponential_sleep_generator +from google.api_core.retry import if_exception_type # noqa: F401 +from google.api_core.retry import if_transient_error +from google.api_core.retry import _build_retry_error +from google.api_core.retry import RetryFailureReason if TYPE_CHECKING: + if sys.version_info >= (3, 10): + from typing import ParamSpec + else: + from typing_extensions import ParamSpec + + _P = ParamSpec("_P") # target function call parameters _Y = TypeVar("_Y") # yielded values _LOGGER = logging.getLogger(__name__) async def retry_target_stream( - target: Union[ - Callable[[], AsyncIterable["_Y"]], - Callable[[], Awaitable[AsyncIterable["_Y"]]], - ], + target: Callable[[], AsyncIterable["_Y"] | Awaitable[AsyncIterable["_Y"]]], predicate: Callable[[Exception], bool], sleep_generator: Iterable[float], timeout: Optional[float] = None, on_error: Optional[Callable[[Exception], None]] = None, exception_factory: Optional[ Callable[ - [List[Exception], retries.RetryFailureReason, Optional[float]], + [List[Exception], RetryFailureReason, Optional[float]], Tuple[Exception, Optional[Exception]], ] ] = None, @@ -110,7 +119,7 @@ async def retry_target_stream( # keep track of retryable exceptions we encounter to pass in to exception_factory error_list: List[Exception] = [] # override exception_factory to build a more complex exception - exc_factory = exception_factory or retries._build_retry_error + exc_factory = exception_factory or _build_retry_error target_is_generator: Optional[bool] = None for sleep in sleep_generator: @@ -175,7 +184,7 @@ async def retry_target_stream( error_list.append(exc) if not predicate(exc): exc, source_exc = exc_factory( - error_list, retries.RetryFailureReason.NON_RETRYABLE_ERROR, timeout + error_list, RetryFailureReason.NON_RETRYABLE_ERROR, timeout ) raise exc from source_exc if on_error is not None: @@ -187,7 +196,7 @@ async def retry_target_stream( # sleep and adjust timeout budget if deadline is not None and time.monotonic() + sleep > deadline: final_exc, source_exc = exc_factory( - error_list, retries.RetryFailureReason.TIMEOUT, timeout + error_list, RetryFailureReason.TIMEOUT, timeout ) raise final_exc from source_exc _LOGGER.debug( @@ -195,3 +204,132 @@ async def retry_target_stream( ) await asyncio.sleep(sleep) raise ValueError("Sleep generator stopped yielding sleep values.") + + +class AsyncStreamingRetry(_BaseRetry): + """Exponential retry decorator for async streaming rpcs. + + This class returns an AsyncGenerator when called, which wraps the target + stream in retry logic. If any exception is raised by the target, the + entire stream will be retried within the wrapper. + + Although the default behavior is to retry transient API errors, a + different predicate can be provided to retry other exceptions. + + Important Note: when a stream is encounters a retryable error, it will + silently construct a fresh iterator instance in the background + and continue yielding (likely duplicate) values as if no error occurred. + This is the most general way to retry a stream, but it often is not the + desired behavior. Example: iter([1, 2, 1/0]) -> [1, 2, 1, 2, ...] + + There are two ways to build more advanced retry logic for streams: + + 1. Wrap the target + Use a ``target`` that maintains state between retries, and creates a + different generator on each retry call. For example, you can wrap a + grpc call in a function that modifies the request based on what has + already been returned: + + .. code-block:: python + + async def attempt_with_modified_request(target, request, seen_items=[]): + # remove seen items from request on each attempt + new_request = modify_request(request, seen_items) + new_generator = await target(new_request) + async for item in new_generator: + yield item + seen_items.append(item) + + retry_wrapped = AsyncRetry(is_stream=True,...)(attempt_with_modified_request, target, request, []) + + 2. Wrap the retry generator + Alternatively, you can wrap the retryable generator itself before + passing it to the end-user to add a filter on the stream. For + example, you can keep track of the items that were successfully yielded + in previous retry attempts, and only yield new items when the + new attempt surpasses the previous ones: + + .. code-block:: python + + async def retryable_with_filter(target): + stream_idx = 0 + # reset stream_idx when the stream is retried + def on_error(e): + nonlocal stream_idx + stream_idx = 0 + # build retryable + retryable_gen = AsyncRetry(is_stream=True, ...)(target) + # keep track of what has been yielded out of filter + seen_items = [] + async for item in retryable_gen: + if stream_idx >= len(seen_items): + yield item + seen_items.append(item) + elif item != previous_stream[stream_idx]: + raise ValueError("Stream differs from last attempt")" + stream_idx += 1 + + filter_retry_wrapped = retryable_with_filter(target) + + Args: + predicate (Callable[Exception]): A callable that should return ``True`` + if the given exception is retryable. + initial (float): The minimum a,out of time to delay in seconds. This + must be greater than 0. + maximum (float): The maximum amount of time to delay in seconds. + multiplier (float): The multiplier applied to the delay. + timeout (Optional[float]): How long to keep retrying in seconds. + on_error (Optional[Callable[Exception]]): A function to call while processing + a retryable exception. Any error raised by this function will + *not* be caught. + is_stream (bool): Indicates whether the input function + should be treated as a stream function (i.e. an AsyncGenerator, + or function or coroutine that returns an AsyncIterable). + If True, the iterable will be wrapped with retry logic, and any + failed outputs will restart the stream. If False, only the input + function call itself will be retried. Defaults to False. + To avoid duplicate values, retryable streams should typically be + wrapped in additional filter logic before use. + deadline (float): DEPRECATED use ``timeout`` instead. If set it will + override ``timeout`` parameter. + """ + + def __call__( + self, + func: Callable[..., AsyncIterable[_Y] | Awaitable[AsyncIterable[_Y]]], + on_error: Callable[[BaseException], Any] | None = None, + ) -> Callable[_P, Awaitable[AsyncGenerator[_Y, None]]]: + """Wrap a callable with retry behavior. + + Args: + func (Callable): The callable or stream to add retry behavior to. + on_error (Optional[Callable[Exception]]): If given, the + on_error callback will be called with each retryable exception + raised by the wrapped function. Any error raised by this + function will *not* be caught. If on_error was specified in the + constructor, this value will be ignored. + + Returns: + Callable: A callable that will invoke ``func`` with retry + behavior. + """ + if self._on_error is not None: + on_error = self._on_error + + # @functools.wraps(func) + async def retry_wrapped_func( + *args: _P.args, **kwargs: _P.kwargs + ) -> AsyncGenerator[_Y, None]: + """A wrapper that calls target function with retry.""" + sleep_generator = exponential_sleep_generator( + self._initial, self._maximum, multiplier=self._multiplier + ) + return retry_target_stream( + functools.partial(func, *args, **kwargs), + predicate=self._predicate, + sleep_generator=sleep_generator, + timeout=self._timeout, + on_error=on_error, + ) + + return retry_wrapped_func diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index ccd5532c..7c86d0e4 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -409,494 +409,3 @@ async def test___init___when_retry_is_executed(self, sleep, uniform): assert _some_function.call_count == 2 target.assert_has_calls([mock.call("meep"), mock.call("meep")]) sleep.assert_any_call(retry_._initial) - - async def _generator_mock( - self, - num=5, - error_on=None, - exceptions_seen=None, - sleep_time=0, - ): - """ - Helper to create a mock generator that yields a number of values - Generator can optionally raise an exception on a specific iteration - - Args: - - num (int): the number of values to yield - - error_on (int): if given, the generator will raise a ValueError on the specified iteration - - exceptions_seen (list): if given, the generator will append any exceptions to this list before raising - - sleep_time (int): if given, the generator will asyncio.sleep for this many seconds before yielding each value - """ - try: - for i in range(num): - if sleep_time: - await asyncio.sleep(sleep_time) - if error_on and i == error_on: - raise ValueError("generator mock error") - yield i - except (Exception, BaseException, GeneratorExit) as e: - # keep track of exceptions seen by generator - if exceptions_seen is not None: - exceptions_seen.append(e) - raise - - @mock.patch("asyncio.sleep", autospec=True) - @pytest.mark.asyncio - async def test___call___generator_success(self, sleep): - """ - Test that a retry-decorated generator yields values as expected - This test checks a generator with no issues - """ - from collections.abc import AsyncGenerator - - retry_ = retry_async.AsyncRetry(is_stream=True) - decorated = retry_(self._generator_mock) - - num = 10 - generator = await decorated(num) - # check types - assert isinstance(generator, AsyncGenerator) - assert isinstance(self._generator_mock(num), AsyncGenerator) - # check yield contents - unpacked = [i async for i in generator] - assert len(unpacked) == num - expected = [i async for i in self._generator_mock(num)] - for a, b in zip(unpacked, expected): - assert a == b - sleep.assert_not_called() - - @mock.patch("asyncio.sleep", autospec=True) - @pytest.mark.asyncio - async def test___call___generator_retry(self, sleep): - """ - Tests that a retry-decorated generator will retry on errors - """ - on_error = mock.Mock(return_value=None) - retry_ = retry_async.AsyncRetry( - on_error=on_error, - predicate=retry_async.if_exception_type(ValueError), - is_stream=True, - timeout=None, - ) - generator = await retry_(self._generator_mock)(error_on=3) - # error thrown on 3 - # generator should contain 0, 1, 2 looping - unpacked = [await generator.__anext__() for i in range(10)] - assert unpacked == [0, 1, 2, 0, 1, 2, 0, 1, 2, 0] - assert on_error.call_count == 3 - - @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) - @mock.patch("asyncio.sleep", autospec=True) - @pytest.mark.asyncio - async def test___call___generator_retry_hitting_deadline(self, sleep, uniform): - """ - Tests that a retry-decorated generator will throw a RetryError - after using the time budget - """ - import time - - on_error = mock.Mock() - retry_ = retry_async.AsyncRetry( - predicate=retry_async.if_exception_type(ValueError), - initial=1.0, - maximum=1024.0, - multiplier=2.0, - deadline=9.9, - is_stream=True, - ) - - time_now = time.monotonic() - now_patcher = mock.patch( - "time.monotonic", - return_value=time_now, - ) - - decorated = retry_(self._generator_mock, on_error=on_error) - generator = await decorated(error_on=1) - - with now_patcher as patched_now: - # Make sure that calls to fake asyncio.sleep() also advance the mocked - # time clock. - def increase_time(sleep_delay): - patched_now.return_value += sleep_delay - - sleep.side_effect = increase_time - - with pytest.raises(exceptions.RetryError): - [i async for i in generator] - - assert on_error.call_count == 4 - # check the delays - assert sleep.call_count == 3 # once between each successive target calls - last_wait = sleep.call_args.args[0] - total_wait = sum(call_args.args[0] for call_args in sleep.call_args_list) - # next wait would have put us over, so ended early - assert last_wait == 4 - assert total_wait == 7 - - @pytest.mark.asyncio - async def test___call___generator_cancellations(self): - """ - cancel calls should propagate to the generator - """ - # test without cancel as retryable - retry_ = retry_async.AsyncRetry(is_stream=True) - utcnow = datetime.datetime.utcnow() - mock.patch("google.api_core.datetime_helpers.utcnow", return_value=utcnow) - generator = await retry_(self._generator_mock)(sleep_time=0.2) - await generator.__anext__() == 0 - task = asyncio.create_task(generator.__anext__()) - task.cancel() - with pytest.raises(asyncio.CancelledError): - await task - with pytest.raises(StopAsyncIteration): - await generator.__anext__() - - @mock.patch("asyncio.sleep", autospec=True) - @pytest.mark.asyncio - async def test___call___with_generator_send(self, sleep): - """ - Send should be passed through retry into target generator - """ - - async def _mock_send_gen(): - """ - always yield whatever was sent in - """ - in_ = yield - while True: - in_ = yield in_ - - retry_ = retry_async.AsyncRetry(is_stream=True) - - decorated = retry_(_mock_send_gen) - - generator = await decorated() - result = await generator.__anext__() - # first yield should be None - assert result is None - in_messages = ["test_1", "hello", "world"] - out_messages = [] - for msg in in_messages: - recv = await generator.asend(msg) - out_messages.append(recv) - assert in_messages == out_messages - - @mock.patch("asyncio.sleep", autospec=True) - @pytest.mark.asyncio - async def test___call___generator_send_retry(self, sleep): - """ - Send should be retried if target generator raises an error - """ - on_error = mock.Mock(return_value=None) - retry_ = retry_async.AsyncRetry( - on_error=on_error, - predicate=retry_async.if_exception_type(ValueError), - is_stream=True, - timeout=None, - ) - generator = await retry_(self._generator_mock)(error_on=3) - with pytest.raises(TypeError) as exc_info: - await generator.asend("cannot send to fresh generator") - assert exc_info.match("can't send non-None value") - - # error thrown on 3 - # generator should contain 0, 1, 2 looping - generator = await retry_(self._generator_mock)(error_on=3) - assert await generator.__anext__() == 0 - unpacked = [await generator.asend(i) for i in range(10)] - assert unpacked == [1, 2, 0, 1, 2, 0, 1, 2, 0, 1] - assert on_error.call_count == 3 - - @mock.patch("asyncio.sleep", autospec=True) - @pytest.mark.asyncio - async def test___call___with_generator_close(self, sleep): - """ - Close should be passed through retry into target generator - """ - retry_ = retry_async.AsyncRetry(is_stream=True) - decorated = retry_(self._generator_mock) - exception_list = [] - generator = await decorated(10, exceptions_seen=exception_list) - for i in range(2): - await generator.__anext__() - await generator.aclose() - - assert isinstance(exception_list[0], GeneratorExit) - with pytest.raises(StopAsyncIteration): - # calling next on closed generator should raise error - await generator.__anext__() - - @mock.patch("asyncio.sleep", autospec=True) - @pytest.mark.asyncio - async def test___call___with_new_generator_close(self, sleep): - """ - Close should be passed through retry into target generator, - even when it hasn't been iterated yet - """ - retry_ = retry_async.AsyncRetry(is_stream=True) - decorated = retry_(self._generator_mock) - exception_list = [] - generator = await decorated(10, exceptions_seen=exception_list) - await generator.aclose() - - with pytest.raises(StopAsyncIteration): - # calling next on closed generator should raise error - await generator.__anext__() - - @mock.patch("asyncio.sleep", autospec=True) - @pytest.mark.asyncio - async def test___call___with_generator_throw(self, sleep): - """ - Throw should be passed through retry into target generator - """ - - # The generator should not retry when it encounters a non-retryable error - retry_ = retry_async.AsyncRetry( - predicate=retry_async.if_exception_type(ValueError), - is_stream=True, - ) - decorated = retry_(self._generator_mock) - exception_list = [] - generator = await decorated(10, exceptions_seen=exception_list) - for i in range(2): - await generator.__anext__() - with pytest.raises(BufferError): - await generator.athrow(BufferError("test")) - assert isinstance(exception_list[0], BufferError) - with pytest.raises(StopAsyncIteration): - # calling next on closed generator should raise error - await generator.__anext__() - - # In contrast, the generator should retry if we throw a retryable exception - exception_list = [] - generator = await decorated(10, exceptions_seen=exception_list) - for i in range(2): - await generator.__anext__() - throw_val = await generator.athrow(ValueError("test")) - assert throw_val == 0 - assert isinstance(exception_list[0], ValueError) - # calling next on generator should not raise error, because it was retried - assert await generator.__anext__() == 1 - - @pytest.mark.parametrize("awaitable_wrapped", [True, False]) - @mock.patch("asyncio.sleep", autospec=True) - @pytest.mark.asyncio - async def test___call___with_iterable_send(self, sleep, awaitable_wrapped): - """ - Send should work like next if the wrapped iterable does not support it - """ - retry_ = retry_async.AsyncRetry(is_stream=True) - - def iterable_fn(): - class CustomIterable: - def __init__(self): - self.i = -1 - - def __aiter__(self): - return self - - async def __anext__(self): - self.i += 1 - return self.i - - return CustomIterable() - - if awaitable_wrapped: - - async def wrapper(): - return iterable_fn() - - decorated = retry_(wrapper) - else: - decorated = retry_(iterable_fn) - - retryable = await decorated() - result = await retryable.__anext__() - assert result == 0 - await retryable.asend("test") == 1 - await retryable.asend("test2") == 2 - await retryable.asend("test3") == 3 - - @pytest.mark.parametrize("awaitable_wrapped", [True, False]) - @mock.patch("asyncio.sleep", autospec=True) - @pytest.mark.asyncio - async def test___call___with_iterable_close(self, sleep, awaitable_wrapped): - """ - close should be handled by wrapper if wrapped iterable does not support it - """ - retry_ = retry_async.AsyncRetry(is_stream=True) - - def iterable_fn(): - class CustomIterable: - def __init__(self): - self.i = -1 - - def __aiter__(self): - return self - - async def __anext__(self): - self.i += 1 - return self.i - - return CustomIterable() - - if awaitable_wrapped: - - async def wrapper(): - return iterable_fn() - - decorated = retry_(wrapper) - else: - decorated = retry_(iterable_fn) - - # try closing active generator - retryable = await decorated() - assert await retryable.__anext__() == 0 - await retryable.aclose() - with pytest.raises(StopAsyncIteration): - await retryable.__anext__() - # try closing new generator - new_retryable = await decorated() - await new_retryable.aclose() - with pytest.raises(StopAsyncIteration): - await new_retryable.__anext__() - - @pytest.mark.parametrize("awaitable_wrapped", [True, False]) - @mock.patch("asyncio.sleep", autospec=True) - @pytest.mark.asyncio - async def test___call___with_iterable_throw(self, sleep, awaitable_wrapped): - """ - Throw should work even if the wrapped iterable does not support it - """ - - predicate = retry_async.if_exception_type(ValueError) - retry_ = retry_async.AsyncRetry(is_stream=True, predicate=predicate) - - def iterable_fn(): - class CustomIterable: - def __init__(self): - self.i = -1 - - def __aiter__(self): - return self - - async def __anext__(self): - self.i += 1 - return self.i - - return CustomIterable() - - if awaitable_wrapped: - - async def wrapper(): - return iterable_fn() - - decorated = retry_(wrapper) - else: - decorated = retry_(iterable_fn) - - # try throwing with active generator - retryable = await decorated() - assert await retryable.__anext__() == 0 - # should swallow errors in predicate - await retryable.athrow(ValueError("test")) - # should raise errors not in predicate - with pytest.raises(BufferError): - await retryable.athrow(BufferError("test")) - with pytest.raises(StopAsyncIteration): - await retryable.__anext__() - # try throwing with new generator - new_retryable = await decorated() - with pytest.raises(BufferError): - await new_retryable.athrow(BufferError("test")) - with pytest.raises(StopAsyncIteration): - await new_retryable.__anext__() - - @pytest.mark.asyncio - async def test_exc_factory_non_retryable_error(self): - """ - generator should give the option to override exception creation logic - test when non-retryable error is thrown - """ - from google.api_core.retry import RetryFailureReason - from google.api_core.retry_streaming_async import retry_target_stream - - timeout = 6 - sent_errors = [ValueError("test"), ValueError("test2"), BufferError("test3")] - expected_final_err = RuntimeError("done") - expected_source_err = ZeroDivisionError("test4") - - def factory(*args, **kwargs): - assert len(kwargs) == 0 - assert args[0] == sent_errors - assert args[1] == RetryFailureReason.NON_RETRYABLE_ERROR - assert args[2] == timeout - return expected_final_err, expected_source_err - - generator = retry_target_stream( - self._generator_mock, - retry_async.if_exception_type(ValueError), - [0] * 3, - timeout=timeout, - exception_factory=factory, - ) - # initialize the generator - await generator.__anext__() - # trigger some retryable errors - await generator.athrow(sent_errors[0]) - await generator.athrow(sent_errors[1]) - # trigger a non-retryable error - with pytest.raises(expected_final_err.__class__) as exc_info: - await generator.athrow(sent_errors[2]) - assert exc_info.value == expected_final_err - assert exc_info.value.__cause__ == expected_source_err - - @pytest.mark.asyncio - async def test_exc_factory_timeout(self): - """ - generator should give the option to override exception creation logic - test when timeout is exceeded - """ - import time - from google.api_core.retry import RetryFailureReason - from google.api_core.retry_streaming_async import retry_target_stream - - timeout = 2 - time_now = time.monotonic() - now_patcher = mock.patch( - "time.monotonic", - return_value=time_now, - ) - - with now_patcher as patched_now: - timeout = 2 - sent_errors = [ValueError("test"), ValueError("test2"), ValueError("test3")] - expected_final_err = RuntimeError("done") - expected_source_err = ZeroDivisionError("test4") - - def factory(*args, **kwargs): - assert len(kwargs) == 0 - assert args[0] == sent_errors - assert args[1] == RetryFailureReason.TIMEOUT - assert args[2] == timeout - return expected_final_err, expected_source_err - - generator = retry_target_stream( - self._generator_mock, - retry_async.if_exception_type(ValueError), - [0] * 3, - timeout=timeout, - exception_factory=factory, - ) - # initialize the generator - await generator.__anext__() - # trigger some retryable errors - await generator.athrow(sent_errors[0]) - await generator.athrow(sent_errors[1]) - # trigger a timeout - patched_now.return_value += timeout + 1 - with pytest.raises(expected_final_err.__class__) as exc_info: - await generator.athrow(sent_errors[2]) - assert exc_info.value == expected_final_err - assert exc_info.value.__cause__ == expected_source_err diff --git a/tests/asyncio/test_retry_streaming_async.py b/tests/asyncio/test_retry_streaming_async.py new file mode 100644 index 00000000..3eaeb27a --- /dev/null +++ b/tests/asyncio/test_retry_streaming_async.py @@ -0,0 +1,638 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime +import re +import asyncio + +import mock +import pytest + +from google.api_core import exceptions +from google.api_core import retry_async +from google.api_core import retry_streaming_async + + +class TestAsyncStreamingRetry: + def test_constructor_defaults(self): + retry_ = retry_streaming_async.AsyncStreamingRetry() + assert retry_._predicate == retry_async.if_transient_error + assert retry_._initial == 1 + assert retry_._maximum == 60 + assert retry_._multiplier == 2 + assert retry_._deadline == 120 + assert retry_._on_error is None + + def test_constructor_options(self): + _some_function = mock.Mock() + + retry_ = retry_streaming_async.AsyncStreamingRetry( + predicate=mock.sentinel.predicate, + initial=1, + maximum=2, + multiplier=3, + deadline=4, + on_error=_some_function, + ) + assert retry_._predicate == mock.sentinel.predicate + assert retry_._initial == 1 + assert retry_._maximum == 2 + assert retry_._multiplier == 3 + assert retry_._deadline == 4 + assert retry_._on_error is _some_function + + def test_with_deadline(self): + retry_ = retry_streaming_async.AsyncStreamingRetry( + predicate=mock.sentinel.predicate, + initial=1, + maximum=2, + multiplier=3, + deadline=4, + on_error=mock.sentinel.on_error, + ) + new_retry = retry_.with_deadline(42) + assert retry_ is not new_retry + assert new_retry._deadline == 42 + + # the rest of the attributes should remain the same + assert new_retry._predicate is retry_._predicate + assert new_retry._initial == retry_._initial + assert new_retry._maximum == retry_._maximum + assert new_retry._multiplier == retry_._multiplier + assert new_retry._on_error is retry_._on_error + + def test_with_predicate(self): + retry_ = retry_streaming_async.AsyncStreamingRetry( + predicate=mock.sentinel.predicate, + initial=1, + maximum=2, + multiplier=3, + deadline=4, + on_error=mock.sentinel.on_error, + ) + new_retry = retry_.with_predicate(mock.sentinel.predicate) + assert retry_ is not new_retry + assert new_retry._predicate == mock.sentinel.predicate + + # the rest of the attributes should remain the same + assert new_retry._deadline == retry_._deadline + assert new_retry._initial == retry_._initial + assert new_retry._maximum == retry_._maximum + assert new_retry._multiplier == retry_._multiplier + assert new_retry._on_error is retry_._on_error + + def test_with_delay_noop(self): + retry_ = retry_streaming_async.AsyncStreamingRetry( + predicate=mock.sentinel.predicate, + initial=1, + maximum=2, + multiplier=3, + deadline=4, + on_error=mock.sentinel.on_error, + ) + new_retry = retry_.with_delay() + assert retry_ is not new_retry + assert new_retry._initial == retry_._initial + assert new_retry._maximum == retry_._maximum + assert new_retry._multiplier == retry_._multiplier + + def test_with_delay(self): + retry_ = retry_streaming_async.AsyncStreamingRetry( + predicate=mock.sentinel.predicate, + initial=1, + maximum=2, + multiplier=3, + deadline=4, + on_error=mock.sentinel.on_error, + ) + new_retry = retry_.with_delay(initial=1, maximum=2, multiplier=3) + assert retry_ is not new_retry + assert new_retry._initial == 1 + assert new_retry._maximum == 2 + assert new_retry._multiplier == 3 + + # the rest of the attributes should remain the same + assert new_retry._deadline == retry_._deadline + assert new_retry._predicate is retry_._predicate + assert new_retry._on_error is retry_._on_error + + def test___str__(self): + def if_exception_type(exc): + return bool(exc) # pragma: NO COVER + + # Explicitly set all attributes as changed Retry defaults should not + # cause this test to start failing. + retry_ = retry_streaming_async.AsyncStreamingRetry( + predicate=if_exception_type, + initial=1.0, + maximum=60.0, + multiplier=2.0, + deadline=120.0, + on_error=None, + ) + assert re.match( + ( + r", " + r"initial=1.0, maximum=60.0, multiplier=2.0, timeout=120.0, " + r"on_error=None>" + ), + str(retry_), + ) + + async def _generator_mock( + self, + num=5, + error_on=None, + exceptions_seen=None, + sleep_time=0, + ): + """ + Helper to create a mock generator that yields a number of values + Generator can optionally raise an exception on a specific iteration + + Args: + - num (int): the number of values to yield + - error_on (int): if given, the generator will raise a ValueError on the specified iteration + - exceptions_seen (list): if given, the generator will append any exceptions to this list before raising + - sleep_time (int): if given, the generator will asyncio.sleep for this many seconds before yielding each value + """ + try: + for i in range(num): + if sleep_time: + await asyncio.sleep(sleep_time) + if error_on and i == error_on: + raise ValueError("generator mock error") + yield i + except (Exception, BaseException, GeneratorExit) as e: + # keep track of exceptions seen by generator + if exceptions_seen is not None: + exceptions_seen.append(e) + raise + + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___generator_success(self, sleep): + """ + Test that a retry-decorated generator yields values as expected + This test checks a generator with no issues + """ + from collections.abc import AsyncGenerator + + retry_ = retry_streaming_async.AsyncStreamingRetry() + decorated = retry_(self._generator_mock) + + num = 10 + generator = await decorated(num) + # check types + assert isinstance(generator, AsyncGenerator) + assert isinstance(self._generator_mock(num), AsyncGenerator) + # check yield contents + unpacked = [i async for i in generator] + assert len(unpacked) == num + expected = [i async for i in self._generator_mock(num)] + for a, b in zip(unpacked, expected): + assert a == b + sleep.assert_not_called() + + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___generator_retry(self, sleep): + """ + Tests that a retry-decorated generator will retry on errors + """ + on_error = mock.Mock(return_value=None) + retry_ = retry_streaming_async.AsyncStreamingRetry( + on_error=on_error, + predicate=retry_async.if_exception_type(ValueError), + timeout=None, + ) + generator = await retry_(self._generator_mock)(error_on=3) + # error thrown on 3 + # generator should contain 0, 1, 2 looping + unpacked = [await generator.__anext__() for i in range(10)] + assert unpacked == [0, 1, 2, 0, 1, 2, 0, 1, 2, 0] + assert on_error.call_count == 3 + + @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___generator_retry_hitting_deadline(self, sleep, uniform): + """ + Tests that a retry-decorated generator will throw a RetryError + after using the time budget + """ + import time + + on_error = mock.Mock() + retry_ = retry_streaming_async.AsyncStreamingRetry( + predicate=retry_async.if_exception_type(ValueError), + initial=1.0, + maximum=1024.0, + multiplier=2.0, + deadline=9.9, + ) + + time_now = time.monotonic() + now_patcher = mock.patch( + "time.monotonic", + return_value=time_now, + ) + + decorated = retry_(self._generator_mock, on_error=on_error) + generator = await decorated(error_on=1) + + with now_patcher as patched_now: + # Make sure that calls to fake asyncio.sleep() also advance the mocked + # time clock. + def increase_time(sleep_delay): + patched_now.return_value += sleep_delay + + sleep.side_effect = increase_time + + with pytest.raises(exceptions.RetryError): + [i async for i in generator] + + assert on_error.call_count == 4 + # check the delays + assert sleep.call_count == 3 # once between each successive target calls + last_wait = sleep.call_args.args[0] + total_wait = sum(call_args.args[0] for call_args in sleep.call_args_list) + # next wait would have put us over, so ended early + assert last_wait == 4 + assert total_wait == 7 + + @pytest.mark.asyncio + async def test___call___generator_cancellations(self): + """ + cancel calls should propagate to the generator + """ + # test without cancel as retryable + retry_ = retry_streaming_async.AsyncStreamingRetry() + utcnow = datetime.datetime.utcnow() + mock.patch("google.api_core.datetime_helpers.utcnow", return_value=utcnow) + generator = await retry_(self._generator_mock)(sleep_time=0.2) + await generator.__anext__() == 0 + task = asyncio.create_task(generator.__anext__()) + task.cancel() + with pytest.raises(asyncio.CancelledError): + await task + with pytest.raises(StopAsyncIteration): + await generator.__anext__() + + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___with_generator_send(self, sleep): + """ + Send should be passed through retry into target generator + """ + + async def _mock_send_gen(): + """ + always yield whatever was sent in + """ + in_ = yield + while True: + in_ = yield in_ + + retry_ = retry_streaming_async.AsyncStreamingRetry() + + decorated = retry_(_mock_send_gen) + + generator = await decorated() + result = await generator.__anext__() + # first yield should be None + assert result is None + in_messages = ["test_1", "hello", "world"] + out_messages = [] + for msg in in_messages: + recv = await generator.asend(msg) + out_messages.append(recv) + assert in_messages == out_messages + + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___generator_send_retry(self, sleep): + """ + Send should be retried if target generator raises an error + """ + on_error = mock.Mock(return_value=None) + retry_ = retry_streaming_async.AsyncStreamingRetry( + on_error=on_error, + predicate=retry_async.if_exception_type(ValueError), + timeout=None, + ) + generator = await retry_(self._generator_mock)(error_on=3) + with pytest.raises(TypeError) as exc_info: + await generator.asend("cannot send to fresh generator") + assert exc_info.match("can't send non-None value") + + # error thrown on 3 + # generator should contain 0, 1, 2 looping + generator = await retry_(self._generator_mock)(error_on=3) + assert await generator.__anext__() == 0 + unpacked = [await generator.asend(i) for i in range(10)] + assert unpacked == [1, 2, 0, 1, 2, 0, 1, 2, 0, 1] + assert on_error.call_count == 3 + + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___with_generator_close(self, sleep): + """ + Close should be passed through retry into target generator + """ + retry_ = retry_streaming_async.AsyncStreamingRetry() + decorated = retry_(self._generator_mock) + exception_list = [] + generator = await decorated(10, exceptions_seen=exception_list) + for i in range(2): + await generator.__anext__() + await generator.aclose() + + assert isinstance(exception_list[0], GeneratorExit) + with pytest.raises(StopAsyncIteration): + # calling next on closed generator should raise error + await generator.__anext__() + + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___with_new_generator_close(self, sleep): + """ + Close should be passed through retry into target generator, + even when it hasn't been iterated yet + """ + retry_ = retry_streaming_async.AsyncStreamingRetry() + decorated = retry_(self._generator_mock) + exception_list = [] + generator = await decorated(10, exceptions_seen=exception_list) + await generator.aclose() + + with pytest.raises(StopAsyncIteration): + # calling next on closed generator should raise error + await generator.__anext__() + + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___with_generator_throw(self, sleep): + """ + Throw should be passed through retry into target generator + """ + + # The generator should not retry when it encounters a non-retryable error + retry_ = retry_streaming_async.AsyncStreamingRetry( + predicate=retry_async.if_exception_type(ValueError), + ) + decorated = retry_(self._generator_mock) + exception_list = [] + generator = await decorated(10, exceptions_seen=exception_list) + for i in range(2): + await generator.__anext__() + with pytest.raises(BufferError): + await generator.athrow(BufferError("test")) + assert isinstance(exception_list[0], BufferError) + with pytest.raises(StopAsyncIteration): + # calling next on closed generator should raise error + await generator.__anext__() + + # In contrast, the generator should retry if we throw a retryable exception + exception_list = [] + generator = await decorated(10, exceptions_seen=exception_list) + for i in range(2): + await generator.__anext__() + throw_val = await generator.athrow(ValueError("test")) + assert throw_val == 0 + assert isinstance(exception_list[0], ValueError) + # calling next on generator should not raise error, because it was retried + assert await generator.__anext__() == 1 + + @pytest.mark.parametrize("awaitable_wrapped", [True, False]) + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___with_iterable_send(self, sleep, awaitable_wrapped): + """ + Send should work like next if the wrapped iterable does not support it + """ + retry_ = retry_streaming_async.AsyncStreamingRetry() + + def iterable_fn(): + class CustomIterable: + def __init__(self): + self.i = -1 + + def __aiter__(self): + return self + + async def __anext__(self): + self.i += 1 + return self.i + + return CustomIterable() + + if awaitable_wrapped: + + async def wrapper(): + return iterable_fn() + + decorated = retry_(wrapper) + else: + decorated = retry_(iterable_fn) + + retryable = await decorated() + result = await retryable.__anext__() + assert result == 0 + await retryable.asend("test") == 1 + await retryable.asend("test2") == 2 + await retryable.asend("test3") == 3 + + @pytest.mark.parametrize("awaitable_wrapped", [True, False]) + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___with_iterable_close(self, sleep, awaitable_wrapped): + """ + close should be handled by wrapper if wrapped iterable does not support it + """ + retry_ = retry_streaming_async.AsyncStreamingRetry() + + def iterable_fn(): + class CustomIterable: + def __init__(self): + self.i = -1 + + def __aiter__(self): + return self + + async def __anext__(self): + self.i += 1 + return self.i + + return CustomIterable() + + if awaitable_wrapped: + + async def wrapper(): + return iterable_fn() + + decorated = retry_(wrapper) + else: + decorated = retry_(iterable_fn) + + # try closing active generator + retryable = await decorated() + assert await retryable.__anext__() == 0 + await retryable.aclose() + with pytest.raises(StopAsyncIteration): + await retryable.__anext__() + # try closing new generator + new_retryable = await decorated() + await new_retryable.aclose() + with pytest.raises(StopAsyncIteration): + await new_retryable.__anext__() + + @pytest.mark.parametrize("awaitable_wrapped", [True, False]) + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___with_iterable_throw(self, sleep, awaitable_wrapped): + """ + Throw should work even if the wrapped iterable does not support it + """ + + predicate = retry_async.if_exception_type(ValueError) + retry_ = retry_streaming_async.AsyncStreamingRetry(predicate=predicate) + + def iterable_fn(): + class CustomIterable: + def __init__(self): + self.i = -1 + + def __aiter__(self): + return self + + async def __anext__(self): + self.i += 1 + return self.i + + return CustomIterable() + + if awaitable_wrapped: + + async def wrapper(): + return iterable_fn() + + decorated = retry_(wrapper) + else: + decorated = retry_(iterable_fn) + + # try throwing with active generator + retryable = await decorated() + assert await retryable.__anext__() == 0 + # should swallow errors in predicate + await retryable.athrow(ValueError("test")) + # should raise errors not in predicate + with pytest.raises(BufferError): + await retryable.athrow(BufferError("test")) + with pytest.raises(StopAsyncIteration): + await retryable.__anext__() + # try throwing with new generator + new_retryable = await decorated() + with pytest.raises(BufferError): + await new_retryable.athrow(BufferError("test")) + with pytest.raises(StopAsyncIteration): + await new_retryable.__anext__() + + @pytest.mark.asyncio + async def test_exc_factory_non_retryable_error(self): + """ + generator should give the option to override exception creation logic + test when non-retryable error is thrown + """ + from google.api_core.retry import RetryFailureReason + from google.api_core.retry_streaming_async import retry_target_stream + + timeout = 6 + sent_errors = [ValueError("test"), ValueError("test2"), BufferError("test3")] + expected_final_err = RuntimeError("done") + expected_source_err = ZeroDivisionError("test4") + + def factory(*args, **kwargs): + assert len(kwargs) == 0 + assert args[0] == sent_errors + assert args[1] == RetryFailureReason.NON_RETRYABLE_ERROR + assert args[2] == timeout + return expected_final_err, expected_source_err + + generator = retry_target_stream( + self._generator_mock, + retry_async.if_exception_type(ValueError), + [0] * 3, + timeout=timeout, + exception_factory=factory, + ) + # initialize the generator + await generator.__anext__() + # trigger some retryable errors + await generator.athrow(sent_errors[0]) + await generator.athrow(sent_errors[1]) + # trigger a non-retryable error + with pytest.raises(expected_final_err.__class__) as exc_info: + await generator.athrow(sent_errors[2]) + assert exc_info.value == expected_final_err + assert exc_info.value.__cause__ == expected_source_err + + @pytest.mark.asyncio + async def test_exc_factory_timeout(self): + """ + generator should give the option to override exception creation logic + test when timeout is exceeded + """ + import time + from google.api_core.retry import RetryFailureReason + from google.api_core.retry_streaming_async import retry_target_stream + + timeout = 2 + time_now = time.monotonic() + now_patcher = mock.patch( + "time.monotonic", + return_value=time_now, + ) + + with now_patcher as patched_now: + timeout = 2 + sent_errors = [ValueError("test"), ValueError("test2"), ValueError("test3")] + expected_final_err = RuntimeError("done") + expected_source_err = ZeroDivisionError("test4") + + def factory(*args, **kwargs): + assert len(kwargs) == 0 + assert args[0] == sent_errors + assert args[1] == RetryFailureReason.TIMEOUT + assert args[2] == timeout + return expected_final_err, expected_source_err + + generator = retry_target_stream( + self._generator_mock, + retry_async.if_exception_type(ValueError), + [0] * 3, + timeout=timeout, + exception_factory=factory, + ) + # initialize the generator + await generator.__anext__() + # trigger some retryable errors + await generator.athrow(sent_errors[0]) + await generator.athrow(sent_errors[1]) + # trigger a timeout + patched_now.return_value += timeout + 1 + with pytest.raises(expected_final_err.__class__) as exc_info: + await generator.athrow(sent_errors[2]) + assert exc_info.value == expected_final_err + assert exc_info.value.__cause__ == expected_source_err diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 967a137a..5c65b448 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -540,436 +540,3 @@ def test___init___when_retry_is_executed(self, sleep, uniform): assert _some_function.call_count == 2 target.assert_has_calls([mock.call("meep"), mock.call("meep")]) sleep.assert_any_call(retry_._initial) - - def _generator_mock( - self, - num=5, - error_on=None, - return_val=None, - exceptions_seen=None, - ): - """ - Helper to create a mock generator that yields a number of values - Generator can optionally raise an exception on a specific iteration - - Args: - - num (int): the number of values to yield. After this, the generator will return `return_val` - - error_on (int): if given, the generator will raise a ValueError on the specified iteration - - return_val (any): if given, the generator will return this value after yielding num values - - exceptions_seen (list): if given, the generator will append any exceptions to this list before raising - """ - try: - for i in range(num): - if error_on and i == error_on: - raise ValueError("generator mock error") - yield i - return return_val - except (Exception, BaseException, GeneratorExit) as e: - # keep track of exceptions seen by generator - if exceptions_seen is not None: - exceptions_seen.append(e) - raise - - @mock.patch("time.sleep", autospec=True) - def test___call___generator_success(self, sleep): - """ - Test that a retry-decorated generator yields values as expected - This test checks a generator with no issues - """ - import types - import collections - - retry_ = retry.Retry() - - decorated = retry_(self._generator_mock) - - num = 10 - result = decorated(num) - # check types - assert isinstance(decorated(num), collections.abc.Iterable) - assert isinstance(decorated(num), types.GeneratorType) - assert isinstance(self._generator_mock(num), collections.abc.Iterable) - assert isinstance(self._generator_mock(num), types.GeneratorType) - # check yield contents - unpacked = [i for i in result] - assert len(unpacked) == num - for a, b in zip(unpacked, self._generator_mock(num)): - assert a == b - sleep.assert_not_called() - - @mock.patch("time.sleep", autospec=True) - def test___call___generator_retry(self, sleep): - """ - Tests that a retry-decorated generator will retry on errors - """ - on_error = mock.Mock(return_value=None) - retry_ = retry.Retry( - on_error=on_error, - predicate=retry.if_exception_type(ValueError), - is_stream=True, - timeout=None, - ) - result = retry_(self._generator_mock)(error_on=3) - # error thrown on 3 - # generator should contain 0, 1, 2 looping - unpacked = [next(result) for i in range(10)] - assert unpacked == [0, 1, 2, 0, 1, 2, 0, 1, 2, 0] - assert on_error.call_count == 3 - - @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) - @mock.patch("time.sleep", autospec=True) - def test___call___generator_retry_hitting_deadline(self, sleep, uniform): - """ - Tests that a retry-decorated generator will throw a RetryError - after using the time budget - """ - import time - - on_error = mock.Mock(return_value=None) - retry_ = retry.Retry( - predicate=retry.if_exception_type(ValueError), - initial=1.0, - maximum=1024.0, - multiplier=2.0, - deadline=30.9, - is_stream=True, - ) - - timenow = time.monotonic() - now_patcher = mock.patch( - "time.monotonic", - return_value=timenow, - ) - - decorated = retry_(self._generator_mock, on_error=on_error) - generator = decorated(error_on=1) - with now_patcher as patched_now: - # Make sure that calls to fake time.sleep() also advance the mocked - # time clock. - def increase_time(sleep_delay): - patched_now.return_value += sleep_delay - - sleep.side_effect = increase_time - with pytest.raises(exceptions.RetryError): - [i for i in generator] - - assert on_error.call_count == 5 - # check the delays - assert sleep.call_count == 4 # once between each successive target calls - last_wait = sleep.call_args.args[0] - total_wait = sum(call_args.args[0] for call_args in sleep.call_args_list) - assert last_wait == 8.0 - assert total_wait == 15.0 - - @mock.patch("time.sleep", autospec=True) - def test___call___with_generator_send(self, sleep): - """ - Send should be passed through retry into target generator - """ - - def _mock_send_gen(): - """ - always yield whatever was sent in - """ - in_ = yield - while True: - in_ = yield in_ - - retry_ = retry.Retry(is_stream=True) - - decorated = retry_(_mock_send_gen) - - generator = decorated() - result = next(generator) - # first yield should be None - assert result is None - in_messages = ["test_1", "hello", "world"] - out_messages = [] - for msg in in_messages: - recv = generator.send(msg) - out_messages.append(recv) - assert in_messages == out_messages - - @mock.patch("time.sleep", autospec=True) - def test___call___with_generator_send_retry(self, sleep): - """ - Send should support retries like next - """ - on_error = mock.Mock(return_value=None) - retry_ = retry.Retry( - on_error=on_error, - predicate=retry.if_exception_type(ValueError), - is_stream=True, - timeout=None, - ) - result = retry_(self._generator_mock)(error_on=3) - with pytest.raises(TypeError) as exc_info: - result.send("can not send to fresh generator") - assert exc_info.match("can't send non-None value") - # initiate iteration with None - result = retry_(self._generator_mock)(error_on=3) - assert result.send(None) == 0 - # error thrown on 3 - # generator should contain 0, 1, 2 looping - unpacked = [result.send(i) for i in range(10)] - assert unpacked == [1, 2, 0, 1, 2, 0, 1, 2, 0, 1] - assert on_error.call_count == 3 - - @mock.patch("time.sleep", autospec=True) - def test___call___with_iterable_send(self, sleep): - """ - send should raise attribute error if wrapped iterator does not support it - """ - retry_ = retry.Retry(is_stream=True) - - def iterable_fn(n): - return iter(range(n)) - - decorated = retry_(iterable_fn) - generator = decorated(5) - # initialize - next(generator) - # call send - with pytest.raises(AttributeError): - generator.send("test") - - @mock.patch("time.sleep", autospec=True) - def test___call___with_iterable_close(self, sleep): - """ - close should be handled by wrapper if wrapped iterable does not support it - """ - retry_ = retry.Retry(is_stream=True) - - def iterable_fn(n): - return iter(range(n)) - - decorated = retry_(iterable_fn) - - # try closing active generator - retryable = decorated(10) - assert next(retryable) == 0 - retryable.close() - with pytest.raises(StopIteration): - next(retryable) - - # try closing a new generator - retryable = decorated(10) - retryable.close() - with pytest.raises(StopIteration): - next(retryable) - - @mock.patch("time.sleep", autospec=True) - def test___call___with_iterable_throw(self, sleep): - """ - Throw should work even if the wrapped iterable does not support it - """ - predicate = retry.if_exception_type(ValueError) - retry_ = retry.Retry(is_stream=True, predicate=predicate) - - def iterable_fn(n): - return iter(range(n)) - - decorated = retry_(iterable_fn) - - # try throwing with active generator - retryable = decorated(10) - assert next(retryable) == 0 - # should swallow errors in predicate - retryable.throw(ValueError) - assert next(retryable) == 1 - # should raise on other errors - with pytest.raises(TypeError): - retryable.throw(TypeError) - with pytest.raises(StopIteration): - next(retryable) - - # try throwing with a new generator - retryable = decorated(10) - with pytest.raises(ValueError): - retryable.throw(ValueError) - with pytest.raises(StopIteration): - next(retryable) - - @mock.patch("time.sleep", autospec=True) - def test___call___with_generator_return(self, sleep): - """ - Generator return value should be passed through retry decorator - """ - retry_ = retry.Retry() - - decorated = retry_(self._generator_mock) - - expected_value = "done" - generator = decorated(5, return_val=expected_value) - found_value = None - try: - while True: - next(generator) - except StopIteration as e: - found_value = e.value - assert found_value == expected_value - - @mock.patch("time.sleep", autospec=True) - def test___call___with_generator_close(self, sleep): - """ - Close should be passed through retry into target generator - """ - retry_ = retry.Retry(is_stream=True) - - decorated = retry_(self._generator_mock) - - exception_list = [] - generator = decorated(10, exceptions_seen=exception_list) - for i in range(2): - next(generator) - generator.close() - assert isinstance(exception_list[0], GeneratorExit) - with pytest.raises(StopIteration): - # calling next on closed generator should raise error - next(generator) - - @mock.patch("time.sleep", autospec=True) - def test___call___with_generator_throw(self, sleep): - """ - Throw should be passed through retry into target generator - """ - retry_ = retry.Retry( - predicate=retry.if_exception_type(ValueError), is_stream=True - ) - decorated = retry_(self._generator_mock) - - exception_list = [] - generator = decorated(10, exceptions_seen=exception_list) - for i in range(2): - next(generator) - with pytest.raises(BufferError): - generator.throw(BufferError("test")) - assert isinstance(exception_list[0], BufferError) - with pytest.raises(StopIteration): - # calling next on closed generator should raise error - next(generator) - # should retry if throw retryable exception - exception_list = [] - generator = decorated(10, exceptions_seen=exception_list) - for i in range(2): - next(generator) - val = generator.throw(ValueError("test")) - assert val == 0 - assert isinstance(exception_list[0], ValueError) - # calling next on closed generator should not raise error - assert next(generator) == 1 - - @mock.patch("time.sleep", autospec=True) - def test___call___with_is_stream(self, sleep): - """ - is_stream should determine if the target is wrapped as a - generator or as a callable - """ - gen_retry_ = retry.Retry( - is_stream=True, predicate=retry.if_exception_type(ValueError) - ) - not_gen_retry_ = retry.Retry( - is_stream=False, predicate=retry.if_exception_type(ValueError) - ) - auto_retry_ = retry.Retry(predicate=retry.if_exception_type(ValueError)) - # force generator to act as non-generator - with pytest.raises(ValueError): - # generator should not retry if error is thrown on yield - gen = not_gen_retry_(self._generator_mock)(10, error_on=3) - unpacked = [next(gen) for i in range(10)] - wrapped = functools.partial(self._generator_mock, 10, error_on=6) - with pytest.raises(ValueError): - # generator should not retry if error is thrown on yield - gen = auto_retry_(wrapped)() - [next(gen) for i in range(10)] - # force non-detected to be accepted as generator - gen = gen_retry_(wrapped)() - unpacked = [next(gen) for i in range(10)] - assert unpacked == [0, 1, 2, 3, 4, 5, 0, 1, 2, 3] - - def test_exc_factory_non_retryable_error(self): - """ - generator should give the option to override exception creation logic - test when non-retryable error is thrown - """ - from google.api_core.retry import RetryFailureReason - from google.api_core.retry_streaming import retry_target_stream - - timeout = None - sent_errors = [ValueError("test"), ValueError("test2"), BufferError("test3")] - expected_final_err = RuntimeError("done") - expected_source_err = ZeroDivisionError("test4") - - def factory(*args, **kwargs): - assert len(kwargs) == 0 - assert args[0] == sent_errors - assert args[1] == RetryFailureReason.NON_RETRYABLE_ERROR - assert args[2] == timeout - return expected_final_err, expected_source_err - - generator = retry_target_stream( - self._generator_mock, - retry.if_exception_type(ValueError), - [0] * 3, - timeout=timeout, - exception_factory=factory, - ) - # initialize generator - next(generator) - # trigger some retryable errors - generator.throw(sent_errors[0]) - generator.throw(sent_errors[1]) - # trigger a non-retryable error - with pytest.raises(expected_final_err.__class__) as exc_info: - generator.throw(sent_errors[2]) - assert exc_info.value == expected_final_err - assert exc_info.value.__cause__ == expected_source_err - - def test_exc_factory_timeout(self): - """ - generator should give the option to override exception creation logic - test when timeout is exceeded - """ - import time - from google.api_core.retry import RetryFailureReason - from google.api_core.retry_streaming import retry_target_stream - - timeout = 2 - time_now = time.monotonic() - now_patcher = mock.patch( - "time.monotonic", - return_value=time_now, - ) - - with now_patcher as patched_now: - timeout = 2 - sent_errors = [ValueError("test"), ValueError("test2"), ValueError("test3")] - expected_final_err = RuntimeError("done") - expected_source_err = ZeroDivisionError("test4") - - def factory(*args, **kwargs): - assert len(kwargs) == 0 - assert args[0] == sent_errors - assert args[1] == RetryFailureReason.TIMEOUT - assert args[2] == timeout - return expected_final_err, expected_source_err - - generator = retry_target_stream( - self._generator_mock, - retry.if_exception_type(ValueError), - [0] * 3, - timeout=timeout, - exception_factory=factory, - check_timeout_on_yield=True, - ) - # initialize generator - next(generator) - # trigger some retryable errors - generator.throw(sent_errors[0]) - generator.throw(sent_errors[1]) - # trigger a timeout - patched_now.return_value += timeout + 1 - with pytest.raises(expected_final_err.__class__) as exc_info: - generator.throw(sent_errors[2]) - assert exc_info.value == expected_final_err - assert exc_info.value.__cause__ == expected_source_err diff --git a/tests/unit/test_retry_streaming.py b/tests/unit/test_retry_streaming.py new file mode 100644 index 00000000..3bee8ace --- /dev/null +++ b/tests/unit/test_retry_streaming.py @@ -0,0 +1,586 @@ +# Copyright 2017 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime +import re + +import mock +import pytest + +from google.api_core import exceptions +from google.api_core import retry +from google.api_core import retry_streaming + + +class TestStreamingRetry(object): + def test_constructor_defaults(self): + retry_ = retry_streaming.StreamingRetry() + assert retry_._predicate == retry.if_transient_error + assert retry_._initial == 1 + assert retry_._maximum == 60 + assert retry_._multiplier == 2 + assert retry_._deadline == 120 + assert retry_._on_error is None + assert retry_.deadline == 120 + assert retry_.timeout == 120 + + def test_constructor_options(self): + _some_function = mock.Mock() + + retry_ = retry_streaming.StreamingRetry( + predicate=mock.sentinel.predicate, + initial=1, + maximum=2, + multiplier=3, + deadline=4, + on_error=_some_function, + ) + assert retry_._predicate == mock.sentinel.predicate + assert retry_._initial == 1 + assert retry_._maximum == 2 + assert retry_._multiplier == 3 + assert retry_._deadline == 4 + assert retry_._on_error is _some_function + + def test_with_deadline(self): + retry_ = retry_streaming.StreamingRetry( + predicate=mock.sentinel.predicate, + initial=1, + maximum=2, + multiplier=3, + deadline=4, + on_error=mock.sentinel.on_error, + ) + new_retry = retry_.with_deadline(42) + assert retry_ is not new_retry + assert new_retry._deadline == 42 + + # the rest of the attributes should remain the same + assert new_retry._predicate is retry_._predicate + assert new_retry._initial == retry_._initial + assert new_retry._maximum == retry_._maximum + assert new_retry._multiplier == retry_._multiplier + assert new_retry._on_error is retry_._on_error + + def test_with_predicate(self): + retry_ = retry_streaming.StreamingRetry( + predicate=mock.sentinel.predicate, + initial=1, + maximum=2, + multiplier=3, + deadline=4, + on_error=mock.sentinel.on_error, + ) + new_retry = retry_.with_predicate(mock.sentinel.predicate) + assert retry_ is not new_retry + assert new_retry._predicate == mock.sentinel.predicate + + # the rest of the attributes should remain the same + assert new_retry._deadline == retry_._deadline + assert new_retry._initial == retry_._initial + assert new_retry._maximum == retry_._maximum + assert new_retry._multiplier == retry_._multiplier + assert new_retry._on_error is retry_._on_error + + def test_with_delay_noop(self): + retry_ = retry_streaming.StreamingRetry( + predicate=mock.sentinel.predicate, + initial=1, + maximum=2, + multiplier=3, + deadline=4, + on_error=mock.sentinel.on_error, + ) + new_retry = retry_.with_delay() + assert retry_ is not new_retry + assert new_retry._initial == retry_._initial + assert new_retry._maximum == retry_._maximum + assert new_retry._multiplier == retry_._multiplier + + def test_with_delay(self): + retry_ = retry_streaming.StreamingRetry( + predicate=mock.sentinel.predicate, + initial=1, + maximum=2, + multiplier=3, + deadline=4, + on_error=mock.sentinel.on_error, + ) + new_retry = retry_.with_delay(initial=5, maximum=6, multiplier=7) + assert retry_ is not new_retry + assert new_retry._initial == 5 + assert new_retry._maximum == 6 + assert new_retry._multiplier == 7 + + # the rest of the attributes should remain the same + assert new_retry._deadline == retry_._deadline + assert new_retry._predicate is retry_._predicate + assert new_retry._on_error is retry_._on_error + + def test_with_delay_partial_options(self): + retry_ = retry_streaming.StreamingRetry( + predicate=mock.sentinel.predicate, + initial=1, + maximum=2, + multiplier=3, + deadline=4, + on_error=mock.sentinel.on_error, + ) + new_retry = retry_.with_delay(initial=4) + assert retry_ is not new_retry + assert new_retry._initial == 4 + assert new_retry._maximum == 2 + assert new_retry._multiplier == 3 + + new_retry = retry_.with_delay(maximum=4) + assert retry_ is not new_retry + assert new_retry._initial == 1 + assert new_retry._maximum == 4 + assert new_retry._multiplier == 3 + + new_retry = retry_.with_delay(multiplier=4) + assert retry_ is not new_retry + assert new_retry._initial == 1 + assert new_retry._maximum == 2 + assert new_retry._multiplier == 4 + + # the rest of the attributes should remain the same + assert new_retry._deadline == retry_._deadline + assert new_retry._predicate is retry_._predicate + assert new_retry._on_error is retry_._on_error + + def test___str__(self): + def if_exception_type(exc): + return bool(exc) # pragma: NO COVER + + # Explicitly set all attributes as changed Retry defaults should not + # cause this test to start failing. + retry_ = retry_streaming.StreamingRetry( + predicate=if_exception_type, + initial=1.0, + maximum=60.0, + multiplier=2.0, + deadline=120.0, + on_error=None, + ) + assert re.match( + ( + r", " + r"initial=1.0, maximum=60.0, multiplier=2.0, timeout=120.0, " + r"on_error=None>" + ), + str(retry_), + ) + + def _generator_mock( + self, + num=5, + error_on=None, + return_val=None, + exceptions_seen=None, + ): + """ + Helper to create a mock generator that yields a number of values + Generator can optionally raise an exception on a specific iteration + + Args: + - num (int): the number of values to yield. After this, the generator will return `return_val` + - error_on (int): if given, the generator will raise a ValueError on the specified iteration + - return_val (any): if given, the generator will return this value after yielding num values + - exceptions_seen (list): if given, the generator will append any exceptions to this list before raising + """ + try: + for i in range(num): + if error_on and i == error_on: + raise ValueError("generator mock error") + yield i + return return_val + except (Exception, BaseException, GeneratorExit) as e: + # keep track of exceptions seen by generator + if exceptions_seen is not None: + exceptions_seen.append(e) + raise + + @mock.patch("time.sleep", autospec=True) + def test___call___success(self, sleep): + """ + Test that a retry-decorated generator yields values as expected + This test checks a generator with no issues + """ + import types + import collections + + retry_ = retry_streaming.StreamingRetry() + + decorated = retry_(self._generator_mock) + + num = 10 + result = decorated(num) + # check types + assert isinstance(decorated(num), collections.abc.Iterable) + assert isinstance(decorated(num), types.GeneratorType) + assert isinstance(self._generator_mock(num), collections.abc.Iterable) + assert isinstance(self._generator_mock(num), types.GeneratorType) + # check yield contents + unpacked = [i for i in result] + assert len(unpacked) == num + for a, b in zip(unpacked, self._generator_mock(num)): + assert a == b + sleep.assert_not_called() + + @mock.patch("time.sleep", autospec=True) + def test___call___retry(self, sleep): + """ + Tests that a retry-decorated generator will retry on errors + """ + on_error = mock.Mock(return_value=None) + retry_ = retry_streaming.StreamingRetry( + on_error=on_error, + predicate=retry.if_exception_type(ValueError), + timeout=None, + ) + result = retry_(self._generator_mock)(error_on=3) + # error thrown on 3 + # generator should contain 0, 1, 2 looping + unpacked = [next(result) for i in range(10)] + assert unpacked == [0, 1, 2, 0, 1, 2, 0, 1, 2, 0] + assert on_error.call_count == 3 + + @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) + @mock.patch("time.sleep", autospec=True) + def test___call___retry_hitting_deadline(self, sleep, uniform): + """ + Tests that a retry-decorated generator will throw a RetryError + after using the time budget + """ + import time + + on_error = mock.Mock(return_value=None) + retry_ = retry_streaming.StreamingRetry( + predicate=retry.if_exception_type(ValueError), + initial=1.0, + maximum=1024.0, + multiplier=2.0, + deadline=30.9, + ) + + timenow = time.monotonic() + now_patcher = mock.patch( + "time.monotonic", + return_value=timenow, + ) + + decorated = retry_(self._generator_mock, on_error=on_error) + generator = decorated(error_on=1) + with now_patcher as patched_now: + # Make sure that calls to fake time.sleep() also advance the mocked + # time clock. + def increase_time(sleep_delay): + patched_now.return_value += sleep_delay + + sleep.side_effect = increase_time + with pytest.raises(exceptions.RetryError): + [i for i in generator] + + assert on_error.call_count == 5 + # check the delays + assert sleep.call_count == 4 # once between each successive target calls + last_wait = sleep.call_args.args[0] + total_wait = sum(call_args.args[0] for call_args in sleep.call_args_list) + assert last_wait == 8.0 + assert total_wait == 15.0 + + @mock.patch("time.sleep", autospec=True) + def test___call___with_generator_send(self, sleep): + """ + Send should be passed through retry into target generator + """ + + def _mock_send_gen(): + """ + always yield whatever was sent in + """ + in_ = yield + while True: + in_ = yield in_ + + retry_ = retry_streaming.StreamingRetry() + + decorated = retry_(_mock_send_gen) + + generator = decorated() + result = next(generator) + # first yield should be None + assert result is None + in_messages = ["test_1", "hello", "world"] + out_messages = [] + for msg in in_messages: + recv = generator.send(msg) + out_messages.append(recv) + assert in_messages == out_messages + + @mock.patch("time.sleep", autospec=True) + def test___call___with_generator_send_retry(self, sleep): + """ + Send should support retries like next + """ + on_error = mock.Mock(return_value=None) + retry_ = retry_streaming.StreamingRetry( + on_error=on_error, + predicate=retry.if_exception_type(ValueError), + timeout=None, + ) + result = retry_(self._generator_mock)(error_on=3) + with pytest.raises(TypeError) as exc_info: + result.send("can not send to fresh generator") + assert exc_info.match("can't send non-None value") + # initiate iteration with None + result = retry_(self._generator_mock)(error_on=3) + assert result.send(None) == 0 + # error thrown on 3 + # generator should contain 0, 1, 2 looping + unpacked = [result.send(i) for i in range(10)] + assert unpacked == [1, 2, 0, 1, 2, 0, 1, 2, 0, 1] + assert on_error.call_count == 3 + + @mock.patch("time.sleep", autospec=True) + def test___call___with_iterable_send(self, sleep): + """ + send should raise attribute error if wrapped iterator does not support it + """ + retry_ = retry_streaming.StreamingRetry() + + def iterable_fn(n): + return iter(range(n)) + + decorated = retry_(iterable_fn) + generator = decorated(5) + # initialize + next(generator) + # call send + with pytest.raises(AttributeError): + generator.send("test") + + @mock.patch("time.sleep", autospec=True) + def test___call___with_iterable_close(self, sleep): + """ + close should be handled by wrapper if wrapped iterable does not support it + """ + retry_ = retry_streaming.StreamingRetry() + + def iterable_fn(n): + return iter(range(n)) + + decorated = retry_(iterable_fn) + + # try closing active generator + retryable = decorated(10) + assert next(retryable) == 0 + retryable.close() + with pytest.raises(StopIteration): + next(retryable) + + # try closing a new generator + retryable = decorated(10) + retryable.close() + with pytest.raises(StopIteration): + next(retryable) + + @mock.patch("time.sleep", autospec=True) + def test___call___with_iterable_throw(self, sleep): + """ + Throw should work even if the wrapped iterable does not support it + """ + predicate = retry.if_exception_type(ValueError) + retry_ = retry_streaming.StreamingRetry(predicate=predicate) + + def iterable_fn(n): + return iter(range(n)) + + decorated = retry_(iterable_fn) + + # try throwing with active generator + retryable = decorated(10) + assert next(retryable) == 0 + # should swallow errors in predicate + retryable.throw(ValueError) + assert next(retryable) == 1 + # should raise on other errors + with pytest.raises(TypeError): + retryable.throw(TypeError) + with pytest.raises(StopIteration): + next(retryable) + + # try throwing with a new generator + retryable = decorated(10) + with pytest.raises(ValueError): + retryable.throw(ValueError) + with pytest.raises(StopIteration): + next(retryable) + + @mock.patch("time.sleep", autospec=True) + def test___call___with_generator_return(self, sleep): + """ + Generator return value should be passed through retry decorator + """ + retry_ = retry_streaming.StreamingRetry() + + decorated = retry_(self._generator_mock) + + expected_value = "done" + generator = decorated(5, return_val=expected_value) + found_value = None + try: + while True: + next(generator) + except StopIteration as e: + found_value = e.value + assert found_value == expected_value + + @mock.patch("time.sleep", autospec=True) + def test___call___with_generator_close(self, sleep): + """ + Close should be passed through retry into target generator + """ + retry_ = retry_streaming.StreamingRetry() + + decorated = retry_(self._generator_mock) + + exception_list = [] + generator = decorated(10, exceptions_seen=exception_list) + for i in range(2): + next(generator) + generator.close() + assert isinstance(exception_list[0], GeneratorExit) + with pytest.raises(StopIteration): + # calling next on closed generator should raise error + next(generator) + + @mock.patch("time.sleep", autospec=True) + def test___call___with_generator_throw(self, sleep): + """ + Throw should be passed through retry into target generator + """ + retry_ = retry_streaming.StreamingRetry( + predicate=retry.if_exception_type(ValueError), + ) + decorated = retry_(self._generator_mock) + + exception_list = [] + generator = decorated(10, exceptions_seen=exception_list) + for i in range(2): + next(generator) + with pytest.raises(BufferError): + generator.throw(BufferError("test")) + assert isinstance(exception_list[0], BufferError) + with pytest.raises(StopIteration): + # calling next on closed generator should raise error + next(generator) + # should retry if throw retryable exception + exception_list = [] + generator = decorated(10, exceptions_seen=exception_list) + for i in range(2): + next(generator) + val = generator.throw(ValueError("test")) + assert val == 0 + assert isinstance(exception_list[0], ValueError) + # calling next on closed generator should not raise error + assert next(generator) == 1 + + def test_exc_factory_non_retryable_error(self): + """ + generator should give the option to override exception creation logic + test when non-retryable error is thrown + """ + from google.api_core.retry import RetryFailureReason + from google.api_core.retry_streaming import retry_target_stream + + timeout = None + sent_errors = [ValueError("test"), ValueError("test2"), BufferError("test3")] + expected_final_err = RuntimeError("done") + expected_source_err = ZeroDivisionError("test4") + + def factory(*args, **kwargs): + assert len(kwargs) == 0 + assert args[0] == sent_errors + assert args[1] == RetryFailureReason.NON_RETRYABLE_ERROR + assert args[2] == timeout + return expected_final_err, expected_source_err + + generator = retry_target_stream( + self._generator_mock, + retry.if_exception_type(ValueError), + [0] * 3, + timeout=timeout, + exception_factory=factory, + ) + # initialize generator + next(generator) + # trigger some retryable errors + generator.throw(sent_errors[0]) + generator.throw(sent_errors[1]) + # trigger a non-retryable error + with pytest.raises(expected_final_err.__class__) as exc_info: + generator.throw(sent_errors[2]) + assert exc_info.value == expected_final_err + assert exc_info.value.__cause__ == expected_source_err + + def test_exc_factory_timeout(self): + """ + generator should give the option to override exception creation logic + test when timeout is exceeded + """ + import time + from google.api_core.retry import RetryFailureReason + from google.api_core.retry_streaming import retry_target_stream + + timeout = 2 + time_now = time.monotonic() + now_patcher = mock.patch( + "time.monotonic", + return_value=time_now, + ) + + with now_patcher as patched_now: + timeout = 2 + sent_errors = [ValueError("test"), ValueError("test2"), ValueError("test3")] + expected_final_err = RuntimeError("done") + expected_source_err = ZeroDivisionError("test4") + + def factory(*args, **kwargs): + assert len(kwargs) == 0 + assert args[0] == sent_errors + assert args[1] == RetryFailureReason.TIMEOUT + assert args[2] == timeout + return expected_final_err, expected_source_err + + generator = retry_target_stream( + self._generator_mock, + retry.if_exception_type(ValueError), + [0] * 3, + timeout=timeout, + exception_factory=factory, + check_timeout_on_yield=True, + ) + # initialize generator + next(generator) + # trigger some retryable errors + generator.throw(sent_errors[0]) + generator.throw(sent_errors[1]) + # trigger a timeout + patched_now.return_value += timeout + 1 + with pytest.raises(expected_final_err.__class__) as exc_info: + generator.throw(sent_errors[2]) + assert exc_info.value == expected_final_err + assert exc_info.value.__cause__ == expected_source_err From 5637e88328b90e2043db3ba727f28a09557def03 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 20 Nov 2023 15:03:25 -0800 Subject: [PATCH 158/204] added typing to base retry --- google/api_core/retry.py | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index 447285e9..aa1c0be8 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -78,6 +78,10 @@ def check_if_exists(): from typing import ParamSpec else: from typing_extensions import ParamSpec + if sys.version_info >= (3, 11): + from typing import Self + else: + from typing_extensions import Self _P = ParamSpec("_P") # target function call parameters _R = TypeVar("_R") # target function returned value @@ -316,7 +320,7 @@ def __call__(self, *args, **kwargs) -> Any: raise NotImplementedError("Not implemented in base class") @property - def deadline(self): + def deadline(self) -> float | None: """ DEPRECATED: use ``timeout`` instead. Refer to the ``Retry`` class documentation for details. @@ -324,7 +328,7 @@ def deadline(self): return self._timeout @property - def timeout(self): + def timeout(self) -> float | None: return self._timeout def _replace( @@ -335,7 +339,7 @@ def _replace( multiplier=None, timeout=None, on_error=None, - ): + ) -> Self: return type(self)( predicate=predicate or self._predicate, initial=initial or self._initial, @@ -345,7 +349,7 @@ def _replace( on_error=on_error or self._on_error, ) - def with_deadline(self, deadline): + def with_deadline(self, deadline) -> Self: """Return a copy of this retry with the given timeout. DEPRECATED: use :meth:`with_timeout` instead. Refer to the ``Retry`` class @@ -359,7 +363,7 @@ def with_deadline(self, deadline): """ return self._replace(timeout=deadline) - def with_timeout(self, timeout): + def with_timeout(self, timeout) -> Self: """Return a copy of this retry with the given timeout. Args: @@ -370,7 +374,7 @@ def with_timeout(self, timeout): """ return self._replace(timeout=timeout) - def with_predicate(self, predicate): + def with_predicate(self, predicate) -> Self: """Return a copy of this retry with the given predicate. Args: @@ -382,7 +386,7 @@ def with_predicate(self, predicate): """ return self._replace(predicate=predicate) - def with_delay(self, initial=None, maximum=None, multiplier=None): + def with_delay(self, initial=None, maximum=None, multiplier=None) -> Self: """Return a copy of this retry with the given delay options. Args: @@ -396,7 +400,7 @@ def with_delay(self, initial=None, maximum=None, multiplier=None): """ return self._replace(initial=initial, maximum=maximum, multiplier=multiplier) - def __str__(self): + def __str__(self) -> str: return ( "<{} predicate={}, initial={:.1f}, maximum={:.1f}, " "multiplier={:.1f}, timeout={}, on_error={}>".format( From c4be5f203f4e8fea3f151e9cdabda418bf8a9b56 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 20 Nov 2023 15:16:10 -0800 Subject: [PATCH 159/204] share base retry logic --- tests/asyncio/test_retry_async.py | 106 +-------------- tests/asyncio/test_retry_streaming_async.py | 110 ++-------------- tests/unit/test_retry.py | 49 +++++-- tests/unit/test_retry_streaming.py | 138 +------------------- 4 files changed, 59 insertions(+), 344 deletions(-) diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 7c86d0e4..eafec3c9 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -22,6 +22,8 @@ from google.api_core import exceptions from google.api_core import retry_async +from ..unit.test_retry import Test_BaseRetry + @mock.patch("asyncio.sleep", autospec=True) @mock.patch( @@ -142,108 +144,10 @@ async def test_retry_streaming_target_bad_sleep_generator(): await retry_target_stream(None, None, [], None).__anext__() -class TestAsyncRetry: - def test_constructor_defaults(self): - retry_ = retry_async.AsyncRetry() - assert retry_._predicate == retry_async.if_transient_error - assert retry_._initial == 1 - assert retry_._maximum == 60 - assert retry_._multiplier == 2 - assert retry_._deadline == 120 - assert retry_._on_error is None - - def test_constructor_options(self): - _some_function = mock.Mock() - - retry_ = retry_async.AsyncRetry( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=_some_function, - ) - assert retry_._predicate == mock.sentinel.predicate - assert retry_._initial == 1 - assert retry_._maximum == 2 - assert retry_._multiplier == 3 - assert retry_._deadline == 4 - assert retry_._on_error is _some_function - - def test_with_deadline(self): - retry_ = retry_async.AsyncRetry( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=mock.sentinel.on_error, - ) - new_retry = retry_.with_deadline(42) - assert retry_ is not new_retry - assert new_retry._deadline == 42 - - # the rest of the attributes should remain the same - assert new_retry._predicate is retry_._predicate - assert new_retry._initial == retry_._initial - assert new_retry._maximum == retry_._maximum - assert new_retry._multiplier == retry_._multiplier - assert new_retry._on_error is retry_._on_error - - def test_with_predicate(self): - retry_ = retry_async.AsyncRetry( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=mock.sentinel.on_error, - ) - new_retry = retry_.with_predicate(mock.sentinel.predicate) - assert retry_ is not new_retry - assert new_retry._predicate == mock.sentinel.predicate - - # the rest of the attributes should remain the same - assert new_retry._deadline == retry_._deadline - assert new_retry._initial == retry_._initial - assert new_retry._maximum == retry_._maximum - assert new_retry._multiplier == retry_._multiplier - assert new_retry._on_error is retry_._on_error - - def test_with_delay_noop(self): - retry_ = retry_async.AsyncRetry( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=mock.sentinel.on_error, - ) - new_retry = retry_.with_delay() - assert retry_ is not new_retry - assert new_retry._initial == retry_._initial - assert new_retry._maximum == retry_._maximum - assert new_retry._multiplier == retry_._multiplier +class TestAsyncRetry(Test_BaseRetry): - def test_with_delay(self): - retry_ = retry_async.AsyncRetry( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=mock.sentinel.on_error, - ) - new_retry = retry_.with_delay(initial=1, maximum=2, multiplier=3) - assert retry_ is not new_retry - assert new_retry._initial == 1 - assert new_retry._maximum == 2 - assert new_retry._multiplier == 3 - - # the rest of the attributes should remain the same - assert new_retry._deadline == retry_._deadline - assert new_retry._predicate is retry_._predicate - assert new_retry._on_error is retry_._on_error + def _make_one(self, *args, **kwargs): + return retry_async.AsyncRetry(*args, **kwargs) def test___str__(self): def if_exception_type(exc): diff --git a/tests/asyncio/test_retry_streaming_async.py b/tests/asyncio/test_retry_streaming_async.py index 3eaeb27a..bc444bcf 100644 --- a/tests/asyncio/test_retry_streaming_async.py +++ b/tests/asyncio/test_retry_streaming_async.py @@ -23,109 +23,13 @@ from google.api_core import retry_async from google.api_core import retry_streaming_async +from ..unit.test_retry import Test_BaseRetry -class TestAsyncStreamingRetry: - def test_constructor_defaults(self): - retry_ = retry_streaming_async.AsyncStreamingRetry() - assert retry_._predicate == retry_async.if_transient_error - assert retry_._initial == 1 - assert retry_._maximum == 60 - assert retry_._multiplier == 2 - assert retry_._deadline == 120 - assert retry_._on_error is None - - def test_constructor_options(self): - _some_function = mock.Mock() - retry_ = retry_streaming_async.AsyncStreamingRetry( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=_some_function, - ) - assert retry_._predicate == mock.sentinel.predicate - assert retry_._initial == 1 - assert retry_._maximum == 2 - assert retry_._multiplier == 3 - assert retry_._deadline == 4 - assert retry_._on_error is _some_function - - def test_with_deadline(self): - retry_ = retry_streaming_async.AsyncStreamingRetry( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=mock.sentinel.on_error, - ) - new_retry = retry_.with_deadline(42) - assert retry_ is not new_retry - assert new_retry._deadline == 42 - - # the rest of the attributes should remain the same - assert new_retry._predicate is retry_._predicate - assert new_retry._initial == retry_._initial - assert new_retry._maximum == retry_._maximum - assert new_retry._multiplier == retry_._multiplier - assert new_retry._on_error is retry_._on_error - - def test_with_predicate(self): - retry_ = retry_streaming_async.AsyncStreamingRetry( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=mock.sentinel.on_error, - ) - new_retry = retry_.with_predicate(mock.sentinel.predicate) - assert retry_ is not new_retry - assert new_retry._predicate == mock.sentinel.predicate - - # the rest of the attributes should remain the same - assert new_retry._deadline == retry_._deadline - assert new_retry._initial == retry_._initial - assert new_retry._maximum == retry_._maximum - assert new_retry._multiplier == retry_._multiplier - assert new_retry._on_error is retry_._on_error - - def test_with_delay_noop(self): - retry_ = retry_streaming_async.AsyncStreamingRetry( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=mock.sentinel.on_error, - ) - new_retry = retry_.with_delay() - assert retry_ is not new_retry - assert new_retry._initial == retry_._initial - assert new_retry._maximum == retry_._maximum - assert new_retry._multiplier == retry_._multiplier - - def test_with_delay(self): - retry_ = retry_streaming_async.AsyncStreamingRetry( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=mock.sentinel.on_error, - ) - new_retry = retry_.with_delay(initial=1, maximum=2, multiplier=3) - assert retry_ is not new_retry - assert new_retry._initial == 1 - assert new_retry._maximum == 2 - assert new_retry._multiplier == 3 +class TestAsyncStreamingRetry(Test_BaseRetry): - # the rest of the attributes should remain the same - assert new_retry._deadline == retry_._deadline - assert new_retry._predicate is retry_._predicate - assert new_retry._on_error is retry_._on_error + def _make_one(self, *args, **kwargs): + return retry_streaming_async.AsyncStreamingRetry(*args, **kwargs) def test___str__(self): def if_exception_type(exc): @@ -215,6 +119,7 @@ async def test___call___generator_retry(self, sleep): retry_ = retry_streaming_async.AsyncStreamingRetry( on_error=on_error, predicate=retry_async.if_exception_type(ValueError), + timeout=None, ) generator = await retry_(self._generator_mock)(error_on=3) @@ -241,6 +146,7 @@ async def test___call___generator_retry_hitting_deadline(self, sleep, uniform): maximum=1024.0, multiplier=2.0, deadline=9.9, + ) time_now = time.monotonic() @@ -330,6 +236,7 @@ async def test___call___generator_send_retry(self, sleep): retry_ = retry_streaming_async.AsyncStreamingRetry( on_error=on_error, predicate=retry_async.if_exception_type(ValueError), + timeout=None, ) generator = await retry_(self._generator_mock)(error_on=3) @@ -391,6 +298,7 @@ async def test___call___with_generator_throw(self, sleep): # The generator should not retry when it encounters a non-retryable error retry_ = retry_streaming_async.AsyncStreamingRetry( predicate=retry_async.if_exception_type(ValueError), + ) decorated = retry_(self._generator_mock) exception_list = [] @@ -507,7 +415,7 @@ async def test___call___with_iterable_throw(self, sleep, awaitable_wrapped): """ predicate = retry_async.if_exception_type(ValueError) - retry_ = retry_streaming_async.AsyncStreamingRetry(predicate=predicate) + retry_ = retry_streaming_async.AsyncStreamingRetry( predicate=predicate) def iterable_fn(): class CustomIterable: diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 5c65b448..61d6c153 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -237,9 +237,13 @@ def test_retry_streaming_target_bad_sleep_generator(): next(retry_target_stream(None, None, [], None)) -class TestRetry(object): +class Test_BaseRetry(object): + + def _make_one(self, *args, **kwargs): + return retry._BaseRetry(*args, **kwargs) + def test_constructor_defaults(self): - retry_ = retry.Retry() + retry_ = self._make_one() assert retry_._predicate == retry.if_transient_error assert retry_._initial == 1 assert retry_._maximum == 60 @@ -252,7 +256,7 @@ def test_constructor_defaults(self): def test_constructor_options(self): _some_function = mock.Mock() - retry_ = retry.Retry( + retry_ = self._make_one( predicate=mock.sentinel.predicate, initial=1, maximum=2, @@ -268,7 +272,7 @@ def test_constructor_options(self): assert retry_._on_error is _some_function def test_with_deadline(self): - retry_ = retry.Retry( + retry_ = self._make_one( predicate=mock.sentinel.predicate, initial=1, maximum=2, @@ -288,7 +292,7 @@ def test_with_deadline(self): assert new_retry._on_error is retry_._on_error def test_with_predicate(self): - retry_ = retry.Retry( + retry_ = self._make_one( predicate=mock.sentinel.predicate, initial=1, maximum=2, @@ -308,7 +312,7 @@ def test_with_predicate(self): assert new_retry._on_error is retry_._on_error def test_with_delay_noop(self): - retry_ = retry.Retry( + retry_ = self._make_one( predicate=mock.sentinel.predicate, initial=1, maximum=2, @@ -323,7 +327,7 @@ def test_with_delay_noop(self): assert new_retry._multiplier == retry_._multiplier def test_with_delay(self): - retry_ = retry.Retry( + retry_ = self._make_one( predicate=mock.sentinel.predicate, initial=1, maximum=2, @@ -343,7 +347,7 @@ def test_with_delay(self): assert new_retry._on_error is retry_._on_error def test_with_delay_partial_options(self): - retry_ = retry.Retry( + retry_ = self._make_one( predicate=mock.sentinel.predicate, initial=1, maximum=2, @@ -374,6 +378,35 @@ def test_with_delay_partial_options(self): assert new_retry._predicate is retry_._predicate assert new_retry._on_error is retry_._on_error + def test___str__(self): + def if_exception_type(exc): + return bool(exc) # pragma: NO COVER + + # Explicitly set all attributes as changed Retry defaults should not + # cause this test to start failing. + retry_ = self._make_one( + predicate=if_exception_type, + initial=1.0, + maximum=60.0, + multiplier=2.0, + deadline=120.0, + on_error=None, + ) + assert re.match( + ( + r"<_BaseRetry predicate=, " + r"initial=1.0, maximum=60.0, multiplier=2.0, timeout=120.0, " + r"on_error=None>" + ), + str(retry_), + ) + + +class TestRetry(Test_BaseRetry): + + def _make_one(self, *args, **kwargs): + return retry.Retry(*args, **kwargs) + def test___str__(self): def if_exception_type(exc): return bool(exc) # pragma: NO COVER diff --git a/tests/unit/test_retry_streaming.py b/tests/unit/test_retry_streaming.py index 3bee8ace..61642394 100644 --- a/tests/unit/test_retry_streaming.py +++ b/tests/unit/test_retry_streaming.py @@ -22,143 +22,13 @@ from google.api_core import retry from google.api_core import retry_streaming +from .test_retry import Test_BaseRetry -class TestStreamingRetry(object): - def test_constructor_defaults(self): - retry_ = retry_streaming.StreamingRetry() - assert retry_._predicate == retry.if_transient_error - assert retry_._initial == 1 - assert retry_._maximum == 60 - assert retry_._multiplier == 2 - assert retry_._deadline == 120 - assert retry_._on_error is None - assert retry_.deadline == 120 - assert retry_.timeout == 120 - - def test_constructor_options(self): - _some_function = mock.Mock() - retry_ = retry_streaming.StreamingRetry( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=_some_function, - ) - assert retry_._predicate == mock.sentinel.predicate - assert retry_._initial == 1 - assert retry_._maximum == 2 - assert retry_._multiplier == 3 - assert retry_._deadline == 4 - assert retry_._on_error is _some_function - - def test_with_deadline(self): - retry_ = retry_streaming.StreamingRetry( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=mock.sentinel.on_error, - ) - new_retry = retry_.with_deadline(42) - assert retry_ is not new_retry - assert new_retry._deadline == 42 - - # the rest of the attributes should remain the same - assert new_retry._predicate is retry_._predicate - assert new_retry._initial == retry_._initial - assert new_retry._maximum == retry_._maximum - assert new_retry._multiplier == retry_._multiplier - assert new_retry._on_error is retry_._on_error - - def test_with_predicate(self): - retry_ = retry_streaming.StreamingRetry( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=mock.sentinel.on_error, - ) - new_retry = retry_.with_predicate(mock.sentinel.predicate) - assert retry_ is not new_retry - assert new_retry._predicate == mock.sentinel.predicate - - # the rest of the attributes should remain the same - assert new_retry._deadline == retry_._deadline - assert new_retry._initial == retry_._initial - assert new_retry._maximum == retry_._maximum - assert new_retry._multiplier == retry_._multiplier - assert new_retry._on_error is retry_._on_error - - def test_with_delay_noop(self): - retry_ = retry_streaming.StreamingRetry( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=mock.sentinel.on_error, - ) - new_retry = retry_.with_delay() - assert retry_ is not new_retry - assert new_retry._initial == retry_._initial - assert new_retry._maximum == retry_._maximum - assert new_retry._multiplier == retry_._multiplier +class TestStreamingRetry(Test_BaseRetry): - def test_with_delay(self): - retry_ = retry_streaming.StreamingRetry( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=mock.sentinel.on_error, - ) - new_retry = retry_.with_delay(initial=5, maximum=6, multiplier=7) - assert retry_ is not new_retry - assert new_retry._initial == 5 - assert new_retry._maximum == 6 - assert new_retry._multiplier == 7 - - # the rest of the attributes should remain the same - assert new_retry._deadline == retry_._deadline - assert new_retry._predicate is retry_._predicate - assert new_retry._on_error is retry_._on_error - - def test_with_delay_partial_options(self): - retry_ = retry_streaming.StreamingRetry( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=mock.sentinel.on_error, - ) - new_retry = retry_.with_delay(initial=4) - assert retry_ is not new_retry - assert new_retry._initial == 4 - assert new_retry._maximum == 2 - assert new_retry._multiplier == 3 - - new_retry = retry_.with_delay(maximum=4) - assert retry_ is not new_retry - assert new_retry._initial == 1 - assert new_retry._maximum == 4 - assert new_retry._multiplier == 3 - - new_retry = retry_.with_delay(multiplier=4) - assert retry_ is not new_retry - assert new_retry._initial == 1 - assert new_retry._maximum == 2 - assert new_retry._multiplier == 4 - - # the rest of the attributes should remain the same - assert new_retry._deadline == retry_._deadline - assert new_retry._predicate is retry_._predicate - assert new_retry._on_error is retry_._on_error + def _make_one(self, *args, **kwargs): + return retry_streaming.StreamingRetry(*args, **kwargs) def test___str__(self): def if_exception_type(exc): From 4d9e762a01f7de9f336db4dad2dd35effc997ca9 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Mon, 20 Nov 2023 23:18:04 +0000 Subject: [PATCH 160/204] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot?= =?UTF-8?q?=20post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- tests/asyncio/test_retry_async.py | 1 - tests/asyncio/test_retry_streaming_async.py | 7 +------ tests/unit/test_retry.py | 2 -- tests/unit/test_retry_streaming.py | 1 - 4 files changed, 1 insertion(+), 10 deletions(-) diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index eafec3c9..c286e1a9 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -145,7 +145,6 @@ async def test_retry_streaming_target_bad_sleep_generator(): class TestAsyncRetry(Test_BaseRetry): - def _make_one(self, *args, **kwargs): return retry_async.AsyncRetry(*args, **kwargs) diff --git a/tests/asyncio/test_retry_streaming_async.py b/tests/asyncio/test_retry_streaming_async.py index bc444bcf..301e0607 100644 --- a/tests/asyncio/test_retry_streaming_async.py +++ b/tests/asyncio/test_retry_streaming_async.py @@ -27,7 +27,6 @@ class TestAsyncStreamingRetry(Test_BaseRetry): - def _make_one(self, *args, **kwargs): return retry_streaming_async.AsyncStreamingRetry(*args, **kwargs) @@ -119,7 +118,6 @@ async def test___call___generator_retry(self, sleep): retry_ = retry_streaming_async.AsyncStreamingRetry( on_error=on_error, predicate=retry_async.if_exception_type(ValueError), - timeout=None, ) generator = await retry_(self._generator_mock)(error_on=3) @@ -146,7 +144,6 @@ async def test___call___generator_retry_hitting_deadline(self, sleep, uniform): maximum=1024.0, multiplier=2.0, deadline=9.9, - ) time_now = time.monotonic() @@ -236,7 +233,6 @@ async def test___call___generator_send_retry(self, sleep): retry_ = retry_streaming_async.AsyncStreamingRetry( on_error=on_error, predicate=retry_async.if_exception_type(ValueError), - timeout=None, ) generator = await retry_(self._generator_mock)(error_on=3) @@ -298,7 +294,6 @@ async def test___call___with_generator_throw(self, sleep): # The generator should not retry when it encounters a non-retryable error retry_ = retry_streaming_async.AsyncStreamingRetry( predicate=retry_async.if_exception_type(ValueError), - ) decorated = retry_(self._generator_mock) exception_list = [] @@ -415,7 +410,7 @@ async def test___call___with_iterable_throw(self, sleep, awaitable_wrapped): """ predicate = retry_async.if_exception_type(ValueError) - retry_ = retry_streaming_async.AsyncStreamingRetry( predicate=predicate) + retry_ = retry_streaming_async.AsyncStreamingRetry(predicate=predicate) def iterable_fn(): class CustomIterable: diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 61d6c153..3855da97 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -238,7 +238,6 @@ def test_retry_streaming_target_bad_sleep_generator(): class Test_BaseRetry(object): - def _make_one(self, *args, **kwargs): return retry._BaseRetry(*args, **kwargs) @@ -403,7 +402,6 @@ def if_exception_type(exc): class TestRetry(Test_BaseRetry): - def _make_one(self, *args, **kwargs): return retry.Retry(*args, **kwargs) diff --git a/tests/unit/test_retry_streaming.py b/tests/unit/test_retry_streaming.py index 61642394..7b1ca149 100644 --- a/tests/unit/test_retry_streaming.py +++ b/tests/unit/test_retry_streaming.py @@ -26,7 +26,6 @@ class TestStreamingRetry(Test_BaseRetry): - def _make_one(self, *args, **kwargs): return retry_streaming.StreamingRetry(*args, **kwargs) From 2e9e84baedcbb7a3a3655fc18917ee46e92bccb7 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 20 Nov 2023 15:31:04 -0800 Subject: [PATCH 161/204] lint and mypy cleanup --- google/api_core/retry.py | 28 ++++++++++++------------ google/api_core/retry_async.py | 9 ++------ google/api_core/retry_streaming.py | 4 ++-- google/api_core/retry_streaming_async.py | 6 ++--- tests/asyncio/test_retry_async.py | 1 - tests/unit/test_retry.py | 1 - tests/unit/test_retry_streaming.py | 1 - 7 files changed, 21 insertions(+), 29 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index aa1c0be8..04fb0274 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -65,7 +65,7 @@ def check_if_exists(): from enum import Enum import inspect import warnings -from typing import Any, Callable, TypeVar, Generator, Iterable, cast, TYPE_CHECKING +from typing import Any, Callable, TypeVar, TYPE_CHECKING import requests.exceptions @@ -142,8 +142,8 @@ def _build_retry_error( def if_exception_type( - *exception_types: type[BaseException], -) -> Callable[[BaseException], bool]: + *exception_types: type[Exception], +) -> Callable[[Exception], bool]: """Creates a predicate to check if the exception is of a given type. Args: @@ -155,7 +155,7 @@ def if_exception_type( exception is of the given type(s). """ - def if_exception_type_predicate(exception: BaseException) -> bool: + def if_exception_type_predicate(exception: Exception) -> bool: """Bound predicate for checking an exception type.""" return isinstance(exception, exception_types) @@ -300,12 +300,12 @@ class _BaseRetry(object): def __init__( self, - predicate: Callable[[BaseException], bool] = if_transient_error, + predicate: Callable[[Exception], bool] = if_transient_error, initial: float = _DEFAULT_INITIAL_DELAY, maximum: float = _DEFAULT_MAXIMUM_DELAY, multiplier: float = _DEFAULT_DELAY_MULTIPLIER, timeout: float = _DEFAULT_DEADLINE, - on_error: Callable[[BaseException], Any] | None = None, + on_error: Callable[[Exception], Any] | None = None, **kwargs: Any, ) -> None: self._predicate = predicate @@ -511,7 +511,7 @@ class Retry(_BaseRetry): def __call__( self, func: Callable[_P, _R], - on_error: Callable[[BaseException], Any] | None = None, + on_error: Callable[[Exception], Any] | None = None, ) -> Callable[_P, _R]: """Wrap a callable with retry behavior. @@ -537,12 +537,12 @@ def retry_wrapped_func(*args: _P.args, **kwargs: _P.kwargs) -> _R: sleep_generator = exponential_sleep_generator( self._initial, self._maximum, multiplier=self._multiplier ) - retry_kwargs = { - "predicate": self._predicate, - "sleep_generator": sleep_generator, - "timeout": self._timeout, - "on_error": on_error, - } - return retry_target(target, **retry_kwargs) + return retry_target( + target, + self._predicate, + sleep_generator, + timeout=self._timeout, + on_error=on_error, + ) return retry_wrapped_func diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index 3ec7ac46..a38d4ea4 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -62,9 +62,6 @@ async def check_if_exists(): Any, Callable, TypeVar, - AsyncGenerator, - AsyncIterable, - cast, TYPE_CHECKING, ) @@ -73,9 +70,7 @@ async def check_if_exists(): from google.api_core.retry import _BaseRetry from google.api_core.retry import exponential_sleep_generator from google.api_core.retry import if_exception_type # noqa: F401 -from google.api_core.retry import if_transient_error -from google.api_core.retry import _build_retry_error -from google.api_core.retry import RetryFailureReason +from google.api_core.retry import if_transient_error # noqa: F401 if TYPE_CHECKING: import sys @@ -207,7 +202,7 @@ class AsyncRetry(_BaseRetry): def __call__( self, func: Callable[..., Awaitable[_R]], - on_error: Callable[[BaseException], Any] | None = None, + on_error: Callable[[Exception], Any] | None = None, ) -> Callable[_P, Awaitable[_R]]: """Wrap a callable with retry behavior. diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index 623049fc..df5be73c 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -37,7 +37,7 @@ from google.api_core.retry import _BaseRetry from google.api_core.retry import exponential_sleep_generator from google.api_core.retry import if_exception_type # noqa: F401 -from google.api_core.retry import if_transient_error +from google.api_core.retry import if_transient_error # noqa: F401 from google.api_core.retry import _build_retry_error from google.api_core.retry import RetryFailureReason @@ -298,7 +298,7 @@ def on_error(e): def __call__( self, func: Callable[_P, Iterable[_Y]], - on_error: Callable[[BaseException], Any] | None = None, + on_error: Callable[[Exception], Any] | None = None, ) -> Callable[_P, Generator[_Y, Any, None]]: """Wrap a callable with retry behavior. diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index be276894..2ae6983a 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -43,7 +43,7 @@ from google.api_core.retry import _BaseRetry from google.api_core.retry import exponential_sleep_generator from google.api_core.retry import if_exception_type # noqa: F401 -from google.api_core.retry import if_transient_error +from google.api_core.retry import if_transient_error # noqa: F401 from google.api_core.retry import _build_retry_error from google.api_core.retry import RetryFailureReason @@ -180,7 +180,7 @@ async def retry_target_stream( # handle exceptions raised by the target_iterator # pylint: disable=broad-except # This function explicitly must deal with broad exceptions. - except (Exception, asyncio.CancelledError) as exc: + except Exception as exc: error_list.append(exc) if not predicate(exc): exc, source_exc = exc_factory( @@ -297,7 +297,7 @@ def on_error(e): def __call__( self, func: Callable[..., AsyncIterable[_Y] | Awaitable[AsyncIterable[_Y]]], - on_error: Callable[[BaseException], Any] | None = None, + on_error: Callable[[Exception], Any] | None = None, ) -> Callable[_P, Awaitable[AsyncGenerator[_Y, None]]]: """Wrap a callable with retry behavior. diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index c286e1a9..b50cddf2 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -14,7 +14,6 @@ import datetime import re -import asyncio import mock import pytest diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 3855da97..6976ce0d 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -15,7 +15,6 @@ import datetime import itertools import re -import functools import mock import pytest diff --git a/tests/unit/test_retry_streaming.py b/tests/unit/test_retry_streaming.py index 7b1ca149..a0257b68 100644 --- a/tests/unit/test_retry_streaming.py +++ b/tests/unit/test_retry_streaming.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import datetime import re import mock From d183a7e920ec8f56bf46d9d882bad1fef8399038 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 20 Nov 2023 17:32:09 -0800 Subject: [PATCH 162/204] removed unneeded changes --- google/api_core/retry.py | 1 - google/api_core/retry_streaming_async.py | 11 ++++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index 04fb0274..e9ec1070 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -85,7 +85,6 @@ def check_if_exists(): _P = ParamSpec("_P") # target function call parameters _R = TypeVar("_R") # target function returned value - _Y = TypeVar("_Y") # target stream yielded values _LOGGER = logging.getLogger(__name__) _DEFAULT_INITIAL_DELAY = 1.0 # seconds diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index 2ae6983a..06432178 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -321,15 +321,16 @@ async def retry_wrapped_func( *args: _P.args, **kwargs: _P.kwargs ) -> AsyncGenerator[_Y, None]: """A wrapper that calls target function with retry.""" + target = functools.partial(func, *args, **kwargs) sleep_generator = exponential_sleep_generator( self._initial, self._maximum, multiplier=self._multiplier ) return retry_target_stream( - functools.partial(func, *args, **kwargs), - predicate=self._predicate, - sleep_generator=sleep_generator, - timeout=self._timeout, - on_error=on_error, + target, + self._predicate, + sleep_generator, + self._timeout, + on_error, ) return retry_wrapped_func From e2d9c9cc2526f0305d137a5858658a2e26bbda80 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 20 Nov 2023 18:41:30 -0800 Subject: [PATCH 163/204] pass in initial args and kwargs to retry_target_stream --- google/api_core/retry_streaming.py | 19 +++++++++++++------ google/api_core/retry_streaming_async.py | 23 +++++++++++++++-------- 2 files changed, 28 insertions(+), 14 deletions(-) diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index df5be73c..5695c67f 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -54,7 +54,7 @@ def retry_target_stream( - target: Callable[[], Iterable[_Y]], + target: Callable[_P, Iterable[_Y]], predicate: Callable[[Exception], bool], sleep_generator: Iterable[float], timeout: Optional[float] = None, @@ -65,6 +65,8 @@ def retry_target_stream( Tuple[Exception, Optional[Exception]], ] ] = None, + init_args: _P.args = (), + init_kwargs: _P.kwargs = {}, **kwargs, ) -> Generator[_Y, Any, None]: """Create a generator wrapper that retries the wrapped stream if it fails. @@ -73,8 +75,7 @@ def retry_target_stream( higher-level retry helper :class:`Retry`. Args: - target: The generator function to call and retry. This must be a - nullary function - apply arguments with `functools.partial`. + target: The generator function to call and retry. predicate: A callable used to determine if an exception raised by the target should be considered retryable. It should return True to retry or False otherwise. @@ -94,6 +95,8 @@ def retry_target_stream( along with the cause exception if any. If not provided, a default implementation will raise a RetryError on timeout, or the last exception encountered otherwise. + init_args: Positional arguments to pass to the target function. + init_kwargs: Keyword arguments to pass to the target function. Returns: Generator: A retryable generator that wraps the target generator function. @@ -114,8 +117,10 @@ def retry_target_stream( for sleep in sleep_generator: # Start a new retry loop try: - # create and yield from a new instance of the generator from input generator function - subgenerator = target() + # Note: in the future, we can add a ResumptionStrategy object + # to generate new args between calls. For now, use the same args + # for each attempt. + subgenerator = target(*init_args, **init_kwargs) return (yield from subgenerator) # handle exceptions raised by the subgenerator # pylint: disable=broad-except @@ -326,11 +331,13 @@ def retry_wrapped_func( self._initial, self._maximum, multiplier=self._multiplier ) return retry_target_stream( - functools.partial(func, *args, **kwargs), + func, predicate=self._predicate, sleep_generator=sleep_generator, timeout=self._timeout, on_error=on_error, + init_args=args, + init_kwargs=kwargs, ) return retry_wrapped_func diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index 06432178..0365e287 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -38,7 +38,6 @@ import logging import time import sys -import functools from google.api_core.retry import _BaseRetry from google.api_core.retry import exponential_sleep_generator @@ -61,7 +60,7 @@ async def retry_target_stream( - target: Callable[[], AsyncIterable["_Y"] | Awaitable[AsyncIterable["_Y"]]], + target: Callable[_P, AsyncIterable["_Y"] | Awaitable[AsyncIterable["_Y"]]], predicate: Callable[[Exception], bool], sleep_generator: Iterable[float], timeout: Optional[float] = None, @@ -72,6 +71,8 @@ async def retry_target_stream( Tuple[Exception, Optional[Exception]], ] ] = None, + init_args: _P.args = (), + init_kwargs: _P.kwargs = {}, **kwargs, ) -> AsyncGenerator["_Y", None]: """Create a generator wrapper that retries the wrapped stream if it fails. @@ -80,8 +81,7 @@ async def retry_target_stream( higher-level retry helper :class:`AsyncRetry`. Args: - target: The generator function to call and retry. This must be a - nullary function - apply arguments with `functools.partial`. + target: The generator function to call and retry. predicate: A callable used to determine if an exception raised by the target should be considered retryable. It should return True to retry or False otherwise. @@ -101,6 +101,8 @@ async def retry_target_stream( along with the cause exception if any. If not provided, a default implementation will raise a RetryError on timeout, or the last exception encountered otherwise. + init_args: Positional arguments to pass to the target function. + init_kwargs: Keyword arguments to pass to the target function. Returns: AsyncGenerator: A retryable generator that wraps the target generator function. @@ -112,7 +114,8 @@ async def retry_target_stream( google.api_core.RetryError: If the deadline is exceeded while retrying. Exception: If the target raises an error that isn't retryable. """ - + # create frozen partial from original call args + # In the future, we can add a ResumptionStrategy object that creates new kwargs between calls target_iterator: Optional[AsyncIterator[_Y]] = None timeout = kwargs.get("deadline", timeout) deadline: Optional[float] = time.monotonic() + timeout if timeout else None @@ -125,9 +128,12 @@ async def retry_target_stream( for sleep in sleep_generator: # Start a new retry loop try: + # Note: in the future, we can add a ResumptionStrategy object + # to generate new args between calls. For now, use the same args + # for each attempt. target_output: Union[ AsyncIterable[_Y], Awaitable[AsyncIterable[_Y]] - ] = target() + ] = target(*init_args, **init_kwargs) try: # gapic functions return the generator behind an awaitable # unwrap the awaitable so we can work with the generator directly @@ -321,16 +327,17 @@ async def retry_wrapped_func( *args: _P.args, **kwargs: _P.kwargs ) -> AsyncGenerator[_Y, None]: """A wrapper that calls target function with retry.""" - target = functools.partial(func, *args, **kwargs) sleep_generator = exponential_sleep_generator( self._initial, self._maximum, multiplier=self._multiplier ) return retry_target_stream( - target, + func, self._predicate, sleep_generator, self._timeout, on_error, + init_args=args, + init_kwargs=kwargs, ) return retry_wrapped_func From 45431067341dde06556f48278e02c2df5a234d2b Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 20 Nov 2023 18:41:59 -0800 Subject: [PATCH 164/204] uncommented functools.wraps --- google/api_core/retry_streaming_async.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index 0365e287..5e2fb0a2 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -38,6 +38,7 @@ import logging import time import sys +import functools from google.api_core.retry import _BaseRetry from google.api_core.retry import exponential_sleep_generator @@ -322,7 +323,7 @@ def __call__( if self._on_error is not None: on_error = self._on_error - # @functools.wraps(func) + @functools.wraps(func) async def retry_wrapped_func( *args: _P.args, **kwargs: _P.kwargs ) -> AsyncGenerator[_Y, None]: From 638cc6863421daaa62b4ca3da1a5473b25661d40 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 30 Nov 2023 13:16:46 -0800 Subject: [PATCH 165/204] change enum encoding --- google/api_core/retry.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index e9ec1070..3b8816dc 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -99,8 +99,8 @@ class RetryFailureReason(Enum): The cause of a failed retry, used when building exceptions """ - TIMEOUT = "TIMEOUT" - NON_RETRYABLE_ERROR = "NON_RETRYABLE_ERROR" + TIMEOUT = 0 + NON_RETRYABLE_ERROR = 1 def _build_retry_error( From f7b1e141a4b165354140f5a35acb43306a0af1b4 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 30 Nov 2023 13:37:18 -0800 Subject: [PATCH 166/204] moved base retry into own file --- google/api_core/retry.py | 263 +---------------------- google/api_core/retry_async.py | 8 +- google/api_core/retry_streaming.py | 12 +- google/api_core/retry_streaming_async.py | 12 +- 4 files changed, 23 insertions(+), 272 deletions(-) diff --git a/google/api_core/retry.py b/google/api_core/retry.py index 3b8816dc..ff360d79 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -59,159 +59,35 @@ def check_if_exists(): import datetime import functools import logging -import random import sys import time -from enum import Enum import inspect import warnings from typing import Any, Callable, TypeVar, TYPE_CHECKING -import requests.exceptions - from google.api_core import datetime_helpers from google.api_core import exceptions -from google.auth import exceptions as auth_exceptions + +from google.api_core.retry_base import _BaseRetry +from google.api_core.retry_base import exponential_sleep_generator +from google.api_core.retry_base import if_exception_type # noqa: F401 +from google.api_core.retry_base import if_transient_error # noqa: F401 +from google.api_core.retry_base import _build_retry_error +from google.api_core.retry_base import RetryFailureReason if TYPE_CHECKING: if sys.version_info >= (3, 10): from typing import ParamSpec else: from typing_extensions import ParamSpec - if sys.version_info >= (3, 11): - from typing import Self - else: - from typing_extensions import Self _P = ParamSpec("_P") # target function call parameters _R = TypeVar("_R") # target function returned value _LOGGER = logging.getLogger(__name__) -_DEFAULT_INITIAL_DELAY = 1.0 # seconds -_DEFAULT_MAXIMUM_DELAY = 60.0 # seconds -_DEFAULT_DELAY_MULTIPLIER = 2.0 -_DEFAULT_DEADLINE = 60.0 * 2.0 # seconds _ASYNC_RETRY_WARNING = "Using the synchronous google.api_core.retry.Retry with asynchronous calls may lead to unexpected results. Please use google.api_core.retry_async.AsyncRetry instead." -class RetryFailureReason(Enum): - """ - The cause of a failed retry, used when building exceptions - """ - - TIMEOUT = 0 - NON_RETRYABLE_ERROR = 1 - - -def _build_retry_error( - exc_list: list[Exception], - reason: RetryFailureReason, - timeout_val: float | None, - **kwargs: Any, -) -> tuple[Exception, Exception | None]: - """ - Default exception_factory implementation. Builds an exception after the retry fails - - Args: - - exc_list: list of exceptions that occurred during the retry - - reason: reason for the retry failure. - Can be TIMEOUT or NON_RETRYABLE_ERROR - - timeout_val: the original timeout value for the retry, for use in the exception message - - Returns: - - tuple: a tuple of the exception to be raised, and the cause exception if any - """ - if reason == RetryFailureReason.TIMEOUT: - # return RetryError with the most recent exception as the cause - src_exc = exc_list[-1] if exc_list else None - timeout_val_str = f"of {timeout_val:0.1f}s " if timeout_val is not None else "" - return ( - exceptions.RetryError( - f"Timeout {timeout_val_str}exceeded", - src_exc, - ), - src_exc, - ) - elif exc_list: - # return most recent exception encountered - return exc_list[-1], None - else: - # no exceptions were given in exc_list. Raise generic RetryError - return exceptions.RetryError("Unknown error", None), None - - -def if_exception_type( - *exception_types: type[Exception], -) -> Callable[[Exception], bool]: - """Creates a predicate to check if the exception is of a given type. - - Args: - exception_types (Sequence[:func:`type`]): The exception types to check - for. - - Returns: - Callable[Exception]: A predicate that returns True if the provided - exception is of the given type(s). - """ - - def if_exception_type_predicate(exception: Exception) -> bool: - """Bound predicate for checking an exception type.""" - return isinstance(exception, exception_types) - - return if_exception_type_predicate - - -# pylint: disable=invalid-name -# Pylint sees this as a constant, but it is also an alias that should be -# considered a function. -if_transient_error = if_exception_type( - exceptions.InternalServerError, - exceptions.TooManyRequests, - exceptions.ServiceUnavailable, - requests.exceptions.ConnectionError, - requests.exceptions.ChunkedEncodingError, - auth_exceptions.TransportError, -) -"""A predicate that checks if an exception is a transient API error. - -The following server errors are considered transient: - -- :class:`google.api_core.exceptions.InternalServerError` - HTTP 500, gRPC - ``INTERNAL(13)`` and its subclasses. -- :class:`google.api_core.exceptions.TooManyRequests` - HTTP 429 -- :class:`google.api_core.exceptions.ServiceUnavailable` - HTTP 503 -- :class:`requests.exceptions.ConnectionError` -- :class:`requests.exceptions.ChunkedEncodingError` - The server declared - chunked encoding but sent an invalid chunk. -- :class:`google.auth.exceptions.TransportError` - Used to indicate an - error occurred during an HTTP request. -""" -# pylint: enable=invalid-name - - -def exponential_sleep_generator(initial, maximum, multiplier=_DEFAULT_DELAY_MULTIPLIER): - """Generates sleep intervals based on the exponential back-off algorithm. - - This implements the `Truncated Exponential Back-off`_ algorithm. - - .. _Truncated Exponential Back-off: - https://cloud.google.com/storage/docs/exponential-backoff - - Args: - initial (float): The minimum amount of time to delay. This must - be greater than 0. - maximum (float): The maximum amount of time to delay. - multiplier (float): The multiplier applied to the delay. - - Yields: - float: successive sleep intervals. - """ - delay = min(initial, maximum) - while True: - yield random.uniform(0.0, delay) - delay = min(delay * multiplier, maximum) - - def retry_target( target, predicate, sleep_generator, timeout=None, on_error=None, **kwargs ): @@ -289,131 +165,6 @@ def retry_target( raise ValueError("Sleep generator stopped yielding sleep values.") -class _BaseRetry(object): - """ - Base class for retry configuration objects. This class is intended to capture retry - and backoff configuration that is common to both synchronous and asynchronous retries, - for both unary and streaming RPCs. It is not intended to be instantiated directly, - but rather to be subclassed by the various retry configuration classes. - """ - - def __init__( - self, - predicate: Callable[[Exception], bool] = if_transient_error, - initial: float = _DEFAULT_INITIAL_DELAY, - maximum: float = _DEFAULT_MAXIMUM_DELAY, - multiplier: float = _DEFAULT_DELAY_MULTIPLIER, - timeout: float = _DEFAULT_DEADLINE, - on_error: Callable[[Exception], Any] | None = None, - **kwargs: Any, - ) -> None: - self._predicate = predicate - self._initial = initial - self._multiplier = multiplier - self._maximum = maximum - self._timeout = kwargs.get("deadline", timeout) - self._deadline = self._timeout - self._on_error = on_error - - def __call__(self, *args, **kwargs) -> Any: - raise NotImplementedError("Not implemented in base class") - - @property - def deadline(self) -> float | None: - """ - DEPRECATED: use ``timeout`` instead. Refer to the ``Retry`` class - documentation for details. - """ - return self._timeout - - @property - def timeout(self) -> float | None: - return self._timeout - - def _replace( - self, - predicate=None, - initial=None, - maximum=None, - multiplier=None, - timeout=None, - on_error=None, - ) -> Self: - return type(self)( - predicate=predicate or self._predicate, - initial=initial or self._initial, - maximum=maximum or self._maximum, - multiplier=multiplier or self._multiplier, - timeout=timeout or self._timeout, - on_error=on_error or self._on_error, - ) - - def with_deadline(self, deadline) -> Self: - """Return a copy of this retry with the given timeout. - - DEPRECATED: use :meth:`with_timeout` instead. Refer to the ``Retry`` class - documentation for details. - - Args: - deadline (float): How long to keep retrying in seconds. - - Returns: - Retry: A new retry instance with the given timeout. - """ - return self._replace(timeout=deadline) - - def with_timeout(self, timeout) -> Self: - """Return a copy of this retry with the given timeout. - - Args: - timeout (float): How long to keep retrying, in seconds. - - Returns: - Retry: A new retry instance with the given timeout. - """ - return self._replace(timeout=timeout) - - def with_predicate(self, predicate) -> Self: - """Return a copy of this retry with the given predicate. - - Args: - predicate (Callable[Exception]): A callable that should return - ``True`` if the given exception is retryable. - - Returns: - Retry: A new retry instance with the given predicate. - """ - return self._replace(predicate=predicate) - - def with_delay(self, initial=None, maximum=None, multiplier=None) -> Self: - """Return a copy of this retry with the given delay options. - - Args: - initial (float): The minimum amount of time to delay. This must - be greater than 0. - maximum (float): The maximum amount of time to delay. - multiplier (float): The multiplier applied to the delay. - - Returns: - Retry: A new retry instance with the given predicate. - """ - return self._replace(initial=initial, maximum=maximum, multiplier=multiplier) - - def __str__(self) -> str: - return ( - "<{} predicate={}, initial={:.1f}, maximum={:.1f}, " - "multiplier={:.1f}, timeout={}, on_error={}>".format( - type(self).__name__, - self._predicate, - self._initial, - self._maximum, - self._multiplier, - self._timeout, # timeout can be None, thus no {:.1f} - self._on_error, - ) - ) - - class Retry(_BaseRetry): """Exponential retry decorator for unary synchronous RPCs. diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index a38d4ea4..9e2ad148 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -67,10 +67,10 @@ async def check_if_exists(): from google.api_core import datetime_helpers from google.api_core import exceptions -from google.api_core.retry import _BaseRetry -from google.api_core.retry import exponential_sleep_generator -from google.api_core.retry import if_exception_type # noqa: F401 -from google.api_core.retry import if_transient_error # noqa: F401 +from google.api_core.retry_base import _BaseRetry +from google.api_core.retry_base import exponential_sleep_generator +from google.api_core.retry_base import if_exception_type # noqa: F401 +from google.api_core.retry_base import if_transient_error # noqa: F401 if TYPE_CHECKING: import sys diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry_streaming.py index 5695c67f..9f9a548c 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry_streaming.py @@ -34,12 +34,12 @@ import time import functools -from google.api_core.retry import _BaseRetry -from google.api_core.retry import exponential_sleep_generator -from google.api_core.retry import if_exception_type # noqa: F401 -from google.api_core.retry import if_transient_error # noqa: F401 -from google.api_core.retry import _build_retry_error -from google.api_core.retry import RetryFailureReason +from google.api_core.retry_base import _BaseRetry +from google.api_core.retry_base import exponential_sleep_generator +from google.api_core.retry_base import if_exception_type # noqa: F401 +from google.api_core.retry_base import if_transient_error # noqa: F401 +from google.api_core.retry_base import _build_retry_error +from google.api_core.retry_base import RetryFailureReason if TYPE_CHECKING: if sys.version_info >= (3, 10): diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry_streaming_async.py index 5e2fb0a2..8af85afc 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry_streaming_async.py @@ -40,12 +40,12 @@ import sys import functools -from google.api_core.retry import _BaseRetry -from google.api_core.retry import exponential_sleep_generator -from google.api_core.retry import if_exception_type # noqa: F401 -from google.api_core.retry import if_transient_error # noqa: F401 -from google.api_core.retry import _build_retry_error -from google.api_core.retry import RetryFailureReason +from google.api_core.retry_base import _BaseRetry +from google.api_core.retry_base import exponential_sleep_generator +from google.api_core.retry_base import if_exception_type # noqa: F401 +from google.api_core.retry_base import if_transient_error # noqa: F401 +from google.api_core.retry_base import _build_retry_error +from google.api_core.retry_base import RetryFailureReason if TYPE_CHECKING: From 07db4c260525092b64c68e8937bd172f508ba1a8 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 30 Nov 2023 15:07:53 -0800 Subject: [PATCH 167/204] restructured files --- google/api_core/__init__.py | 3 + google/api_core/retry/__init__.py | 39 +++ google/api_core/retry/retry_base.py | 286 ++++++++++++++++++ .../api_core/{ => retry}/retry_streaming.py | 10 +- .../{ => retry}/retry_streaming_async.py | 10 +- .../{retry.py => retry/retry_unary.py} | 9 +- .../retry_unary_async.py} | 9 +- tests/asyncio/retry/__init__.py | 0 .../{ => retry}/test_retry_streaming_async.py | 8 +- .../test_retry_unary_async.py} | 4 +- tests/unit/retry/__init__.py | 0 tests/unit/retry/test_retry_base.py | 267 ++++++++++++++++ .../unit/{ => retry}/test_retry_streaming.py | 15 +- .../test_retry_unary.py} | 258 +--------------- 14 files changed, 630 insertions(+), 288 deletions(-) create mode 100644 google/api_core/retry/__init__.py create mode 100644 google/api_core/retry/retry_base.py rename google/api_core/{ => retry}/retry_streaming.py (97%) rename google/api_core/{ => retry}/retry_streaming_async.py (97%) rename google/api_core/{retry.py => retry/retry_unary.py} (97%) rename google/api_core/{retry_async.py => retry/retry_unary_async.py} (97%) create mode 100644 tests/asyncio/retry/__init__.py rename tests/asyncio/{ => retry}/test_retry_streaming_async.py (98%) rename tests/asyncio/{test_retry_async.py => retry/test_retry_unary_async.py} (98%) create mode 100644 tests/unit/retry/__init__.py create mode 100644 tests/unit/retry/test_retry_base.py rename tests/unit/{ => retry}/test_retry_streaming.py (97%) rename tests/unit/{test_retry.py => retry/test_retry_unary.py} (54%) diff --git a/google/api_core/__init__.py b/google/api_core/__init__.py index b80ea372..89ce7510 100644 --- a/google/api_core/__init__.py +++ b/google/api_core/__init__.py @@ -20,3 +20,6 @@ from google.api_core import version as api_core_version __version__ = api_core_version.__version__ + +# for backwards compatibility, expose async unary retries as google.api_core.retry_async +from .retry import retry_unary_async as retry_async # noqa: F401 diff --git a/google/api_core/retry/__init__.py b/google/api_core/retry/__init__.py new file mode 100644 index 00000000..7f8b363e --- /dev/null +++ b/google/api_core/retry/__init__.py @@ -0,0 +1,39 @@ +# Copyright 2017 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Retry implementation for Google API client libraries.""" + +from .retry_base import exponential_sleep_generator +from .retry_base import if_exception_type +from .retry_base import if_transient_error +from .retry_base import _build_retry_error +from .retry_base import RetryFailureReason +from .retry_unary import Retry +from .retry_unary import retry_target +from .retry_unary_async import AsyncRetry +from .retry_streaming import StreamingRetry +from .retry_streaming_async import AsyncStreamingRetry + +__all__ = ( + "exponential_sleep_generator", + "if_exception_type", + "if_transient_error", + "_build_retry_error", + "RetryFailureReason", + "Retry", + "AsyncRetry", + "StreamingRetry", + "AsyncStreamingRetry", + "retry_target", +) diff --git a/google/api_core/retry/retry_base.py b/google/api_core/retry/retry_base.py new file mode 100644 index 00000000..62c6099d --- /dev/null +++ b/google/api_core/retry/retry_base.py @@ -0,0 +1,286 @@ +# Copyright 2017 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Shared classes and functions for retrying requests. + +:class:`_BaseRetry` is the base class for :class:`Retry`, +:class:`AsyncRetry`, :class:`StreamingRetry`, and :class:`AsyncStreamingRetry`. +""" + +from __future__ import annotations + +import random +from enum import Enum +from typing import Any, Callable, TYPE_CHECKING + +import requests.exceptions + +from google.api_core import exceptions +from google.auth import exceptions as auth_exceptions + +if TYPE_CHECKING: + import sys + + if sys.version_info >= (3, 11): + from typing import Self + else: + from typing_extensions import Self + +_DEFAULT_INITIAL_DELAY = 1.0 # seconds +_DEFAULT_MAXIMUM_DELAY = 60.0 # seconds +_DEFAULT_DELAY_MULTIPLIER = 2.0 +_DEFAULT_DEADLINE = 60.0 * 2.0 # seconds + + +def if_exception_type( + *exception_types: type[Exception], +) -> Callable[[Exception], bool]: + """Creates a predicate to check if the exception is of a given type. + + Args: + exception_types (Sequence[:func:`type`]): The exception types to check + for. + + Returns: + Callable[Exception]: A predicate that returns True if the provided + exception is of the given type(s). + """ + + def if_exception_type_predicate(exception: Exception) -> bool: + """Bound predicate for checking an exception type.""" + return isinstance(exception, exception_types) + + return if_exception_type_predicate + + +# pylint: disable=invalid-name +# Pylint sees this as a constant, but it is also an alias that should be +# considered a function. +if_transient_error = if_exception_type( + exceptions.InternalServerError, + exceptions.TooManyRequests, + exceptions.ServiceUnavailable, + requests.exceptions.ConnectionError, + requests.exceptions.ChunkedEncodingError, + auth_exceptions.TransportError, +) +"""A predicate that checks if an exception is a transient API error. + +The following server errors are considered transient: + +- :class:`google.api_core.exceptions.InternalServerError` - HTTP 500, gRPC + ``INTERNAL(13)`` and its subclasses. +- :class:`google.api_core.exceptions.TooManyRequests` - HTTP 429 +- :class:`google.api_core.exceptions.ServiceUnavailable` - HTTP 503 +- :class:`requests.exceptions.ConnectionError` +- :class:`requests.exceptions.ChunkedEncodingError` - The server declared + chunked encoding but sent an invalid chunk. +- :class:`google.auth.exceptions.TransportError` - Used to indicate an + error occurred during an HTTP request. +""" +# pylint: enable=invalid-name + + +def exponential_sleep_generator(initial, maximum, multiplier=_DEFAULT_DELAY_MULTIPLIER): + """Generates sleep intervals based on the exponential back-off algorithm. + + This implements the `Truncated Exponential Back-off`_ algorithm. + + .. _Truncated Exponential Back-off: + https://cloud.google.com/storage/docs/exponential-backoff + + Args: + initial (float): The minimum amount of time to delay. This must + be greater than 0. + maximum (float): The maximum amount of time to delay. + multiplier (float): The multiplier applied to the delay. + + Yields: + float: successive sleep intervals. + """ + delay = min(initial, maximum) + while True: + yield random.uniform(0.0, delay) + delay = min(delay * multiplier, maximum) + + +class RetryFailureReason(Enum): + """ + The cause of a failed retry, used when building exceptions + """ + + TIMEOUT = 0 + NON_RETRYABLE_ERROR = 1 + + +def _build_retry_error( + exc_list: list[Exception], + reason: RetryFailureReason, + timeout_val: float | None, + **kwargs: Any, +) -> tuple[Exception, Exception | None]: + """ + Default exception_factory implementation. Builds an exception after the retry fails + + Args: + - exc_list: list of exceptions that occurred during the retry + - reason: reason for the retry failure. + Can be TIMEOUT or NON_RETRYABLE_ERROR + - timeout_val: the original timeout value for the retry, for use in the exception message + + Returns: + - tuple: a tuple of the exception to be raised, and the cause exception if any + """ + if reason == RetryFailureReason.TIMEOUT: + # return RetryError with the most recent exception as the cause + src_exc = exc_list[-1] if exc_list else None + timeout_val_str = f"of {timeout_val:0.1f}s " if timeout_val is not None else "" + return ( + exceptions.RetryError( + f"Timeout {timeout_val_str}exceeded", + src_exc, + ), + src_exc, + ) + elif exc_list: + # return most recent exception encountered + return exc_list[-1], None + else: + # no exceptions were given in exc_list. Raise generic RetryError + return exceptions.RetryError("Unknown error", None), None + + +class _BaseRetry(object): + """ + Base class for retry configuration objects. This class is intended to capture retry + and backoff configuration that is common to both synchronous and asynchronous retries, + for both unary and streaming RPCs. It is not intended to be instantiated directly, + but rather to be subclassed by the various retry configuration classes. + """ + + def __init__( + self, + predicate: Callable[[Exception], bool] = if_transient_error, + initial: float = _DEFAULT_INITIAL_DELAY, + maximum: float = _DEFAULT_MAXIMUM_DELAY, + multiplier: float = _DEFAULT_DELAY_MULTIPLIER, + timeout: float = _DEFAULT_DEADLINE, + on_error: Callable[[Exception], Any] | None = None, + **kwargs: Any, + ) -> None: + self._predicate = predicate + self._initial = initial + self._multiplier = multiplier + self._maximum = maximum + self._timeout = kwargs.get("deadline", timeout) + self._deadline = self._timeout + self._on_error = on_error + + def __call__(self, *args, **kwargs) -> Any: + raise NotImplementedError("Not implemented in base class") + + @property + def deadline(self) -> float | None: + """ + DEPRECATED: use ``timeout`` instead. Refer to the ``Retry`` class + documentation for details. + """ + return self._timeout + + @property + def timeout(self) -> float | None: + return self._timeout + + def _replace( + self, + predicate=None, + initial=None, + maximum=None, + multiplier=None, + timeout=None, + on_error=None, + ) -> Self: + return type(self)( + predicate=predicate or self._predicate, + initial=initial or self._initial, + maximum=maximum or self._maximum, + multiplier=multiplier or self._multiplier, + timeout=timeout or self._timeout, + on_error=on_error or self._on_error, + ) + + def with_deadline(self, deadline) -> Self: + """Return a copy of this retry with the given timeout. + + DEPRECATED: use :meth:`with_timeout` instead. Refer to the ``Retry`` class + documentation for details. + + Args: + deadline (float): How long to keep retrying in seconds. + + Returns: + Retry: A new retry instance with the given timeout. + """ + return self._replace(timeout=deadline) + + def with_timeout(self, timeout) -> Self: + """Return a copy of this retry with the given timeout. + + Args: + timeout (float): How long to keep retrying, in seconds. + + Returns: + Retry: A new retry instance with the given timeout. + """ + return self._replace(timeout=timeout) + + def with_predicate(self, predicate) -> Self: + """Return a copy of this retry with the given predicate. + + Args: + predicate (Callable[Exception]): A callable that should return + ``True`` if the given exception is retryable. + + Returns: + Retry: A new retry instance with the given predicate. + """ + return self._replace(predicate=predicate) + + def with_delay(self, initial=None, maximum=None, multiplier=None) -> Self: + """Return a copy of this retry with the given delay options. + + Args: + initial (float): The minimum amount of time to delay. This must + be greater than 0. + maximum (float): The maximum amount of time to delay. + multiplier (float): The multiplier applied to the delay. + + Returns: + Retry: A new retry instance with the given predicate. + """ + return self._replace(initial=initial, maximum=maximum, multiplier=multiplier) + + def __str__(self) -> str: + return ( + "<{} predicate={}, initial={:.1f}, maximum={:.1f}, " + "multiplier={:.1f}, timeout={}, on_error={}>".format( + type(self).__name__, + self._predicate, + self._initial, + self._maximum, + self._multiplier, + self._timeout, # timeout can be None, thus no {:.1f} + self._on_error, + ) + ) diff --git a/google/api_core/retry_streaming.py b/google/api_core/retry/retry_streaming.py similarity index 97% rename from google/api_core/retry_streaming.py rename to google/api_core/retry/retry_streaming.py index 9f9a548c..1c866698 100644 --- a/google/api_core/retry_streaming.py +++ b/google/api_core/retry/retry_streaming.py @@ -34,12 +34,10 @@ import time import functools -from google.api_core.retry_base import _BaseRetry -from google.api_core.retry_base import exponential_sleep_generator -from google.api_core.retry_base import if_exception_type # noqa: F401 -from google.api_core.retry_base import if_transient_error # noqa: F401 -from google.api_core.retry_base import _build_retry_error -from google.api_core.retry_base import RetryFailureReason +from google.api_core.retry.retry_base import _BaseRetry +from google.api_core.retry import exponential_sleep_generator +from google.api_core.retry import _build_retry_error +from google.api_core.retry import RetryFailureReason if TYPE_CHECKING: if sys.version_info >= (3, 10): diff --git a/google/api_core/retry_streaming_async.py b/google/api_core/retry/retry_streaming_async.py similarity index 97% rename from google/api_core/retry_streaming_async.py rename to google/api_core/retry/retry_streaming_async.py index 8af85afc..7f860d3d 100644 --- a/google/api_core/retry_streaming_async.py +++ b/google/api_core/retry/retry_streaming_async.py @@ -40,12 +40,10 @@ import sys import functools -from google.api_core.retry_base import _BaseRetry -from google.api_core.retry_base import exponential_sleep_generator -from google.api_core.retry_base import if_exception_type # noqa: F401 -from google.api_core.retry_base import if_transient_error # noqa: F401 -from google.api_core.retry_base import _build_retry_error -from google.api_core.retry_base import RetryFailureReason +from google.api_core.retry.retry_base import _BaseRetry +from google.api_core.retry import exponential_sleep_generator +from google.api_core.retry import _build_retry_error +from google.api_core.retry import RetryFailureReason if TYPE_CHECKING: diff --git a/google/api_core/retry.py b/google/api_core/retry/retry_unary.py similarity index 97% rename from google/api_core/retry.py rename to google/api_core/retry/retry_unary.py index ff360d79..93642a14 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry/retry_unary.py @@ -68,12 +68,9 @@ def check_if_exists(): from google.api_core import datetime_helpers from google.api_core import exceptions -from google.api_core.retry_base import _BaseRetry -from google.api_core.retry_base import exponential_sleep_generator -from google.api_core.retry_base import if_exception_type # noqa: F401 -from google.api_core.retry_base import if_transient_error # noqa: F401 -from google.api_core.retry_base import _build_retry_error -from google.api_core.retry_base import RetryFailureReason +from google.api_core.retry.retry_base import _BaseRetry +from google.api_core.retry import exponential_sleep_generator + if TYPE_CHECKING: if sys.version_info >= (3, 10): diff --git a/google/api_core/retry_async.py b/google/api_core/retry/retry_unary_async.py similarity index 97% rename from google/api_core/retry_async.py rename to google/api_core/retry/retry_unary_async.py index 9e2ad148..c6d3ac42 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry/retry_unary_async.py @@ -67,10 +67,11 @@ async def check_if_exists(): from google.api_core import datetime_helpers from google.api_core import exceptions -from google.api_core.retry_base import _BaseRetry -from google.api_core.retry_base import exponential_sleep_generator -from google.api_core.retry_base import if_exception_type # noqa: F401 -from google.api_core.retry_base import if_transient_error # noqa: F401 +from google.api_core.retry.retry_base import _BaseRetry +from google.api_core.retry import exponential_sleep_generator + +# for backwards compatibility, expose helpers in this module +from google.api_core.retry.retry_base import if_exception_type # noqa if TYPE_CHECKING: import sys diff --git a/tests/asyncio/retry/__init__.py b/tests/asyncio/retry/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/asyncio/test_retry_streaming_async.py b/tests/asyncio/retry/test_retry_streaming_async.py similarity index 98% rename from tests/asyncio/test_retry_streaming_async.py rename to tests/asyncio/retry/test_retry_streaming_async.py index 301e0607..7e08dcb0 100644 --- a/tests/asyncio/test_retry_streaming_async.py +++ b/tests/asyncio/retry/test_retry_streaming_async.py @@ -21,9 +21,9 @@ from google.api_core import exceptions from google.api_core import retry_async -from google.api_core import retry_streaming_async +from google.api_core.retry import retry_streaming_async -from ..unit.test_retry import Test_BaseRetry +from ...unit.retry.test_retry_base import Test_BaseRetry class TestAsyncStreamingRetry(Test_BaseRetry): @@ -459,7 +459,7 @@ async def test_exc_factory_non_retryable_error(self): test when non-retryable error is thrown """ from google.api_core.retry import RetryFailureReason - from google.api_core.retry_streaming_async import retry_target_stream + from google.api_core.retry.retry_streaming_async import retry_target_stream timeout = 6 sent_errors = [ValueError("test"), ValueError("test2"), BufferError("test3")] @@ -499,7 +499,7 @@ async def test_exc_factory_timeout(self): """ import time from google.api_core.retry import RetryFailureReason - from google.api_core.retry_streaming_async import retry_target_stream + from google.api_core.retry.retry_streaming_async import retry_target_stream timeout = 2 time_now = time.monotonic() diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/retry/test_retry_unary_async.py similarity index 98% rename from tests/asyncio/test_retry_async.py rename to tests/asyncio/retry/test_retry_unary_async.py index b50cddf2..0938e14d 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/retry/test_retry_unary_async.py @@ -21,7 +21,7 @@ from google.api_core import exceptions from google.api_core import retry_async -from ..unit.test_retry import Test_BaseRetry +from ...unit.retry.test_retry_base import Test_BaseRetry @mock.patch("asyncio.sleep", autospec=True) @@ -137,7 +137,7 @@ async def test_retry_target_bad_sleep_generator(): @pytest.mark.asyncio async def test_retry_streaming_target_bad_sleep_generator(): - from google.api_core.retry_streaming_async import retry_target_stream + from google.api_core.retry.retry_streaming_async import retry_target_stream with pytest.raises(ValueError, match="Sleep generator"): await retry_target_stream(None, None, [], None).__anext__() diff --git a/tests/unit/retry/__init__.py b/tests/unit/retry/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/retry/test_retry_base.py b/tests/unit/retry/test_retry_base.py new file mode 100644 index 00000000..06488839 --- /dev/null +++ b/tests/unit/retry/test_retry_base.py @@ -0,0 +1,267 @@ +# Copyright 2017 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import itertools +import re + +import mock +import requests.exceptions + +from google.api_core import exceptions +from google.api_core import retry +from google.auth import exceptions as auth_exceptions + + +def test_if_exception_type(): + predicate = retry.if_exception_type(ValueError) + + assert predicate(ValueError()) + assert not predicate(TypeError()) + + +def test_if_exception_type_multiple(): + predicate = retry.if_exception_type(ValueError, TypeError) + + assert predicate(ValueError()) + assert predicate(TypeError()) + assert not predicate(RuntimeError()) + + +def test_if_transient_error(): + assert retry.if_transient_error(exceptions.InternalServerError("")) + assert retry.if_transient_error(exceptions.TooManyRequests("")) + assert retry.if_transient_error(exceptions.ServiceUnavailable("")) + assert retry.if_transient_error(requests.exceptions.ConnectionError("")) + assert retry.if_transient_error(requests.exceptions.ChunkedEncodingError("")) + assert retry.if_transient_error(auth_exceptions.TransportError("")) + assert not retry.if_transient_error(exceptions.InvalidArgument("")) + + +# Make uniform return half of its maximum, which will be the calculated +# sleep time. +@mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) +def test_exponential_sleep_generator_base_2(uniform): + gen = retry.exponential_sleep_generator(1, 60, multiplier=2) + + result = list(itertools.islice(gen, 8)) + assert result == [1, 2, 4, 8, 16, 32, 60, 60] + + +def test__build_retry_error_empty_list(): + """ + attempt to build a retry error with no errors encountered + should return a generic RetryError + """ + from google.api_core.retry import _build_retry_error + from google.api_core.retry import RetryFailureReason + + reason = RetryFailureReason.NON_RETRYABLE_ERROR + src, cause = _build_retry_error([], reason, 10) + assert isinstance(src, exceptions.RetryError) + assert cause is None + assert src.message == "Unknown error" + + +def test__build_retry_error_timeout_message(): + """ + should provide helpful error message when timeout is reached + """ + from google.api_core.retry import _build_retry_error + from google.api_core.retry import RetryFailureReason + + reason = RetryFailureReason.TIMEOUT + cause = RuntimeError("timeout") + src, found_cause = _build_retry_error([ValueError(), cause], reason, 10) + assert isinstance(src, exceptions.RetryError) + assert src.message == "Timeout of 10.0s exceeded" + # should attach appropriate cause + assert found_cause is cause + + +def test__build_retry_error_empty_timeout(): + """ + attempt to build a retry error with timout is None + should return a generic timeout error message + """ + from google.api_core.retry import _build_retry_error + from google.api_core.retry import RetryFailureReason + + reason = RetryFailureReason.TIMEOUT + src, _ = _build_retry_error([], reason, None) + assert isinstance(src, exceptions.RetryError) + assert src.message == "Timeout exceeded" + + +class Test_BaseRetry(object): + def _make_one(self, *args, **kwargs): + return retry.retry_base._BaseRetry(*args, **kwargs) + + def test_constructor_defaults(self): + retry_ = self._make_one() + assert retry_._predicate == retry.if_transient_error + assert retry_._initial == 1 + assert retry_._maximum == 60 + assert retry_._multiplier == 2 + assert retry_._deadline == 120 + assert retry_._on_error is None + assert retry_.deadline == 120 + assert retry_.timeout == 120 + + def test_constructor_options(self): + _some_function = mock.Mock() + + retry_ = self._make_one( + predicate=mock.sentinel.predicate, + initial=1, + maximum=2, + multiplier=3, + deadline=4, + on_error=_some_function, + ) + assert retry_._predicate == mock.sentinel.predicate + assert retry_._initial == 1 + assert retry_._maximum == 2 + assert retry_._multiplier == 3 + assert retry_._deadline == 4 + assert retry_._on_error is _some_function + + def test_with_deadline(self): + retry_ = self._make_one( + predicate=mock.sentinel.predicate, + initial=1, + maximum=2, + multiplier=3, + deadline=4, + on_error=mock.sentinel.on_error, + ) + new_retry = retry_.with_deadline(42) + assert retry_ is not new_retry + assert new_retry._deadline == 42 + + # the rest of the attributes should remain the same + assert new_retry._predicate is retry_._predicate + assert new_retry._initial == retry_._initial + assert new_retry._maximum == retry_._maximum + assert new_retry._multiplier == retry_._multiplier + assert new_retry._on_error is retry_._on_error + + def test_with_predicate(self): + retry_ = self._make_one( + predicate=mock.sentinel.predicate, + initial=1, + maximum=2, + multiplier=3, + deadline=4, + on_error=mock.sentinel.on_error, + ) + new_retry = retry_.with_predicate(mock.sentinel.predicate) + assert retry_ is not new_retry + assert new_retry._predicate == mock.sentinel.predicate + + # the rest of the attributes should remain the same + assert new_retry._deadline == retry_._deadline + assert new_retry._initial == retry_._initial + assert new_retry._maximum == retry_._maximum + assert new_retry._multiplier == retry_._multiplier + assert new_retry._on_error is retry_._on_error + + def test_with_delay_noop(self): + retry_ = self._make_one( + predicate=mock.sentinel.predicate, + initial=1, + maximum=2, + multiplier=3, + deadline=4, + on_error=mock.sentinel.on_error, + ) + new_retry = retry_.with_delay() + assert retry_ is not new_retry + assert new_retry._initial == retry_._initial + assert new_retry._maximum == retry_._maximum + assert new_retry._multiplier == retry_._multiplier + + def test_with_delay(self): + retry_ = self._make_one( + predicate=mock.sentinel.predicate, + initial=1, + maximum=2, + multiplier=3, + deadline=4, + on_error=mock.sentinel.on_error, + ) + new_retry = retry_.with_delay(initial=5, maximum=6, multiplier=7) + assert retry_ is not new_retry + assert new_retry._initial == 5 + assert new_retry._maximum == 6 + assert new_retry._multiplier == 7 + + # the rest of the attributes should remain the same + assert new_retry._deadline == retry_._deadline + assert new_retry._predicate is retry_._predicate + assert new_retry._on_error is retry_._on_error + + def test_with_delay_partial_options(self): + retry_ = self._make_one( + predicate=mock.sentinel.predicate, + initial=1, + maximum=2, + multiplier=3, + deadline=4, + on_error=mock.sentinel.on_error, + ) + new_retry = retry_.with_delay(initial=4) + assert retry_ is not new_retry + assert new_retry._initial == 4 + assert new_retry._maximum == 2 + assert new_retry._multiplier == 3 + + new_retry = retry_.with_delay(maximum=4) + assert retry_ is not new_retry + assert new_retry._initial == 1 + assert new_retry._maximum == 4 + assert new_retry._multiplier == 3 + + new_retry = retry_.with_delay(multiplier=4) + assert retry_ is not new_retry + assert new_retry._initial == 1 + assert new_retry._maximum == 2 + assert new_retry._multiplier == 4 + + # the rest of the attributes should remain the same + assert new_retry._deadline == retry_._deadline + assert new_retry._predicate is retry_._predicate + assert new_retry._on_error is retry_._on_error + + def test___str__(self): + def if_exception_type(exc): + return bool(exc) # pragma: NO COVER + + # Explicitly set all attributes as changed Retry defaults should not + # cause this test to start failing. + retry_ = self._make_one( + predicate=if_exception_type, + initial=1.0, + maximum=60.0, + multiplier=2.0, + deadline=120.0, + on_error=None, + ) + assert re.match( + ( + r"<_BaseRetry predicate=, " + r"initial=1.0, maximum=60.0, multiplier=2.0, timeout=120.0, " + r"on_error=None>" + ), + str(retry_), + ) diff --git a/tests/unit/test_retry_streaming.py b/tests/unit/retry/test_retry_streaming.py similarity index 97% rename from tests/unit/test_retry_streaming.py rename to tests/unit/retry/test_retry_streaming.py index a0257b68..e2b3861d 100644 --- a/tests/unit/test_retry_streaming.py +++ b/tests/unit/retry/test_retry_streaming.py @@ -19,9 +19,16 @@ from google.api_core import exceptions from google.api_core import retry -from google.api_core import retry_streaming +from google.api_core.retry import retry_streaming -from .test_retry import Test_BaseRetry +from .test_retry_base import Test_BaseRetry + + +def test_retry_streaming_target_bad_sleep_generator(): + with pytest.raises( + ValueError, match="Sleep generator stopped yielding sleep values" + ): + next(retry_streaming.retry_target_stream(None, None, [], None)) class TestStreamingRetry(Test_BaseRetry): @@ -372,7 +379,7 @@ def test_exc_factory_non_retryable_error(self): test when non-retryable error is thrown """ from google.api_core.retry import RetryFailureReason - from google.api_core.retry_streaming import retry_target_stream + from google.api_core.retry.retry_streaming import retry_target_stream timeout = None sent_errors = [ValueError("test"), ValueError("test2"), BufferError("test3")] @@ -411,7 +418,7 @@ def test_exc_factory_timeout(self): """ import time from google.api_core.retry import RetryFailureReason - from google.api_core.retry_streaming import retry_target_stream + from google.api_core.retry.retry_streaming import retry_target_stream timeout = 2 time_now = time.monotonic() diff --git a/tests/unit/test_retry.py b/tests/unit/retry/test_retry_unary.py similarity index 54% rename from tests/unit/test_retry.py rename to tests/unit/retry/test_retry_unary.py index 6976ce0d..cdc4e18f 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/retry/test_retry_unary.py @@ -13,96 +13,15 @@ # limitations under the License. import datetime -import itertools import re import mock import pytest -import requests.exceptions from google.api_core import exceptions from google.api_core import retry -from google.auth import exceptions as auth_exceptions - -def test_if_exception_type(): - predicate = retry.if_exception_type(ValueError) - - assert predicate(ValueError()) - assert not predicate(TypeError()) - - -def test_if_exception_type_multiple(): - predicate = retry.if_exception_type(ValueError, TypeError) - - assert predicate(ValueError()) - assert predicate(TypeError()) - assert not predicate(RuntimeError()) - - -def test_if_transient_error(): - assert retry.if_transient_error(exceptions.InternalServerError("")) - assert retry.if_transient_error(exceptions.TooManyRequests("")) - assert retry.if_transient_error(exceptions.ServiceUnavailable("")) - assert retry.if_transient_error(requests.exceptions.ConnectionError("")) - assert retry.if_transient_error(requests.exceptions.ChunkedEncodingError("")) - assert retry.if_transient_error(auth_exceptions.TransportError("")) - assert not retry.if_transient_error(exceptions.InvalidArgument("")) - - -# Make uniform return half of its maximum, which will be the calculated -# sleep time. -@mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) -def test_exponential_sleep_generator_base_2(uniform): - gen = retry.exponential_sleep_generator(1, 60, multiplier=2) - - result = list(itertools.islice(gen, 8)) - assert result == [1, 2, 4, 8, 16, 32, 60, 60] - - -def test__build_retry_error_empty_list(): - """ - attempt to build a retry error with no errors encountered - should return a generic RetryError - """ - from google.api_core.retry import _build_retry_error - from google.api_core.retry import RetryFailureReason - - reason = RetryFailureReason.NON_RETRYABLE_ERROR - src, cause = _build_retry_error([], reason, 10) - assert isinstance(src, exceptions.RetryError) - assert cause is None - assert src.message == "Unknown error" - - -def test__build_retry_error_timeout_message(): - """ - should provide helpful error message when timeout is reached - """ - from google.api_core.retry import _build_retry_error - from google.api_core.retry import RetryFailureReason - - reason = RetryFailureReason.TIMEOUT - cause = RuntimeError("timeout") - src, found_cause = _build_retry_error([ValueError(), cause], reason, 10) - assert isinstance(src, exceptions.RetryError) - assert src.message == "Timeout of 10.0s exceeded" - # should attach appropriate cause - assert found_cause is cause - - -def test__build_retry_error_empty_timeout(): - """ - attempt to build a retry error with timout is None - should return a generic timeout error message - """ - from google.api_core.retry import _build_retry_error - from google.api_core.retry import RetryFailureReason - - reason = RetryFailureReason.TIMEOUT - src, _ = _build_retry_error([], reason, None) - assert isinstance(src, exceptions.RetryError) - assert src.message == "Timeout exceeded" +from .test_retry_base import Test_BaseRetry @mock.patch("time.sleep", autospec=True) @@ -190,7 +109,7 @@ async def test_retry_target_warning_for_retry(utcnow, sleep): retry.retry_target(target, predicate, range(10), None) assert len(exc_info) == 2 - assert str(exc_info[0].message) == retry._ASYNC_RETRY_WARNING + assert str(exc_info[0].message) == retry.retry_unary._ASYNC_RETRY_WARNING sleep.assert_not_called() @@ -227,179 +146,6 @@ def test_retry_target_bad_sleep_generator(): retry.retry_target(mock.sentinel.target, mock.sentinel.predicate, [], None) -def test_retry_streaming_target_bad_sleep_generator(): - from google.api_core.retry_streaming import retry_target_stream - - with pytest.raises( - ValueError, match="Sleep generator stopped yielding sleep values" - ): - next(retry_target_stream(None, None, [], None)) - - -class Test_BaseRetry(object): - def _make_one(self, *args, **kwargs): - return retry._BaseRetry(*args, **kwargs) - - def test_constructor_defaults(self): - retry_ = self._make_one() - assert retry_._predicate == retry.if_transient_error - assert retry_._initial == 1 - assert retry_._maximum == 60 - assert retry_._multiplier == 2 - assert retry_._deadline == 120 - assert retry_._on_error is None - assert retry_.deadline == 120 - assert retry_.timeout == 120 - - def test_constructor_options(self): - _some_function = mock.Mock() - - retry_ = self._make_one( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=_some_function, - ) - assert retry_._predicate == mock.sentinel.predicate - assert retry_._initial == 1 - assert retry_._maximum == 2 - assert retry_._multiplier == 3 - assert retry_._deadline == 4 - assert retry_._on_error is _some_function - - def test_with_deadline(self): - retry_ = self._make_one( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=mock.sentinel.on_error, - ) - new_retry = retry_.with_deadline(42) - assert retry_ is not new_retry - assert new_retry._deadline == 42 - - # the rest of the attributes should remain the same - assert new_retry._predicate is retry_._predicate - assert new_retry._initial == retry_._initial - assert new_retry._maximum == retry_._maximum - assert new_retry._multiplier == retry_._multiplier - assert new_retry._on_error is retry_._on_error - - def test_with_predicate(self): - retry_ = self._make_one( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=mock.sentinel.on_error, - ) - new_retry = retry_.with_predicate(mock.sentinel.predicate) - assert retry_ is not new_retry - assert new_retry._predicate == mock.sentinel.predicate - - # the rest of the attributes should remain the same - assert new_retry._deadline == retry_._deadline - assert new_retry._initial == retry_._initial - assert new_retry._maximum == retry_._maximum - assert new_retry._multiplier == retry_._multiplier - assert new_retry._on_error is retry_._on_error - - def test_with_delay_noop(self): - retry_ = self._make_one( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=mock.sentinel.on_error, - ) - new_retry = retry_.with_delay() - assert retry_ is not new_retry - assert new_retry._initial == retry_._initial - assert new_retry._maximum == retry_._maximum - assert new_retry._multiplier == retry_._multiplier - - def test_with_delay(self): - retry_ = self._make_one( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=mock.sentinel.on_error, - ) - new_retry = retry_.with_delay(initial=5, maximum=6, multiplier=7) - assert retry_ is not new_retry - assert new_retry._initial == 5 - assert new_retry._maximum == 6 - assert new_retry._multiplier == 7 - - # the rest of the attributes should remain the same - assert new_retry._deadline == retry_._deadline - assert new_retry._predicate is retry_._predicate - assert new_retry._on_error is retry_._on_error - - def test_with_delay_partial_options(self): - retry_ = self._make_one( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=mock.sentinel.on_error, - ) - new_retry = retry_.with_delay(initial=4) - assert retry_ is not new_retry - assert new_retry._initial == 4 - assert new_retry._maximum == 2 - assert new_retry._multiplier == 3 - - new_retry = retry_.with_delay(maximum=4) - assert retry_ is not new_retry - assert new_retry._initial == 1 - assert new_retry._maximum == 4 - assert new_retry._multiplier == 3 - - new_retry = retry_.with_delay(multiplier=4) - assert retry_ is not new_retry - assert new_retry._initial == 1 - assert new_retry._maximum == 2 - assert new_retry._multiplier == 4 - - # the rest of the attributes should remain the same - assert new_retry._deadline == retry_._deadline - assert new_retry._predicate is retry_._predicate - assert new_retry._on_error is retry_._on_error - - def test___str__(self): - def if_exception_type(exc): - return bool(exc) # pragma: NO COVER - - # Explicitly set all attributes as changed Retry defaults should not - # cause this test to start failing. - retry_ = self._make_one( - predicate=if_exception_type, - initial=1.0, - maximum=60.0, - multiplier=2.0, - deadline=120.0, - on_error=None, - ) - assert re.match( - ( - r"<_BaseRetry predicate=, " - r"initial=1.0, maximum=60.0, multiplier=2.0, timeout=120.0, " - r"on_error=None>" - ), - str(retry_), - ) - - class TestRetry(Test_BaseRetry): def _make_one(self, *args, **kwargs): return retry.Retry(*args, **kwargs) From d448a52d0af497c7299dbd619df6ce0d5af8aa72 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 30 Nov 2023 15:10:08 -0800 Subject: [PATCH 168/204] expose other retry target functions in retry __init__ --- google/api_core/retry/__init__.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/google/api_core/retry/__init__.py b/google/api_core/retry/__init__.py index 7f8b363e..92a9c767 100644 --- a/google/api_core/retry/__init__.py +++ b/google/api_core/retry/__init__.py @@ -22,8 +22,11 @@ from .retry_unary import Retry from .retry_unary import retry_target from .retry_unary_async import AsyncRetry +from .retry_unary_async import retry_target as retry_target_async from .retry_streaming import StreamingRetry +from .retry_streaming import retry_target_stream from .retry_streaming_async import AsyncStreamingRetry +from .retry_streaming_async import retry_target_stream as retry_target_stream_async __all__ = ( "exponential_sleep_generator", @@ -36,4 +39,7 @@ "StreamingRetry", "AsyncStreamingRetry", "retry_target", + "retry_target_async", + "retry_target_stream", + "retry_target_stream_async", ) From 781426a6ba1ecc914af520155efb73543d89cfdb Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 30 Nov 2023 15:35:11 -0800 Subject: [PATCH 169/204] share a logger --- google/api_core/retry/retry_base.py | 3 +++ google/api_core/retry/retry_streaming_async.py | 3 +-- google/api_core/retry/retry_unary.py | 2 +- google/api_core/retry/retry_unary_async.py | 2 +- 4 files changed, 6 insertions(+), 4 deletions(-) diff --git a/google/api_core/retry/retry_base.py b/google/api_core/retry/retry_base.py index 62c6099d..62364138 100644 --- a/google/api_core/retry/retry_base.py +++ b/google/api_core/retry/retry_base.py @@ -20,6 +20,7 @@ from __future__ import annotations +import logging import random from enum import Enum from typing import Any, Callable, TYPE_CHECKING @@ -42,6 +43,8 @@ _DEFAULT_DELAY_MULTIPLIER = 2.0 _DEFAULT_DEADLINE = 60.0 * 2.0 # seconds +_LOGGER = logging.getLogger("google.api_core.retry") + def if_exception_type( *exception_types: type[Exception], diff --git a/google/api_core/retry/retry_streaming_async.py b/google/api_core/retry/retry_streaming_async.py index 7f860d3d..fe36afbb 100644 --- a/google/api_core/retry/retry_streaming_async.py +++ b/google/api_core/retry/retry_streaming_async.py @@ -41,6 +41,7 @@ import functools from google.api_core.retry.retry_base import _BaseRetry +from google.api_core.retry.retry_base import _LOGGER from google.api_core.retry import exponential_sleep_generator from google.api_core.retry import _build_retry_error from google.api_core.retry import RetryFailureReason @@ -55,8 +56,6 @@ _P = ParamSpec("_P") # target function call parameters _Y = TypeVar("_Y") # yielded values -_LOGGER = logging.getLogger(__name__) - async def retry_target_stream( target: Callable[_P, AsyncIterable["_Y"] | Awaitable[AsyncIterable["_Y"]]], diff --git a/google/api_core/retry/retry_unary.py b/google/api_core/retry/retry_unary.py index 93642a14..c14df667 100644 --- a/google/api_core/retry/retry_unary.py +++ b/google/api_core/retry/retry_unary.py @@ -69,6 +69,7 @@ def check_if_exists(): from google.api_core import exceptions from google.api_core.retry.retry_base import _BaseRetry +from google.api_core.retry.retry_base import _LOGGER from google.api_core.retry import exponential_sleep_generator @@ -81,7 +82,6 @@ def check_if_exists(): _P = ParamSpec("_P") # target function call parameters _R = TypeVar("_R") # target function returned value -_LOGGER = logging.getLogger(__name__) _ASYNC_RETRY_WARNING = "Using the synchronous google.api_core.retry.Retry with asynchronous calls may lead to unexpected results. Please use google.api_core.retry_async.AsyncRetry instead." diff --git a/google/api_core/retry/retry_unary_async.py b/google/api_core/retry/retry_unary_async.py index c6d3ac42..1d5ebc13 100644 --- a/google/api_core/retry/retry_unary_async.py +++ b/google/api_core/retry/retry_unary_async.py @@ -68,6 +68,7 @@ async def check_if_exists(): from google.api_core import datetime_helpers from google.api_core import exceptions from google.api_core.retry.retry_base import _BaseRetry +from google.api_core.retry.retry_base import _LOGGER from google.api_core.retry import exponential_sleep_generator # for backwards compatibility, expose helpers in this module @@ -84,7 +85,6 @@ async def check_if_exists(): _P = ParamSpec("_P") # target function call parameters _R = TypeVar("_R") # target function returned value -_LOGGER = logging.getLogger(__name__) _DEFAULT_INITIAL_DELAY = 1.0 # seconds _DEFAULT_MAXIMUM_DELAY = 60.0 # seconds _DEFAULT_DELAY_MULTIPLIER = 2.0 From 4a05404e82389971540e683e5de8d8edb921bc64 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 30 Nov 2023 17:05:14 -0800 Subject: [PATCH 170/204] extracted shared error handling logic --- google/api_core/retry/retry_base.py | 41 +++++++++++ google/api_core/retry/retry_streaming.py | 39 ++++------- .../api_core/retry/retry_streaming_async.py | 34 +++------ google/api_core/retry/retry_unary.py | 54 ++++++--------- google/api_core/retry/retry_unary_async.py | 69 +++++++------------ tests/asyncio/retry/test_retry_unary_async.py | 36 +++++----- tests/unit/retry/test_retry_unary.py | 22 ++---- 7 files changed, 135 insertions(+), 160 deletions(-) diff --git a/google/api_core/retry/retry_base.py b/google/api_core/retry/retry_base.py index 62364138..f25b9f6c 100644 --- a/google/api_core/retry/retry_base.py +++ b/google/api_core/retry/retry_base.py @@ -22,6 +22,8 @@ import logging import random +import time + from enum import Enum from typing import Any, Callable, TYPE_CHECKING @@ -164,6 +166,45 @@ def _build_retry_error( return exceptions.RetryError("Unknown error", None), None +def _retry_error_helper( + exc, deadline, next_sleep, error_list, predicate_fn, on_error_fn, exc_factory_fn +): + """ + Shared logic for handling an error for all retry implementations + + - Raises an error on timeout or non-retryable error + - Calls on_error_fn if provided + - Logs the error + + Args: + - exc: the exception that was raised + - deadline: the deadline for the retry, calculated as a diff from time.monotonic() + - next_sleep: the calculated next sleep interval + - error_list: the list of exceptions that have been raised so far + - predicate_fn: the predicate that was used to determine if the exception should be retried + - on_error_fn: the callback that was called when the exception was raised + - exc_factory_fn: the callback that was called to build the exception to be raised on terminal failure + """ + error_list.append(exc) + if not predicate_fn(exc): + final_exc, source_exc = exc_factory_fn( + error_list, + RetryFailureReason.NON_RETRYABLE_ERROR, + ) + raise final_exc from source_exc + if on_error_fn is not None: + on_error_fn(exc) + if deadline is not None and time.monotonic() + next_sleep > deadline: + final_exc, source_exc = exc_factory_fn( + error_list, + RetryFailureReason.TIMEOUT, + ) + raise final_exc from source_exc + _LOGGER.debug( + "Retrying due to {}, sleeping {:.1f}s ...".format(error_list[-1], next_sleep) + ) + + class _BaseRetry(object): """ Base class for retry configuration objects. This class is intended to capture retry diff --git a/google/api_core/retry/retry_streaming.py b/google/api_core/retry/retry_streaming.py index 1c866698..e849781a 100644 --- a/google/api_core/retry/retry_streaming.py +++ b/google/api_core/retry/retry_streaming.py @@ -30,11 +30,11 @@ ) import sys -import logging import time import functools from google.api_core.retry.retry_base import _BaseRetry +from google.api_core.retry.retry_base import _retry_error_helper from google.api_core.retry import exponential_sleep_generator from google.api_core.retry import _build_retry_error from google.api_core.retry import RetryFailureReason @@ -48,8 +48,6 @@ _P = ParamSpec("_P") # target function call parameters _Y = TypeVar("_Y") # yielded values -_LOGGER = logging.getLogger(__name__) - def retry_target_stream( target: Callable[_P, Iterable[_Y]], @@ -108,9 +106,14 @@ def retry_target_stream( """ timeout = kwargs.get("deadline", timeout) - deadline: Optional[float] = time.monotonic() + timeout if timeout else None - error_list: List[Exception] = [] - exc_factory = exception_factory or _build_retry_error + deadline: Optional[float] = ( + time.monotonic() + timeout if timeout is not None else None + ) + error_list: list[Exception] = [] + # make a partial with timeout applied + exc_factory = lambda e, t: (exception_factory or _build_retry_error)( # noqa: E731 + e, t, timeout + ) for sleep in sleep_generator: # Start a new retry loop @@ -124,26 +127,12 @@ def retry_target_stream( # pylint: disable=broad-except # This function explicitly must deal with broad exceptions. except Exception as exc: - error_list.append(exc) - if not predicate(exc): - final_exc, source_exc = exc_factory( - error_list, - RetryFailureReason.NON_RETRYABLE_ERROR, - timeout, - ) - raise final_exc from source_exc - if on_error is not None: - on_error(exc) - - if deadline is not None and time.monotonic() + sleep > deadline: - final_exc, source_exc = exc_factory( - error_list, RetryFailureReason.TIMEOUT, timeout + # defer to shared logic for handling errors + _retry_error_helper( + exc, deadline, sleep, error_list, predicate, on_error, exc_factory ) - raise final_exc from source_exc - _LOGGER.debug( - "Retrying due to {}, sleeping {:.1f}s ...".format(error_list[-1], sleep) - ) - time.sleep(sleep) + # if exception not raised, sleep before next attempt + time.sleep(sleep) raise ValueError("Sleep generator stopped yielding sleep values.") diff --git a/google/api_core/retry/retry_streaming_async.py b/google/api_core/retry/retry_streaming_async.py index fe36afbb..74741b59 100644 --- a/google/api_core/retry/retry_streaming_async.py +++ b/google/api_core/retry/retry_streaming_async.py @@ -35,13 +35,12 @@ ) import asyncio -import logging import time import sys import functools from google.api_core.retry.retry_base import _BaseRetry -from google.api_core.retry.retry_base import _LOGGER +from google.api_core.retry.retry_base import _retry_error_helper from google.api_core.retry import exponential_sleep_generator from google.api_core.retry import _build_retry_error from google.api_core.retry import RetryFailureReason @@ -119,8 +118,10 @@ async def retry_target_stream( deadline: Optional[float] = time.monotonic() + timeout if timeout else None # keep track of retryable exceptions we encounter to pass in to exception_factory error_list: List[Exception] = [] - # override exception_factory to build a more complex exception - exc_factory = exception_factory or _build_retry_error + # make a partial with timeout applied + exc_factory = lambda e, t: (exception_factory or _build_retry_error)( # noqa: E731 + e, t, timeout + ) target_is_generator: Optional[bool] = None for sleep in sleep_generator: @@ -185,28 +186,15 @@ async def retry_target_stream( # pylint: disable=broad-except # This function explicitly must deal with broad exceptions. except Exception as exc: - error_list.append(exc) - if not predicate(exc): - exc, source_exc = exc_factory( - error_list, RetryFailureReason.NON_RETRYABLE_ERROR, timeout - ) - raise exc from source_exc - if on_error is not None: - on_error(exc) + # defer to shared logic for handling errors + _retry_error_helper( + exc, deadline, sleep, error_list, predicate, on_error, exc_factory + ) + # if exception not raised, sleep before next attempt + await asyncio.sleep(sleep) finally: if target_is_generator and target_iterator is not None: await cast(AsyncGenerator["_Y", None], target_iterator).aclose() - - # sleep and adjust timeout budget - if deadline is not None and time.monotonic() + sleep > deadline: - final_exc, source_exc = exc_factory( - error_list, RetryFailureReason.TIMEOUT, timeout - ) - raise final_exc from source_exc - _LOGGER.debug( - "Retrying due to {}, sleeping {:.1f}s ...".format(error_list[-1], sleep) - ) - await asyncio.sleep(sleep) raise ValueError("Sleep generator stopped yielding sleep values.") diff --git a/google/api_core/retry/retry_unary.py b/google/api_core/retry/retry_unary.py index c14df667..5106e132 100644 --- a/google/api_core/retry/retry_unary.py +++ b/google/api_core/retry/retry_unary.py @@ -56,21 +56,17 @@ def check_if_exists(): from __future__ import annotations -import datetime import functools -import logging import sys import time import inspect import warnings from typing import Any, Callable, TypeVar, TYPE_CHECKING -from google.api_core import datetime_helpers -from google.api_core import exceptions - from google.api_core.retry.retry_base import _BaseRetry -from google.api_core.retry.retry_base import _LOGGER +from google.api_core.retry.retry_base import _retry_error_helper from google.api_core.retry import exponential_sleep_generator +from google.api_core.retry import _build_retry_error if TYPE_CHECKING: @@ -86,7 +82,13 @@ def check_if_exists(): def retry_target( - target, predicate, sleep_generator, timeout=None, on_error=None, **kwargs + target, + predicate, + sleep_generator, + timeout=None, + on_error=None, + exception_factory=None, + **kwargs, ): """Call a function and retry if it fails. @@ -119,12 +121,12 @@ def retry_target( timeout = kwargs.get("deadline", timeout) - if timeout is not None: - deadline = datetime_helpers.utcnow() + datetime.timedelta(seconds=timeout) - else: - deadline = None - - last_exc = None + deadline = time.monotonic() + timeout if timeout is not None else None + error_list = [] + # make a partial with timeout applied + exc_factory = lambda e, t: (exception_factory or _build_retry_error)( # noqa: E731 + e, t, timeout + ) for sleep in sleep_generator: try: @@ -136,28 +138,12 @@ def retry_target( # pylint: disable=broad-except # This function explicitly must deal with broad exceptions. except Exception as exc: - if not predicate(exc): - raise - last_exc = exc - if on_error is not None: - on_error(exc) - - if deadline is not None: - next_attempt_time = datetime_helpers.utcnow() + datetime.timedelta( - seconds=sleep + # defer to shared logic for handling errors + _retry_error_helper( + exc, deadline, sleep, error_list, predicate, on_error, exc_factory ) - if deadline < next_attempt_time: - raise exceptions.RetryError( - "Deadline of {:.1f}s exceeded while calling target function".format( - timeout - ), - last_exc, - ) from last_exc - - _LOGGER.debug( - "Retrying due to {}, sleeping {:.1f}s ...".format(last_exc, sleep) - ) - time.sleep(sleep) + # if exception not raised, sleep before next attempt + time.sleep(sleep) raise ValueError("Sleep generator stopped yielding sleep values.") diff --git a/google/api_core/retry/retry_unary_async.py b/google/api_core/retry/retry_unary_async.py index 1d5ebc13..2e79b783 100644 --- a/google/api_core/retry/retry_unary_async.py +++ b/google/api_core/retry/retry_unary_async.py @@ -54,9 +54,8 @@ async def check_if_exists(): from __future__ import annotations import asyncio -import datetime +import time import functools -import logging from typing import ( Awaitable, Any, @@ -65,11 +64,10 @@ async def check_if_exists(): TYPE_CHECKING, ) -from google.api_core import datetime_helpers -from google.api_core import exceptions from google.api_core.retry.retry_base import _BaseRetry -from google.api_core.retry.retry_base import _LOGGER +from google.api_core.retry.retry_base import _retry_error_helper from google.api_core.retry import exponential_sleep_generator +from google.api_core.retry import _build_retry_error # for backwards compatibility, expose helpers in this module from google.api_core.retry.retry_base import if_exception_type # noqa @@ -93,7 +91,13 @@ async def check_if_exists(): async def retry_target( - target, predicate, sleep_generator, timeout=None, on_error=None, **kwargs + target, + predicate, + sleep_generator, + timeout=None, + on_error=None, + exception_factory=None, + **kwargs, ): """Await a coroutine and retry if it fails. @@ -113,7 +117,7 @@ async def retry_target( callback will be called with each retryable exception raised by the target. Any error raised by this function will *not* be caught. deadline (float): DEPRECATED use ``timeout`` instead. For backward - compatibility, if set it will override the ``timeout`` parameter. + compatibility, if set it will override the ``timeout`` parameter. Returns: Any: the return value of the target function. @@ -126,52 +130,25 @@ async def retry_target( timeout = kwargs.get("deadline", timeout) - deadline_dt = ( - (datetime_helpers.utcnow() + datetime.timedelta(seconds=timeout)) - if timeout - else None + deadline = time.monotonic() + timeout if timeout is not None else None + error_list: list[Exception] = [] + # make a partial with timeout applied + exc_factory = lambda e, t: (exception_factory or _build_retry_error)( # noqa: E731 + e, t, timeout ) - last_exc = None - for sleep in sleep_generator: try: - if not deadline_dt: - return await target() - else: - return await asyncio.wait_for( - target(), - timeout=(deadline_dt - datetime_helpers.utcnow()).total_seconds(), - ) + return await target() # pylint: disable=broad-except # This function explicitly must deal with broad exceptions. except Exception as exc: - if not predicate(exc) and not isinstance(exc, asyncio.TimeoutError): - raise - last_exc = exc - if on_error is not None: - on_error(exc) - - now = datetime_helpers.utcnow() - - if deadline_dt: - if deadline_dt <= now: - # Chains the raising RetryError with the root cause error, - # which helps observability and debugability. - raise exceptions.RetryError( - "Timeout of {:.1f}s exceeded while calling target function".format( - timeout - ), - last_exc, - ) from last_exc - else: - time_to_deadline = (deadline_dt - now).total_seconds() - sleep = min(time_to_deadline, sleep) - - _LOGGER.debug( - "Retrying due to {}, sleeping {:.1f}s ...".format(last_exc, sleep) - ) - await asyncio.sleep(sleep) + # defer to shared logic for handling errors + _retry_error_helper( + exc, deadline, sleep, error_list, predicate, on_error, exc_factory + ) + # if exception not raised, sleep before next attempt + await asyncio.sleep(sleep) raise ValueError("Sleep generator stopped yielding sleep values.") diff --git a/tests/asyncio/retry/test_retry_unary_async.py b/tests/asyncio/retry/test_retry_unary_async.py index 0938e14d..cbc9d3e0 100644 --- a/tests/asyncio/retry/test_retry_unary_async.py +++ b/tests/asyncio/retry/test_retry_unary_async.py @@ -99,20 +99,15 @@ async def test_retry_target_non_retryable_error(utcnow, sleep): @mock.patch("asyncio.sleep", autospec=True) -@mock.patch("google.api_core.datetime_helpers.utcnow", autospec=True) +@mock.patch("time.monotonic", autospec=True) @pytest.mark.asyncio -async def test_retry_target_deadline_exceeded(utcnow, sleep): +async def test_retry_target_deadline_exceeded(monotonic, sleep): predicate = retry_async.if_exception_type(ValueError) exception = ValueError("meep") target = mock.Mock(side_effect=exception) # Setup the timeline so that the first call takes 5 seconds but the second # call takes 6, which puts the retry over the deadline. - utcnow.side_effect = [ - # The first call to utcnow establishes the start of the timeline. - datetime.datetime.min, - datetime.datetime.min + datetime.timedelta(seconds=5), - datetime.datetime.min + datetime.timedelta(seconds=11), - ] + monotonic.side_effect = [0, 5, 11] with pytest.raises(exceptions.RetryError) as exc_info: await retry_async.retry_target(target, predicate, range(10), deadline=10) @@ -221,13 +216,10 @@ async def test___call___and_execute_retry_hitting_deadline(self, sleep, uniform) initial=1.0, maximum=1024.0, multiplier=2.0, - deadline=9.9, + deadline=30.9, ) - utcnow = datetime.datetime.now(tz=datetime.timezone.utc) - utcnow_patcher = mock.patch( - "google.api_core.datetime_helpers.utcnow", return_value=utcnow - ) + monotonic_patcher = mock.patch("time.monotonic", return_value=0) target = mock.AsyncMock(spec=["__call__"], side_effect=[ValueError()] * 10) # __name__ is needed by functools.partial. @@ -236,11 +228,12 @@ async def test___call___and_execute_retry_hitting_deadline(self, sleep, uniform) decorated = retry_(target, on_error=on_error) target.assert_not_called() - with utcnow_patcher as patched_utcnow: + with monotonic_patcher as patched_monotonic: # Make sure that calls to fake asyncio.sleep() also advance the mocked # time clock. def increase_time(sleep_delay): - patched_utcnow.return_value += datetime.timedelta(seconds=sleep_delay) + print(sleep_delay) + patched_monotonic.return_value += sleep_delay sleep.side_effect = increase_time @@ -256,8 +249,17 @@ def increase_time(sleep_delay): last_wait = sleep.call_args.args[0] total_wait = sum(call_args.args[0] for call_args in sleep.call_args_list) - assert last_wait == 2.9 # and not 8.0, because the last delay was shortened - assert total_wait == 9.9 # the same as the deadline + assert last_wait == 8.0 + # Next attempt would be scheduled in 16 secs, 15 + 16 = 31 > 30.9, thus + # we do not even wait for it to be scheduled (30.9 is configured timeout). + # This changes the previous logic of shortening the last attempt to fit + # in the deadline. The previous logic was removed to make Python retry + # logic consistent with the other languages and to not disrupt the + # randomized retry delays distribution by artificially increasing a + # probability of scheduling two (instead of one) last attempts with very + # short delay between them, while the second retry having very low chance + # of succeeding anyways. + assert total_wait == 15.0 @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio diff --git a/tests/unit/retry/test_retry_unary.py b/tests/unit/retry/test_retry_unary.py index cdc4e18f..e344edb8 100644 --- a/tests/unit/retry/test_retry_unary.py +++ b/tests/unit/retry/test_retry_unary.py @@ -114,25 +114,20 @@ async def test_retry_target_warning_for_retry(utcnow, sleep): @mock.patch("time.sleep", autospec=True) -@mock.patch("google.api_core.datetime_helpers.utcnow", autospec=True) -def test_retry_target_deadline_exceeded(utcnow, sleep): +@mock.patch("time.monotonic", autospec=True) +def test_retry_target_deadline_exceeded(monotonic, sleep): predicate = retry.if_exception_type(ValueError) exception = ValueError("meep") target = mock.Mock(side_effect=exception) # Setup the timeline so that the first call takes 5 seconds but the second # call takes 6, which puts the retry over the deadline. - utcnow.side_effect = [ - # The first call to utcnow establishes the start of the timeline. - datetime.datetime.min, - datetime.datetime.min + datetime.timedelta(seconds=5), - datetime.datetime.min + datetime.timedelta(seconds=11), - ] + monotonic.side_effect = [0, 5, 11] with pytest.raises(exceptions.RetryError) as exc_info: retry.retry_target(target, predicate, range(10), deadline=10) assert exc_info.value.cause == exception - assert exc_info.match("Deadline of 10.0s exceeded") + assert exc_info.match("Timeout of 10.0s exceeded") assert exc_info.match("last exception: meep") assert target.call_count == 2 @@ -222,10 +217,7 @@ def test___call___and_execute_retry_hitting_deadline(self, sleep, uniform): deadline=30.9, ) - utcnow = datetime.datetime.now(tz=datetime.timezone.utc) - utcnow_patcher = mock.patch( - "google.api_core.datetime_helpers.utcnow", return_value=utcnow - ) + monotonic_patcher = mock.patch("time.monotonic", return_value=0) target = mock.Mock(spec=["__call__"], side_effect=[ValueError()] * 10) # __name__ is needed by functools.partial. @@ -234,11 +226,11 @@ def test___call___and_execute_retry_hitting_deadline(self, sleep, uniform): decorated = retry_(target, on_error=on_error) target.assert_not_called() - with utcnow_patcher as patched_utcnow: + with monotonic_patcher as patched_monotonic: # Make sure that calls to fake time.sleep() also advance the mocked # time clock. def increase_time(sleep_delay): - patched_utcnow.return_value += datetime.timedelta(seconds=sleep_delay) + patched_monotonic.return_value += sleep_delay sleep.side_effect = increase_time From b221c8d36e541a438dab73d5601166c1e0f72e0b Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 30 Nov 2023 17:18:14 -0800 Subject: [PATCH 171/204] added type hints --- google/api_core/retry/retry_base.py | 40 +++++++++++++------ .../api_core/retry/retry_streaming_async.py | 37 ++++++++--------- google/api_core/retry/retry_unary.py | 25 +++++++----- google/api_core/retry/retry_unary_async.py | 22 ++++++---- 4 files changed, 73 insertions(+), 51 deletions(-) diff --git a/google/api_core/retry/retry_base.py b/google/api_core/retry/retry_base.py index f25b9f6c..c31cc1ba 100644 --- a/google/api_core/retry/retry_base.py +++ b/google/api_core/retry/retry_base.py @@ -97,7 +97,9 @@ def if_exception_type_predicate(exception: Exception) -> bool: # pylint: enable=invalid-name -def exponential_sleep_generator(initial, maximum, multiplier=_DEFAULT_DELAY_MULTIPLIER): +def exponential_sleep_generator( + initial: float, maximum: float, multiplier: float = _DEFAULT_DELAY_MULTIPLIER +): """Generates sleep intervals based on the exponential back-off algorithm. This implements the `Truncated Exponential Back-off`_ algorithm. @@ -167,7 +169,16 @@ def _build_retry_error( def _retry_error_helper( - exc, deadline, next_sleep, error_list, predicate_fn, on_error_fn, exc_factory_fn + exc: Exception, + deadline: float | None, + next_sleep: float, + error_list: list[Exception], + predicate_fn: Callable[[Exception], bool], + on_error_fn: Callable[[Exception], None] | None, + exc_factory_fn: Callable[ + [list[Exception], RetryFailureReason], + tuple[Exception, Exception | None], + ], ): """ Shared logic for handling an error for all retry implementations @@ -248,12 +259,12 @@ def timeout(self) -> float | None: def _replace( self, - predicate=None, - initial=None, - maximum=None, - multiplier=None, - timeout=None, - on_error=None, + predicate: Callable[[Exception], bool] | None = None, + initial: float | None = None, + maximum: float | None = None, + multiplier: float | None = None, + timeout: float | None = None, + on_error: Callable[[Exception], Any] | None = None, ) -> Self: return type(self)( predicate=predicate or self._predicate, @@ -264,7 +275,7 @@ def _replace( on_error=on_error or self._on_error, ) - def with_deadline(self, deadline) -> Self: + def with_deadline(self, deadline: float | None) -> Self: """Return a copy of this retry with the given timeout. DEPRECATED: use :meth:`with_timeout` instead. Refer to the ``Retry`` class @@ -278,7 +289,7 @@ def with_deadline(self, deadline) -> Self: """ return self._replace(timeout=deadline) - def with_timeout(self, timeout) -> Self: + def with_timeout(self, timeout: float) -> Self: """Return a copy of this retry with the given timeout. Args: @@ -289,7 +300,7 @@ def with_timeout(self, timeout) -> Self: """ return self._replace(timeout=timeout) - def with_predicate(self, predicate) -> Self: + def with_predicate(self, predicate: Callable[[Exception], bool]) -> Self: """Return a copy of this retry with the given predicate. Args: @@ -301,7 +312,12 @@ def with_predicate(self, predicate) -> Self: """ return self._replace(predicate=predicate) - def with_delay(self, initial=None, maximum=None, multiplier=None) -> Self: + def with_delay( + self, + initial: float | None = None, + maximum: float | None = None, + multiplier: float | None = None, + ) -> Self: """Return a copy of this retry with the given delay options. Args: diff --git a/google/api_core/retry/retry_streaming_async.py b/google/api_core/retry/retry_streaming_async.py index 74741b59..1d33df9f 100644 --- a/google/api_core/retry/retry_streaming_async.py +++ b/google/api_core/retry/retry_streaming_async.py @@ -21,14 +21,10 @@ cast, Any, Callable, - Optional, Iterable, - List, - Tuple, AsyncIterator, AsyncIterable, Awaitable, - Union, TypeVar, AsyncGenerator, TYPE_CHECKING, @@ -57,21 +53,20 @@ async def retry_target_stream( - target: Callable[_P, AsyncIterable["_Y"] | Awaitable[AsyncIterable["_Y"]]], + target: Callable[_P, AsyncIterable[_Y] | Awaitable[AsyncIterable[_Y]]], predicate: Callable[[Exception], bool], sleep_generator: Iterable[float], - timeout: Optional[float] = None, - on_error: Optional[Callable[[Exception], None]] = None, - exception_factory: Optional[ - Callable[ - [List[Exception], RetryFailureReason, Optional[float]], - Tuple[Exception, Optional[Exception]], - ] - ] = None, + timeout: float | None = None, + on_error: Callable[[Exception], None] | None = None, + exception_factory: Callable[ + [list[Exception], RetryFailureReason, float | None], + tuple[Exception, Exception | None], + ] + | None = None, init_args: _P.args = (), init_kwargs: _P.kwargs = {}, **kwargs, -) -> AsyncGenerator["_Y", None]: +) -> AsyncGenerator[_Y, None]: """Create a generator wrapper that retries the wrapped stream if it fails. This is the lowest-level retry helper. Generally, you'll use the @@ -113,16 +108,16 @@ async def retry_target_stream( """ # create frozen partial from original call args # In the future, we can add a ResumptionStrategy object that creates new kwargs between calls - target_iterator: Optional[AsyncIterator[_Y]] = None + target_iterator: AsyncIterator[_Y] | None = None timeout = kwargs.get("deadline", timeout) - deadline: Optional[float] = time.monotonic() + timeout if timeout else None + deadline = time.monotonic() + timeout if timeout else None # keep track of retryable exceptions we encounter to pass in to exception_factory - error_list: List[Exception] = [] + error_list: list[Exception] = [] # make a partial with timeout applied exc_factory = lambda e, t: (exception_factory or _build_retry_error)( # noqa: E731 e, t, timeout ) - target_is_generator: Optional[bool] = None + target_is_generator: bool | None = None for sleep in sleep_generator: # Start a new retry loop @@ -130,9 +125,9 @@ async def retry_target_stream( # Note: in the future, we can add a ResumptionStrategy object # to generate new args between calls. For now, use the same args # for each attempt. - target_output: Union[ - AsyncIterable[_Y], Awaitable[AsyncIterable[_Y]] - ] = target(*init_args, **init_kwargs) + target_output: AsyncIterable[_Y] | Awaitable[AsyncIterable[_Y]] = target( + *init_args, **init_kwargs + ) try: # gapic functions return the generator behind an awaitable # unwrap the awaitable so we can work with the generator directly diff --git a/google/api_core/retry/retry_unary.py b/google/api_core/retry/retry_unary.py index 5106e132..87aaacc4 100644 --- a/google/api_core/retry/retry_unary.py +++ b/google/api_core/retry/retry_unary.py @@ -61,12 +61,13 @@ def check_if_exists(): import time import inspect import warnings -from typing import Any, Callable, TypeVar, TYPE_CHECKING +from typing import Any, Callable, Iterable, TypeVar, TYPE_CHECKING from google.api_core.retry.retry_base import _BaseRetry from google.api_core.retry.retry_base import _retry_error_helper -from google.api_core.retry import exponential_sleep_generator -from google.api_core.retry import _build_retry_error +from google.api_core.retry.retry_base import exponential_sleep_generator +from google.api_core.retry.retry_base import _build_retry_error +from google.api_core.retry.retry_base import RetryFailureReason if TYPE_CHECKING: @@ -82,12 +83,16 @@ def check_if_exists(): def retry_target( - target, - predicate, - sleep_generator, - timeout=None, - on_error=None, - exception_factory=None, + target: Callable[_P, _R], + predicate: Callable[[Exception], bool], + sleep_generator: Iterable[float], + timeout: float | None = None, + on_error: Callable[[Exception], None] | None = None, + exception_factory: Callable[ + [list[Exception], RetryFailureReason, float | None], + tuple[Exception, Exception | None], + ] + | None = None, **kwargs, ): """Call a function and retry if it fails. @@ -122,7 +127,7 @@ def retry_target( timeout = kwargs.get("deadline", timeout) deadline = time.monotonic() + timeout if timeout is not None else None - error_list = [] + error_list: list[Exception] = [] # make a partial with timeout applied exc_factory = lambda e, t: (exception_factory or _build_retry_error)( # noqa: E731 e, t, timeout diff --git a/google/api_core/retry/retry_unary_async.py b/google/api_core/retry/retry_unary_async.py index 2e79b783..24c7fd0a 100644 --- a/google/api_core/retry/retry_unary_async.py +++ b/google/api_core/retry/retry_unary_async.py @@ -60,14 +60,16 @@ async def check_if_exists(): Awaitable, Any, Callable, + Iterable, TypeVar, TYPE_CHECKING, ) from google.api_core.retry.retry_base import _BaseRetry from google.api_core.retry.retry_base import _retry_error_helper -from google.api_core.retry import exponential_sleep_generator -from google.api_core.retry import _build_retry_error +from google.api_core.retry.retry_base import exponential_sleep_generator +from google.api_core.retry.retry_base import _build_retry_error +from google.api_core.retry.retry_base import RetryFailureReason # for backwards compatibility, expose helpers in this module from google.api_core.retry.retry_base import if_exception_type # noqa @@ -91,12 +93,16 @@ async def check_if_exists(): async def retry_target( - target, - predicate, - sleep_generator, - timeout=None, - on_error=None, - exception_factory=None, + target: Callable[_P, Awaitable[_R]], + predicate: Callable[[Exception], bool], + sleep_generator: Iterable[float], + timeout: float | None = None, + on_error: Callable[[Exception], None] | None = None, + exception_factory: Callable[ + [list[Exception], RetryFailureReason, float | None], + tuple[Exception, Exception | None], + ] + | None = None, **kwargs, ): """Await a coroutine and retry if it fails. From b5b4534cff3cdd29dd2d72bb696730bad7a6a735 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 30 Nov 2023 17:22:33 -0800 Subject: [PATCH 172/204] removed costly awaitable check --- google/api_core/retry/retry_unary.py | 9 +-------- tests/unit/retry/test_retry_unary.py | 20 -------------------- 2 files changed, 1 insertion(+), 28 deletions(-) diff --git a/google/api_core/retry/retry_unary.py b/google/api_core/retry/retry_unary.py index 87aaacc4..1708395a 100644 --- a/google/api_core/retry/retry_unary.py +++ b/google/api_core/retry/retry_unary.py @@ -59,8 +59,6 @@ def check_if_exists(): import functools import sys import time -import inspect -import warnings from typing import Any, Callable, Iterable, TypeVar, TYPE_CHECKING from google.api_core.retry.retry_base import _BaseRetry @@ -79,8 +77,6 @@ def check_if_exists(): _P = ParamSpec("_P") # target function call parameters _R = TypeVar("_R") # target function returned value -_ASYNC_RETRY_WARNING = "Using the synchronous google.api_core.retry.Retry with asynchronous calls may lead to unexpected results. Please use google.api_core.retry_async.AsyncRetry instead." - def retry_target( target: Callable[_P, _R], @@ -135,10 +131,7 @@ def retry_target( for sleep in sleep_generator: try: - result = target() - if inspect.isawaitable(result): - warnings.warn(_ASYNC_RETRY_WARNING) - return result + return target() # pylint: disable=broad-except # This function explicitly must deal with broad exceptions. diff --git a/tests/unit/retry/test_retry_unary.py b/tests/unit/retry/test_retry_unary.py index e344edb8..9a3750d8 100644 --- a/tests/unit/retry/test_retry_unary.py +++ b/tests/unit/retry/test_retry_unary.py @@ -93,26 +93,6 @@ def test_retry_target_non_retryable_error(utcnow, sleep): sleep.assert_not_called() -@mock.patch("asyncio.sleep", autospec=True) -@mock.patch( - "google.api_core.datetime_helpers.utcnow", - return_value=datetime.datetime.min, - autospec=True, -) -@pytest.mark.asyncio -async def test_retry_target_warning_for_retry(utcnow, sleep): - predicate = retry.if_exception_type(ValueError) - target = mock.AsyncMock(spec=["__call__"]) - - with pytest.warns(Warning) as exc_info: - # Note: predicate is just a filler and doesn't affect the test - retry.retry_target(target, predicate, range(10), None) - - assert len(exc_info) == 2 - assert str(exc_info[0].message) == retry.retry_unary._ASYNC_RETRY_WARNING - sleep.assert_not_called() - - @mock.patch("time.sleep", autospec=True) @mock.patch("time.monotonic", autospec=True) def test_retry_target_deadline_exceeded(monotonic, sleep): From 0f1145d9d94c7b9d5021a43328a555914380e2c0 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 30 Nov 2023 17:24:57 -0800 Subject: [PATCH 173/204] revised docstring --- google/api_core/retry/retry_unary.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/google/api_core/retry/retry_unary.py b/google/api_core/retry/retry_unary.py index 1708395a..2902094a 100644 --- a/google/api_core/retry/retry_unary.py +++ b/google/api_core/retry/retry_unary.py @@ -97,7 +97,7 @@ def retry_target( higher-level retry helper :class:`Retry`. Args: - target(Callable[[], Any]): The function to call and retry. This must be a + target(Callable): The function to call and retry. This must be a nullary function - apply arguments with `functools.partial`. predicate (Callable[Exception]): A callable used to determine if an exception raised by the target should be considered retryable. From 84085122df7a41b83d081411b35e11c429b58b85 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 30 Nov 2023 17:26:01 -0800 Subject: [PATCH 174/204] added exception_factory docstrings --- google/api_core/retry/retry_unary.py | 8 ++++++++ google/api_core/retry/retry_unary_async.py | 8 ++++++++ 2 files changed, 16 insertions(+) diff --git a/google/api_core/retry/retry_unary.py b/google/api_core/retry/retry_unary.py index 2902094a..3ba3e658 100644 --- a/google/api_core/retry/retry_unary.py +++ b/google/api_core/retry/retry_unary.py @@ -108,6 +108,14 @@ def retry_target( on_error (Optional[Callable[Exception]]): If given, the on_error callback will be called with each retryable exception raised by the target. Any error raised by this function will *not* be caught. + exception_factory: A function that is called when the retryable reaches + a terminal failure state, used to construct an exception to be raised. + It it given a list of all exceptions encountered, a retry.RetryFailureReason + enum indicating the failure cause, and the original timeout value + as arguments. It should return a tuple of the exception to be raised, + along with the cause exception if any. + If not provided, a default implementation will raise a RetryError + on timeout, or the last exception encountered otherwise. deadline (float): DEPRECATED: use ``timeout`` instead. For backward compatibility, if specified it will override ``timeout`` parameter. diff --git a/google/api_core/retry/retry_unary_async.py b/google/api_core/retry/retry_unary_async.py index 24c7fd0a..9cf65cdc 100644 --- a/google/api_core/retry/retry_unary_async.py +++ b/google/api_core/retry/retry_unary_async.py @@ -122,6 +122,14 @@ async def retry_target( on_error (Optional[Callable[Exception]]): If given, the on_error callback will be called with each retryable exception raised by the target. Any error raised by this function will *not* be caught. + exception_factory: A function that is called when the retryable reaches + a terminal failure state, used to construct an exception to be raised. + It it given a list of all exceptions encountered, a retry.RetryFailureReason + enum indicating the failure cause, and the original timeout value + as arguments. It should return a tuple of the exception to be raised, + along with the cause exception if any. + If not provided, a default implementation will raise a RetryError + on timeout, or the last exception encountered otherwise. deadline (float): DEPRECATED use ``timeout`` instead. For backward compatibility, if set it will override the ``timeout`` parameter. From aa69c563dac73a945e55886bb9c122a00f69dd69 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 30 Nov 2023 17:32:30 -0800 Subject: [PATCH 175/204] Revert "removed costly awaitable check" This reverts commit b5b4534cff3cdd29dd2d72bb696730bad7a6a735. --- google/api_core/retry/retry_unary.py | 9 ++++++++- tests/unit/retry/test_retry_unary.py | 20 ++++++++++++++++++++ 2 files changed, 28 insertions(+), 1 deletion(-) diff --git a/google/api_core/retry/retry_unary.py b/google/api_core/retry/retry_unary.py index 3ba3e658..c41841a2 100644 --- a/google/api_core/retry/retry_unary.py +++ b/google/api_core/retry/retry_unary.py @@ -59,6 +59,8 @@ def check_if_exists(): import functools import sys import time +import inspect +import warnings from typing import Any, Callable, Iterable, TypeVar, TYPE_CHECKING from google.api_core.retry.retry_base import _BaseRetry @@ -77,6 +79,8 @@ def check_if_exists(): _P = ParamSpec("_P") # target function call parameters _R = TypeVar("_R") # target function returned value +_ASYNC_RETRY_WARNING = "Using the synchronous google.api_core.retry.Retry with asynchronous calls may lead to unexpected results. Please use google.api_core.retry_async.AsyncRetry instead." + def retry_target( target: Callable[_P, _R], @@ -139,7 +143,10 @@ def retry_target( for sleep in sleep_generator: try: - return target() + result = target() + if inspect.isawaitable(result): + warnings.warn(_ASYNC_RETRY_WARNING) + return result # pylint: disable=broad-except # This function explicitly must deal with broad exceptions. diff --git a/tests/unit/retry/test_retry_unary.py b/tests/unit/retry/test_retry_unary.py index 9a3750d8..e344edb8 100644 --- a/tests/unit/retry/test_retry_unary.py +++ b/tests/unit/retry/test_retry_unary.py @@ -93,6 +93,26 @@ def test_retry_target_non_retryable_error(utcnow, sleep): sleep.assert_not_called() +@mock.patch("asyncio.sleep", autospec=True) +@mock.patch( + "google.api_core.datetime_helpers.utcnow", + return_value=datetime.datetime.min, + autospec=True, +) +@pytest.mark.asyncio +async def test_retry_target_warning_for_retry(utcnow, sleep): + predicate = retry.if_exception_type(ValueError) + target = mock.AsyncMock(spec=["__call__"]) + + with pytest.warns(Warning) as exc_info: + # Note: predicate is just a filler and doesn't affect the test + retry.retry_target(target, predicate, range(10), None) + + assert len(exc_info) == 2 + assert str(exc_info[0].message) == retry.retry_unary._ASYNC_RETRY_WARNING + sleep.assert_not_called() + + @mock.patch("time.sleep", autospec=True) @mock.patch("time.monotonic", autospec=True) def test_retry_target_deadline_exceeded(monotonic, sleep): From d1ac29d564a1401446a0310ef5356ddcfd0548bb Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 5 Dec 2023 11:56:32 -0800 Subject: [PATCH 176/204] renamed variable --- google/api_core/retry/retry_base.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/google/api_core/retry/retry_base.py b/google/api_core/retry/retry_base.py index c31cc1ba..5a084a87 100644 --- a/google/api_core/retry/retry_base.py +++ b/google/api_core/retry/retry_base.py @@ -116,10 +116,10 @@ def exponential_sleep_generator( Yields: float: successive sleep intervals. """ - delay = min(initial, maximum) + max_delay = min(initial, maximum) while True: - yield random.uniform(0.0, delay) - delay = min(delay * multiplier, maximum) + yield random.uniform(0.0, max_delay) + max_delay = min(max_delay * multiplier, maximum) class RetryFailureReason(Enum): From 3ab88fca081e8f496877a530638e23b91fc76fd9 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 5 Dec 2023 19:57:18 +0000 Subject: [PATCH 177/204] update docstring Co-authored-by: Victor Chudnovsky --- google/api_core/retry/retry_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/google/api_core/retry/retry_base.py b/google/api_core/retry/retry_base.py index 5a084a87..f6cc503f 100644 --- a/google/api_core/retry/retry_base.py +++ b/google/api_core/retry/retry_base.py @@ -144,7 +144,7 @@ def _build_retry_error( - exc_list: list of exceptions that occurred during the retry - reason: reason for the retry failure. Can be TIMEOUT or NON_RETRYABLE_ERROR - - timeout_val: the original timeout value for the retry, for use in the exception message + - timeout_val: the original timeout value for the retry (in seconds), for use in the exception message Returns: - tuple: a tuple of the exception to be raised, and the cause exception if any From 382d0e291d3063f955b1229c3a1ded9709d62347 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 5 Dec 2023 19:57:36 +0000 Subject: [PATCH 178/204] add punctuation Co-authored-by: Victor Chudnovsky --- google/api_core/retry/retry_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/google/api_core/retry/retry_base.py b/google/api_core/retry/retry_base.py index f6cc503f..ee1093ac 100644 --- a/google/api_core/retry/retry_base.py +++ b/google/api_core/retry/retry_base.py @@ -138,7 +138,7 @@ def _build_retry_error( **kwargs: Any, ) -> tuple[Exception, Exception | None]: """ - Default exception_factory implementation. Builds an exception after the retry fails + Default exception_factory implementation. Builds an exception after the retry fails. Args: - exc_list: list of exceptions that occurred during the retry From 4258823b9d5afb08c382b41581435d13860fbff2 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 5 Dec 2023 20:09:38 +0000 Subject: [PATCH 179/204] punctuation Co-authored-by: Victor Chudnovsky --- google/api_core/retry/retry_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/google/api_core/retry/retry_base.py b/google/api_core/retry/retry_base.py index ee1093ac..1ba44dd9 100644 --- a/google/api_core/retry/retry_base.py +++ b/google/api_core/retry/retry_base.py @@ -282,7 +282,7 @@ def with_deadline(self, deadline: float | None) -> Self: documentation for details. Args: - deadline (float): How long to keep retrying in seconds. + deadline (float): How long to keep retrying, in seconds. Returns: Retry: A new retry instance with the given timeout. From 1bc9731456525f9c5e50f172bb25192b47a9085f Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 5 Dec 2023 20:12:17 +0000 Subject: [PATCH 180/204] update docstrings Co-authored-by: Victor Chudnovsky --- google/api_core/retry/retry_base.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/google/api_core/retry/retry_base.py b/google/api_core/retry/retry_base.py index 1ba44dd9..880eecb0 100644 --- a/google/api_core/retry/retry_base.py +++ b/google/api_core/retry/retry_base.py @@ -321,9 +321,9 @@ def with_delay( """Return a copy of this retry with the given delay options. Args: - initial (float): The minimum amount of time to delay. This must + initial (float): The minimum amount of time to delay (in seconds). This must be greater than 0. - maximum (float): The maximum amount of time to delay. + maximum (float): The maximum amount of time to delay (in seconds). multiplier (float): The multiplier applied to the delay. Returns: From aafe0576f1ce3d1912415bd2c88cd91fa5821f33 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 5 Dec 2023 12:17:55 -0800 Subject: [PATCH 181/204] changed deadline to timeout --- google/api_core/retry/retry_unary.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/google/api_core/retry/retry_unary.py b/google/api_core/retry/retry_unary.py index c41841a2..2c110bae 100644 --- a/google/api_core/retry/retry_unary.py +++ b/google/api_core/retry/retry_unary.py @@ -127,7 +127,7 @@ def retry_target( Any: the return value of the target function. Raises: - google.api_core.RetryError: If the deadline is exceeded while retrying. + google.api_core.RetryError: If the timeout is exceeded while retrying. ValueError: If the sleep generator stops yielding values. Exception: If the target raises a method that isn't retryable. """ From 809522970b30a4f04fb43d9ae29548877c945ccb Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 5 Dec 2023 12:30:50 -0800 Subject: [PATCH 182/204] updated deadlien to timeout in docstrings --- google/api_core/retry/retry_unary.py | 6 +++--- google/api_core/retry/retry_unary_async.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/google/api_core/retry/retry_unary.py b/google/api_core/retry/retry_unary.py index 2c110bae..991383bd 100644 --- a/google/api_core/retry/retry_unary.py +++ b/google/api_core/retry/retry_unary.py @@ -16,8 +16,8 @@ The :class:`Retry` decorator can be used to retry functions that raise exceptions using exponential backoff. Because a exponential sleep algorithm is -used, the retry is limited by a `deadline`. The deadline is the maximum amount -of time a method can block. This is used instead of total number of retries +used, the retry is limited by a `timeout`. The timeout determines the window +in which retries will be attempted. This is used instead of total number of retries because it is difficult to ascertain the amount of time a function can block when using total number of retries and exponential backoff. @@ -49,7 +49,7 @@ def check_if_exists(): .. code-block:: python - my_retry = retry.Retry(deadline=60) + my_retry = retry.Retry(timeout=60) result = client.some_method(retry=my_retry) """ diff --git a/google/api_core/retry/retry_unary_async.py b/google/api_core/retry/retry_unary_async.py index 9cf65cdc..e7d27f26 100644 --- a/google/api_core/retry/retry_unary_async.py +++ b/google/api_core/retry/retry_unary_async.py @@ -46,7 +46,7 @@ async def check_if_exists(): .. code-block:: python - my_retry = retry_async.AsyncRetry(deadline=60) + my_retry = retry_async.AsyncRetry(timeout=60) result = await client.some_method(retry=my_retry) """ From de9f518e331845ec6232dff8793e0c2bd2c52e83 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 5 Dec 2023 20:33:51 +0000 Subject: [PATCH 183/204] update docstring Co-authored-by: Victor Chudnovsky --- google/api_core/retry/retry_unary_async.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/google/api_core/retry/retry_unary_async.py b/google/api_core/retry/retry_unary_async.py index e7d27f26..856c5cf2 100644 --- a/google/api_core/retry/retry_unary_async.py +++ b/google/api_core/retry/retry_unary_async.py @@ -137,7 +137,7 @@ async def retry_target( Any: the return value of the target function. Raises: - google.api_core.RetryError: If the deadline is exceeded while retrying. + google.api_core.RetryError: If the timeout is exceeded while retrying. ValueError: If the sleep generator stops yielding values. Exception: If the target raises a method that isn't retryable. """ From 786466742141f5bf1cf7be94d5e3534fca1d56b3 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 5 Dec 2023 20:54:35 +0000 Subject: [PATCH 184/204] update test comment Co-authored-by: Victor Chudnovsky --- tests/unit/retry/test_retry_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit/retry/test_retry_base.py b/tests/unit/retry/test_retry_base.py index 06488839..db6dd01e 100644 --- a/tests/unit/retry/test_retry_base.py +++ b/tests/unit/retry/test_retry_base.py @@ -91,7 +91,7 @@ def test__build_retry_error_timeout_message(): def test__build_retry_error_empty_timeout(): """ - attempt to build a retry error with timout is None + attempt to build a retry error when timeout is None should return a generic timeout error message """ from google.api_core.retry import _build_retry_error From 4c243229e4ee93fb37adcd289470e747532bd8d0 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 5 Dec 2023 12:39:46 -0800 Subject: [PATCH 185/204] update docstrings --- google/api_core/retry/retry_streaming.py | 4 ++-- google/api_core/retry/retry_streaming_async.py | 4 ++-- google/api_core/retry/retry_unary.py | 8 +++++--- google/api_core/retry/retry_unary_async.py | 8 +++++--- 4 files changed, 14 insertions(+), 10 deletions(-) diff --git a/google/api_core/retry/retry_streaming.py b/google/api_core/retry/retry_streaming.py index e849781a..393efd4b 100644 --- a/google/api_core/retry/retry_streaming.py +++ b/google/api_core/retry/retry_streaming.py @@ -85,7 +85,7 @@ def retry_target_stream( function will *not* be caught. exception_factory: A function that is called when the retryable reaches a terminal failure state, used to construct an exception to be raised. - It it given a list of all exceptions encountered, a retry.RetryFailureReason + It takes a list of all exceptions encountered, a retry.RetryFailureReason enum indicating the failure cause, and the original timeout value as arguments. It should return a tuple of the exception to be raised, along with the cause exception if any. @@ -101,7 +101,7 @@ def retry_target_stream( ValueError: If the sleep generator stops yielding values. Exception: a custom exception specified by the exception_factory if provided. If no exception_factory is provided: - google.api_core.RetryError: If the deadline is exceeded while retrying. + google.api_core.RetryError: If the timeout is exceeded while retrying. Exception: If the target raises an error that isn't retryable. """ diff --git a/google/api_core/retry/retry_streaming_async.py b/google/api_core/retry/retry_streaming_async.py index 1d33df9f..c8f57fb4 100644 --- a/google/api_core/retry/retry_streaming_async.py +++ b/google/api_core/retry/retry_streaming_async.py @@ -87,7 +87,7 @@ async def retry_target_stream( function will *not* be caught. exception_factory: A function that is called when the retryable reaches a terminal failure state, used to construct an exception to be raised. - It it given a list of all exceptions encountered, a retry.RetryFailureReason + It takes a list of all exceptions encountered, a retry.RetryFailureReason enum indicating the failure cause, and the original timeout value as arguments. It should return a tuple of the exception to be raised, along with the cause exception if any. @@ -103,7 +103,7 @@ async def retry_target_stream( ValueError: If the sleep generator stops yielding values. Exception: a custom exception specified by the exception_factory if provided. If no exception_factory is provided: - google.api_core.RetryError: If the deadline is exceeded while retrying. + google.api_core.RetryError: If the timeout is exceeded while retrying. Exception: If the target raises an error that isn't retryable. """ # create frozen partial from original call args diff --git a/google/api_core/retry/retry_unary.py b/google/api_core/retry/retry_unary.py index 991383bd..12887ed6 100644 --- a/google/api_core/retry/retry_unary.py +++ b/google/api_core/retry/retry_unary.py @@ -114,7 +114,7 @@ def retry_target( target. Any error raised by this function will *not* be caught. exception_factory: A function that is called when the retryable reaches a terminal failure state, used to construct an exception to be raised. - It it given a list of all exceptions encountered, a retry.RetryFailureReason + It takes a list of all exceptions encountered, a retry.RetryFailureReason enum indicating the failure cause, and the original timeout value as arguments. It should return a tuple of the exception to be raised, along with the cause exception if any. @@ -127,9 +127,11 @@ def retry_target( Any: the return value of the target function. Raises: - google.api_core.RetryError: If the timeout is exceeded while retrying. ValueError: If the sleep generator stops yielding values. - Exception: If the target raises a method that isn't retryable. + Exception: a custom exception specified by the exception_factory if provided. + If no exception_factory is provided: + google.api_core.RetryError: If the timeout is exceeded while retrying. + Exception: If the target raises an error that isn't retryable. """ timeout = kwargs.get("deadline", timeout) diff --git a/google/api_core/retry/retry_unary_async.py b/google/api_core/retry/retry_unary_async.py index 856c5cf2..2360b5fa 100644 --- a/google/api_core/retry/retry_unary_async.py +++ b/google/api_core/retry/retry_unary_async.py @@ -124,7 +124,7 @@ async def retry_target( target. Any error raised by this function will *not* be caught. exception_factory: A function that is called when the retryable reaches a terminal failure state, used to construct an exception to be raised. - It it given a list of all exceptions encountered, a retry.RetryFailureReason + It takes a list of all exceptions encountered, a retry.RetryFailureReason enum indicating the failure cause, and the original timeout value as arguments. It should return a tuple of the exception to be raised, along with the cause exception if any. @@ -137,9 +137,11 @@ async def retry_target( Any: the return value of the target function. Raises: - google.api_core.RetryError: If the timeout is exceeded while retrying. ValueError: If the sleep generator stops yielding values. - Exception: If the target raises a method that isn't retryable. + Exception: a custom exception specified by the exception_factory if provided. + If no exception_factory is provided: + google.api_core.RetryError: If the timeout is exceeded while retrying. + Exception: If the target raises an error that isn't retryable. """ timeout = kwargs.get("deadline", timeout) From 78555135cb07dccd7797a710b4cbb0754271eec1 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 5 Dec 2023 12:45:24 -0800 Subject: [PATCH 186/204] removed unneeded comments --- google/api_core/retry/retry_streaming_async.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/google/api_core/retry/retry_streaming_async.py b/google/api_core/retry/retry_streaming_async.py index c8f57fb4..b4027af2 100644 --- a/google/api_core/retry/retry_streaming_async.py +++ b/google/api_core/retry/retry_streaming_async.py @@ -106,8 +106,6 @@ async def retry_target_stream( google.api_core.RetryError: If the timeout is exceeded while retrying. Exception: If the target raises an error that isn't retryable. """ - # create frozen partial from original call args - # In the future, we can add a ResumptionStrategy object that creates new kwargs between calls target_iterator: AsyncIterator[_Y] | None = None timeout = kwargs.get("deadline", timeout) deadline = time.monotonic() + timeout if timeout else None From f4bfb0295e4295692caecc013ef2a04f07c5c4de Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 5 Dec 2023 12:54:06 -0800 Subject: [PATCH 187/204] improved docstrings --- google/api_core/retry/retry_streaming.py | 10 +++++----- google/api_core/retry/retry_unary.py | 10 +++++----- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/google/api_core/retry/retry_streaming.py b/google/api_core/retry/retry_streaming.py index 393efd4b..57b40aed 100644 --- a/google/api_core/retry/retry_streaming.py +++ b/google/api_core/retry/retry_streaming.py @@ -147,7 +147,7 @@ class StreamingRetry(_BaseRetry): Although the default behavior is to retry transient API errors, a different predicate can be provided to retry other exceptions. - There two important concepts that retry/polling behavior may operate on, + There are two important concepts that retry/polling behavior may operate on, Deadline and Timeout, which need to be properly defined for the correct usage of this class and the rest of the library. @@ -163,12 +163,12 @@ class StreamingRetry(_BaseRetry): 09:24:00 with timeout of 75 seconds, it must terminate no later than 09:25:15. - Unfortunately, in the past this class (and the api-core library as a whole) has not been - properly distinguishing the concepts of "timeout" and "deadline", and the + Unfortunately, in the past this class (and the api-core library as a whole) has not + been properly distinguishing the concepts of "timeout" and "deadline", and the ``deadline`` parameter has meant ``timeout``. That is why ``deadline`` has been deprecated and ``timeout`` should be used instead. If the - ``deadline`` parameter is set, it will override the ``timeout`` parameter. In other words, - ``retry.deadline`` should be treated as just a deprecated alias for + ``deadline`` parameter is set, it will override the ``timeout`` parameter. + In other words, ``retry.deadline`` should be treated as just a deprecated alias for ``retry.timeout``. Said another way, it is safe to assume that this class and the rest of this diff --git a/google/api_core/retry/retry_unary.py b/google/api_core/retry/retry_unary.py index 12887ed6..a3b17411 100644 --- a/google/api_core/retry/retry_unary.py +++ b/google/api_core/retry/retry_unary.py @@ -172,7 +172,7 @@ class Retry(_BaseRetry): Although the default behavior is to retry transient API errors, a different predicate can be provided to retry other exceptions. - There two important concepts that retry/polling behavior may operate on, + There are two important concepts that retry/polling behavior may operate on, Deadline and Timeout, which need to be properly defined for the correct usage of this class and the rest of the library. @@ -188,12 +188,12 @@ class Retry(_BaseRetry): 09:24:00 with timeout of 75 seconds, it must terminate no later than 09:25:15. - Unfortunately, in the past this class (and the api-core library as a whole) has not been - properly distinguishing the concepts of "timeout" and "deadline", and the + Unfortunately, in the past this class (and the api-core library as a whole) has not + been properly distinguishing the concepts of "timeout" and "deadline", and the ``deadline`` parameter has meant ``timeout``. That is why ``deadline`` has been deprecated and ``timeout`` should be used instead. If the - ``deadline`` parameter is set, it will override the ``timeout`` parameter. In other words, - ``retry.deadline`` should be treated as just a deprecated alias for + ``deadline`` parameter is set, it will override the ``timeout`` parameter. + In other words, ``retry.deadline`` should be treated as just a deprecated alias for ``retry.timeout``. Said another way, it is safe to assume that this class and the rest of this From a88cf6ff07603d0dead3ba4a1484812c3d18a5f8 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 5 Dec 2023 13:05:29 -0800 Subject: [PATCH 188/204] use timeout in tests --- .../retry/test_retry_streaming_async.py | 11 +++++-- tests/asyncio/retry/test_retry_unary_async.py | 20 +++++++----- tests/unit/retry/test_retry_base.py | 32 +++++++++---------- tests/unit/retry/test_retry_streaming.py | 11 +++++-- tests/unit/retry/test_retry_unary.py | 18 +++++++---- 5 files changed, 55 insertions(+), 37 deletions(-) diff --git a/tests/asyncio/retry/test_retry_streaming_async.py b/tests/asyncio/retry/test_retry_streaming_async.py index 7e08dcb0..0cd89fdc 100644 --- a/tests/asyncio/retry/test_retry_streaming_async.py +++ b/tests/asyncio/retry/test_retry_streaming_async.py @@ -41,7 +41,7 @@ def if_exception_type(exc): initial=1.0, maximum=60.0, multiplier=2.0, - deadline=120.0, + timeout=120.0, on_error=None, ) assert re.match( @@ -129,21 +129,26 @@ async def test___call___generator_retry(self, sleep): @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.parametrize("use_deadline_arg", [True, False]) @pytest.mark.asyncio - async def test___call___generator_retry_hitting_deadline(self, sleep, uniform): + async def test___call___generator_retry_hitting_timeout(self, sleep, uniform, use_deadline_arg): """ Tests that a retry-decorated generator will throw a RetryError after using the time budget """ import time + timeout_val = 9.9 + # support "deadline" as an alias for "timeout" + timout_kwarg = {"timeout": timeout_val} if not use_deadline_arg else {"deadline": timeout_val} + on_error = mock.Mock() retry_ = retry_streaming_async.AsyncStreamingRetry( predicate=retry_async.if_exception_type(ValueError), initial=1.0, maximum=1024.0, multiplier=2.0, - deadline=9.9, + **timout_kwarg, ) time_now = time.monotonic() diff --git a/tests/asyncio/retry/test_retry_unary_async.py b/tests/asyncio/retry/test_retry_unary_async.py index cbc9d3e0..a4995d57 100644 --- a/tests/asyncio/retry/test_retry_unary_async.py +++ b/tests/asyncio/retry/test_retry_unary_async.py @@ -100,17 +100,22 @@ async def test_retry_target_non_retryable_error(utcnow, sleep): @mock.patch("asyncio.sleep", autospec=True) @mock.patch("time.monotonic", autospec=True) +@pytest.mark.parametrize("use_deadline_arg", [True, False]) @pytest.mark.asyncio -async def test_retry_target_deadline_exceeded(monotonic, sleep): +async def test_retry_target_timeout_exceeded(monotonic, sleep, use_deadline_arg): predicate = retry_async.if_exception_type(ValueError) exception = ValueError("meep") target = mock.Mock(side_effect=exception) # Setup the timeline so that the first call takes 5 seconds but the second - # call takes 6, which puts the retry over the deadline. + # call takes 6, which puts the retry over the timeout. monotonic.side_effect = [0, 5, 11] + timeout_val = 10 + # support "deadline" as an alias for "timeout" + timout_kwarg = {"timeout": timeout_val} if not use_deadline_arg else {"deadline": timeout_val} + with pytest.raises(exceptions.RetryError) as exc_info: - await retry_async.retry_target(target, predicate, range(10), deadline=10) + await retry_async.retry_target(target, predicate, range(10), **timout_kwarg) assert exc_info.value.cause == exception assert exc_info.match("Timeout of 10.0s exceeded") @@ -153,7 +158,7 @@ def if_exception_type(exc): initial=1.0, maximum=60.0, multiplier=2.0, - deadline=120.0, + timeout=120.0, on_error=None, ) assert re.match( @@ -209,14 +214,14 @@ async def test___call___and_execute_retry(self, sleep, uniform): @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio - async def test___call___and_execute_retry_hitting_deadline(self, sleep, uniform): + async def test___call___and_execute_retry_hitting_timeout(self, sleep, uniform): on_error = mock.Mock(spec=["__call__"], side_effect=[None] * 10) retry_ = retry_async.AsyncRetry( predicate=retry_async.if_exception_type(ValueError), initial=1.0, maximum=1024.0, multiplier=2.0, - deadline=30.9, + timeout=30.9, ) monotonic_patcher = mock.patch("time.monotonic", return_value=0) @@ -232,7 +237,6 @@ async def test___call___and_execute_retry_hitting_deadline(self, sleep, uniform) # Make sure that calls to fake asyncio.sleep() also advance the mocked # time clock. def increase_time(sleep_delay): - print(sleep_delay) patched_monotonic.return_value += sleep_delay sleep.side_effect = increase_time @@ -253,7 +257,7 @@ def increase_time(sleep_delay): # Next attempt would be scheduled in 16 secs, 15 + 16 = 31 > 30.9, thus # we do not even wait for it to be scheduled (30.9 is configured timeout). # This changes the previous logic of shortening the last attempt to fit - # in the deadline. The previous logic was removed to make Python retry + # in the timeout. The previous logic was removed to make Python retry # logic consistent with the other languages and to not disrupt the # randomized retry delays distribution by artificially increasing a # probability of scheduling two (instead of one) last attempts with very diff --git a/tests/unit/retry/test_retry_base.py b/tests/unit/retry/test_retry_base.py index db6dd01e..2eda4b5d 100644 --- a/tests/unit/retry/test_retry_base.py +++ b/tests/unit/retry/test_retry_base.py @@ -113,9 +113,9 @@ def test_constructor_defaults(self): assert retry_._initial == 1 assert retry_._maximum == 60 assert retry_._multiplier == 2 - assert retry_._deadline == 120 + assert retry_._timeout == 120 assert retry_._on_error is None - assert retry_.deadline == 120 + assert retry_.timeout == 120 assert retry_.timeout == 120 def test_constructor_options(self): @@ -126,28 +126,28 @@ def test_constructor_options(self): initial=1, maximum=2, multiplier=3, - deadline=4, + timeout=4, on_error=_some_function, ) assert retry_._predicate == mock.sentinel.predicate assert retry_._initial == 1 assert retry_._maximum == 2 assert retry_._multiplier == 3 - assert retry_._deadline == 4 + assert retry_._timeout == 4 assert retry_._on_error is _some_function - def test_with_deadline(self): + def test_with_timeout(self): retry_ = self._make_one( predicate=mock.sentinel.predicate, initial=1, maximum=2, multiplier=3, - deadline=4, + timeout=4, on_error=mock.sentinel.on_error, ) - new_retry = retry_.with_deadline(42) + new_retry = retry_.with_timeout(42) assert retry_ is not new_retry - assert new_retry._deadline == 42 + assert new_retry._timeout == 42 # the rest of the attributes should remain the same assert new_retry._predicate is retry_._predicate @@ -162,7 +162,7 @@ def test_with_predicate(self): initial=1, maximum=2, multiplier=3, - deadline=4, + timeout=4, on_error=mock.sentinel.on_error, ) new_retry = retry_.with_predicate(mock.sentinel.predicate) @@ -170,7 +170,7 @@ def test_with_predicate(self): assert new_retry._predicate == mock.sentinel.predicate # the rest of the attributes should remain the same - assert new_retry._deadline == retry_._deadline + assert new_retry._timeout == retry_._timeout assert new_retry._initial == retry_._initial assert new_retry._maximum == retry_._maximum assert new_retry._multiplier == retry_._multiplier @@ -182,7 +182,7 @@ def test_with_delay_noop(self): initial=1, maximum=2, multiplier=3, - deadline=4, + timeout=4, on_error=mock.sentinel.on_error, ) new_retry = retry_.with_delay() @@ -197,7 +197,7 @@ def test_with_delay(self): initial=1, maximum=2, multiplier=3, - deadline=4, + timeout=4, on_error=mock.sentinel.on_error, ) new_retry = retry_.with_delay(initial=5, maximum=6, multiplier=7) @@ -207,7 +207,7 @@ def test_with_delay(self): assert new_retry._multiplier == 7 # the rest of the attributes should remain the same - assert new_retry._deadline == retry_._deadline + assert new_retry._timeout == retry_._timeout assert new_retry._predicate is retry_._predicate assert new_retry._on_error is retry_._on_error @@ -217,7 +217,7 @@ def test_with_delay_partial_options(self): initial=1, maximum=2, multiplier=3, - deadline=4, + timeout=4, on_error=mock.sentinel.on_error, ) new_retry = retry_.with_delay(initial=4) @@ -239,7 +239,7 @@ def test_with_delay_partial_options(self): assert new_retry._multiplier == 4 # the rest of the attributes should remain the same - assert new_retry._deadline == retry_._deadline + assert new_retry._timeout == retry_._timeout assert new_retry._predicate is retry_._predicate assert new_retry._on_error is retry_._on_error @@ -254,7 +254,7 @@ def if_exception_type(exc): initial=1.0, maximum=60.0, multiplier=2.0, - deadline=120.0, + timeout=120.0, on_error=None, ) assert re.match( diff --git a/tests/unit/retry/test_retry_streaming.py b/tests/unit/retry/test_retry_streaming.py index e2b3861d..41188689 100644 --- a/tests/unit/retry/test_retry_streaming.py +++ b/tests/unit/retry/test_retry_streaming.py @@ -46,7 +46,7 @@ def if_exception_type(exc): initial=1.0, maximum=60.0, multiplier=2.0, - deadline=120.0, + timeout=120.0, on_error=None, ) assert re.match( @@ -134,20 +134,25 @@ def test___call___retry(self, sleep): @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) @mock.patch("time.sleep", autospec=True) - def test___call___retry_hitting_deadline(self, sleep, uniform): + @pytest.mark.parametrize("use_deadline_arg", [True, False]) + def test___call___retry_hitting_timeout(self, sleep, uniform, use_deadline_arg): """ Tests that a retry-decorated generator will throw a RetryError after using the time budget """ import time + timeout_val = 30.9 + # support "deadline" as an alias for "timeout" + timout_kwarg = {"timeout": timeout_val} if not use_deadline_arg else {"deadline": timeout_val} + on_error = mock.Mock(return_value=None) retry_ = retry_streaming.StreamingRetry( predicate=retry.if_exception_type(ValueError), initial=1.0, maximum=1024.0, multiplier=2.0, - deadline=30.9, + **timout_kwarg, ) timenow = time.monotonic() diff --git a/tests/unit/retry/test_retry_unary.py b/tests/unit/retry/test_retry_unary.py index e344edb8..5f054f28 100644 --- a/tests/unit/retry/test_retry_unary.py +++ b/tests/unit/retry/test_retry_unary.py @@ -115,16 +115,20 @@ async def test_retry_target_warning_for_retry(utcnow, sleep): @mock.patch("time.sleep", autospec=True) @mock.patch("time.monotonic", autospec=True) -def test_retry_target_deadline_exceeded(monotonic, sleep): +@pytest.mark.parametrize("use_deadline_arg", [True, False]) +def test_retry_target_timeout_exceeded(monotonic, sleep, use_deadline_arg): predicate = retry.if_exception_type(ValueError) exception = ValueError("meep") target = mock.Mock(side_effect=exception) # Setup the timeline so that the first call takes 5 seconds but the second - # call takes 6, which puts the retry over the deadline. + # call takes 6, which puts the retry over the timeout. monotonic.side_effect = [0, 5, 11] + # support "deadline" as an alias for "timeout" + kwargs = {"timeout": 10} if use_deadline_arg else {"deadline": 10} + with pytest.raises(exceptions.RetryError) as exc_info: - retry.retry_target(target, predicate, range(10), deadline=10) + retry.retry_target(target, predicate, range(10), **kwargs) assert exc_info.value.cause == exception assert exc_info.match("Timeout of 10.0s exceeded") @@ -156,7 +160,7 @@ def if_exception_type(exc): initial=1.0, maximum=60.0, multiplier=2.0, - deadline=120.0, + timeout=120.0, on_error=None, ) assert re.match( @@ -207,14 +211,14 @@ def test___call___and_execute_retry(self, sleep, uniform): @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) @mock.patch("time.sleep", autospec=True) - def test___call___and_execute_retry_hitting_deadline(self, sleep, uniform): + def test___call___and_execute_retry_hitting_timeout(self, sleep, uniform): on_error = mock.Mock(spec=["__call__"], side_effect=[None] * 10) retry_ = retry.Retry( predicate=retry.if_exception_type(ValueError), initial=1.0, maximum=1024.0, multiplier=2.0, - deadline=30.9, + timeout=30.9, ) monotonic_patcher = mock.patch("time.monotonic", return_value=0) @@ -250,7 +254,7 @@ def increase_time(sleep_delay): # Next attempt would be scheduled in 16 secs, 15 + 16 = 31 > 30.9, thus # we do not even wait for it to be scheduled (30.9 is configured timeout). # This changes the previous logic of shortening the last attempt to fit - # in the deadline. The previous logic was removed to make Python retry + # in the timeout. The previous logic was removed to make Python retry # logic consistent with the other languages and to not disrupt the # randomized retry delays distribution by artificially increasing a # probability of scheduling two (instead of one) last attempts with very From b5c62e109871f9ceb6e1c97b424f06c1d49f24d2 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Tue, 5 Dec 2023 21:16:06 +0000 Subject: [PATCH 189/204] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot?= =?UTF-8?q?=20post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- tests/asyncio/retry/test_retry_streaming_async.py | 10 ++++++++-- tests/asyncio/retry/test_retry_unary_async.py | 4 +++- tests/unit/retry/test_retry_streaming.py | 6 +++++- 3 files changed, 16 insertions(+), 4 deletions(-) diff --git a/tests/asyncio/retry/test_retry_streaming_async.py b/tests/asyncio/retry/test_retry_streaming_async.py index 0cd89fdc..7daa7f86 100644 --- a/tests/asyncio/retry/test_retry_streaming_async.py +++ b/tests/asyncio/retry/test_retry_streaming_async.py @@ -131,7 +131,9 @@ async def test___call___generator_retry(self, sleep): @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.parametrize("use_deadline_arg", [True, False]) @pytest.mark.asyncio - async def test___call___generator_retry_hitting_timeout(self, sleep, uniform, use_deadline_arg): + async def test___call___generator_retry_hitting_timeout( + self, sleep, uniform, use_deadline_arg + ): """ Tests that a retry-decorated generator will throw a RetryError after using the time budget @@ -140,7 +142,11 @@ async def test___call___generator_retry_hitting_timeout(self, sleep, uniform, us timeout_val = 9.9 # support "deadline" as an alias for "timeout" - timout_kwarg = {"timeout": timeout_val} if not use_deadline_arg else {"deadline": timeout_val} + timout_kwarg = ( + {"timeout": timeout_val} + if not use_deadline_arg + else {"deadline": timeout_val} + ) on_error = mock.Mock() retry_ = retry_streaming_async.AsyncStreamingRetry( diff --git a/tests/asyncio/retry/test_retry_unary_async.py b/tests/asyncio/retry/test_retry_unary_async.py index a4995d57..97e9522a 100644 --- a/tests/asyncio/retry/test_retry_unary_async.py +++ b/tests/asyncio/retry/test_retry_unary_async.py @@ -112,7 +112,9 @@ async def test_retry_target_timeout_exceeded(monotonic, sleep, use_deadline_arg) timeout_val = 10 # support "deadline" as an alias for "timeout" - timout_kwarg = {"timeout": timeout_val} if not use_deadline_arg else {"deadline": timeout_val} + timout_kwarg = ( + {"timeout": timeout_val} if not use_deadline_arg else {"deadline": timeout_val} + ) with pytest.raises(exceptions.RetryError) as exc_info: await retry_async.retry_target(target, predicate, range(10), **timout_kwarg) diff --git a/tests/unit/retry/test_retry_streaming.py b/tests/unit/retry/test_retry_streaming.py index 41188689..ce88c95a 100644 --- a/tests/unit/retry/test_retry_streaming.py +++ b/tests/unit/retry/test_retry_streaming.py @@ -144,7 +144,11 @@ def test___call___retry_hitting_timeout(self, sleep, uniform, use_deadline_arg): timeout_val = 30.9 # support "deadline" as an alias for "timeout" - timout_kwarg = {"timeout": timeout_val} if not use_deadline_arg else {"deadline": timeout_val} + timout_kwarg = ( + {"timeout": timeout_val} + if not use_deadline_arg + else {"deadline": timeout_val} + ) on_error = mock.Mock(return_value=None) retry_ = retry_streaming.StreamingRetry( From 852f4f83b22fc5e9a675308f0f36c0b1618ea85f Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 5 Dec 2023 13:16:28 -0800 Subject: [PATCH 190/204] moved test to proper place --- tests/asyncio/retry/test_retry_streaming_async.py | 8 ++++++++ tests/unit/retry/test_retry_unary.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/tests/asyncio/retry/test_retry_streaming_async.py b/tests/asyncio/retry/test_retry_streaming_async.py index 7daa7f86..efa0dff2 100644 --- a/tests/asyncio/retry/test_retry_streaming_async.py +++ b/tests/asyncio/retry/test_retry_streaming_async.py @@ -26,6 +26,14 @@ from ...unit.retry.test_retry_base import Test_BaseRetry +@pytest.mark.asyncio +async def test_retry_streaming_target_bad_sleep_generator(): + from google.api_core.retry.retry_streaming_async import retry_target_stream + + with pytest.raises(ValueError, match="Sleep generator"): + await retry_target_stream(None, None, [], None).__anext__() + + class TestAsyncStreamingRetry(Test_BaseRetry): def _make_one(self, *args, **kwargs): return retry_streaming_async.AsyncStreamingRetry(*args, **kwargs) diff --git a/tests/unit/retry/test_retry_unary.py b/tests/unit/retry/test_retry_unary.py index 5f054f28..7dcd8dd6 100644 --- a/tests/unit/retry/test_retry_unary.py +++ b/tests/unit/retry/test_retry_unary.py @@ -125,7 +125,7 @@ def test_retry_target_timeout_exceeded(monotonic, sleep, use_deadline_arg): monotonic.side_effect = [0, 5, 11] # support "deadline" as an alias for "timeout" - kwargs = {"timeout": 10} if use_deadline_arg else {"deadline": 10} + kwargs = {"timeout": 10} if not use_deadline_arg else {"deadline": 10} with pytest.raises(exceptions.RetryError) as exc_info: retry.retry_target(target, predicate, range(10), **kwargs) From cd8323ef8ed563c6698e3eca5e2ee6c85310c474 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 5 Dec 2023 13:23:03 -0800 Subject: [PATCH 191/204] added test comments; fixed asserts --- tests/asyncio/retry/test_retry_streaming_async.py | 10 ++++++---- tests/unit/retry/test_retry_streaming.py | 1 + 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/tests/asyncio/retry/test_retry_streaming_async.py b/tests/asyncio/retry/test_retry_streaming_async.py index efa0dff2..bc3e7afb 100644 --- a/tests/asyncio/retry/test_retry_streaming_async.py +++ b/tests/asyncio/retry/test_retry_streaming_async.py @@ -204,7 +204,7 @@ async def test___call___generator_cancellations(self): utcnow = datetime.datetime.utcnow() mock.patch("google.api_core.datetime_helpers.utcnow", return_value=utcnow) generator = await retry_(self._generator_mock)(sleep_time=0.2) - await generator.__anext__() == 0 + assert await generator.__anext__() == 0 task = asyncio.create_task(generator.__anext__()) task.cancel() with pytest.raises(asyncio.CancelledError): @@ -370,11 +370,13 @@ async def wrapper(): decorated = retry_(iterable_fn) retryable = await decorated() + # initiate the generator by calling next result = await retryable.__anext__() assert result == 0 - await retryable.asend("test") == 1 - await retryable.asend("test2") == 2 - await retryable.asend("test3") == 3 + # test sending values + assert await retryable.asend("test") == 1 + assert await retryable.asend("test2") == 2 + assert await retryable.asend("test3") == 3 @pytest.mark.parametrize("awaitable_wrapped", [True, False]) @mock.patch("asyncio.sleep", autospec=True) diff --git a/tests/unit/retry/test_retry_streaming.py b/tests/unit/retry/test_retry_streaming.py index ce88c95a..c6a7759a 100644 --- a/tests/unit/retry/test_retry_streaming.py +++ b/tests/unit/retry/test_retry_streaming.py @@ -227,6 +227,7 @@ def test___call___with_generator_send_retry(self, sleep): ) result = retry_(self._generator_mock)(error_on=3) with pytest.raises(TypeError) as exc_info: + # calling first send with non-None input should raise a TypeError result.send("can not send to fresh generator") assert exc_info.match("can't send non-None value") # initiate iteration with None From ace61eb1ab402327f828d6d0bcd914b8953c9d76 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 5 Dec 2023 13:33:51 -0800 Subject: [PATCH 192/204] use _build_retry_error as default param --- google/api_core/retry/retry_base.py | 15 ++++++++----- google/api_core/retry/retry_streaming.py | 21 +++++++------------ .../api_core/retry/retry_streaming_async.py | 14 ++++--------- google/api_core/retry/retry_unary.py | 14 ++++--------- google/api_core/retry/retry_unary_async.py | 14 ++++--------- 5 files changed, 29 insertions(+), 49 deletions(-) diff --git a/google/api_core/retry/retry_base.py b/google/api_core/retry/retry_base.py index 880eecb0..c48eb91d 100644 --- a/google/api_core/retry/retry_base.py +++ b/google/api_core/retry/retry_base.py @@ -176,9 +176,10 @@ def _retry_error_helper( predicate_fn: Callable[[Exception], bool], on_error_fn: Callable[[Exception], None] | None, exc_factory_fn: Callable[ - [list[Exception], RetryFailureReason], + [list[Exception], RetryFailureReason, float | None], tuple[Exception, Exception | None], ], + original_timeout: float | None, ): """ Shared logic for handling an error for all retry implementations @@ -190,17 +191,20 @@ def _retry_error_helper( Args: - exc: the exception that was raised - deadline: the deadline for the retry, calculated as a diff from time.monotonic() - - next_sleep: the calculated next sleep interval + - next_sleep: the next sleep interval - error_list: the list of exceptions that have been raised so far - - predicate_fn: the predicate that was used to determine if the exception should be retried - - on_error_fn: the callback that was called when the exception was raised - - exc_factory_fn: the callback that was called to build the exception to be raised on terminal failure + - predicate_fn: predicate used to determine if the exception should be retried + - on_error_fn: callback to execute when a retryable error occurs + - exc_factory_fn: callback used to build the exception to be raised on terminal failure + - original_timeout_val: the original timeout value for the retry (in seconds), + to be passed to the exception factory for building an error message """ error_list.append(exc) if not predicate_fn(exc): final_exc, source_exc = exc_factory_fn( error_list, RetryFailureReason.NON_RETRYABLE_ERROR, + original_timeout, ) raise final_exc from source_exc if on_error_fn is not None: @@ -209,6 +213,7 @@ def _retry_error_helper( final_exc, source_exc = exc_factory_fn( error_list, RetryFailureReason.TIMEOUT, + original_timeout, ) raise final_exc from source_exc _LOGGER.debug( diff --git a/google/api_core/retry/retry_streaming.py b/google/api_core/retry/retry_streaming.py index 57b40aed..e5abcd01 100644 --- a/google/api_core/retry/retry_streaming.py +++ b/google/api_core/retry/retry_streaming.py @@ -55,12 +55,10 @@ def retry_target_stream( sleep_generator: Iterable[float], timeout: Optional[float] = None, on_error: Optional[Callable[[Exception], None]] = None, - exception_factory: Optional[ - Callable[ - [List[Exception], RetryFailureReason, Optional[float]], - Tuple[Exception, Optional[Exception]], - ] - ] = None, + exception_factory: Callable[ + [List[Exception], RetryFailureReason, Optional[float]], + Tuple[Exception, Optional[Exception]], + ] = _build_retry_error, init_args: _P.args = (), init_kwargs: _P.kwargs = {}, **kwargs, @@ -88,9 +86,8 @@ def retry_target_stream( It takes a list of all exceptions encountered, a retry.RetryFailureReason enum indicating the failure cause, and the original timeout value as arguments. It should return a tuple of the exception to be raised, - along with the cause exception if any. - If not provided, a default implementation will raise a RetryError - on timeout, or the last exception encountered otherwise. + along with the cause exception if any. The default implementation will raise + a RetryError on timeout, or the last exception encountered otherwise. init_args: Positional arguments to pass to the target function. init_kwargs: Keyword arguments to pass to the target function. @@ -110,10 +107,6 @@ def retry_target_stream( time.monotonic() + timeout if timeout is not None else None ) error_list: list[Exception] = [] - # make a partial with timeout applied - exc_factory = lambda e, t: (exception_factory or _build_retry_error)( # noqa: E731 - e, t, timeout - ) for sleep in sleep_generator: # Start a new retry loop @@ -129,7 +122,7 @@ def retry_target_stream( except Exception as exc: # defer to shared logic for handling errors _retry_error_helper( - exc, deadline, sleep, error_list, predicate, on_error, exc_factory + exc, deadline, sleep, error_list, predicate, on_error, exception_factory, timeout ) # if exception not raised, sleep before next attempt time.sleep(sleep) diff --git a/google/api_core/retry/retry_streaming_async.py b/google/api_core/retry/retry_streaming_async.py index b4027af2..0eb90c7a 100644 --- a/google/api_core/retry/retry_streaming_async.py +++ b/google/api_core/retry/retry_streaming_async.py @@ -61,8 +61,7 @@ async def retry_target_stream( exception_factory: Callable[ [list[Exception], RetryFailureReason, float | None], tuple[Exception, Exception | None], - ] - | None = None, + ] = _build_retry_error, init_args: _P.args = (), init_kwargs: _P.kwargs = {}, **kwargs, @@ -90,9 +89,8 @@ async def retry_target_stream( It takes a list of all exceptions encountered, a retry.RetryFailureReason enum indicating the failure cause, and the original timeout value as arguments. It should return a tuple of the exception to be raised, - along with the cause exception if any. - If not provided, a default implementation will raise a RetryError - on timeout, or the last exception encountered otherwise. + along with the cause exception if any. The default implementation will raise + a RetryError on timeout, or the last exception encountered otherwise. init_args: Positional arguments to pass to the target function. init_kwargs: Keyword arguments to pass to the target function. @@ -111,10 +109,6 @@ async def retry_target_stream( deadline = time.monotonic() + timeout if timeout else None # keep track of retryable exceptions we encounter to pass in to exception_factory error_list: list[Exception] = [] - # make a partial with timeout applied - exc_factory = lambda e, t: (exception_factory or _build_retry_error)( # noqa: E731 - e, t, timeout - ) target_is_generator: bool | None = None for sleep in sleep_generator: @@ -181,7 +175,7 @@ async def retry_target_stream( except Exception as exc: # defer to shared logic for handling errors _retry_error_helper( - exc, deadline, sleep, error_list, predicate, on_error, exc_factory + exc, deadline, sleep, error_list, predicate, on_error, exception_factory, timeout ) # if exception not raised, sleep before next attempt await asyncio.sleep(sleep) diff --git a/google/api_core/retry/retry_unary.py b/google/api_core/retry/retry_unary.py index a3b17411..db69c6eb 100644 --- a/google/api_core/retry/retry_unary.py +++ b/google/api_core/retry/retry_unary.py @@ -91,8 +91,7 @@ def retry_target( exception_factory: Callable[ [list[Exception], RetryFailureReason, float | None], tuple[Exception, Exception | None], - ] - | None = None, + ] = _build_retry_error, **kwargs, ): """Call a function and retry if it fails. @@ -117,9 +116,8 @@ def retry_target( It takes a list of all exceptions encountered, a retry.RetryFailureReason enum indicating the failure cause, and the original timeout value as arguments. It should return a tuple of the exception to be raised, - along with the cause exception if any. - If not provided, a default implementation will raise a RetryError - on timeout, or the last exception encountered otherwise. + along with the cause exception if any. The default implementation will raise + a RetryError on timeout, or the last exception encountered otherwise. deadline (float): DEPRECATED: use ``timeout`` instead. For backward compatibility, if specified it will override ``timeout`` parameter. @@ -138,10 +136,6 @@ def retry_target( deadline = time.monotonic() + timeout if timeout is not None else None error_list: list[Exception] = [] - # make a partial with timeout applied - exc_factory = lambda e, t: (exception_factory or _build_retry_error)( # noqa: E731 - e, t, timeout - ) for sleep in sleep_generator: try: @@ -155,7 +149,7 @@ def retry_target( except Exception as exc: # defer to shared logic for handling errors _retry_error_helper( - exc, deadline, sleep, error_list, predicate, on_error, exc_factory + exc, deadline, sleep, error_list, predicate, on_error, exception_factory, timeout ) # if exception not raised, sleep before next attempt time.sleep(sleep) diff --git a/google/api_core/retry/retry_unary_async.py b/google/api_core/retry/retry_unary_async.py index 2360b5fa..a6272b73 100644 --- a/google/api_core/retry/retry_unary_async.py +++ b/google/api_core/retry/retry_unary_async.py @@ -101,8 +101,7 @@ async def retry_target( exception_factory: Callable[ [list[Exception], RetryFailureReason, float | None], tuple[Exception, Exception | None], - ] - | None = None, + ] = _build_retry_error, **kwargs, ): """Await a coroutine and retry if it fails. @@ -127,9 +126,8 @@ async def retry_target( It takes a list of all exceptions encountered, a retry.RetryFailureReason enum indicating the failure cause, and the original timeout value as arguments. It should return a tuple of the exception to be raised, - along with the cause exception if any. - If not provided, a default implementation will raise a RetryError - on timeout, or the last exception encountered otherwise. + along with the cause exception if any. The default implementation will raise + a RetryError on timeout, or the last exception encountered otherwise. deadline (float): DEPRECATED use ``timeout`` instead. For backward compatibility, if set it will override the ``timeout`` parameter. @@ -148,10 +146,6 @@ async def retry_target( deadline = time.monotonic() + timeout if timeout is not None else None error_list: list[Exception] = [] - # make a partial with timeout applied - exc_factory = lambda e, t: (exception_factory or _build_retry_error)( # noqa: E731 - e, t, timeout - ) for sleep in sleep_generator: try: @@ -161,7 +155,7 @@ async def retry_target( except Exception as exc: # defer to shared logic for handling errors _retry_error_helper( - exc, deadline, sleep, error_list, predicate, on_error, exc_factory + exc, deadline, sleep, error_list, predicate, on_error, exception_factory, timeout ) # if exception not raised, sleep before next attempt await asyncio.sleep(sleep) From 1bbd1f0f39974db204198f083a477d85b3cfc77b Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 5 Dec 2023 13:35:14 -0800 Subject: [PATCH 193/204] ran blacken --- google/api_core/retry/retry_streaming.py | 9 ++++++++- google/api_core/retry/retry_streaming_async.py | 9 ++++++++- google/api_core/retry/retry_unary.py | 9 ++++++++- google/api_core/retry/retry_unary_async.py | 9 ++++++++- 4 files changed, 32 insertions(+), 4 deletions(-) diff --git a/google/api_core/retry/retry_streaming.py b/google/api_core/retry/retry_streaming.py index e5abcd01..01d33126 100644 --- a/google/api_core/retry/retry_streaming.py +++ b/google/api_core/retry/retry_streaming.py @@ -122,7 +122,14 @@ def retry_target_stream( except Exception as exc: # defer to shared logic for handling errors _retry_error_helper( - exc, deadline, sleep, error_list, predicate, on_error, exception_factory, timeout + exc, + deadline, + sleep, + error_list, + predicate, + on_error, + exception_factory, + timeout, ) # if exception not raised, sleep before next attempt time.sleep(sleep) diff --git a/google/api_core/retry/retry_streaming_async.py b/google/api_core/retry/retry_streaming_async.py index 0eb90c7a..05f908a4 100644 --- a/google/api_core/retry/retry_streaming_async.py +++ b/google/api_core/retry/retry_streaming_async.py @@ -175,7 +175,14 @@ async def retry_target_stream( except Exception as exc: # defer to shared logic for handling errors _retry_error_helper( - exc, deadline, sleep, error_list, predicate, on_error, exception_factory, timeout + exc, + deadline, + sleep, + error_list, + predicate, + on_error, + exception_factory, + timeout, ) # if exception not raised, sleep before next attempt await asyncio.sleep(sleep) diff --git a/google/api_core/retry/retry_unary.py b/google/api_core/retry/retry_unary.py index db69c6eb..ee312cf1 100644 --- a/google/api_core/retry/retry_unary.py +++ b/google/api_core/retry/retry_unary.py @@ -149,7 +149,14 @@ def retry_target( except Exception as exc: # defer to shared logic for handling errors _retry_error_helper( - exc, deadline, sleep, error_list, predicate, on_error, exception_factory, timeout + exc, + deadline, + sleep, + error_list, + predicate, + on_error, + exception_factory, + timeout, ) # if exception not raised, sleep before next attempt time.sleep(sleep) diff --git a/google/api_core/retry/retry_unary_async.py b/google/api_core/retry/retry_unary_async.py index a6272b73..d6d2492f 100644 --- a/google/api_core/retry/retry_unary_async.py +++ b/google/api_core/retry/retry_unary_async.py @@ -155,7 +155,14 @@ async def retry_target( except Exception as exc: # defer to shared logic for handling errors _retry_error_helper( - exc, deadline, sleep, error_list, predicate, on_error, exception_factory, timeout + exc, + deadline, + sleep, + error_list, + predicate, + on_error, + exception_factory, + timeout, ) # if exception not raised, sleep before next attempt await asyncio.sleep(sleep) From 35cc00aa28271c5571e240efa0d7ca4fb0127356 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 5 Dec 2023 13:46:06 -0800 Subject: [PATCH 194/204] added comment to clarify timeouts --- google/api_core/retry/retry_streaming.py | 2 ++ google/api_core/retry/retry_streaming_async.py | 2 ++ google/api_core/retry/retry_unary.py | 4 ++++ google/api_core/retry/retry_unary_async.py | 4 ++++ 4 files changed, 12 insertions(+) diff --git a/google/api_core/retry/retry_streaming.py b/google/api_core/retry/retry_streaming.py index 01d33126..6d0197dc 100644 --- a/google/api_core/retry/retry_streaming.py +++ b/google/api_core/retry/retry_streaming.py @@ -280,6 +280,8 @@ def on_error(e): maximum (float): The maximum amount of time to delay in seconds. multiplier (float): The multiplier applied to the delay. timeout (float): How long to keep retrying, in seconds. + Note: timeout is only checked before initiating a retry, so the target may + run past the timeout value as long as it is healthy. on_error (Callable[Exception]): A function to call while processing a retryable exception. Any error raised by this function will *not* be caught. diff --git a/google/api_core/retry/retry_streaming_async.py b/google/api_core/retry/retry_streaming_async.py index 05f908a4..9077764c 100644 --- a/google/api_core/retry/retry_streaming_async.py +++ b/google/api_core/retry/retry_streaming_async.py @@ -265,6 +265,8 @@ def on_error(e): maximum (float): The maximum amount of time to delay in seconds. multiplier (float): The multiplier applied to the delay. timeout (Optional[float]): How long to keep retrying in seconds. + Note: timeout is only checked before initiating a retry, so the target may + run past the timeout value as long as it is healthy. on_error (Optional[Callable[Exception]]): A function to call while processing a retryable exception. Any error raised by this function will *not* be caught. diff --git a/google/api_core/retry/retry_unary.py b/google/api_core/retry/retry_unary.py index ee312cf1..fb025cfb 100644 --- a/google/api_core/retry/retry_unary.py +++ b/google/api_core/retry/retry_unary.py @@ -108,6 +108,8 @@ def retry_target( sleep_generator (Iterable[float]): An infinite iterator that determines how long to sleep between retries. timeout (Optional[float]): How long to keep retrying the target. + Note: timeout is only checked before initiating a retry, so the target may + run past the timeout value as long as it is healthy. on_error (Optional[Callable[Exception]]): If given, the on_error callback will be called with each retryable exception raised by the target. Any error raised by this function will *not* be caught. @@ -250,6 +252,8 @@ class Retry(_BaseRetry): maximum (float): The maximum amount of time to delay in seconds. multiplier (float): The multiplier applied to the delay. timeout (float): How long to keep retrying, in seconds. + Note: timeout is only checked before initiating a retry, so the target may + run past the timeout value as long as it is healthy. on_error (Callable[Exception]): A function to call while processing a retryable exception. Any error raised by this function will *not* be caught. diff --git a/google/api_core/retry/retry_unary_async.py b/google/api_core/retry/retry_unary_async.py index d6d2492f..d0ebc3de 100644 --- a/google/api_core/retry/retry_unary_async.py +++ b/google/api_core/retry/retry_unary_async.py @@ -118,6 +118,8 @@ async def retry_target( sleep_generator (Iterable[float]): An infinite iterator that determines how long to sleep between retries. timeout (Optional[float]): How long to keep retrying the target, in seconds. + Note: timeout is only checked before initiating a retry, so the target may + run past the timeout value as long as it is healthy. on_error (Optional[Callable[Exception]]): If given, the on_error callback will be called with each retryable exception raised by the target. Any error raised by this function will *not* be caught. @@ -187,6 +189,8 @@ class AsyncRetry(_BaseRetry): maximum (float): The maximum amount of time to delay in seconds. multiplier (float): The multiplier applied to the delay. timeout (Optional[float]): How long to keep retrying in seconds. + Note: timeout is only checked before initiating a retry, so the target may + run past the timeout value as long as it is healthy. on_error (Optional[Callable[Exception]]): A function to call while processing a retryable exception. Any error raised by this function will *not* be caught. From 89abfa45dc6d3ce8360e580647222aec86f23fc9 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 5 Dec 2023 13:47:39 -0800 Subject: [PATCH 195/204] removed timeout vs deadline explainer from retry_streaming --- google/api_core/retry/retry_streaming.py | 69 ------------------------ 1 file changed, 69 deletions(-) diff --git a/google/api_core/retry/retry_streaming.py b/google/api_core/retry/retry_streaming.py index 6d0197dc..e2a4e4e3 100644 --- a/google/api_core/retry/retry_streaming.py +++ b/google/api_core/retry/retry_streaming.py @@ -147,75 +147,6 @@ class StreamingRetry(_BaseRetry): Although the default behavior is to retry transient API errors, a different predicate can be provided to retry other exceptions. - There are two important concepts that retry/polling behavior may operate on, - Deadline and Timeout, which need to be properly defined for the correct - usage of this class and the rest of the library. - - Deadline: a fixed point in time by which a certain operation must - terminate. For example, if a certain operation has a deadline - "2022-10-18T23:30:52.123Z" it must terminate (successfully or with an - error) by that time, regardless of when it was started or whether it - was started at all. - - Timeout: the maximum duration of time after which a certain operation - must terminate (successfully or with an error). The countdown begins right - after an operation was started. For example, if an operation was started at - 09:24:00 with timeout of 75 seconds, it must terminate no later than - 09:25:15. - - Unfortunately, in the past this class (and the api-core library as a whole) has not - been properly distinguishing the concepts of "timeout" and "deadline", and the - ``deadline`` parameter has meant ``timeout``. That is why - ``deadline`` has been deprecated and ``timeout`` should be used instead. If the - ``deadline`` parameter is set, it will override the ``timeout`` parameter. - In other words, ``retry.deadline`` should be treated as just a deprecated alias for - ``retry.timeout``. - - Said another way, it is safe to assume that this class and the rest of this - library operate in terms of timeouts (not deadlines) unless explicitly - noted the usage of deadline semantics. - - It is also important to - understand the three most common applications of the Timeout concept in the - context of this library. - - Usually the generic Timeout term may stand for one of the following actual - timeouts: RPC Timeout, Retry Timeout, or Polling Timeout. - - RPC Timeout: a value supplied by the client to the server so - that the server side knows the maximum amount of time it is expected to - spend handling that specific RPC. For example, in the case of gRPC transport, - RPC Timeout is represented by setting "grpc-timeout" header in the HTTP2 - request. The `timeout` property of this class normally never represents the - RPC Timeout as it is handled separately by the ``google.api_core.timeout`` - module of this library. - - Retry Timeout: this is the most common meaning of the ``timeout`` property - of this class, and defines how long a certain RPC may be retried in case - the server returns an error. - - Polling Timeout: defines how long the - client side is allowed to call the polling RPC repeatedly to check a status of a - long-running operation. Each polling RPC is - expected to succeed (its errors are supposed to be handled by the retry - logic). The decision as to whether a new polling attempt needs to be made is based - not on the RPC status code but on the status of the returned - status of an operation. In other words: we will poll a long-running operation until - the operation is done or the polling timeout expires. Each poll will inform us of - the status of the operation. The poll consists of an RPC to the server that may - itself be retried as per the poll-specific retry settings in case of errors. The - operation-level retry settings do NOT apply to polling-RPC retries. - - With the actual timeout types being defined above, the client libraries - often refer to just Timeout without clarifying which type specifically - that is. In that case the actual timeout type (sometimes also referred to as - Logical Timeout) can be determined from the context. If it is a unary rpc - call (i.e. a regular one) Timeout usually stands for the RPC Timeout (if - provided directly as a standalone value) or Retry Timeout (if provided as - ``retry.timeout`` property of the unary RPC's retry config). For - ``Operation`` or ``PollingFuture`` in general Timeout stands for - Polling Timeout. - Important Note: when a stream encounters a retryable error, it will silently construct a fresh iterator instance in the background and continue yielding (likely duplicate) values as if no error occurred. From 74ab817f22be5c351fe830af46c9b9a44d2d794f Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 7 Dec 2023 17:00:59 -0800 Subject: [PATCH 196/204] remove duplicated test --- tests/asyncio/retry/test_retry_unary_async.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/tests/asyncio/retry/test_retry_unary_async.py b/tests/asyncio/retry/test_retry_unary_async.py index 97e9522a..90852a3d 100644 --- a/tests/asyncio/retry/test_retry_unary_async.py +++ b/tests/asyncio/retry/test_retry_unary_async.py @@ -137,14 +137,6 @@ async def test_retry_target_bad_sleep_generator(): ) -@pytest.mark.asyncio -async def test_retry_streaming_target_bad_sleep_generator(): - from google.api_core.retry.retry_streaming_async import retry_target_stream - - with pytest.raises(ValueError, match="Sleep generator"): - await retry_target_stream(None, None, [], None).__anext__() - - class TestAsyncRetry(Test_BaseRetry): def _make_one(self, *args, **kwargs): return retry_async.AsyncRetry(*args, **kwargs) From 85b3e02f30dc45b8fb2423f25eb0842380c38d10 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 7 Dec 2023 17:01:37 -0800 Subject: [PATCH 197/204] fixed variable name --- tests/asyncio/retry/test_retry_streaming_async.py | 4 ++-- tests/asyncio/retry/test_retry_unary_async.py | 4 ++-- tests/unit/retry/test_retry_streaming.py | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/asyncio/retry/test_retry_streaming_async.py b/tests/asyncio/retry/test_retry_streaming_async.py index bc3e7afb..39af85f7 100644 --- a/tests/asyncio/retry/test_retry_streaming_async.py +++ b/tests/asyncio/retry/test_retry_streaming_async.py @@ -150,7 +150,7 @@ async def test___call___generator_retry_hitting_timeout( timeout_val = 9.9 # support "deadline" as an alias for "timeout" - timout_kwarg = ( + timeout_kwarg = ( {"timeout": timeout_val} if not use_deadline_arg else {"deadline": timeout_val} @@ -162,7 +162,7 @@ async def test___call___generator_retry_hitting_timeout( initial=1.0, maximum=1024.0, multiplier=2.0, - **timout_kwarg, + **timeout_kwarg, ) time_now = time.monotonic() diff --git a/tests/asyncio/retry/test_retry_unary_async.py b/tests/asyncio/retry/test_retry_unary_async.py index 90852a3d..fc2f572b 100644 --- a/tests/asyncio/retry/test_retry_unary_async.py +++ b/tests/asyncio/retry/test_retry_unary_async.py @@ -112,12 +112,12 @@ async def test_retry_target_timeout_exceeded(monotonic, sleep, use_deadline_arg) timeout_val = 10 # support "deadline" as an alias for "timeout" - timout_kwarg = ( + timeout_kwarg = ( {"timeout": timeout_val} if not use_deadline_arg else {"deadline": timeout_val} ) with pytest.raises(exceptions.RetryError) as exc_info: - await retry_async.retry_target(target, predicate, range(10), **timout_kwarg) + await retry_async.retry_target(target, predicate, range(10), **timeout_kwarg) assert exc_info.value.cause == exception assert exc_info.match("Timeout of 10.0s exceeded") diff --git a/tests/unit/retry/test_retry_streaming.py b/tests/unit/retry/test_retry_streaming.py index c6a7759a..01f35327 100644 --- a/tests/unit/retry/test_retry_streaming.py +++ b/tests/unit/retry/test_retry_streaming.py @@ -144,7 +144,7 @@ def test___call___retry_hitting_timeout(self, sleep, uniform, use_deadline_arg): timeout_val = 30.9 # support "deadline" as an alias for "timeout" - timout_kwarg = ( + timeout_kwarg = ( {"timeout": timeout_val} if not use_deadline_arg else {"deadline": timeout_val} @@ -156,7 +156,7 @@ def test___call___retry_hitting_timeout(self, sleep, uniform, use_deadline_arg): initial=1.0, maximum=1024.0, multiplier=2.0, - **timout_kwarg, + **timeout_kwarg, ) timenow = time.monotonic() From 6dbe17d004b1642936339a7a9ba969feddfd997b Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 8 Dec 2023 11:40:10 -0800 Subject: [PATCH 198/204] made build_retry_error public --- google/api_core/retry/__init__.py | 5 +++-- google/api_core/retry/retry_base.py | 7 +++++-- google/api_core/retry/retry_streaming.py | 4 ++-- google/api_core/retry/retry_streaming_async.py | 4 ++-- google/api_core/retry/retry_unary.py | 4 ++-- google/api_core/retry/retry_unary_async.py | 4 ++-- tests/unit/retry/test_retry_base.py | 18 +++++++++--------- 7 files changed, 25 insertions(+), 21 deletions(-) diff --git a/google/api_core/retry/__init__.py b/google/api_core/retry/__init__.py index 92a9c767..79428415 100644 --- a/google/api_core/retry/__init__.py +++ b/google/api_core/retry/__init__.py @@ -1,4 +1,5 @@ # Copyright 2017 Google LLC + # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,7 +18,7 @@ from .retry_base import exponential_sleep_generator from .retry_base import if_exception_type from .retry_base import if_transient_error -from .retry_base import _build_retry_error +from .retry_base import build_retry_error from .retry_base import RetryFailureReason from .retry_unary import Retry from .retry_unary import retry_target @@ -32,7 +33,7 @@ "exponential_sleep_generator", "if_exception_type", "if_transient_error", - "_build_retry_error", + "build_retry_error", "RetryFailureReason", "Retry", "AsyncRetry", diff --git a/google/api_core/retry/retry_base.py b/google/api_core/retry/retry_base.py index c48eb91d..d6447973 100644 --- a/google/api_core/retry/retry_base.py +++ b/google/api_core/retry/retry_base.py @@ -131,14 +131,17 @@ class RetryFailureReason(Enum): NON_RETRYABLE_ERROR = 1 -def _build_retry_error( +def build_retry_error( exc_list: list[Exception], reason: RetryFailureReason, timeout_val: float | None, **kwargs: Any, ) -> tuple[Exception, Exception | None]: """ - Default exception_factory implementation. Builds an exception after the retry fails. + Default exception_factory implementation. + + Returns a RetryError if the failure is due to a timeout, otherwise + returns the last exception encountered. Args: - exc_list: list of exceptions that occurred during the retry diff --git a/google/api_core/retry/retry_streaming.py b/google/api_core/retry/retry_streaming.py index e2a4e4e3..e113323b 100644 --- a/google/api_core/retry/retry_streaming.py +++ b/google/api_core/retry/retry_streaming.py @@ -36,7 +36,7 @@ from google.api_core.retry.retry_base import _BaseRetry from google.api_core.retry.retry_base import _retry_error_helper from google.api_core.retry import exponential_sleep_generator -from google.api_core.retry import _build_retry_error +from google.api_core.retry import build_retry_error from google.api_core.retry import RetryFailureReason if TYPE_CHECKING: @@ -58,7 +58,7 @@ def retry_target_stream( exception_factory: Callable[ [List[Exception], RetryFailureReason, Optional[float]], Tuple[Exception, Optional[Exception]], - ] = _build_retry_error, + ] = build_retry_error, init_args: _P.args = (), init_kwargs: _P.kwargs = {}, **kwargs, diff --git a/google/api_core/retry/retry_streaming_async.py b/google/api_core/retry/retry_streaming_async.py index 9077764c..dc7c95c8 100644 --- a/google/api_core/retry/retry_streaming_async.py +++ b/google/api_core/retry/retry_streaming_async.py @@ -38,7 +38,7 @@ from google.api_core.retry.retry_base import _BaseRetry from google.api_core.retry.retry_base import _retry_error_helper from google.api_core.retry import exponential_sleep_generator -from google.api_core.retry import _build_retry_error +from google.api_core.retry import build_retry_error from google.api_core.retry import RetryFailureReason @@ -61,7 +61,7 @@ async def retry_target_stream( exception_factory: Callable[ [list[Exception], RetryFailureReason, float | None], tuple[Exception, Exception | None], - ] = _build_retry_error, + ] = build_retry_error, init_args: _P.args = (), init_kwargs: _P.kwargs = {}, **kwargs, diff --git a/google/api_core/retry/retry_unary.py b/google/api_core/retry/retry_unary.py index fb025cfb..ae59d514 100644 --- a/google/api_core/retry/retry_unary.py +++ b/google/api_core/retry/retry_unary.py @@ -66,7 +66,7 @@ def check_if_exists(): from google.api_core.retry.retry_base import _BaseRetry from google.api_core.retry.retry_base import _retry_error_helper from google.api_core.retry.retry_base import exponential_sleep_generator -from google.api_core.retry.retry_base import _build_retry_error +from google.api_core.retry.retry_base import build_retry_error from google.api_core.retry.retry_base import RetryFailureReason @@ -91,7 +91,7 @@ def retry_target( exception_factory: Callable[ [list[Exception], RetryFailureReason, float | None], tuple[Exception, Exception | None], - ] = _build_retry_error, + ] = build_retry_error, **kwargs, ): """Call a function and retry if it fails. diff --git a/google/api_core/retry/retry_unary_async.py b/google/api_core/retry/retry_unary_async.py index d0ebc3de..6e1641e9 100644 --- a/google/api_core/retry/retry_unary_async.py +++ b/google/api_core/retry/retry_unary_async.py @@ -68,7 +68,7 @@ async def check_if_exists(): from google.api_core.retry.retry_base import _BaseRetry from google.api_core.retry.retry_base import _retry_error_helper from google.api_core.retry.retry_base import exponential_sleep_generator -from google.api_core.retry.retry_base import _build_retry_error +from google.api_core.retry.retry_base import build_retry_error from google.api_core.retry.retry_base import RetryFailureReason # for backwards compatibility, expose helpers in this module @@ -101,7 +101,7 @@ async def retry_target( exception_factory: Callable[ [list[Exception], RetryFailureReason, float | None], tuple[Exception, Exception | None], - ] = _build_retry_error, + ] = build_retry_error, **kwargs, ): """Await a coroutine and retry if it fails. diff --git a/tests/unit/retry/test_retry_base.py b/tests/unit/retry/test_retry_base.py index 2eda4b5d..93561e83 100644 --- a/tests/unit/retry/test_retry_base.py +++ b/tests/unit/retry/test_retry_base.py @@ -58,47 +58,47 @@ def test_exponential_sleep_generator_base_2(uniform): assert result == [1, 2, 4, 8, 16, 32, 60, 60] -def test__build_retry_error_empty_list(): +def test_build_retry_error_empty_list(): """ attempt to build a retry error with no errors encountered should return a generic RetryError """ - from google.api_core.retry import _build_retry_error + from google.api_core.retry import build_retry_error from google.api_core.retry import RetryFailureReason reason = RetryFailureReason.NON_RETRYABLE_ERROR - src, cause = _build_retry_error([], reason, 10) + src, cause = build_retry_error([], reason, 10) assert isinstance(src, exceptions.RetryError) assert cause is None assert src.message == "Unknown error" -def test__build_retry_error_timeout_message(): +def test_build_retry_error_timeout_message(): """ should provide helpful error message when timeout is reached """ - from google.api_core.retry import _build_retry_error + from google.api_core.retry import build_retry_error from google.api_core.retry import RetryFailureReason reason = RetryFailureReason.TIMEOUT cause = RuntimeError("timeout") - src, found_cause = _build_retry_error([ValueError(), cause], reason, 10) + src, found_cause = build_retry_error([ValueError(), cause], reason, 10) assert isinstance(src, exceptions.RetryError) assert src.message == "Timeout of 10.0s exceeded" # should attach appropriate cause assert found_cause is cause -def test__build_retry_error_empty_timeout(): +def test_build_retry_error_empty_timeout(): """ attempt to build a retry error when timeout is None should return a generic timeout error message """ - from google.api_core.retry import _build_retry_error + from google.api_core.retry import build_retry_error from google.api_core.retry import RetryFailureReason reason = RetryFailureReason.TIMEOUT - src, _ = _build_retry_error([], reason, None) + src, _ = build_retry_error([], reason, None) assert isinstance(src, exceptions.RetryError) assert src.message == "Timeout exceeded" From 71e58888b1687bcc09b0c4c795d2bdf85f6a69c2 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 8 Dec 2023 12:03:20 -0800 Subject: [PATCH 199/204] changed docstring --- google/api_core/retry/retry_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/google/api_core/retry/retry_base.py b/google/api_core/retry/retry_base.py index d6447973..efd6d8f7 100644 --- a/google/api_core/retry/retry_base.py +++ b/google/api_core/retry/retry_base.py @@ -196,7 +196,7 @@ def _retry_error_helper( - deadline: the deadline for the retry, calculated as a diff from time.monotonic() - next_sleep: the next sleep interval - error_list: the list of exceptions that have been raised so far - - predicate_fn: predicate used to determine if the exception should be retried + - predicate_fn: takes `exc` and returns true if the operation should be retried - on_error_fn: callback to execute when a retryable error occurs - exc_factory_fn: callback used to build the exception to be raised on terminal failure - original_timeout_val: the original timeout value for the retry (in seconds), From cbae3d3867dc615001548cecc246c7938a46d9df Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Mon, 11 Dec 2023 10:46:36 -0800 Subject: [PATCH 200/204] import extra helper in retry_unary_async --- google/api_core/retry/retry_unary_async.py | 1 + 1 file changed, 1 insertion(+) diff --git a/google/api_core/retry/retry_unary_async.py b/google/api_core/retry/retry_unary_async.py index 6e1641e9..f97ea931 100644 --- a/google/api_core/retry/retry_unary_async.py +++ b/google/api_core/retry/retry_unary_async.py @@ -73,6 +73,7 @@ async def check_if_exists(): # for backwards compatibility, expose helpers in this module from google.api_core.retry.retry_base import if_exception_type # noqa +from google.api_core.retry.retry_base import if_transient_error # noqa if TYPE_CHECKING: import sys From acf9752ede77481b57b83b58d72d47eba0540ed4 Mon Sep 17 00:00:00 2001 From: Victor Chudnovsky Date: Mon, 11 Dec 2023 16:28:01 -0800 Subject: [PATCH 201/204] fix: address backwards compatibility warnings failing presubmits --- google/api_core/retry/retry_streaming_async.py | 11 ++++++++--- tests/asyncio/retry/test_retry_streaming_async.py | 2 +- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/google/api_core/retry/retry_streaming_async.py b/google/api_core/retry/retry_streaming_async.py index dc7c95c8..f4620bb1 100644 --- a/google/api_core/retry/retry_streaming_async.py +++ b/google/api_core/retry/retry_streaming_async.py @@ -160,9 +160,14 @@ async def retry_target_stream( # bare except catches any exception passed to `athrow` if target_is_generator: # delegate error handling to target_iterator - await cast(AsyncGenerator["_Y", None], target_iterator).athrow( - *sys.exc_info() - ) + + # TODO: Remove this conditional once the minimum supported Python version is 3.11 + if sys.version_info[:3] >= (3, 11, 0): + await cast(AsyncGenerator["_Y", None], target_iterator).athrow( + sys.exception()) + else: + await cast(AsyncGenerator["_Y", None], target_iterator).athrow( + *sys.exc_info()) else: raise return diff --git a/tests/asyncio/retry/test_retry_streaming_async.py b/tests/asyncio/retry/test_retry_streaming_async.py index 39af85f7..28ae6ff1 100644 --- a/tests/asyncio/retry/test_retry_streaming_async.py +++ b/tests/asyncio/retry/test_retry_streaming_async.py @@ -201,7 +201,7 @@ async def test___call___generator_cancellations(self): """ # test without cancel as retryable retry_ = retry_streaming_async.AsyncStreamingRetry() - utcnow = datetime.datetime.utcnow() + utcnow = datetime.datetime.now(datetime.timezone.utc) mock.patch("google.api_core.datetime_helpers.utcnow", return_value=utcnow) generator = await retry_(self._generator_mock)(sleep_time=0.2) assert await generator.__anext__() == 0 From 7cf9fbf118e1687a062de22ba042aa649a200b1b Mon Sep 17 00:00:00 2001 From: Victor Chudnovsky Date: Mon, 11 Dec 2023 17:37:58 -0800 Subject: [PATCH 202/204] fix: address mypy errors --- google/api_core/retry/retry_streaming_async.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/google/api_core/retry/retry_streaming_async.py b/google/api_core/retry/retry_streaming_async.py index f4620bb1..b3e72573 100644 --- a/google/api_core/retry/retry_streaming_async.py +++ b/google/api_core/retry/retry_streaming_async.py @@ -164,7 +164,7 @@ async def retry_target_stream( # TODO: Remove this conditional once the minimum supported Python version is 3.11 if sys.version_info[:3] >= (3, 11, 0): await cast(AsyncGenerator["_Y", None], target_iterator).athrow( - sys.exception()) + sys.exception()) # type: ignore else: await cast(AsyncGenerator["_Y", None], target_iterator).athrow( *sys.exc_info()) From f62439a0ad1b4eb3776f6b8de4f8b32fbc9107e6 Mon Sep 17 00:00:00 2001 From: Victor Chudnovsky Date: Mon, 11 Dec 2023 19:15:24 -0800 Subject: [PATCH 203/204] fix: address coverage and lint issues failing presubmits --- google/api_core/retry/retry_streaming_async.py | 12 ++++++++---- tests/unit/retry/test_retry_base.py | 9 +++++++-- 2 files changed, 15 insertions(+), 6 deletions(-) diff --git a/google/api_core/retry/retry_streaming_async.py b/google/api_core/retry/retry_streaming_async.py index b3e72573..b0e6f5d3 100644 --- a/google/api_core/retry/retry_streaming_async.py +++ b/google/api_core/retry/retry_streaming_async.py @@ -163,11 +163,15 @@ async def retry_target_stream( # TODO: Remove this conditional once the minimum supported Python version is 3.11 if sys.version_info[:3] >= (3, 11, 0): - await cast(AsyncGenerator["_Y", None], target_iterator).athrow( - sys.exception()) # type: ignore + await cast( + AsyncGenerator["_Y", None], target_iterator + ).athrow( + sys.exception() # type: ignore + ) else: - await cast(AsyncGenerator["_Y", None], target_iterator).athrow( - *sys.exc_info()) + await cast( + AsyncGenerator["_Y", None], target_iterator + ).athrow(*sys.exc_info()) else: raise return diff --git a/tests/unit/retry/test_retry_base.py b/tests/unit/retry/test_retry_base.py index 93561e83..fa55d935 100644 --- a/tests/unit/retry/test_retry_base.py +++ b/tests/unit/retry/test_retry_base.py @@ -16,6 +16,7 @@ import re import mock +import pytest import requests.exceptions from google.api_core import exceptions @@ -136,7 +137,8 @@ def test_constructor_options(self): assert retry_._timeout == 4 assert retry_._on_error is _some_function - def test_with_timeout(self): + @pytest.mark.parametrize("use_deadline", [True, False]) + def test_with_timeout(self, use_deadline): retry_ = self._make_one( predicate=mock.sentinel.predicate, initial=1, @@ -145,9 +147,12 @@ def test_with_timeout(self): timeout=4, on_error=mock.sentinel.on_error, ) - new_retry = retry_.with_timeout(42) + new_retry = ( + retry_.with_timeout(42) if not use_deadline else retry_.with_deadline(42) + ) assert retry_ is not new_retry assert new_retry._timeout == 42 + assert new_retry.timeout == 42 if not use_deadline else new_retry.deadline == 42 # the rest of the attributes should remain the same assert new_retry._predicate is retry_._predicate From b7abeca4b2c2ffb652f388d0b554d0714e35ff4a Mon Sep 17 00:00:00 2001 From: Victor Chudnovsky Date: Tue, 12 Dec 2023 14:57:47 -0800 Subject: [PATCH 204/204] chore: simplify resolution of backaward-compatibility warnings --- google/api_core/retry/retry_streaming_async.py | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) diff --git a/google/api_core/retry/retry_streaming_async.py b/google/api_core/retry/retry_streaming_async.py index b0e6f5d3..ed4edab2 100644 --- a/google/api_core/retry/retry_streaming_async.py +++ b/google/api_core/retry/retry_streaming_async.py @@ -160,18 +160,9 @@ async def retry_target_stream( # bare except catches any exception passed to `athrow` if target_is_generator: # delegate error handling to target_iterator - - # TODO: Remove this conditional once the minimum supported Python version is 3.11 - if sys.version_info[:3] >= (3, 11, 0): - await cast( - AsyncGenerator["_Y", None], target_iterator - ).athrow( - sys.exception() # type: ignore - ) - else: - await cast( - AsyncGenerator["_Y", None], target_iterator - ).athrow(*sys.exc_info()) + await cast(AsyncGenerator["_Y", None], target_iterator).athrow( + cast(BaseException, sys.exc_info()[1]) + ) else: raise return