Skip to content

Commit

Permalink
feat(flagd-rpc): add caching with tests
Browse files Browse the repository at this point in the history
Signed-off-by: Simon Schrottner <simon.schrottner@dynatrace.com>
  • Loading branch information
aepfli committed Nov 28, 2024
1 parent b62d3d1 commit bd25097
Show file tree
Hide file tree
Showing 9 changed files with 145 additions and 10 deletions.
1 change: 1 addition & 0 deletions providers/openfeature-provider-flagd/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ dependencies = [
"panzi-json-logic>=1.0.1",
"semver>=3,<4",
"pyyaml>=6.0.1",
"cachebox"
]
requires-python = ">=3.8"

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,13 @@ class ResolverType(Enum):
IN_PROCESS = "in-process"


class CacheType(Enum):
LRU = "lru"
DISABLED = "disabled"


DEFAULT_CACHE = CacheType.LRU
DEFAULT_CACHE_SIZE = 1000
DEFAULT_DEADLINE = 500
DEFAULT_HOST = "localhost"
DEFAULT_KEEP_ALIVE = 0
Expand All @@ -19,6 +26,8 @@ class ResolverType(Enum):
DEFAULT_STREAM_DEADLINE = 600000
DEFAULT_TLS = False

ENV_VAR_CACHE_SIZE = "FLAGD_MAX_CACHE_SIZE"
ENV_VAR_CACHE_TYPE = "FLAGD_CACHE_TYPE"
ENV_VAR_DEADLINE_MS = "FLAGD_DEADLINE_MS"
ENV_VAR_HOST = "FLAGD_HOST"
ENV_VAR_KEEP_ALIVE_TIME_MS = "FLAGD_KEEP_ALIVE_TIME_MS"
Expand Down Expand Up @@ -57,6 +66,8 @@ def __init__( # noqa: PLR0913
deadline: typing.Optional[int] = None,
stream_deadline_ms: typing.Optional[int] = None,
keep_alive_time: typing.Optional[int] = None,
cache_type: typing.Optional[CacheType] = None,
max_cache_size: typing.Optional[int] = None,
):
self.host = env_or_default(ENV_VAR_HOST, DEFAULT_HOST) if host is None else host

Expand Down Expand Up @@ -125,3 +136,15 @@ def __init__( # noqa: PLR0913
if keep_alive_time is None
else keep_alive_time
)

self.cache_type = (
CacheType(env_or_default(ENV_VAR_CACHE_TYPE, DEFAULT_CACHE))
if cache_type is None
else cache_type
)

self.max_cache_size: int = (
int(env_or_default(ENV_VAR_CACHE_SIZE, DEFAULT_CACHE_SIZE, cast=int))
if max_cache_size is None
else max_cache_size
)
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
from openfeature.provider.metadata import Metadata
from openfeature.provider.provider import AbstractProvider

from .config import Config, ResolverType
from .config import CacheType, Config, ResolverType
from .resolvers import AbstractResolver, GrpcResolver, InProcessResolver

T = typing.TypeVar("T")
Expand All @@ -50,6 +50,8 @@ def __init__( # noqa: PLR0913
offline_flag_source_path: typing.Optional[str] = None,
stream_deadline_ms: typing.Optional[int] = None,
keep_alive_time: typing.Optional[int] = None,
cache_type: typing.Optional[CacheType] = None,
max_cache_size: typing.Optional[int] = None,
):
"""
Create an instance of the FlagdProvider
Expand Down Expand Up @@ -83,6 +85,8 @@ def __init__( # noqa: PLR0913
offline_flag_source_path=offline_flag_source_path,
stream_deadline_ms=stream_deadline_ms,
keep_alive_time=keep_alive_time,
cache_type=cache_type,
max_cache_size=max_cache_size,
)

self.resolver = self.setup_resolver()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import typing

import grpc
from cachebox import BaseCacheImpl, LRUCache
from google.protobuf.json_format import MessageToDict
from google.protobuf.struct_pb2 import Struct

Expand All @@ -18,13 +19,13 @@
ProviderNotReadyError,
TypeMismatchError,
)
from openfeature.flag_evaluation import FlagResolutionDetails
from openfeature.flag_evaluation import FlagResolutionDetails, Reason
from openfeature.schemas.protobuf.flagd.evaluation.v1 import (
evaluation_pb2,
evaluation_pb2_grpc,
)

from ..config import Config
from ..config import CacheType, Config
from ..flag_type import FlagType

if typing.TYPE_CHECKING:
Expand Down Expand Up @@ -57,6 +58,12 @@ def __init__(
self.deadline = config.deadline * 0.001
self.connected = False

self._cache: typing.Optional[BaseCacheImpl] = (
LRUCache(maxsize=self.config.max_cache_size)
if self.config.cache_type == CacheType.LRU
else None
)

def _create_stub(
self,
) -> typing.Tuple[evaluation_pb2_grpc.ServiceStub, grpc.Channel]:
Expand All @@ -71,10 +78,20 @@ def _create_stub(

def initialize(self, evaluation_context: EvaluationContext) -> None:
self.connect()
self.retry_backoff_seconds = 0.1
self.connected = False

self._cache = (
LRUCache(maxsize=self.config.max_cache_size)
if self.config.cache_type == CacheType.LRU
else None
)

def shutdown(self) -> None:
self.active = False
self.channel.close()
if self._cache:
self._cache.clear()

def connect(self) -> None:
self.active = True
Expand All @@ -96,7 +113,6 @@ def connect(self) -> None:

def listen(self) -> None:
retry_delay = self.retry_backoff_seconds

call_args = (
{"timeout": self.streamline_deadline_seconds}
if self.streamline_deadline_seconds > 0
Expand Down Expand Up @@ -148,6 +164,10 @@ def listen(self) -> None:
def handle_changed_flags(self, data: typing.Any) -> None:
changed_flags = list(data["flags"].keys())

if self._cache:
for flag in changed_flags:
self._cache.pop(flag)

self.emit_provider_configuration_changed(ProviderEventDetails(changed_flags))

def resolve_boolean_details(
Expand Down Expand Up @@ -190,13 +210,18 @@ def resolve_object_details(
) -> FlagResolutionDetails[typing.Union[dict, list]]:
return self._resolve(key, FlagType.OBJECT, default_value, evaluation_context)

def _resolve( # noqa: PLR0915
def _resolve( # noqa: PLR0915 C901
self,
flag_key: str,
flag_type: FlagType,
default_value: T,
evaluation_context: typing.Optional[EvaluationContext],
) -> FlagResolutionDetails[T]:
if self._cache is not None and flag_key in self._cache:
cached_flag: FlagResolutionDetails[T] = self._cache[flag_key]
cached_flag.reason = Reason.CACHED
return cached_flag

context = self._convert_context(evaluation_context)
call_args = {"timeout": self.deadline}
try:
Expand Down Expand Up @@ -249,12 +274,17 @@ def _resolve( # noqa: PLR0915
raise GeneralError(message) from e

# Got a valid flag and valid type. Return it.
return FlagResolutionDetails(
result = FlagResolutionDetails(
value=value,
reason=response.reason,
variant=response.variant,
)

if response.reason == Reason.STATIC and self._cache is not None:
self._cache.insert(flag_key, result)

return result

def _convert_context(
self, evaluation_context: typing.Optional[EvaluationContext]
) -> Struct:
Expand Down
44 changes: 44 additions & 0 deletions providers/openfeature-provider-flagd/tests/e2e/rpc_cache.feature
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
Feature: Flag evaluation with Caching

# This test suite contains scenarios to test the flag evaluation API.

Background:
Given a provider is registered

Scenario: Resolves boolean details with caching
When a boolean flag with key "boolean-flag" is evaluated with details and default value "false"
Then the resolved boolean details value should be "true", the variant should be "on", and the reason should be "STATIC"
Then the resolved boolean details value should be "true", the variant should be "on", and the reason should be "CACHED"

Scenario: Resolves string details with caching
When a string flag with key "string-flag" is evaluated with details and default value "bye"
Then the resolved string details value should be "hi", the variant should be "greeting", and the reason should be "STATIC"
Then the resolved string details value should be "hi", the variant should be "greeting", and the reason should be "CACHED"

Scenario: Resolves integer details with caching
When an integer flag with key "integer-flag" is evaluated with details and default value 1
Then the resolved integer details value should be 10, the variant should be "ten", and the reason should be "STATIC"
Then the resolved integer details value should be 10, the variant should be "ten", and the reason should be "CACHED"

Scenario: Resolves float details with caching
When a float flag with key "float-flag" is evaluated with details and default value 0.1
Then the resolved float details value should be 0.5, the variant should be "half", and the reason should be "STATIC"
Then the resolved float details value should be 0.5, the variant should be "half", and the reason should be "CACHED"

Scenario: Resolves object details with caching
When an object flag with key "object-flag" is evaluated with details and a null default value
Then the resolved object details value should be contain fields "showImages", "title", and "imagesPerPage", with values "true", "Check out these pics!" and 100, respectively
And the variant should be "template", and the reason should be "STATIC"
Then the resolved object details value should be contain fields "showImages", "title", and "imagesPerPage", with values "true", "Check out these pics!" and 100, respectively
And the variant should be "template", and the reason should be "CACHED"

Scenario: Flag change event with caching
When a string flag with key "changing-flag" is evaluated with details
When a PROVIDER_CONFIGURATION_CHANGED handler is added
And a flag with key "changing-flag" is modified
Then the returned reason should be "STATIC"
Then the returned reason should be "CACHED"
Then the PROVIDER_CONFIGURATION_CHANGED handler must run
And the event details must indicate "changing-flag" was altered
Then the returned reason should be "STATIC"
Then the returned reason should be "CACHED"
10 changes: 10 additions & 0 deletions providers/openfeature-provider-flagd/tests/e2e/steps.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,16 @@ def setup_key_and_default(
return (key, default)


@when(
parsers.cfparse(
'a string flag with key "{key}" is evaluated with details',
),
target_fixture="key_and_default",
)
def setup_key_without_default(key: str) -> typing.Tuple[str, JsonPrimitive]:
return setup_key_and_default(key, "")


@when(
parsers.cfparse(
'an object flag with key "{key}" is evaluated with a null default value',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,8 +67,7 @@ def setup(request, client_name, file_name, resolver_type):
"""nothing to boot"""
api.set_provider(
FlagdProvider(
resolver_type=resolver_type,
offline_flag_source_path=file_name.name,
resolver_type=resolver_type, offline_flag_source_path=file_name.name
),
client_name,
)
Expand Down
1 change: 1 addition & 0 deletions providers/openfeature-provider-flagd/tests/e2e/test_rpc.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,4 +29,5 @@ def image():
f"{TEST_HARNESS_PATH}/gherkin/flagd.feature",
f"{TEST_HARNESS_PATH}/gherkin/flagd-json-evaluator.feature",
f"{SPEC_PATH}/specification/assets/gherkin/evaluation.feature",
"./rpc_cache.feature",
)
27 changes: 25 additions & 2 deletions providers/openfeature-provider-flagd/tests/test_config.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
import pytest

from openfeature.contrib.provider.flagd.config import (
DEFAULT_CACHE,
DEFAULT_CACHE_SIZE,
DEFAULT_DEADLINE,
DEFAULT_HOST,
DEFAULT_KEEP_ALIVE,
Expand All @@ -11,6 +13,8 @@
DEFAULT_RETRY_BACKOFF,
DEFAULT_STREAM_DEADLINE,
DEFAULT_TLS,
ENV_VAR_CACHE_SIZE,
ENV_VAR_CACHE_TYPE,
ENV_VAR_DEADLINE_MS,
ENV_VAR_HOST,
ENV_VAR_KEEP_ALIVE_TIME_MS,
Expand All @@ -20,13 +24,16 @@
ENV_VAR_RETRY_BACKOFF_MS,
ENV_VAR_STREAM_DEADLINE_MS,
ENV_VAR_TLS,
CacheType,
Config,
ResolverType,
)


def test_return_default_values_rpc():
config = Config()
assert config.cache_type == DEFAULT_CACHE
assert config.max_cache_size == DEFAULT_CACHE_SIZE
assert config.deadline == DEFAULT_DEADLINE
assert config.host == DEFAULT_HOST
assert config.keep_alive_time == DEFAULT_KEEP_ALIVE
Expand All @@ -40,6 +47,8 @@ def test_return_default_values_rpc():

def test_return_default_values_in_process():
config = Config(resolver_type=ResolverType.IN_PROCESS)
assert config.cache_type == DEFAULT_CACHE
assert config.max_cache_size == DEFAULT_CACHE_SIZE
assert config.deadline == DEFAULT_DEADLINE
assert config.host == DEFAULT_HOST
assert config.keep_alive_time == DEFAULT_KEEP_ALIVE
Expand All @@ -56,7 +65,9 @@ def resolver_type(request):
return request.param


def test_overrides_defaults_with_environment(monkeypatch, resolver_type):
def test_overrides_defaults_with_environment(monkeypatch, resolver_type): # noqa: PLR0915
cache = CacheType.DISABLED
cache_size = 456
deadline = 1
host = "flagd"
keep_alive = 2
Expand All @@ -66,6 +77,8 @@ def test_overrides_defaults_with_environment(monkeypatch, resolver_type):
stream_deadline = 4
tls = True

monkeypatch.setenv(ENV_VAR_CACHE_TYPE, str(cache.value))
monkeypatch.setenv(ENV_VAR_CACHE_SIZE, str(cache_size))
monkeypatch.setenv(ENV_VAR_DEADLINE_MS, str(deadline))
monkeypatch.setenv(ENV_VAR_HOST, host)
monkeypatch.setenv(ENV_VAR_KEEP_ALIVE_TIME_MS, str(keep_alive))
Expand All @@ -77,6 +90,8 @@ def test_overrides_defaults_with_environment(monkeypatch, resolver_type):
monkeypatch.setenv(ENV_VAR_TLS, str(tls))

config = Config()
assert config.cache_type == cache
assert config.max_cache_size == cache_size
assert config.deadline == deadline
assert config.host == host
assert config.keep_alive_time == keep_alive
Expand All @@ -88,7 +103,9 @@ def test_overrides_defaults_with_environment(monkeypatch, resolver_type):
assert config.tls is tls


def test_uses_arguments_over_environments_and_defaults(monkeypatch, resolver_type):
def test_uses_arguments_over_environments_and_defaults(monkeypatch, resolver_type): # noqa: PLR0915
cache = CacheType.LRU
cache_size = 456
deadline = 1
host = "flagd"
keep_alive = 2
Expand All @@ -98,6 +115,8 @@ def test_uses_arguments_over_environments_and_defaults(monkeypatch, resolver_typ
stream_deadline = 4
tls = True

monkeypatch.setenv(ENV_VAR_CACHE_TYPE, str(cache.value) + "value")
monkeypatch.setenv(ENV_VAR_CACHE_SIZE, str(cache_size) + "value")
monkeypatch.setenv(ENV_VAR_DEADLINE_MS, str(deadline) + "value")
monkeypatch.setenv(ENV_VAR_HOST, host + "value")
monkeypatch.setenv(ENV_VAR_KEEP_ALIVE_TIME_MS, str(keep_alive) + "value")
Expand All @@ -109,6 +128,8 @@ def test_uses_arguments_over_environments_and_defaults(monkeypatch, resolver_typ
monkeypatch.setenv(ENV_VAR_TLS, str(tls) + "value")

config = Config(
cache_type=cache,
max_cache_size=cache_size,
deadline=deadline,
host=host,
port=port,
Expand All @@ -119,6 +140,8 @@ def test_uses_arguments_over_environments_and_defaults(monkeypatch, resolver_typ
keep_alive_time=keep_alive,
offline_flag_source_path=offline_path,
)
assert config.cache_type == cache
assert config.max_cache_size == cache_size
assert config.deadline == deadline
assert config.host == host
assert config.keep_alive_time == keep_alive
Expand Down

0 comments on commit bd25097

Please sign in to comment.