diff --git a/ldclient/impl/datasourcev2/__init__.py b/ldclient/impl/datasourcev2/__init__.py index 96a0318e..1bde435b 100644 --- a/ldclient/impl/datasourcev2/__init__.py +++ b/ldclient/impl/datasourcev2/__init__.py @@ -10,80 +10,6 @@ You have been warned. """ -from abc import abstractmethod -from dataclasses import dataclass -from typing import Generator, Mapping, Optional, Protocol, Tuple +from .polling import PollingResult, Requester -from ldclient.impl.datasystem.protocolv2 import Basis, ChangeSet -from ldclient.impl.util import _Result -from ldclient.interfaces import DataSourceErrorInfo, DataSourceState - -PollingResult = _Result[Tuple[ChangeSet, Mapping], str] - - -BasisResult = _Result[Basis, str] - - -class Initializer(Protocol): # pylint: disable=too-few-public-methods - """ - Initializer represents a component capable of retrieving a single data - result, such as from the LD polling API. - - The intent of initializers is to quickly fetch an initial set of data, - which may be stale but is fast to retrieve. This initial data serves as a - foundation for a Synchronizer to build upon, enabling it to provide updates - as new changes occur. - """ - - @abstractmethod - def fetch(self) -> BasisResult: - """ - sync should begin the synchronization process for the data source, yielding - Update objects until the connection is closed or an unrecoverable error - occurs. - """ - raise NotImplementedError - - -@dataclass(frozen=True) -class Update: - """ - Update represents the results of a synchronizer's ongoing sync - method. - """ - - state: DataSourceState - change_set: Optional[ChangeSet] = None - error: Optional[DataSourceErrorInfo] = None - revert_to_fdv1: bool = False - environment_id: Optional[str] = None - - -class Synchronizer(Protocol): # pylint: disable=too-few-public-methods - """ - Synchronizer represents a component capable of synchronizing data from an external - data source, such as a streaming or polling API. - - It is responsible for yielding Update objects that represent the current state - of the data source, including any changes that have occurred since the last - synchronization. - """ - - @abstractmethod - def sync(self) -> Generator[Update, None, None]: - """ - sync should begin the synchronization process for the data source, yielding - Update objects until the connection is closed or an unrecoverable error - occurs. - """ - raise NotImplementedError - - -__all__: list[str] = [ - # Initializer-related types - "BasisResult", - "Initializer", - # Synchronizer-related types - "Update", - "Synchronizer", -] +__all__: list[str] = ["PollingResult", "Requester"] diff --git a/ldclient/impl/datasourcev2/polling.py b/ldclient/impl/datasourcev2/polling.py index 0037a179..224f49c5 100644 --- a/ldclient/impl/datasourcev2/polling.py +++ b/ldclient/impl/datasourcev2/polling.py @@ -8,12 +8,13 @@ from collections import namedtuple from threading import Event from time import time -from typing import Generator, Optional, Protocol +from typing import Generator, Mapping, Optional, Protocol, Tuple from urllib import parse import urllib3 -from ldclient.impl.datasourcev2 import BasisResult, PollingResult, Update +from ldclient.config import Config +from ldclient.impl.datasystem import BasisResult, Update from ldclient.impl.datasystem.protocolv2 import ( Basis, ChangeSet, @@ -46,7 +47,7 @@ POLLING_ENDPOINT = "/sdk/poll" -CacheEntry = namedtuple("CacheEntry", ["data", "etag"]) +PollingResult = _Result[Tuple[ChangeSet, Mapping], str] class Requester(Protocol): # pylint: disable=too-few-public-methods @@ -68,6 +69,9 @@ def fetch(self, selector: Optional[Selector]) -> PollingResult: raise NotImplementedError +CacheEntry = namedtuple("CacheEntry", ["data", "etag"]) + + class PollingDataSource: """ PollingDataSource is a data source that can retrieve information from @@ -206,7 +210,7 @@ class Urllib3PollingRequester: requests. """ - def __init__(self, config): + def __init__(self, config: Config): self._etag = None self._http = _http_factory(config).create_pool_manager(1, config.base_uri) self._config = config @@ -335,3 +339,30 @@ def polling_payload_to_changeset(data: dict) -> _Result[ChangeSet, str]: ) return _Fail(error="didn't receive any known protocol events in polling payload") + + +class PollingDataSourceBuilder: + """ + Builder for a PollingDataSource. + """ + + def __init__(self, config: Config): + self._config = config + self._requester: Optional[Requester] = None + + def requester(self, requester: Requester) -> "PollingDataSourceBuilder": + """Sets a custom Requester for the PollingDataSource.""" + self._requester = requester + return self + + def build(self) -> PollingDataSource: + """Builds the PollingDataSource with the configured parameters.""" + requester = ( + self._requester + if self._requester is not None + else Urllib3PollingRequester(self._config) + ) + + return PollingDataSource( + poll_interval=self._config.poll_interval, requester=requester + ) diff --git a/ldclient/impl/datasourcev2/streaming.py b/ldclient/impl/datasourcev2/streaming.py index f4f5638b..03ea68ff 100644 --- a/ldclient/impl/datasourcev2/streaming.py +++ b/ldclient/impl/datasourcev2/streaming.py @@ -19,7 +19,7 @@ from ld_eventsource.errors import HTTPStatusError from ldclient.config import Config -from ldclient.impl.datasourcev2 import Synchronizer, Update +from ldclient.impl.datasystem import Synchronizer, Update from ldclient.impl.datasystem.protocolv2 import ( ChangeSetBuilder, DeleteObject, @@ -110,7 +110,7 @@ def create_sse_client(config: Config) -> SSEClientImpl: ) -class StreamingSynchronizer(Synchronizer): +class StreamingDataSource(Synchronizer): """ StreamingSynchronizer is a specific type of Synchronizer that handles streaming data sources. @@ -386,3 +386,17 @@ def __enter__(self): def __exit__(self, type, value, traceback): # self.stop() pass + + +class StreamingDataSourceBuilder: # disable: pylint: disable=too-few-public-methods + """ + Builder for a StreamingDataSource. + """ + + def __init__(self, config: Config): + self._config = config + + def build(self) -> StreamingDataSource: + """Builds a StreamingDataSource instance with the configured parameters.""" + # TODO(fdv2): Add in the other controls here. + return StreamingDataSource(self._config) diff --git a/ldclient/impl/datasystem/__init__.py b/ldclient/impl/datasystem/__init__.py index ad05eb50..210fc7f7 100644 --- a/ldclient/impl/datasystem/__init__.py +++ b/ldclient/impl/datasystem/__init__.py @@ -4,46 +4,196 @@ """ from abc import abstractmethod -from typing import Protocol +from dataclasses import dataclass +from enum import Enum +from threading import Event +from typing import Generator, Optional, Protocol -from ldclient.impl.util import Result +from ldclient.impl.datasystem.protocolv2 import Basis, ChangeSet +from ldclient.impl.util import _Result +from ldclient.interfaces import ( + DataSourceErrorInfo, + DataSourceState, + DataSourceStatusProvider, + DataStoreStatusProvider, + FlagTracker +) -class Synchronizer(Protocol): +class DataAvailability(str, Enum): """ - Represents a component capable of obtaining a Basis and subsequent delta - updates asynchronously. + Represents the availability of data in the SDK. """ - @abstractmethod - def name(self) -> str: - """Returns the name of the initializer.""" - raise NotImplementedError + DEFAULTS = "defaults" + """ + The SDK has no data and will evaluate flags using the application-provided default values. + """ - # TODO(fdv2): Need sync method + CACHED = "cached" + """ + The SDK has data, not necessarily the latest, which will be used to evaluate flags. + """ + + REFRESHED = "refreshed" + """ + The SDK has obtained, at least once, the latest known data from LaunchDarkly. + """ - def close(self): + def at_least(self, other: "DataAvailability") -> bool: """ - Close the synchronizer, releasing any resources it holds. + Returns whether this availability level is **at least** as good as the other. """ + if self == other: + return True + if self == DataAvailability.REFRESHED: + return True -class Initializer(Protocol): + if self == DataAvailability.CACHED and other == DataAvailability.DEFAULTS: + return True + + return False + + +class DataSystem(Protocol): """ - Represents a component capable of obtaining a Basis via a synchronous call. + Represents the requirements the client has for storing/retrieving/detecting changes related + to the SDK's data model. """ @abstractmethod - def name(self) -> str: - """Returns the name of the initializer.""" + def start(self, set_on_ready: Event): + """ + Starts the data system. + + This method will return immediately. The provided `Event` will be set when the system + has reached an initial state (either permanently faile, e.g. due to bad auth, or + succeeded) + """ + raise NotImplementedError + + @abstractmethod + def stop(self): + """ + Halts the data system. Should be called when the client is closed to stop any long running + operations. + """ + raise NotImplementedError + + @property + @abstractmethod + def data_source_status_provider(self) -> DataSourceStatusProvider: + """ + Returns an interface for tracking the status of the data source. + + The data source is the mechanism that the SDK uses to get feature flag configurations, such + as a streaming connection (the default) or poll requests. The + :class:`ldclient.interfaces.DataSourceStatusProvider` has methods for checking whether the + data source is (as far as the SDK knows) currently operational and tracking changes in this + status. + + :return: The data source status provider + """ + raise NotImplementedError + + @property + @abstractmethod + def data_store_status_provider(self) -> DataStoreStatusProvider: + """ + Returns an interface for tracking the status of a persistent data store. + + The provider has methods for checking whether the data store is (as far + as the SDK knows) currently operational, tracking changes in this + status, and getting cache statistics. These are only relevant for a + persistent data store; if you are using an in-memory data store, then + this method will return a stub object that provides no information. + + :return: The data store status provider + """ + raise NotImplementedError + + @property + @abstractmethod + def flag_tracker(self) -> FlagTracker: + """ + Returns an interface for tracking changes in feature flag configurations. + + The :class:`ldclient.interfaces.FlagTracker` contains methods for + requesting notifications about feature flag changes using an event + listener model. + """ + raise NotImplementedError + + @property + @abstractmethod + def data_availability(self) -> DataAvailability: + """ + Indicates what form of data is currently available. + """ raise NotImplementedError + @property @abstractmethod - def fetch(self) -> Result: + def target_availability(self) -> DataAvailability: """ - Fetch returns a Basis, or an error if the Basis could not be retrieved. + Indicates the ideal form of data attainable given the current configuration. """ raise NotImplementedError -__all__: list[str] = ["Synchronizer", "Initializer"] +BasisResult = _Result[Basis, str] + + +class Initializer(Protocol): # pylint: disable=too-few-public-methods + """ + Initializer represents a component capable of retrieving a single data + result, such as from the LD polling API. + + The intent of initializers is to quickly fetch an initial set of data, + which may be stale but is fast to retrieve. This initial data serves as a + foundation for a Synchronizer to build upon, enabling it to provide updates + as new changes occur. + """ + + @abstractmethod + def fetch(self) -> BasisResult: + """ + fetch should retrieve the initial data set for the data source, returning + a Basis object on success, or an error message on failure. + """ + raise NotImplementedError + + +@dataclass(frozen=True) +class Update: + """ + Update represents the results of a synchronizer's ongoing sync + method. + """ + + state: DataSourceState + change_set: Optional[ChangeSet] = None + error: Optional[DataSourceErrorInfo] = None + revert_to_fdv1: bool = False + environment_id: Optional[str] = None + + +class Synchronizer(Protocol): # pylint: disable=too-few-public-methods + """ + Synchronizer represents a component capable of synchronizing data from an external + data source, such as a streaming or polling API. + + It is responsible for yielding Update objects that represent the current state + of the data source, including any changes that have occurred since the last + synchronization. + """ + + @abstractmethod + def sync(self) -> Generator[Update, None, None]: + """ + sync should begin the synchronization process for the data source, yielding + Update objects until the connection is closed or an unrecoverable error + occurs. + """ + raise NotImplementedError diff --git a/ldclient/impl/datasystem/config.py b/ldclient/impl/datasystem/config.py new file mode 100644 index 00000000..c0e66d6b --- /dev/null +++ b/ldclient/impl/datasystem/config.py @@ -0,0 +1,188 @@ +""" +Configuration for LaunchDarkly's data acquisition strategy. +""" + +from dataclasses import dataclass +from typing import Callable, List, Optional, TypeVar + +from ldclient.config import Config as LDConfig +from ldclient.impl.datasourcev2.polling import ( + PollingDataSource, + PollingDataSourceBuilder, + Urllib3PollingRequester +) +from ldclient.impl.datasourcev2.streaming import ( + StreamingDataSource, + StreamingDataSourceBuilder +) +from ldclient.impl.datasystem import Initializer, Synchronizer + +T = TypeVar("T") + +Builder = Callable[[], T] + + +@dataclass(frozen=True) +class Config: + """ + Configuration for LaunchDarkly's data acquisition strategy. + """ + + initializers: Optional[List[Builder[Initializer]]] + """The initializers for the data system.""" + + primary_synchronizer: Builder[Synchronizer] + """The primary synchronizer for the data system.""" + + secondary_synchronizer: Optional[Builder[Synchronizer]] + """The secondary synchronizers for the data system.""" + + +class ConfigBuilder: # pylint: disable=too-few-public-methods + """ + Builder for the data system configuration. + """ + + _initializers: Optional[List[Builder[Initializer]]] = None + _primary_synchronizer: Optional[Builder[Synchronizer]] = None + _secondary_synchronizer: Optional[Builder[Synchronizer]] = None + + def initializers(self, initializers: List[Builder[Initializer]]) -> "ConfigBuilder": + """ + Sets the initializers for the data system. + """ + self._initializers = initializers + return self + + def synchronizers( + self, + primary: Builder[Synchronizer], + secondary: Optional[Builder[Synchronizer]] = None, + ) -> "ConfigBuilder": + """ + Sets the synchronizers for the data system. + """ + self._primary_synchronizer = primary + self._secondary_synchronizer = secondary + return self + + def build(self) -> Config: + """ + Builds the data system configuration. + """ + if self._primary_synchronizer is None: + raise ValueError("Primary synchronizer must be set") + + return Config( + initializers=self._initializers, + primary_synchronizer=self._primary_synchronizer, + secondary_synchronizer=self._secondary_synchronizer, + ) + + +def __polling_ds_builder(config: LDConfig) -> Builder[PollingDataSource]: + def builder() -> PollingDataSource: + requester = Urllib3PollingRequester(config) + polling_ds = PollingDataSourceBuilder(config) + polling_ds.requester(requester) + + return polling_ds.build() + + return builder + + +def __streaming_ds_builder(config: LDConfig) -> Builder[StreamingDataSource]: + def builder() -> StreamingDataSource: + return StreamingDataSourceBuilder(config).build() + + return builder + + +def default(config: LDConfig) -> ConfigBuilder: + """ + Default is LaunchDarkly's recommended flag data acquisition strategy. + + Currently, it operates a two-phase method for obtaining data: first, it + requests data from LaunchDarkly's global CDN. Then, it initiates a + streaming connection to LaunchDarkly's Flag Delivery services to + receive real-time updates. + + If the streaming connection is interrupted for an extended period of + time, the SDK will automatically fall back to polling the global CDN + for updates. + """ + + polling_builder = __polling_ds_builder(config) + streaming_builder = __streaming_ds_builder(config) + + builder = ConfigBuilder() + builder.initializers([polling_builder]) + builder.synchronizers(streaming_builder, polling_builder) + + return builder + + +def streaming(config: LDConfig) -> ConfigBuilder: + """ + Streaming configures the SDK to efficiently streams flag/segment data + in the background, allowing evaluations to operate on the latest data + with no additional latency. + """ + + streaming_builder = __streaming_ds_builder(config) + + builder = ConfigBuilder() + builder.synchronizers(streaming_builder) + + return builder + + +def polling(config: LDConfig) -> ConfigBuilder: + """ + Polling configures the SDK to regularly poll an endpoint for + flag/segment data in the background. This is less efficient than + streaming, but may be necessary in some network environments. + """ + + polling_builder = __polling_ds_builder(config) + + builder = ConfigBuilder() + builder.synchronizers(polling_builder) + + return builder + + +def custom() -> ConfigBuilder: + """ + Custom returns a builder suitable for creating a custom data + acquisition strategy. You may configure how the SDK uses a Persistent + Store, how the SDK obtains an initial set of data, and how the SDK + keeps data up-to-date. + """ + + return ConfigBuilder() + + +# TODO(fdv2): Implement these methods +# +# Daemon configures the SDK to read from a persistent store integration +# that is populated by Relay Proxy or other SDKs. The SDK will not connect +# to LaunchDarkly. In this mode, the SDK never writes to the data store. + +# PersistentStore is similar to Default, with the addition of a persistent +# store integration. Before data has arrived from LaunchDarkly, the SDK is +# able to evaluate flags using data from the persistent store. Once fresh +# data is available, the SDK will no longer read from the persistent store, +# although it will keep it up-to-date. + +# WithEndpoints configures the data system with custom endpoints for +# LaunchDarkly's streaming and polling synchronizers. This method is not +# necessary for most use-cases, but can be useful for testing or custom +# network configurations. +# +# Any endpoint that is not specified (empty string) will be treated as the +# default LaunchDarkly SaaS endpoint for that service. + +# WithRelayProxyEndpoints configures the data system with a single endpoint +# for LaunchDarkly's streaming and polling synchronizers. The endpoint +# should be Relay Proxy's base URI, for example http://localhost:8123. diff --git a/ldclient/impl/datasystem/protocolv2.py b/ldclient/impl/datasystem/protocolv2.py index e93cb23d..50cc0862 100644 --- a/ldclient/impl/datasystem/protocolv2.py +++ b/ldclient/impl/datasystem/protocolv2.py @@ -542,8 +542,11 @@ class ChangeSetBuilder: """ def __init__(self): - self.intent: Optional[IntentCode] = None - self.changes: List[Change] = [] + """ + Initializes a new ChangeSetBuilder. + """ + self.intent = None + self.changes = [] @staticmethod def no_changes() -> "ChangeSet": diff --git a/ldclient/testing/impl/datasourcev2/test_polling_synchronizer.py b/ldclient/testing/impl/datasourcev2/test_polling_synchronizer.py index 4e1150cd..ff8bf2eb 100644 --- a/ldclient/testing/impl/datasourcev2/test_polling_synchronizer.py +++ b/ldclient/testing/impl/datasourcev2/test_polling_synchronizer.py @@ -322,13 +322,13 @@ def test_recoverable_error_continues(): headers = {} polling_result: PollingResult = _Success(value=(change_set, headers)) - _failure = _Fail(error="error for test", exception=UnsuccessfulResponseException(status=408)) + _failure = _Fail( + error="error for test", exception=UnsuccessfulResponseException(status=408) + ) synchronizer = PollingDataSource( poll_interval=0.01, - requester=ListBasedRequester( - results=iter([_failure, polling_result]) - ), + requester=ListBasedRequester(results=iter([_failure, polling_result])), ) sync = synchronizer.sync() interrupted = next(sync) @@ -360,13 +360,13 @@ def test_unrecoverable_error_shuts_down(): headers = {} polling_result: PollingResult = _Success(value=(change_set, headers)) - _failure = _Fail(error="error for test", exception=UnsuccessfulResponseException(status=401)) + _failure = _Fail( + error="error for test", exception=UnsuccessfulResponseException(status=401) + ) synchronizer = PollingDataSource( poll_interval=0.01, - requester=ListBasedRequester( - results=iter([_failure, polling_result]) - ), + requester=ListBasedRequester(results=iter([_failure, polling_result])), ) sync = synchronizer.sync() off = next(sync) diff --git a/ldclient/testing/impl/datasourcev2/test_streaming_synchronizer.py b/ldclient/testing/impl/datasourcev2/test_streaming_synchronizer.py index e161c81d..8aa66bbb 100644 --- a/ldclient/testing/impl/datasourcev2/test_streaming_synchronizer.py +++ b/ldclient/testing/impl/datasourcev2/test_streaming_synchronizer.py @@ -14,7 +14,7 @@ from ldclient.impl.datasourcev2.streaming import ( SSEClient, SseClientBuilder, - StreamingSynchronizer + StreamingDataSource ) from ldclient.impl.datasystem.protocolv2 import ( ChangeType, @@ -75,7 +75,7 @@ class UnknownTypeOfEvent(Action): unknown_named_event = Event(event="Unknown") builder = list_sse_client([UnknownTypeOfEvent(), unknown_named_event]) - synchronizer = StreamingSynchronizer(Config(sdk_key="key"), builder) + synchronizer = StreamingDataSource(Config(sdk_key="key"), builder) assert len(list(synchronizer.sync())) == 0 @@ -83,7 +83,7 @@ class UnknownTypeOfEvent(Action): def test_ignores_faults_without_errors(): errorless_fault = Fault(error=None) builder = list_sse_client([errorless_fault]) - synchronizer = StreamingSynchronizer(Config(sdk_key="key"), builder) + synchronizer = StreamingDataSource(Config(sdk_key="key"), builder) assert len(list(synchronizer.sync())) == 0 @@ -162,7 +162,7 @@ def test_handles_no_changes(): ) builder = list_sse_client([intent_event]) - synchronizer = StreamingSynchronizer(Config(sdk_key="key"), builder) + synchronizer = StreamingDataSource(Config(sdk_key="key"), builder) updates = list(synchronizer.sync()) assert len(updates) == 1 @@ -181,7 +181,7 @@ def test_handles_empty_changeset(events): # pylint: disable=redefined-outer-nam ] ) - synchronizer = StreamingSynchronizer(Config(sdk_key="key"), builder) + synchronizer = StreamingDataSource(Config(sdk_key="key"), builder) updates = list(synchronizer.sync()) assert len(updates) == 1 @@ -207,7 +207,7 @@ def test_handles_put_objects(events): # pylint: disable=redefined-outer-name ] ) - synchronizer = StreamingSynchronizer(Config(sdk_key="key"), builder) + synchronizer = StreamingDataSource(Config(sdk_key="key"), builder) updates = list(synchronizer.sync()) assert len(updates) == 1 @@ -238,7 +238,7 @@ def test_handles_delete_objects(events): # pylint: disable=redefined-outer-name ] ) - synchronizer = StreamingSynchronizer(Config(sdk_key="key"), builder) + synchronizer = StreamingDataSource(Config(sdk_key="key"), builder) updates = list(synchronizer.sync()) assert len(updates) == 1 @@ -268,7 +268,7 @@ def test_swallows_goodbye(events): # pylint: disable=redefined-outer-name ] ) - synchronizer = StreamingSynchronizer(Config(sdk_key="key"), builder) + synchronizer = StreamingDataSource(Config(sdk_key="key"), builder) updates = list(synchronizer.sync()) assert len(updates) == 1 @@ -294,7 +294,7 @@ def test_swallows_heartbeat(events): # pylint: disable=redefined-outer-name ] ) - synchronizer = StreamingSynchronizer(Config(sdk_key="key"), builder) + synchronizer = StreamingDataSource(Config(sdk_key="key"), builder) updates = list(synchronizer.sync()) assert len(updates) == 1 @@ -322,7 +322,7 @@ def test_error_resets(events): # pylint: disable=redefined-outer-name ] ) - synchronizer = StreamingSynchronizer(Config(sdk_key="key"), builder) + synchronizer = StreamingDataSource(Config(sdk_key="key"), builder) updates = list(synchronizer.sync()) assert len(updates) == 1 @@ -345,7 +345,7 @@ def test_handles_out_of_order(events): # pylint: disable=redefined-outer-name ] ) - synchronizer = StreamingSynchronizer(Config(sdk_key="key"), builder) + synchronizer = StreamingDataSource(Config(sdk_key="key"), builder) updates = list(synchronizer.sync()) assert len(updates) == 1 @@ -375,7 +375,7 @@ def test_invalid_json_decoding(events): # pylint: disable=redefined-outer-name ] ) - synchronizer = StreamingSynchronizer(Config(sdk_key="key"), builder) + synchronizer = StreamingDataSource(Config(sdk_key="key"), builder) updates = list(synchronizer.sync()) assert len(updates) == 2 @@ -407,7 +407,7 @@ def test_stops_on_unrecoverable_status_code( ] ) - synchronizer = StreamingSynchronizer(Config(sdk_key="key"), builder) + synchronizer = StreamingDataSource(Config(sdk_key="key"), builder) updates = list(synchronizer.sync()) assert len(updates) == 1 @@ -436,7 +436,7 @@ def test_continues_on_recoverable_status_code( events[EventName.PAYLOAD_TRANSFERRED], ] ) - synchronizer = StreamingSynchronizer(Config(sdk_key="key"), builder) + synchronizer = StreamingDataSource(Config(sdk_key="key"), builder) updates = list(synchronizer.sync()) assert len(updates) == 3 diff --git a/ldclient/testing/impl/datasystem/test_config.py b/ldclient/testing/impl/datasystem/test_config.py new file mode 100644 index 00000000..c7c0925b --- /dev/null +++ b/ldclient/testing/impl/datasystem/test_config.py @@ -0,0 +1,221 @@ +import dataclasses +from unittest.mock import MagicMock, Mock + +import pytest + +from ldclient.config import Config as LDConfig +from ldclient.impl.datasystem import Initializer, Synchronizer +from ldclient.impl.datasystem.config import ( + Config, + ConfigBuilder, + custom, + default, + polling, + streaming +) + + +def test_config_builder_initializers(): + """Test that initializers can be set and retrieved correctly.""" + builder = ConfigBuilder() + mock_initializer = Mock() + + result = builder.initializers([mock_initializer]) + + assert result is builder # Method chaining + assert builder._initializers == [mock_initializer] + + +def test_config_builder_synchronizers_primary_only(): + """Test that primary synchronizer can be set without secondary.""" + builder = ConfigBuilder() + mock_synchronizer = Mock() + + result = builder.synchronizers(mock_synchronizer) + + assert result is builder # Method chaining + assert builder._primary_synchronizer == mock_synchronizer + assert builder._secondary_synchronizer is None + + +def test_config_builder_synchronizers_with_secondary(): + """Test that both primary and secondary synchronizers can be set.""" + builder = ConfigBuilder() + mock_primary = Mock() + mock_secondary = Mock() + + result = builder.synchronizers(mock_primary, mock_secondary) + + assert result is builder # Method chaining + assert builder._primary_synchronizer == mock_primary + assert builder._secondary_synchronizer == mock_secondary + + +def test_config_builder_build_success(): + """Test successful build with all required fields set.""" + builder = ConfigBuilder() + mock_initializer = Mock() + mock_primary = Mock() + mock_secondary = Mock() + + builder.initializers([mock_initializer]) + builder.synchronizers(mock_primary, mock_secondary) + + config = builder.build() + + assert isinstance(config, Config) + assert config.initializers == [mock_initializer] + assert config.primary_synchronizer == mock_primary + assert config.secondary_synchronizer == mock_secondary + + +def test_config_builder_build_missing_primary_synchronizer(): + """Test that build fails when primary synchronizer is not set.""" + builder = ConfigBuilder() + + with pytest.raises(ValueError, match="Primary synchronizer must be set"): + builder.build() + + +def test_config_builder_build_with_initializers_only(): + """Test that build fails when only initializers are set.""" + builder = ConfigBuilder() + mock_initializer = Mock() + + builder.initializers([mock_initializer]) + + with pytest.raises(ValueError, match="Primary synchronizer must be set"): + builder.build() + + +def test_config_builder_method_chaining(): + """Test that all builder methods support method chaining.""" + builder = ConfigBuilder() + mock_initializer = Mock() + mock_primary = Mock() + mock_secondary = Mock() + + # Test that each method returns the builder instance + result = builder.initializers([mock_initializer]).synchronizers( + mock_primary, mock_secondary + ) + + assert result is builder + + +def test_config_builder_default_state(): + """Test that ConfigBuilder starts with all fields as None.""" + builder = ConfigBuilder() + + assert builder._initializers is None + assert builder._primary_synchronizer is None + assert builder._secondary_synchronizer is None + + +def test_config_builder_multiple_calls(): + """Test that multiple calls to builder methods overwrite previous values.""" + builder = ConfigBuilder() + mock_initializer1 = Mock() + mock_initializer2 = Mock() + mock_primary1 = Mock() + mock_primary2 = Mock() + + # Set initial values + builder.initializers([mock_initializer1]) + builder.synchronizers(mock_primary1) + + # Overwrite with new values + builder.initializers([mock_initializer2]) + builder.synchronizers(mock_primary2) + + config = builder.build() + + assert config.initializers == [mock_initializer2] + assert config.primary_synchronizer == mock_primary2 + + +def test_custom_builder(): + """Test that custom() returns a fresh ConfigBuilder instance.""" + builder1 = custom() + builder2 = custom() + + assert isinstance(builder1, ConfigBuilder) + assert isinstance(builder2, ConfigBuilder) + assert builder1 is not builder2 # Different instances + + +def test_default_config_builder(): + """Test that default() returns a properly configured ConfigBuilder.""" + mock_ld_config = Mock(spec=LDConfig) + + builder = default(mock_ld_config) + + assert isinstance(builder, ConfigBuilder) + # The actual implementation details would be tested in integration tests + # Here we just verify it returns a builder + + +def test_streaming_config_builder(): + """Test that streaming() returns a properly configured ConfigBuilder.""" + mock_ld_config = Mock(spec=LDConfig) + + builder = streaming(mock_ld_config) + + assert isinstance(builder, ConfigBuilder) + # The actual implementation details would be tested in integration tests + # Here we just verify it returns a builder + + +def test_polling_config_builder(): + """Test that polling() returns a properly configured ConfigBuilder.""" + mock_ld_config = Mock(spec=LDConfig) + + builder = polling(mock_ld_config) + + assert isinstance(builder, ConfigBuilder) + # The actual implementation details would be tested in integration tests + # Here we just verify it returns a builder + + +def test_config_dataclass_immutability(): + """Test that Config instances are immutable (frozen dataclass).""" + mock_primary = Mock() + mock_secondary = Mock() + + config = Config( + initializers=None, + primary_synchronizer=mock_primary, + secondary_synchronizer=mock_secondary, + ) + + # Attempting to modify attributes should raise an error + with pytest.raises(dataclasses.FrozenInstanceError): + config.primary_synchronizer = Mock() + + +def test_config_builder_with_none_initializers(): + """Test that initializers can be explicitly set to None.""" + builder = ConfigBuilder() + mock_primary = Mock() + + builder.initializers(None) + builder.synchronizers(mock_primary) + + config = builder.build() + + assert config.initializers is None + assert config.primary_synchronizer == mock_primary + + +def test_config_builder_with_empty_initializers_list(): + """Test that empty list of initializers is handled correctly.""" + builder = ConfigBuilder() + mock_primary = Mock() + + builder.initializers([]) + builder.synchronizers(mock_primary) + + config = builder.build() + + assert config.initializers == [] + assert config.primary_synchronizer == mock_primary diff --git a/ldclient/testing/impl/datasystem/test_data_availability.py b/ldclient/testing/impl/datasystem/test_data_availability.py new file mode 100644 index 00000000..3eee9461 --- /dev/null +++ b/ldclient/testing/impl/datasystem/test_data_availability.py @@ -0,0 +1,89 @@ +import pytest + +from ldclient.impl.datasystem import DataAvailability + + +def test_data_availability_enum_values(): + """Test that DataAvailability enum has the expected values.""" + assert DataAvailability.DEFAULTS == "defaults" + assert DataAvailability.CACHED == "cached" + assert DataAvailability.REFRESHED == "refreshed" + + +def test_data_availability_enum_type(): + """Test that DataAvailability is a string enum.""" + assert isinstance(DataAvailability.DEFAULTS, str) + assert isinstance(DataAvailability.CACHED, str) + assert isinstance(DataAvailability.REFRESHED, str) + + # Should also be instances of the enum class + assert isinstance(DataAvailability.DEFAULTS, DataAvailability) + assert isinstance(DataAvailability.CACHED, DataAvailability) + assert isinstance(DataAvailability.REFRESHED, DataAvailability) + + +def test_at_least_same_value(): + """Test that at_least returns True when comparing the same value.""" + assert DataAvailability.DEFAULTS.at_least(DataAvailability.DEFAULTS) is True + assert DataAvailability.CACHED.at_least(DataAvailability.CACHED) is True + assert DataAvailability.REFRESHED.at_least(DataAvailability.REFRESHED) is True + + +def test_at_least_hierarchy(): + """Test the complete hierarchy of at_least relationships.""" + # DEFAULTS < CACHED < REFRESHED + + # DEFAULTS comparisons + assert DataAvailability.DEFAULTS.at_least(DataAvailability.DEFAULTS) is True + assert DataAvailability.DEFAULTS.at_least(DataAvailability.CACHED) is False + assert DataAvailability.DEFAULTS.at_least(DataAvailability.REFRESHED) is False + + # CACHED comparisons + assert DataAvailability.CACHED.at_least(DataAvailability.DEFAULTS) is True + assert DataAvailability.CACHED.at_least(DataAvailability.CACHED) is True + assert DataAvailability.CACHED.at_least(DataAvailability.REFRESHED) is False + + # REFRESHED comparisons + assert DataAvailability.REFRESHED.at_least(DataAvailability.DEFAULTS) is True + assert DataAvailability.REFRESHED.at_least(DataAvailability.CACHED) is True + assert DataAvailability.REFRESHED.at_least(DataAvailability.REFRESHED) is True + + +def test_data_availability_string_operations(): + """Test that DataAvailability values work as strings.""" + defaults = DataAvailability.DEFAULTS + cached = DataAvailability.CACHED + refreshed = DataAvailability.REFRESHED + + # String concatenation + assert defaults + "_test" == "defaults_test" + assert cached + "_test" == "cached_test" + assert refreshed + "_test" == "refreshed_test" + + # String formatting - need to use .value attribute for the actual string + assert f"Status: {defaults.value}" == "Status: defaults" + assert f"Status: {cached.value}" == "Status: cached" + assert f"Status: {refreshed.value}" == "Status: refreshed" + + # String methods + assert defaults.upper() == "DEFAULTS" + assert cached.upper() == "CACHED" + assert refreshed.upper() == "REFRESHED" + + +def test_data_availability_comparison_operators(): + """Test that DataAvailability values can be compared using standard operators.""" + # Equality + assert DataAvailability.DEFAULTS == "defaults" + assert DataAvailability.CACHED == "cached" + assert DataAvailability.REFRESHED == "refreshed" + + # Inequality + assert DataAvailability.DEFAULTS != "cached" + assert DataAvailability.CACHED != "refreshed" + assert DataAvailability.REFRESHED != "defaults" + + # String comparison (lexicographic) - 'cached' < 'defaults' < 'refreshed' + assert DataAvailability.CACHED.value < DataAvailability.DEFAULTS.value + assert DataAvailability.DEFAULTS.value < DataAvailability.REFRESHED.value + assert DataAvailability.CACHED.value < DataAvailability.REFRESHED.value