From 77b1e822621bb730c12971b31931efa0f94802c9 Mon Sep 17 00:00:00 2001 From: Matthew Keeler Date: Fri, 24 Oct 2025 10:47:53 -0400 Subject: [PATCH] chore: Remove LDConfig requirement from top level DS function helpers --- ldclient/client.py | 2 +- ldclient/config.py | 2 +- ldclient/impl/datasystem/config.py | 33 ++-- ldclient/impl/datasystem/fdv2.py | 61 +++---- ldclient/integrations/test_datav2.py | 5 +- .../testing/impl/datasystem/test_config.py | 12 +- .../impl/datasystem/test_fdv2_datasystem.py | 28 +-- .../impl/datasystem/test_fdv2_persistence.py | 170 ++++++++++-------- .../integrations/test_test_data_sourcev2.py | 21 +-- 9 files changed, 172 insertions(+), 162 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index 6c3269ad..71158291 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -260,7 +260,7 @@ def __start_up(self, start_wait: float): self._data_system: DataSystem = FDv1(self._config) else: - self._data_system = FDv2(datasystem_config, disabled=self._config.offline) + self._data_system = FDv2(self._config, datasystem_config) # Provide flag evaluation function for value-change tracking self._data_system.set_flag_value_eval_fn( # type: ignore diff --git a/ldclient/config.py b/ldclient/config.py index 01e12fec..7d4a7901 100644 --- a/ldclient/config.py +++ b/ldclient/config.py @@ -157,7 +157,7 @@ def disable_ssl_verification(self) -> bool: T = TypeVar("T") -Builder = Callable[[], T] +Builder = Callable[['Config'], T] @dataclass(frozen=True) diff --git a/ldclient/impl/datasystem/config.py b/ldclient/impl/datasystem/config.py index d2755865..c02ba952 100644 --- a/ldclient/impl/datasystem/config.py +++ b/ldclient/impl/datasystem/config.py @@ -20,7 +20,7 @@ T = TypeVar("T") -Builder = Callable[[], T] +Builder = Callable[[LDConfig], T] class ConfigBuilder: # pylint: disable=too-few-public-methods @@ -77,8 +77,8 @@ def build(self) -> DataSystemConfig: ) -def __polling_ds_builder(config: LDConfig) -> Builder[PollingDataSource]: - def builder() -> PollingDataSource: +def __polling_ds_builder() -> Builder[PollingDataSource]: + def builder(config: LDConfig) -> PollingDataSource: requester = Urllib3PollingRequester(config) polling_ds = PollingDataSourceBuilder(config) polling_ds.requester(requester) @@ -88,14 +88,14 @@ def builder() -> PollingDataSource: return builder -def __streaming_ds_builder(config: LDConfig) -> Builder[StreamingDataSource]: - def builder() -> StreamingDataSource: +def __streaming_ds_builder() -> Builder[StreamingDataSource]: + def builder(config: LDConfig) -> StreamingDataSource: return StreamingDataSourceBuilder(config).build() return builder -def default(config: LDConfig) -> ConfigBuilder: +def default() -> ConfigBuilder: """ Default is LaunchDarkly's recommended flag data acquisition strategy. @@ -109,8 +109,8 @@ def default(config: LDConfig) -> ConfigBuilder: for updates. """ - polling_builder = __polling_ds_builder(config) - streaming_builder = __streaming_ds_builder(config) + polling_builder = __polling_ds_builder() + streaming_builder = __streaming_ds_builder() builder = ConfigBuilder() builder.initializers([polling_builder]) @@ -119,14 +119,14 @@ def default(config: LDConfig) -> ConfigBuilder: return builder -def streaming(config: LDConfig) -> ConfigBuilder: +def streaming() -> ConfigBuilder: """ Streaming configures the SDK to efficiently streams flag/segment data in the background, allowing evaluations to operate on the latest data with no additional latency. """ - streaming_builder = __streaming_ds_builder(config) + streaming_builder = __streaming_ds_builder() builder = ConfigBuilder() builder.synchronizers(streaming_builder) @@ -134,14 +134,14 @@ def streaming(config: LDConfig) -> ConfigBuilder: return builder -def polling(config: LDConfig) -> ConfigBuilder: +def polling() -> ConfigBuilder: """ Polling configures the SDK to regularly poll an endpoint for flag/segment data in the background. This is less efficient than streaming, but may be necessary in some network environments. """ - polling_builder: Builder[Synchronizer] = __polling_ds_builder(config) + polling_builder: Builder[Synchronizer] = __polling_ds_builder() builder = ConfigBuilder() builder.synchronizers(polling_builder) @@ -160,17 +160,16 @@ def custom() -> ConfigBuilder: return ConfigBuilder() -# TODO(fdv2): Need to update these so they don't rely on the LDConfig -def daemon(config: LDConfig, store: FeatureStore) -> ConfigBuilder: +def daemon(store: FeatureStore) -> ConfigBuilder: """ Daemon configures the SDK to read from a persistent store integration that is populated by Relay Proxy or other SDKs. The SDK will not connect to LaunchDarkly. In this mode, the SDK never writes to the data store. """ - return default(config).data_store(store, DataStoreMode.READ_ONLY) + return default().data_store(store, DataStoreMode.READ_ONLY) -def persistent_store(config: LDConfig, store: FeatureStore) -> ConfigBuilder: +def persistent_store(store: FeatureStore) -> ConfigBuilder: """ PersistentStore is similar to Default, with the addition of a persistent store integration. Before data has arrived from LaunchDarkly, the SDK is @@ -178,7 +177,7 @@ def persistent_store(config: LDConfig, store: FeatureStore) -> ConfigBuilder: data is available, the SDK will no longer read from the persistent store, although it will keep it up-to-date. """ - return default(config).data_store(store, DataStoreMode.READ_WRITE) + return default().data_store(store, DataStoreMode.READ_WRITE) # TODO(fdv2): Implement these methods diff --git a/ldclient/impl/datasystem/fdv2.py b/ldclient/impl/datasystem/fdv2.py index 3106074f..e41386e3 100644 --- a/ldclient/impl/datasystem/fdv2.py +++ b/ldclient/impl/datasystem/fdv2.py @@ -3,7 +3,7 @@ from threading import Event, Thread from typing import Any, Callable, Dict, List, Mapping, Optional -from ldclient.config import Builder, DataSystemConfig +from ldclient.config import Builder, Config, DataSystemConfig from ldclient.feature_store import _FeatureStoreDataSetSorter from ldclient.impl.datasourcev2.status import ( DataSourceStatusProviderImpl, @@ -153,8 +153,8 @@ class FDv2: def __init__( self, - config: DataSystemConfig, - disabled: bool = False, + config: Config, + data_system_config: DataSystemConfig, ): """ Initialize a new FDv2 data system. @@ -165,10 +165,11 @@ def __init__( :param disabled: Whether the data system is disabled (offline mode) """ self._config = config - self._primary_synchronizer_builder: Optional[Builder[Synchronizer]] = config.primary_synchronizer - self._secondary_synchronizer_builder = config.secondary_synchronizer - self._fdv1_fallback_synchronizer_builder = config.fdv1_fallback_synchronizer - self._disabled = disabled + self._data_system_config = data_system_config + self._primary_synchronizer_builder: Optional[Builder[Synchronizer]] = data_system_config.primary_synchronizer + self._secondary_synchronizer_builder = data_system_config.secondary_synchronizer + self._fdv1_fallback_synchronizer_builder = data_system_config.fdv1_fallback_synchronizer + self._disabled = self._config.offline # Diagnostic accumulator provided by client for streaming metrics # TODO(fdv2): Either we need to use this, or we need to provide it to @@ -188,10 +189,10 @@ def __init__( self._data_store_status_provider = DataStoreStatusProviderImpl(None, Listeners()) # Configure persistent store if provided - if self._config.data_store is not None: - self._data_store_status_provider = DataStoreStatusProviderImpl(self._config.data_store, Listeners()) - writable = self._config.data_store_mode == DataStoreMode.READ_WRITE - wrapper = FeatureStoreClientWrapper(self._config.data_store, self._data_store_status_provider) + if self._data_system_config.data_store is not None: + self._data_store_status_provider = DataStoreStatusProviderImpl(self._data_system_config.data_store, Listeners()) + writable = self._data_system_config.data_store_mode == DataStoreMode.READ_WRITE + wrapper = FeatureStoreClientWrapper(self._data_system_config.data_store, self._data_store_status_provider) self._store.with_persistence( wrapper, writable, self._data_store_status_provider ) @@ -208,8 +209,8 @@ def __init__( # Track configuration self._configured_with_data_sources = ( - (config.initializers is not None and len(config.initializers) > 0) - or config.primary_synchronizer is not None + (data_system_config.initializers is not None and len(data_system_config.initializers) > 0) + or data_system_config.primary_synchronizer is not None ) def start(self, set_on_ready: Event): @@ -268,32 +269,32 @@ def _run_main_loop(self, set_on_ready: Event): self._run_synchronizers(set_on_ready) except Exception as e: - log.error(f"Error in FDv2 main loop: {e}") + log.error("Error in FDv2 main loop: %s", e) # Ensure ready event is set even on error if not set_on_ready.is_set(): set_on_ready.set() def _run_initializers(self, set_on_ready: Event): """Run initializers to get initial data.""" - if self._config.initializers is None: + if self._data_system_config.initializers is None: return - for initializer_builder in self._config.initializers: + for initializer_builder in self._data_system_config.initializers: if self._stop_event.is_set(): return try: - initializer = initializer_builder() - log.info(f"Attempting to initialize via {initializer.name}") + initializer = initializer_builder(self._config) + log.info("Attempting to initialize via %s", initializer.name) basis_result = initializer.fetch() if isinstance(basis_result, _Fail): - log.warning(f"Initializer {initializer.name} failed: {basis_result.error}") + log.warning("Initializer %s failed: %s", initializer.name, basis_result.error) continue basis = basis_result.value - log.info(f"Initialized via {initializer.name}") + log.info("Initialized via %s", initializer.name) # Apply the basis to the store self._store.apply(basis.change_set, basis.persist) @@ -302,12 +303,12 @@ def _run_initializers(self, set_on_ready: Event): if not set_on_ready.is_set(): set_on_ready.set() except Exception as e: - log.error(f"Initializer failed with exception: {e}") + log.error("Initializer failed with exception: %s", e) def _run_synchronizers(self, set_on_ready: Event): """Run synchronizers to keep data up-to-date.""" # If no primary synchronizer configured, just set ready and return - if self._config.primary_synchronizer is None: + if self._data_system_config.primary_synchronizer is None: if not set_on_ready.is_set(): set_on_ready.set() return @@ -318,8 +319,8 @@ def synchronizer_loop(self: 'FDv2'): while not self._stop_event.is_set() and self._primary_synchronizer_builder is not None: # Try primary synchronizer try: - primary_sync = self._primary_synchronizer_builder() - log.info(f"Primary synchronizer {primary_sync.name} is starting") + primary_sync = self._primary_synchronizer_builder(self._config) + log.info("Primary synchronizer %s is starting", primary_sync.name) remove_sync, fallback_v1 = self._consume_synchronizer_results( primary_sync, set_on_ready, self._fallback_condition @@ -345,8 +346,8 @@ def synchronizer_loop(self: 'FDv2'): if self._secondary_synchronizer_builder is None: continue - secondary_sync = self._secondary_synchronizer_builder() - log.info(f"Secondary synchronizer {secondary_sync.name} is starting") + secondary_sync = self._secondary_synchronizer_builder(self._config) + log.info("Secondary synchronizer %s is starting", secondary_sync.name) remove_sync, fallback_v1 = self._consume_synchronizer_results( secondary_sync, set_on_ready, self._recovery_condition @@ -368,11 +369,11 @@ def synchronizer_loop(self: 'FDv2'): log.info("Recovery condition met, returning to primary synchronizer") except Exception as e: - log.error(f"Failed to build primary synchronizer: {e}") + log.error("Failed to build primary synchronizer: %s", e) break except Exception as e: - log.error(f"Error in synchronizer loop: {e}") + log.error("Error in synchronizer loop: %s", e) finally: # Ensure we always set the ready event when exiting if not set_on_ready.is_set(): @@ -400,7 +401,7 @@ def _consume_synchronizer_results( """ try: for update in synchronizer.sync(): - log.info(f"Synchronizer {synchronizer.name} update: {update.state}") + log.info("Synchronizer %s update: %s", synchronizer.name, update.state) if self._stop_event.is_set(): return False, False @@ -425,7 +426,7 @@ def _consume_synchronizer_results( return False, False except Exception as e: - log.error(f"Error consuming synchronizer results: {e}") + log.error("Error consuming synchronizer results: %s", e) return True, False return True, False diff --git a/ldclient/integrations/test_datav2.py b/ldclient/integrations/test_datav2.py index 84ccf30d..744264f2 100644 --- a/ldclient/integrations/test_datav2.py +++ b/ldclient/integrations/test_datav2.py @@ -3,6 +3,7 @@ import copy from typing import Any, Dict, List, Optional, Set, Union +from ldclient.config import Config from ldclient.context import Context from ldclient.impl.integrations.test_datav2.test_data_sourcev2 import ( _TestDataSourceV2 @@ -693,7 +694,7 @@ def _add_instance(self, instance): finally: self._lock.unlock() - def build_initializer(self) -> _TestDataSourceV2: + def build_initializer(self, _: Config) -> _TestDataSourceV2: """ Creates an initializer that can be used with the FDv2 data system. @@ -701,7 +702,7 @@ def build_initializer(self) -> _TestDataSourceV2: """ return _TestDataSourceV2(self) - def build_synchronizer(self) -> _TestDataSourceV2: + def build_synchronizer(self, _: Config) -> _TestDataSourceV2: """ Creates a synchronizer that can be used with the FDv2 data system. diff --git a/ldclient/testing/impl/datasystem/test_config.py b/ldclient/testing/impl/datasystem/test_config.py index 5142fb82..a36c748d 100644 --- a/ldclient/testing/impl/datasystem/test_config.py +++ b/ldclient/testing/impl/datasystem/test_config.py @@ -126,9 +126,7 @@ def test_custom_builder(): def test_default_config_builder(): """Test that default() returns a properly configured ConfigBuilder.""" - mock_ld_config = Mock(spec=LDConfig) - - builder = default(mock_ld_config) + builder = default() assert isinstance(builder, ConfigBuilder) # The actual implementation details would be tested in integration tests @@ -137,9 +135,7 @@ def test_default_config_builder(): def test_streaming_config_builder(): """Test that streaming() returns a properly configured ConfigBuilder.""" - mock_ld_config = Mock(spec=LDConfig) - - builder = streaming(mock_ld_config) + builder = streaming() assert isinstance(builder, ConfigBuilder) # The actual implementation details would be tested in integration tests @@ -148,9 +144,7 @@ def test_streaming_config_builder(): def test_polling_config_builder(): """Test that polling() returns a properly configured ConfigBuilder.""" - mock_ld_config = Mock(spec=LDConfig) - - builder = polling(mock_ld_config) + builder = polling() assert isinstance(builder, ConfigBuilder) # The actual implementation details would be tested in integration tests diff --git a/ldclient/testing/impl/datasystem/test_fdv2_datasystem.py b/ldclient/testing/impl/datasystem/test_fdv2_datasystem.py index b0db1426..353dfa0a 100644 --- a/ldclient/testing/impl/datasystem/test_fdv2_datasystem.py +++ b/ldclient/testing/impl/datasystem/test_fdv2_datasystem.py @@ -5,7 +5,7 @@ from mock import Mock -from ldclient.config import DataSystemConfig +from ldclient.config import Config, DataSystemConfig from ldclient.impl.datasystem import DataAvailability, Synchronizer from ldclient.impl.datasystem.fdv2 import FDv2 from ldclient.integrations.test_datav2 import TestDataV2 @@ -18,13 +18,13 @@ def test_two_phase_init(): td_synchronizer = TestDataV2.data_source() td_synchronizer.update(td_synchronizer.flag("feature-flag").on(True)) - config = DataSystemConfig( + data_system_config = DataSystemConfig( initializers=[td_initializer.build_initializer], primary_synchronizer=td_synchronizer.build_synchronizer, ) set_on_ready = Event() - fdv2 = FDv2(config) + fdv2 = FDv2(Config(sdk_key="dummy"), data_system_config) changed = Event() changes: List[FlagChange] = [] @@ -52,13 +52,13 @@ def listener(flag_change: FlagChange): def test_can_stop_fdv2(): td = TestDataV2.data_source() - config = DataSystemConfig( + data_system_config = DataSystemConfig( initializers=None, primary_synchronizer=td.build_synchronizer, ) set_on_ready = Event() - fdv2 = FDv2(config) + fdv2 = FDv2(Config(sdk_key="dummy"), data_system_config) changed = Event() changes: List[FlagChange] = [] @@ -81,13 +81,13 @@ def listener(flag_change: FlagChange): def test_fdv2_data_availability_is_refreshed_with_data(): td = TestDataV2.data_source() - config = DataSystemConfig( + data_system_config = DataSystemConfig( initializers=None, primary_synchronizer=td.build_synchronizer, ) set_on_ready = Event() - fdv2 = FDv2(config) + fdv2 = FDv2(Config(sdk_key="dummy"), data_system_config) fdv2.start(set_on_ready) assert set_on_ready.wait(1), "Data system did not become ready in time" @@ -101,9 +101,9 @@ def test_fdv2_fallsback_to_secondary_synchronizer(): mock.sync.return_value = iter([]) # Empty iterator to simulate no data td = TestDataV2.data_source() td.update(td.flag("feature-flag").on(True)) - config = DataSystemConfig( + data_system_config = DataSystemConfig( initializers=[td.build_initializer], - primary_synchronizer=lambda: mock, # Primary synchronizer is None to force fallback + primary_synchronizer=lambda _: mock, # Primary synchronizer is None to force fallback secondary_synchronizer=td.build_synchronizer, ) @@ -120,7 +120,7 @@ def listener(flag_change: FlagChange): changed.set() set_on_ready = Event() - fdv2 = FDv2(config) + fdv2 = FDv2(Config(sdk_key="dummy"), data_system_config) fdv2.flag_tracker.add_listener(listener) fdv2.start(set_on_ready) assert set_on_ready.wait(1), "Data system did not become ready in time" @@ -137,10 +137,10 @@ def test_fdv2_shutdown_down_if_both_synchronizers_fail(): mock.sync.return_value = iter([]) # Empty iterator to simulate no data td = TestDataV2.data_source() td.update(td.flag("feature-flag").on(True)) - config = DataSystemConfig( + data_system_config = DataSystemConfig( initializers=[td.build_initializer], - primary_synchronizer=lambda: mock, # Primary synchronizer is None to force fallback - secondary_synchronizer=lambda: mock, # Secondary synchronizer also fails + primary_synchronizer=lambda _: mock, # Primary synchronizer is None to force fallback + secondary_synchronizer=lambda _: mock, # Secondary synchronizer also fails ) changed = Event() @@ -150,7 +150,7 @@ def listener(status: DataSourceStatus): changed.set() set_on_ready = Event() - fdv2 = FDv2(config) + fdv2 = FDv2(Config(sdk_key="dummy"), data_system_config) fdv2.data_source_status_provider.add_listener(listener) fdv2.start(set_on_ready) assert set_on_ready.wait(1), "Data system did not become ready in time" diff --git a/ldclient/testing/impl/datasystem/test_fdv2_persistence.py b/ldclient/testing/impl/datasystem/test_fdv2_persistence.py index c64757ab..34cbd4c9 100644 --- a/ldclient/testing/impl/datasystem/test_fdv2_persistence.py +++ b/ldclient/testing/impl/datasystem/test_fdv2_persistence.py @@ -3,16 +3,11 @@ from threading import Event from typing import Any, Callable, Dict, List, Mapping, Optional -from ldclient.config import DataSystemConfig +from ldclient.config import Config, DataSystemConfig from ldclient.impl.datasystem import DataAvailability from ldclient.impl.datasystem.fdv2 import FDv2 from ldclient.integrations.test_datav2 import TestDataV2 -from ldclient.interfaces import ( - DataStoreMode, - DataStoreStatus, - FeatureStore, - FlagChange -) +from ldclient.interfaces import DataStoreMode, FeatureStore, FlagChange from ldclient.versioned_data_kind import FEATURES, SEGMENTS, VersionedDataKind @@ -21,10 +16,16 @@ class StubFeatureStore(FeatureStore): A simple stub implementation of FeatureStore for testing. Records all operations and allows inspection of state. """ - def __init__(self, initial_data: Optional[Mapping[VersionedDataKind, Mapping[str, Dict[Any, Any]]]] = None): + + def __init__( + self, + initial_data: Optional[ + Mapping[VersionedDataKind, Mapping[str, Dict[Any, Any]]] + ] = None, + ): self._data: Dict[VersionedDataKind, Dict[str, dict]] = { FEATURES: {}, - SEGMENTS: {} + SEGMENTS: {}, } self._initialized = False self._available = True @@ -44,16 +45,23 @@ def init(self, all_data: Mapping[VersionedDataKind, Mapping[str, Dict[Any, Any]] self.init_called_count += 1 self._data = { FEATURES: dict(all_data.get(FEATURES, {})), - SEGMENTS: dict(all_data.get(SEGMENTS, {})) + SEGMENTS: dict(all_data.get(SEGMENTS, {})), } self._initialized = True - def get(self, kind: VersionedDataKind, key: str, callback: Callable[[Any], Any] = lambda x: x): + def get( + self, + kind: VersionedDataKind, + key: str, + callback: Callable[[Any], Any] = lambda x: x, + ): self.get_calls.append((kind, key)) item = self._data.get(kind, {}).get(key) return callback(item) if item else None - def all(self, kind: VersionedDataKind, callback: Callable[[Any], Any] = lambda x: x): + def all( + self, kind: VersionedDataKind, callback: Callable[[Any], Any] = lambda x: x + ): self.all_calls.append(kind) items = self._data.get(kind, {}) return {key: callback(value) for key, value in items.items()} @@ -61,14 +69,14 @@ def all(self, kind: VersionedDataKind, callback: Callable[[Any], Any] = lambda x def delete(self, kind: VersionedDataKind, key: str, version: int): self.delete_calls.append((kind, key, version)) existing = self._data.get(kind, {}).get(key) - if existing and existing.get('version', 0) < version: - self._data[kind][key] = {'key': key, 'version': version, 'deleted': True} + if existing and existing.get("version", 0) < version: + self._data[kind][key] = {"key": key, "version": version, "deleted": True} def upsert(self, kind: VersionedDataKind, item: dict): - self.upsert_calls.append((kind, item.get('key'), item.get('version'))) - key = item['key'] + self.upsert_calls.append((kind, item.get("key"), item.get("version"))) + key = item["key"] existing = self._data.get(kind, {}).get(key) - if not existing or existing.get('version', 0) < item.get('version', 0): + if not existing or existing.get("version", 0) < item.get("version", 0): self._data[kind][key] = item @property @@ -95,7 +103,7 @@ def get_data_snapshot(self) -> Mapping[VersionedDataKind, Mapping[str, dict]]: """Test helper to get a snapshot of current data""" return { FEATURES: dict(self._data[FEATURES]), - SEGMENTS: dict(self._data[SEGMENTS]) + SEGMENTS: dict(self._data[SEGMENTS]), } def reset_operation_tracking(self): @@ -112,15 +120,15 @@ def test_persistent_store_read_only_mode(): # Pre-populate persistent store with a flag initial_data = { FEATURES: { - 'existing-flag': { - 'key': 'existing-flag', - 'version': 1, - 'on': True, - 'variations': [True, False], - 'fallthrough': {'variation': 0} + "existing-flag": { + "key": "existing-flag", + "version": 1, + "on": True, + "variations": [True, False], + "fallthrough": {"variation": 0}, } }, - SEGMENTS: {} + SEGMENTS: {}, } persistent_store = StubFeatureStore(initial_data) @@ -129,7 +137,7 @@ def test_persistent_store_read_only_mode(): td_synchronizer = TestDataV2.data_source() td_synchronizer.update(td_synchronizer.flag("new-flag").on(True)) - config = DataSystemConfig( + data_system_config = DataSystemConfig( data_store_mode=DataStoreMode.READ_ONLY, data_store=persistent_store, initializers=None, @@ -137,7 +145,7 @@ def test_persistent_store_read_only_mode(): ) set_on_ready = Event() - fdv2 = FDv2(config) + fdv2 = FDv2(Config(sdk_key="dummy"), data_system_config) fdv2.start(set_on_ready) assert set_on_ready.wait(1), "Data system did not become ready in time" @@ -158,15 +166,15 @@ def test_persistent_store_read_write_mode(): # Pre-populate persistent store with a flag initial_data = { FEATURES: { - 'existing-flag': { - 'key': 'existing-flag', - 'version': 1, - 'on': True, - 'variations': [True, False], - 'fallthrough': {'variation': 0} + "existing-flag": { + "key": "existing-flag", + "version": 1, + "on": True, + "variations": [True, False], + "fallthrough": {"variation": 0}, } }, - SEGMENTS: {} + SEGMENTS: {}, } persistent_store = StubFeatureStore(initial_data) @@ -176,7 +184,7 @@ def test_persistent_store_read_write_mode(): td_synchronizer = TestDataV2.data_source() td_synchronizer.update(td_synchronizer.flag("new-flag").on(True)) - config = DataSystemConfig( + data_system_config = DataSystemConfig( data_store_mode=DataStoreMode.READ_WRITE, data_store=persistent_store, initializers=None, @@ -184,17 +192,19 @@ def test_persistent_store_read_write_mode(): ) set_on_ready = Event() - fdv2 = FDv2(config) + fdv2 = FDv2(Config(sdk_key="dummy"), data_system_config) fdv2.start(set_on_ready) assert set_on_ready.wait(1), "Data system did not become ready in time" # In READ_WRITE mode, the store should be initialized with new data - assert persistent_store.init_called_count >= 1 # At least one init call for the new data + assert ( + persistent_store.init_called_count >= 1 + ) # At least one init call for the new data # Verify the new flag was written to persistent store snapshot = persistent_store.get_data_snapshot() - assert 'new-flag' in snapshot[FEATURES] + assert "new-flag" in snapshot[FEATURES] fdv2.stop() @@ -207,7 +217,7 @@ def test_persistent_store_delta_updates_read_write(): td_synchronizer = TestDataV2.data_source() td_synchronizer.update(td_synchronizer.flag("feature-flag").on(True)) - config = DataSystemConfig( + data_system_config = DataSystemConfig( data_store_mode=DataStoreMode.READ_WRITE, data_store=persistent_store, initializers=None, @@ -215,7 +225,7 @@ def test_persistent_store_delta_updates_read_write(): ) set_on_ready = Event() - fdv2 = FDv2(config) + fdv2 = FDv2(Config(sdk_key="dummy"), data_system_config) # Set up flag change listener to detect the update flag_changed = Event() @@ -223,7 +233,9 @@ def test_persistent_store_delta_updates_read_write(): def listener(flag_change: FlagChange): change_count[0] += 1 - if change_count[0] == 2: # First change is from initial sync, second is our update + if ( + change_count[0] == 2 + ): # First change is from initial sync, second is our update flag_changed.set() fdv2.flag_tracker.add_listener(listener) @@ -241,12 +253,12 @@ def listener(flag_change: FlagChange): # Verify the update was written to persistent store assert len(persistent_store.upsert_calls) > 0 - assert any(call[1] == 'feature-flag' for call in persistent_store.upsert_calls) + assert any(call[1] == "feature-flag" for call in persistent_store.upsert_calls) # Verify the updated flag is in the store snapshot = persistent_store.get_data_snapshot() - assert 'feature-flag' in snapshot[FEATURES] - assert snapshot[FEATURES]['feature-flag']['on'] is False + assert "feature-flag" in snapshot[FEATURES] + assert snapshot[FEATURES]["feature-flag"]["on"] is False fdv2.stop() @@ -259,7 +271,7 @@ def test_persistent_store_delta_updates_read_only(): td_synchronizer = TestDataV2.data_source() td_synchronizer.update(td_synchronizer.flag("feature-flag").on(True)) - config = DataSystemConfig( + data_system_config = DataSystemConfig( data_store_mode=DataStoreMode.READ_ONLY, data_store=persistent_store, initializers=None, @@ -267,7 +279,7 @@ def test_persistent_store_delta_updates_read_only(): ) set_on_ready = Event() - fdv2 = FDv2(config) + fdv2 = FDv2(Config(sdk_key="dummy"), data_system_config) # Set up flag change listener to detect the update flag_changed = Event() @@ -275,7 +287,9 @@ def test_persistent_store_delta_updates_read_only(): def listener(flag_change: FlagChange): change_count[0] += 1 - if change_count[0] == 2: # First change is from initial sync, second is our update + if ( + change_count[0] == 2 + ): # First change is from initial sync, second is our update flag_changed.set() fdv2.flag_tracker.add_listener(listener) @@ -309,7 +323,7 @@ def test_persistent_store_with_initializer_and_synchronizer(): td_synchronizer = TestDataV2.data_source() td_synchronizer.update(td_synchronizer.flag("sync-flag").on(False)) - config = DataSystemConfig( + data_system_config = DataSystemConfig( data_store_mode=DataStoreMode.READ_WRITE, data_store=persistent_store, initializers=[td_initializer.build_initializer], @@ -317,7 +331,7 @@ def test_persistent_store_with_initializer_and_synchronizer(): ) set_on_ready = Event() - fdv2 = FDv2(config) + fdv2 = FDv2(Config(sdk_key="dummy"), data_system_config) # Set up flag change listener to detect when synchronizer data arrives sync_flag_arrived = Event() @@ -338,8 +352,8 @@ def listener(flag_change: FlagChange): # The synchronizer flag should be in the persistent store # (it replaces the init-flag since synchronizer does a full data set) snapshot = persistent_store.get_data_snapshot() - assert 'init-flag' not in snapshot[FEATURES] - assert 'sync-flag' in snapshot[FEATURES] + assert "init-flag" not in snapshot[FEATURES] + assert "sync-flag" in snapshot[FEATURES] fdv2.stop() @@ -361,15 +375,15 @@ def test_persistent_store_delete_operations(): # Pre-populate with a flag initial_data = { FEATURES: { - 'deletable-flag': { - 'key': 'deletable-flag', - 'version': 1, - 'on': True, - 'variations': [True, False], - 'fallthrough': {'variation': 0} + "deletable-flag": { + "key": "deletable-flag", + "version": 1, + "on": True, + "variations": [True, False], + "fallthrough": {"variation": 0}, } }, - SEGMENTS: {} + SEGMENTS: {}, } persistent_store = StubFeatureStore(initial_data) @@ -384,18 +398,18 @@ def test_persistent_store_delete_operations(): Change( action=ChangeType.PUT, kind=ObjectKind.FLAG, - key='deletable-flag', + key="deletable-flag", version=1, object={ - 'key': 'deletable-flag', - 'version': 1, - 'on': True, - 'variations': [True, False], - 'fallthrough': {'variation': 0} - } + "key": "deletable-flag", + "version": 1, + "on": True, + "variations": [True, False], + "fallthrough": {"variation": 0}, + }, ) ], - selector=None + selector=None, ) store.apply(init_changeset, True) @@ -408,19 +422,19 @@ def test_persistent_store_delete_operations(): Change( action=ChangeType.DELETE, kind=ObjectKind.FLAG, - key='deletable-flag', + key="deletable-flag", version=2, - object=None + object=None, ) ], - selector=None + selector=None, ) store.apply(delete_changeset, True) # Verify delete was called on persistent store assert len(persistent_store.delete_calls) > 0 - assert any(call[1] == 'deletable-flag' for call in persistent_store.delete_calls) + assert any(call[1] == "deletable-flag" for call in persistent_store.delete_calls) def test_data_store_status_provider(): @@ -430,7 +444,7 @@ def test_data_store_status_provider(): td_synchronizer = TestDataV2.data_source() td_synchronizer.update(td_synchronizer.flag("feature-flag").on(True)) - config = DataSystemConfig( + data_system_config = DataSystemConfig( data_store_mode=DataStoreMode.READ_WRITE, data_store=persistent_store, initializers=None, @@ -438,7 +452,7 @@ def test_data_store_status_provider(): ) set_on_ready = Event() - fdv2 = FDv2(config) + fdv2 = FDv2(Config(sdk_key="dummy"), data_system_config) # Verify data store status provider exists status_provider = fdv2.data_store_status_provider @@ -462,14 +476,14 @@ def test_data_store_status_monitoring_not_enabled_by_default(): td_synchronizer = TestDataV2.data_source() td_synchronizer.update(td_synchronizer.flag("feature-flag").on(True)) - config = DataSystemConfig( + data_system_config = DataSystemConfig( data_store_mode=DataStoreMode.READ_WRITE, data_store=persistent_store, initializers=None, primary_synchronizer=td_synchronizer.build_synchronizer, ) - fdv2 = FDv2(config) + fdv2 = FDv2(Config(sdk_key="dummy"), data_system_config) # Monitoring should not be enabled because the store doesn't support it status_provider = fdv2.data_store_status_provider @@ -484,14 +498,14 @@ def test_data_store_status_monitoring_enabled_when_supported(): td_synchronizer = TestDataV2.data_source() td_synchronizer.update(td_synchronizer.flag("feature-flag").on(True)) - config = DataSystemConfig( + data_system_config = DataSystemConfig( data_store_mode=DataStoreMode.READ_WRITE, data_store=persistent_store, initializers=None, primary_synchronizer=td_synchronizer.build_synchronizer, ) - fdv2 = FDv2(config) + fdv2 = FDv2(Config(sdk_key="dummy"), data_system_config) # Monitoring should be enabled status_provider = fdv2.data_store_status_provider @@ -503,7 +517,7 @@ def test_no_persistent_store_status_provider_without_store(): td_synchronizer = TestDataV2.data_source() td_synchronizer.update(td_synchronizer.flag("feature-flag").on(True)) - config = DataSystemConfig( + data_system_config = DataSystemConfig( data_store_mode=DataStoreMode.READ_WRITE, data_store=None, initializers=None, @@ -511,7 +525,7 @@ def test_no_persistent_store_status_provider_without_store(): ) set_on_ready = Event() - fdv2 = FDv2(config) + fdv2 = FDv2(Config(sdk_key="dummy"), data_system_config) # Status provider should exist but not be monitoring status_provider = fdv2.data_store_status_provider diff --git a/ldclient/testing/integrations/test_test_data_sourcev2.py b/ldclient/testing/integrations/test_test_data_sourcev2.py index ac52278a..0660ffae 100644 --- a/ldclient/testing/integrations/test_test_data_sourcev2.py +++ b/ldclient/testing/integrations/test_test_data_sourcev2.py @@ -4,6 +4,7 @@ import pytest +from ldclient.config import Config from ldclient.impl.datasystem.protocolv2 import ( ChangeType, IntentCode, @@ -19,7 +20,7 @@ def test_creates_valid_initializer(): """Test that TestDataV2 creates a working initializer""" td = TestDataV2.data_source() - initializer = td.build_initializer() + initializer = td.build_initializer(Config(sdk_key="dummy")) result = initializer.fetch() assert isinstance(result, _Success) @@ -34,7 +35,7 @@ def test_creates_valid_initializer(): def test_creates_valid_synchronizer(): """Test that TestDataV2 creates a working synchronizer""" td = TestDataV2.data_source() - synchronizer = td.build_synchronizer() + synchronizer = td.build_synchronizer(Config(sdk_key="dummy")) updates = [] update_count = 0 @@ -238,7 +239,7 @@ def test_initializer_fetches_flag_data(): td = TestDataV2.data_source() td.update(td.flag('some-flag').variation_for_all(True)) - initializer = td.build_initializer() + initializer = td.build_initializer(Config(sdk_key="dummy")) result = initializer.fetch() assert isinstance(result, _Success) @@ -258,7 +259,7 @@ def test_synchronizer_yields_initial_data(): td = TestDataV2.data_source() td.update(td.flag('initial-flag').variation_for_all(False)) - synchronizer = td.build_synchronizer() + synchronizer = td.build_synchronizer(Config(sdk_key="dummy")) update_iter = iter(synchronizer.sync()) initial_update = next(update_iter) @@ -277,7 +278,7 @@ def test_synchronizer_yields_initial_data(): def test_synchronizer_receives_updates(): """Test that synchronizer receives flag updates""" td = TestDataV2.data_source() - synchronizer = td.build_synchronizer() + synchronizer = td.build_synchronizer(Config(sdk_key="dummy")) updates = [] update_count = 0 @@ -321,8 +322,8 @@ def collect_updates(): def test_multiple_synchronizers_receive_updates(): """Test that multiple synchronizers receive the same updates""" td = TestDataV2.data_source() - sync1 = td.build_synchronizer() - sync2 = td.build_synchronizer() + sync1 = td.build_synchronizer(Config(sdk_key="dummy")) + sync2 = td.build_synchronizer(Config(sdk_key="dummy")) updates1 = [] updates2 = [] @@ -367,7 +368,7 @@ def collect_updates_2(): def test_closed_synchronizer_stops_yielding(): """Test that closed synchronizer stops yielding updates""" td = TestDataV2.data_source() - synchronizer = td.build_synchronizer() + synchronizer = td.build_synchronizer(Config(sdk_key="dummy")) updates = [] @@ -399,7 +400,7 @@ def test_initializer_can_sync(): td = TestDataV2.data_source() td.update(td.flag('test-flag').variation_for_all(True)) - initializer = td.build_initializer() + initializer = td.build_initializer(Config(sdk_key="dummy")) sync_gen = initializer.sync() # Should get initial update with data @@ -438,7 +439,7 @@ def test_version_increment(): def test_error_handling_in_fetch(): """Test error handling in the fetch method""" td = TestDataV2.data_source() - initializer = td.build_initializer() + initializer = td.build_initializer(Config(sdk_key="dummy")) # Close the initializer to trigger error condition initializer.close()