Skip to content

Commit 8e0eae3

Browse files
committed
chore: Remove LDConfig requirement from top level DS function helpers
1 parent 4e44909 commit 8e0eae3

File tree

10 files changed

+174
-163
lines changed

10 files changed

+174
-163
lines changed

ldclient/client.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -260,7 +260,7 @@ def __start_up(self, start_wait: float):
260260

261261
self._data_system: DataSystem = FDv1(self._config)
262262
else:
263-
self._data_system = FDv2(datasystem_config, disabled=self._config.offline)
263+
self._data_system = FDv2(self._config, datasystem_config)
264264

265265
# Provide flag evaluation function for value-change tracking
266266
self._data_system.set_flag_value_eval_fn( # type: ignore

ldclient/config.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -157,7 +157,7 @@ def disable_ssl_verification(self) -> bool:
157157

158158
T = TypeVar("T")
159159

160-
Builder = Callable[[], T]
160+
Builder = Callable[['Config'], T]
161161

162162

163163
@dataclass(frozen=True)

ldclient/impl/datasystem/config.py

Lines changed: 16 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020

2121
T = TypeVar("T")
2222

23-
Builder = Callable[[], T]
23+
Builder = Callable[[LDConfig], T]
2424

2525

2626
class ConfigBuilder: # pylint: disable=too-few-public-methods
@@ -77,8 +77,8 @@ def build(self) -> DataSystemConfig:
7777
)
7878

7979

80-
def __polling_ds_builder(config: LDConfig) -> Builder[PollingDataSource]:
81-
def builder() -> PollingDataSource:
80+
def __polling_ds_builder() -> Builder[PollingDataSource]:
81+
def builder(config: LDConfig) -> PollingDataSource:
8282
requester = Urllib3PollingRequester(config)
8383
polling_ds = PollingDataSourceBuilder(config)
8484
polling_ds.requester(requester)
@@ -88,14 +88,14 @@ def builder() -> PollingDataSource:
8888
return builder
8989

9090

91-
def __streaming_ds_builder(config: LDConfig) -> Builder[StreamingDataSource]:
92-
def builder() -> StreamingDataSource:
91+
def __streaming_ds_builder() -> Builder[StreamingDataSource]:
92+
def builder(config: LDConfig) -> StreamingDataSource:
9393
return StreamingDataSourceBuilder(config).build()
9494

9595
return builder
9696

9797

98-
def default(config: LDConfig) -> ConfigBuilder:
98+
def default() -> ConfigBuilder:
9999
"""
100100
Default is LaunchDarkly's recommended flag data acquisition strategy.
101101
@@ -109,8 +109,8 @@ def default(config: LDConfig) -> ConfigBuilder:
109109
for updates.
110110
"""
111111

112-
polling_builder = __polling_ds_builder(config)
113-
streaming_builder = __streaming_ds_builder(config)
112+
polling_builder = __polling_ds_builder()
113+
streaming_builder = __streaming_ds_builder()
114114

115115
builder = ConfigBuilder()
116116
builder.initializers([polling_builder])
@@ -119,29 +119,29 @@ def default(config: LDConfig) -> ConfigBuilder:
119119
return builder
120120

121121

122-
def streaming(config: LDConfig) -> ConfigBuilder:
122+
def streaming() -> ConfigBuilder:
123123
"""
124124
Streaming configures the SDK to efficiently streams flag/segment data
125125
in the background, allowing evaluations to operate on the latest data
126126
with no additional latency.
127127
"""
128128

129-
streaming_builder = __streaming_ds_builder(config)
129+
streaming_builder = __streaming_ds_builder()
130130

131131
builder = ConfigBuilder()
132132
builder.synchronizers(streaming_builder)
133133

134134
return builder
135135

136136

137-
def polling(config: LDConfig) -> ConfigBuilder:
137+
def polling() -> ConfigBuilder:
138138
"""
139139
Polling configures the SDK to regularly poll an endpoint for
140140
flag/segment data in the background. This is less efficient than
141141
streaming, but may be necessary in some network environments.
142142
"""
143143

144-
polling_builder: Builder[Synchronizer] = __polling_ds_builder(config)
144+
polling_builder: Builder[Synchronizer] = __polling_ds_builder()
145145

146146
builder = ConfigBuilder()
147147
builder.synchronizers(polling_builder)
@@ -160,25 +160,24 @@ def custom() -> ConfigBuilder:
160160
return ConfigBuilder()
161161

162162

163-
# TODO(fdv2): Need to update these so they don't rely on the LDConfig
164-
def daemon(config: LDConfig, store: FeatureStore) -> ConfigBuilder:
163+
def daemon(store: FeatureStore) -> ConfigBuilder:
165164
"""
166165
Daemon configures the SDK to read from a persistent store integration
167166
that is populated by Relay Proxy or other SDKs. The SDK will not connect
168167
to LaunchDarkly. In this mode, the SDK never writes to the data store.
169168
"""
170-
return default(config).data_store(store, DataStoreMode.READ_ONLY)
169+
return default().data_store(store, DataStoreMode.READ_ONLY)
171170

172171

173-
def persistent_store(config: LDConfig, store: FeatureStore) -> ConfigBuilder:
172+
def persistent_store(store: FeatureStore) -> ConfigBuilder:
174173
"""
175174
PersistentStore is similar to Default, with the addition of a persistent
176175
store integration. Before data has arrived from LaunchDarkly, the SDK is
177176
able to evaluate flags using data from the persistent store. Once fresh
178177
data is available, the SDK will no longer read from the persistent store,
179178
although it will keep it up-to-date.
180179
"""
181-
return default(config).data_store(store, DataStoreMode.READ_WRITE)
180+
return default().data_store(store, DataStoreMode.READ_WRITE)
182181

183182

184183
# TODO(fdv2): Implement these methods

ldclient/impl/datasystem/fdv2.py

Lines changed: 31 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
from threading import Event, Thread
44
from typing import Any, Callable, Dict, List, Mapping, Optional
55

6-
from ldclient.config import Builder, DataSystemConfig
6+
from ldclient.config import Builder, Config, DataSystemConfig
77
from ldclient.feature_store import _FeatureStoreDataSetSorter
88
from ldclient.impl.datasourcev2.status import (
99
DataSourceStatusProviderImpl,
@@ -153,8 +153,8 @@ class FDv2:
153153

154154
def __init__(
155155
self,
156-
config: DataSystemConfig,
157-
disabled: bool = False,
156+
config: Config,
157+
data_system_config: DataSystemConfig,
158158
):
159159
"""
160160
Initialize a new FDv2 data system.
@@ -165,10 +165,11 @@ def __init__(
165165
:param disabled: Whether the data system is disabled (offline mode)
166166
"""
167167
self._config = config
168-
self._primary_synchronizer_builder: Optional[Builder[Synchronizer]] = config.primary_synchronizer
169-
self._secondary_synchronizer_builder = config.secondary_synchronizer
170-
self._fdv1_fallback_synchronizer_builder = config.fdv1_fallback_synchronizer
171-
self._disabled = disabled
168+
self._data_system_config = data_system_config
169+
self._primary_synchronizer_builder: Optional[Builder[Synchronizer]] = data_system_config.primary_synchronizer
170+
self._secondary_synchronizer_builder = data_system_config.secondary_synchronizer
171+
self._fdv1_fallback_synchronizer_builder = data_system_config.fdv1_fallback_synchronizer
172+
self._disabled = self._config.offline
172173

173174
# Diagnostic accumulator provided by client for streaming metrics
174175
# TODO(fdv2): Either we need to use this, or we need to provide it to
@@ -188,10 +189,10 @@ def __init__(
188189
self._data_store_status_provider = DataStoreStatusProviderImpl(None, Listeners())
189190

190191
# Configure persistent store if provided
191-
if self._config.data_store is not None:
192-
self._data_store_status_provider = DataStoreStatusProviderImpl(self._config.data_store, Listeners())
193-
writable = self._config.data_store_mode == DataStoreMode.READ_WRITE
194-
wrapper = FeatureStoreClientWrapper(self._config.data_store, self._data_store_status_provider)
192+
if self._data_system_config.data_store is not None:
193+
self._data_store_status_provider = DataStoreStatusProviderImpl(self._data_system_config.data_store, Listeners())
194+
writable = self._data_system_config.data_store_mode == DataStoreMode.READ_WRITE
195+
wrapper = FeatureStoreClientWrapper(self._data_system_config.data_store, self._data_store_status_provider)
195196
self._store.with_persistence(
196197
wrapper, writable, self._data_store_status_provider
197198
)
@@ -208,8 +209,8 @@ def __init__(
208209

209210
# Track configuration
210211
self._configured_with_data_sources = (
211-
(config.initializers is not None and len(config.initializers) > 0)
212-
or config.primary_synchronizer is not None
212+
(data_system_config.initializers is not None and len(data_system_config.initializers) > 0)
213+
or data_system_config.primary_synchronizer is not None
213214
)
214215

215216
def start(self, set_on_ready: Event):
@@ -268,32 +269,32 @@ def _run_main_loop(self, set_on_ready: Event):
268269
self._run_synchronizers(set_on_ready)
269270

270271
except Exception as e:
271-
log.error(f"Error in FDv2 main loop: {e}")
272+
log.error("Error in FDv2 main loop: %s", e)
272273
# Ensure ready event is set even on error
273274
if not set_on_ready.is_set():
274275
set_on_ready.set()
275276

276277
def _run_initializers(self, set_on_ready: Event):
277278
"""Run initializers to get initial data."""
278-
if self._config.initializers is None:
279+
if self._data_system_config.initializers is None:
279280
return
280281

281-
for initializer_builder in self._config.initializers:
282+
for initializer_builder in self._data_system_config.initializers:
282283
if self._stop_event.is_set():
283284
return
284285

285286
try:
286-
initializer = initializer_builder()
287-
log.info(f"Attempting to initialize via {initializer.name}")
287+
initializer = initializer_builder(self._config)
288+
log.info("Attempting to initialize via %s", initializer.name)
288289

289290
basis_result = initializer.fetch()
290291

291292
if isinstance(basis_result, _Fail):
292-
log.warning(f"Initializer {initializer.name} failed: {basis_result.error}")
293+
log.warning("Initializer %s failed: %s", initializer.name, basis_result.error)
293294
continue
294295

295296
basis = basis_result.value
296-
log.info(f"Initialized via {initializer.name}")
297+
log.info("Initialized via %s", initializer.name)
297298

298299
# Apply the basis to the store
299300
self._store.apply(basis.change_set, basis.persist)
@@ -302,12 +303,12 @@ def _run_initializers(self, set_on_ready: Event):
302303
if not set_on_ready.is_set():
303304
set_on_ready.set()
304305
except Exception as e:
305-
log.error(f"Initializer failed with exception: {e}")
306+
log.error("Initializer failed with exception: %s", e)
306307

307308
def _run_synchronizers(self, set_on_ready: Event):
308309
"""Run synchronizers to keep data up-to-date."""
309310
# If no primary synchronizer configured, just set ready and return
310-
if self._config.primary_synchronizer is None:
311+
if self._data_system_config.primary_synchronizer is None:
311312
if not set_on_ready.is_set():
312313
set_on_ready.set()
313314
return
@@ -318,8 +319,8 @@ def synchronizer_loop(self: 'FDv2'):
318319
while not self._stop_event.is_set() and self._primary_synchronizer_builder is not None:
319320
# Try primary synchronizer
320321
try:
321-
primary_sync = self._primary_synchronizer_builder()
322-
log.info(f"Primary synchronizer {primary_sync.name} is starting")
322+
primary_sync = self._primary_synchronizer_builder(self._config)
323+
log.info("Primary synchronizer %s is starting", primary_sync.name)
323324

324325
remove_sync, fallback_v1 = self._consume_synchronizer_results(
325326
primary_sync, set_on_ready, self._fallback_condition
@@ -345,8 +346,8 @@ def synchronizer_loop(self: 'FDv2'):
345346
if self._secondary_synchronizer_builder is None:
346347
continue
347348

348-
secondary_sync = self._secondary_synchronizer_builder()
349-
log.info(f"Secondary synchronizer {secondary_sync.name} is starting")
349+
secondary_sync = self._secondary_synchronizer_builder(self._config)
350+
log.info("Secondary synchronizer %s is starting", secondary_sync.name)
350351

351352
remove_sync, fallback_v1 = self._consume_synchronizer_results(
352353
secondary_sync, set_on_ready, self._recovery_condition
@@ -368,11 +369,11 @@ def synchronizer_loop(self: 'FDv2'):
368369

369370
log.info("Recovery condition met, returning to primary synchronizer")
370371
except Exception as e:
371-
log.error(f"Failed to build primary synchronizer: {e}")
372+
log.error("Failed to build primary synchronizer: %s", e)
372373
break
373374

374375
except Exception as e:
375-
log.error(f"Error in synchronizer loop: {e}")
376+
log.error("Error in synchronizer loop: %s", e)
376377
finally:
377378
# Ensure we always set the ready event when exiting
378379
if not set_on_ready.is_set():
@@ -400,7 +401,7 @@ def _consume_synchronizer_results(
400401
"""
401402
try:
402403
for update in synchronizer.sync():
403-
log.info(f"Synchronizer {synchronizer.name} update: {update.state}")
404+
log.info("Synchronizer %s update: %s", synchronizer.name, update.state)
404405
if self._stop_event.is_set():
405406
return False, False
406407

@@ -425,7 +426,7 @@ def _consume_synchronizer_results(
425426
return False, False
426427

427428
except Exception as e:
428-
log.error(f"Error consuming synchronizer results: {e}")
429+
log.error("Error consuming synchronizer results: %s", e)
429430
return True, False
430431

431432
return True, False

ldclient/integrations/test_datav2.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
import copy
44
from typing import Any, Dict, List, Optional, Set, Union
55

6+
from ldclient.config import Config
67
from ldclient.context import Context
78
from ldclient.impl.integrations.test_datav2.test_data_sourcev2 import (
89
_TestDataSourceV2
@@ -693,15 +694,15 @@ def _add_instance(self, instance):
693694
finally:
694695
self._lock.unlock()
695696

696-
def build_initializer(self) -> _TestDataSourceV2:
697+
def build_initializer(self, _: Config) -> _TestDataSourceV2:
697698
"""
698699
Creates an initializer that can be used with the FDv2 data system.
699700
700701
:return: a test data initializer
701702
"""
702703
return _TestDataSourceV2(self)
703704

704-
def build_synchronizer(self) -> _TestDataSourceV2:
705+
def build_synchronizer(self, _: Config) -> _TestDataSourceV2:
705706
"""
706707
Creates a synchronizer that can be used with the FDv2 data system.
707708

ldclient/testing/impl/datasourcev2/test_polling_synchronizer.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -201,6 +201,7 @@ def test_handles_delete_objects():
201201
assert valid.change_set.selector.state == "p:SOMETHING:300"
202202
assert valid.change_set.intent_code == IntentCode.TRANSFER_FULL
203203

204+
204205
def test_generic_error_interrupts_and_recovers():
205206
builder = ChangeSetBuilder()
206207
builder.start(intent=IntentCode.TRANSFER_FULL)

ldclient/testing/impl/datasystem/test_config.py

Lines changed: 3 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -126,9 +126,7 @@ def test_custom_builder():
126126

127127
def test_default_config_builder():
128128
"""Test that default() returns a properly configured ConfigBuilder."""
129-
mock_ld_config = Mock(spec=LDConfig)
130-
131-
builder = default(mock_ld_config)
129+
builder = default()
132130

133131
assert isinstance(builder, ConfigBuilder)
134132
# The actual implementation details would be tested in integration tests
@@ -137,9 +135,7 @@ def test_default_config_builder():
137135

138136
def test_streaming_config_builder():
139137
"""Test that streaming() returns a properly configured ConfigBuilder."""
140-
mock_ld_config = Mock(spec=LDConfig)
141-
142-
builder = streaming(mock_ld_config)
138+
builder = streaming()
143139

144140
assert isinstance(builder, ConfigBuilder)
145141
# The actual implementation details would be tested in integration tests
@@ -148,9 +144,7 @@ def test_streaming_config_builder():
148144

149145
def test_polling_config_builder():
150146
"""Test that polling() returns a properly configured ConfigBuilder."""
151-
mock_ld_config = Mock(spec=LDConfig)
152-
153-
builder = polling(mock_ld_config)
147+
builder = polling()
154148

155149
assert isinstance(builder, ConfigBuilder)
156150
# The actual implementation details would be tested in integration tests

0 commit comments

Comments
 (0)