From 9abdd646c91972f36be03386960a23f2793278f0 Mon Sep 17 00:00:00 2001 From: Devin AI <158243242+devin-ai-integration[bot]@users.noreply.github.com> Date: Mon, 30 Jun 2025 17:06:23 +0000 Subject: [PATCH 1/8] feat: Add experimental plugin support - Add Plugin interface and metadata classes following plugin specs - Extend Config class to accept plugins parameter - Modify LDClient to register plugins and collect their hooks - Add comprehensive tests for plugin functionality - Follow existing Ruby SDK patterns and conventions Co-Authored-By: jbailey@launchdarkly.com --- lib/ldclient-rb/config.rb | 10 ++ lib/ldclient-rb/interfaces.rb | 133 +++++++++++++++++++++++++++ lib/ldclient-rb/ldclient.rb | 52 ++++++++++- spec/ldclient_plugins_spec.rb | 167 ++++++++++++++++++++++++++++++++++ spec/mock_components.rb | 22 +++++ 5 files changed, 383 insertions(+), 1 deletion(-) create mode 100644 spec/ldclient_plugins_spec.rb diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index bca3db2c..5ec80393 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -45,6 +45,7 @@ class Config # @option opts [String] :payload_filter_key See {#payload_filter_key} # @option opts [Boolean] :omit_anonymous_contexts See {#omit_anonymous_contexts} # @option hooks [Array] A list of hooks to be registered with the SDK + # + def get_hooks(environment_metadata) + [] + end + end + end end end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 379ab71e..844ef422 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -89,7 +89,10 @@ def postfork(wait_for_sec = 5) end private def start_up(wait_for_sec) - @hooks = Concurrent::Array.new(@config.hooks) + environment_metadata = get_environment_metadata + plugin_hooks = get_plugin_hooks(environment_metadata) + + @hooks = Concurrent::Array.new(@config.hooks + plugin_hooks) @shared_executor = Concurrent::SingleThreadExecutor.new @@ -156,6 +159,8 @@ def postfork(wait_for_sec = 5) @data_source = data_source_or_factory end + register_plugins(environment_metadata) + ready = @data_source.start return unless wait_for_sec > 0 @@ -172,6 +177,51 @@ def postfork(wait_for_sec = 5) end end + private def get_environment_metadata + sdk_metadata = Interfaces::Plugins::SdkMetadata.new( + name: "ruby-server-sdk", + version: LaunchDarkly::VERSION, + wrapper_name: @config.wrapper_name, + wrapper_version: @config.wrapper_version + ) + + application_metadata = nil + if @config.application && (!@config.application.empty?) + application_metadata = Interfaces::Plugins::ApplicationMetadata.new( + id: @config.application[:id], + version: @config.application[:version] + ) + end + + Interfaces::Plugins::EnvironmentMetadata.new( + sdk: sdk_metadata, + application: application_metadata, + sdk_key: @sdk_key + ) + end + + private def get_plugin_hooks(environment_metadata) + hooks = [] + @config.plugins.each do |plugin| + begin + hooks.concat(plugin.get_hooks(environment_metadata)) + rescue => e + @config.logger.error { "[LDClient] Error getting hooks from plugin #{plugin.metadata.name}: #{e}" } + end + end + hooks + end + + private def register_plugins(environment_metadata) + @config.plugins.each do |plugin| + begin + plugin.register(self, environment_metadata) + rescue => e + @config.logger.error { "[LDClient] Error registering plugin #{plugin.metadata.name}: #{e}" } + end + end + end + # # Add a hook to the client. In order to register a hook before the client starts, please use the `hooks` property of # {#LDConfig}. diff --git a/spec/ldclient_plugins_spec.rb b/spec/ldclient_plugins_spec.rb new file mode 100644 index 00000000..d942bc7c --- /dev/null +++ b/spec/ldclient_plugins_spec.rb @@ -0,0 +1,167 @@ +require "spec_helper" + +module LaunchDarkly + describe "LDClient plugins tests" do + class MockPlugin + include Interfaces::Plugins::Plugin + + def initialize(name, hooks = [], register_callback = nil) + @name = name + @hooks = hooks + @register_callback = register_callback + end + + def metadata + Interfaces::Plugins::PluginMetadata.new(@name) + end + + def get_hooks(environment_metadata) + @hooks + end + + def register(client, environment_metadata) + @register_callback.call(client, environment_metadata) if @register_callback + end + end + + context "plugin configuration" do + it "can register a plugin on the config" do + plugin = MockPlugin.new("test-plugin") + config = test_config(plugins: [plugin]) + expect(config.plugins.length).to eq 1 + expect(config.plugins[0]).to eq plugin + end + + it "will drop invalid plugins on config" do + config = test_config(plugins: [true, nil, "example thing"]) + expect(config.plugins.count).to eq 0 + end + + it "can register multiple plugins" do + plugin1 = MockPlugin.new("plugin1") + plugin2 = MockPlugin.new("plugin2") + config = test_config(plugins: [plugin1, plugin2]) + expect(config.plugins.length).to eq 2 + end + end + + context "plugin hook collection" do + it "collects hooks from plugins" do + hook = MockHook.new(->(_, _) { }, ->(_, _, _) { }) + plugin = MockPlugin.new("test-plugin", [hook]) + + with_client(test_config(plugins: [plugin])) do |client| + expect(client.instance_variable_get("@hooks")).to include(hook) + end + end + + it "handles plugin hook errors gracefully" do + plugin = MockPlugin.new("error-plugin") + allow(plugin).to receive(:get_hooks).and_raise("Hook error") + + with_client(test_config(plugins: [plugin])) do |client| + expect(client).to be_initialized + end + end + end + + context "plugin registration" do + it "calls register on plugins during initialization" do + registered = false + register_callback = ->(client, metadata) { registered = true } + plugin = MockPlugin.new("test-plugin", [], register_callback) + + with_client(test_config(plugins: [plugin])) do |client| + expect(registered).to be true + end + end + + it "provides correct environment metadata to plugins" do + received_metadata = nil + register_callback = ->(client, metadata) { received_metadata = metadata } + plugin = MockPlugin.new("test-plugin", [], register_callback) + + with_client(test_config(plugins: [plugin])) do |client| + expect(received_metadata).to be_a(Interfaces::Plugins::EnvironmentMetadata) + expect(received_metadata.sdk.name).to eq("ruby-server-sdk") + expect(received_metadata.sdk.version).to eq(LaunchDarkly::VERSION) + end + end + + it "handles plugin registration errors gracefully" do + register_callback = ->(client, metadata) { raise "Registration error" } + plugin = MockPlugin.new("error-plugin", [], register_callback) + + with_client(test_config(plugins: [plugin])) do |client| + expect(client).to be_initialized + end + end + end + + context "plugin execution order" do + it "registers plugins in the order they were added" do + order = [] + plugin1 = MockPlugin.new("plugin1", [], ->(_, _) { order << "plugin1" }) + plugin2 = MockPlugin.new("plugin2", [], ->(_, _) { order << "plugin2" }) + + with_client(test_config(plugins: [plugin1, plugin2])) do |client| + expect(order).to eq ["plugin1", "plugin2"] + end + end + + it "plugin hooks are added after config hooks" do + config_hook = MockHook.new(->(_, _) { }, ->(_, _, _) { }) + plugin_hook = MockHook.new(->(_, _) { }, ->(_, _, _) { }) + plugin = MockPlugin.new("test-plugin", [plugin_hook]) + + with_client(test_config(hooks: [config_hook], plugins: [plugin])) do |client| + hooks = client.instance_variable_get("@hooks") + config_hook_index = hooks.index(config_hook) + plugin_hook_index = hooks.index(plugin_hook) + expect(config_hook_index).to be < plugin_hook_index + end + end + end + + context "metadata classes" do + it "creates SdkMetadata correctly" do + metadata = Interfaces::Plugins::SdkMetadata.new( + name: "test-sdk", + version: "1.0.0", + wrapper_name: "test-wrapper", + wrapper_version: "2.0.0" + ) + + expect(metadata.name).to eq("test-sdk") + expect(metadata.version).to eq("1.0.0") + expect(metadata.wrapper_name).to eq("test-wrapper") + expect(metadata.wrapper_version).to eq("2.0.0") + end + + it "creates ApplicationMetadata correctly" do + metadata = Interfaces::Plugins::ApplicationMetadata.new( + id: "test-app", + version: "3.0.0" + ) + + expect(metadata.id).to eq("test-app") + expect(metadata.version).to eq("3.0.0") + end + + it "creates EnvironmentMetadata correctly" do + sdk_metadata = Interfaces::Plugins::SdkMetadata.new(name: "test", version: "1.0") + app_metadata = Interfaces::Plugins::ApplicationMetadata.new(id: "app") + + metadata = Interfaces::Plugins::EnvironmentMetadata.new( + sdk: sdk_metadata, + application: app_metadata, + sdk_key: "test-key" + ) + + expect(metadata.sdk).to eq(sdk_metadata) + expect(metadata.application).to eq(app_metadata) + expect(metadata.sdk_key).to eq("test-key") + end + end + end +end diff --git a/spec/mock_components.rb b/spec/mock_components.rb index 4eb0e7a8..1a4470fa 100644 --- a/spec/mock_components.rb +++ b/spec/mock_components.rb @@ -118,4 +118,26 @@ def after_evaluation(evaluation_series_context, data, detail) @after_evaluation.call(evaluation_series_context, data, detail) end end + + class MockPlugin + include Interfaces::Plugins::Plugin + + def initialize(name, hooks = [], register_callback = nil) + @name = name + @hooks = hooks + @register_callback = register_callback + end + + def metadata + Interfaces::Plugins::PluginMetadata.new(@name) + end + + def get_hooks(environment_metadata) + @hooks + end + + def register(client, environment_metadata) + @register_callback.call(client, environment_metadata) if @register_callback + end + end end From b0a19b513cf0646dd84e493d470d4786b48a3266 Mon Sep 17 00:00:00 2001 From: jsonbailey Date: Fri, 11 Jul 2025 16:30:36 +0000 Subject: [PATCH 2/8] split out the interface file that is getting too large --- lib/ldclient-rb/interfaces.rb | 1114 +---------------- .../interfaces/big_segment_store.rb | 153 +++ lib/ldclient-rb/interfaces/data_source.rb | 265 ++++ lib/ldclient-rb/interfaces/data_store.rb | 113 ++ lib/ldclient-rb/interfaces/feature_store.rb | 162 +++ lib/ldclient-rb/interfaces/flag_tracker.rb | 106 ++ lib/ldclient-rb/interfaces/hooks.rb | 88 ++ lib/ldclient-rb/interfaces/migrations.rb | 100 ++ lib/ldclient-rb/interfaces/plugins.rb | 136 ++ 9 files changed, 1131 insertions(+), 1106 deletions(-) create mode 100644 lib/ldclient-rb/interfaces/big_segment_store.rb create mode 100644 lib/ldclient-rb/interfaces/data_source.rb create mode 100644 lib/ldclient-rb/interfaces/data_store.rb create mode 100644 lib/ldclient-rb/interfaces/feature_store.rb create mode 100644 lib/ldclient-rb/interfaces/flag_tracker.rb create mode 100644 lib/ldclient-rb/interfaces/hooks.rb create mode 100644 lib/ldclient-rb/interfaces/migrations.rb create mode 100644 lib/ldclient-rb/interfaces/plugins.rb diff --git a/lib/ldclient-rb/interfaces.rb b/lib/ldclient-rb/interfaces.rb index 92d0ef21..60201067 100644 --- a/lib/ldclient-rb/interfaces.rb +++ b/lib/ldclient-rb/interfaces.rb @@ -1,1107 +1,9 @@ require "observer" - -module LaunchDarkly - # - # Mixins that define the required methods of various pluggable components used by the client. - # - module Interfaces - # - # Mixin that defines the required methods of a feature store implementation. The LaunchDarkly - # client uses the feature store to persist feature flags and related objects received from - # the LaunchDarkly service. Implementations must support concurrent access and updates. - # For more about how feature stores can be used, see: - # [Using a persistent feature store](https://docs.launchdarkly.com/sdk/features/storing-data#ruby). - # - # An entity that can be stored in a feature store is a hash that can be converted to and from - # JSON, and that has at a minimum the following properties: `:key`, a string that is unique - # among entities of the same kind; `:version`, an integer that is higher for newer data; - # `:deleted`, a boolean (optional, defaults to false) that if true means this is a - # placeholder for a deleted entity. - # - # To represent the different kinds of objects that can be stored, such as feature flags and - # segments, the SDK will provide a "kind" object; this is a hash with a single property, - # `:namespace`, which is a short string unique to that kind. This string can be used as a - # collection name or a key prefix. - # - # The default implementation is {LaunchDarkly::InMemoryFeatureStore}. Several implementations - # that use databases can be found in {LaunchDarkly::Integrations}. If you want to write a new - # implementation, see {LaunchDarkly::Integrations::Util} for tools that can make this task - # simpler. - # - module FeatureStore - # - # Initializes (or re-initializes) the store with the specified set of entities. Any - # existing entries will be removed. Implementations can assume that this data set is up to - # date-- there is no need to perform individual version comparisons between the existing - # objects and the supplied features. - # - # If possible, the store should update the entire data set atomically. If that is not possible, - # it should iterate through the outer hash and then the inner hash using the existing iteration - # order of those hashes (the SDK will ensure that the items were inserted into the hashes in - # the correct order), storing each item, and then delete any leftover items at the very end. - # - # @param all_data [Hash] a hash where each key is one of the data kind objects, and each - # value is in turn a hash of string keys to entities - # @return [void] - # - def init(all_data) - end - - # - # Returns the entity to which the specified key is mapped, if any. - # - # @param kind [Object] the kind of entity to get - # @param key [String] the unique key of the entity to get - # @return [Hash] the entity; nil if the key was not found, or if the stored entity's - # `:deleted` property was true - # - def get(kind, key) - end - - # - # Returns all stored entities of the specified kind, not including deleted entities. - # - # @param kind [Object] the kind of entity to get - # @return [Hash] a hash where each key is the entity's `:key` property and each value - # is the entity - # - def all(kind) - end - - # - # Attempt to add an entity, or update an existing entity with the same key. An update - # should only succeed if the new item's `:version` is greater than the old one; - # otherwise, the method should do nothing. - # - # @param kind [Object] the kind of entity to add or update - # @param item [Hash] the entity to add or update - # @return [void] - # - def upsert(kind, item) - end - - # - # Attempt to delete an entity if it exists. Deletion should only succeed if the - # `version` parameter is greater than the existing entity's `:version`; otherwise, the - # method should do nothing. - # - # @param kind [Object] the kind of entity to delete - # @param key [String] the unique key of the entity - # @param version [Integer] the entity must have a lower version than this to be deleted - # @return [void] - # - def delete(kind, key, version) - end - - # - # Checks whether this store has been initialized. That means that `init` has been called - # either by this process, or (if the store can be shared) by another process. This - # method will be called frequently, so it should be efficient. You can assume that if it - # has returned true once, it can continue to return true, i.e. a store cannot become - # uninitialized again. - # - # @return [Boolean] true if the store is in an initialized state - # - def initialized? - end - - # - # Performs any necessary cleanup to shut down the store when the client is being shut down. - # - # @return [void] - # - def stop - end - - # - # WARN: This isn't a required method on a FeatureStore yet. The SDK will - # currently check if the provided store responds to this method, and if - # it does, will take appropriate action based on the documented behavior - # below. This will become required in a future major version release of - # the SDK. - # - # Returns true if this data store implementation supports status - # monitoring. - # - # This is normally only true for persistent data stores but it could also - # be true for any custom {FeatureStore} implementation. - # - # Returning true means that the store guarantees that if it ever enters - # an invalid state (that is, an operation has failed or it knows that - # operations cannot succeed at the moment), it will publish a status - # update, and will then publish another status update once it has - # returned to a valid state. - # - # Custom implementations must implement `def available?` which - # synchronously checks if the store is available. Without this method, - # the SDK cannot ensure status updates will occur once the store has gone - # offline. - # - # The same value will be returned from - # {StatusProvider::monitoring_enabled?}. - # - # def monitoring_enabled? end - - # - # WARN: This isn't a required method on a FeatureStore. The SDK will - # check if the provided store responds to this method, and if it does, - # will take appropriate action based on the documented behavior below. - # Usage of this method will be dropped in a future version of the SDK. - # - # Tests whether the data store seems to be functioning normally. - # - # This should not be a detailed test of different kinds of operations, - # but just the smallest possible operation to determine whether (for - # instance) we can reach the database. - # - # Whenever one of the store's other methods throws an exception, the SDK - # will assume that it may have become unavailable (e.g. the database - # connection was lost). The SDK will then call {#available?} at intervals - # until it returns true. - # - # @return [Boolean] true if the underlying data store is reachable - # - # def available? end - end - - # - # An interface for tracking changes in feature flag configurations. - # - # An implementation of this interface is returned by {LaunchDarkly::LDClient#flag_tracker}. - # Application code never needs to implement this interface. - # - module FlagTracker - # - # Registers a listener to be notified of feature flag changes in general. - # - # The listener will be notified whenever the SDK receives any change to any feature flag's configuration, - # or to a user segment that is referenced by a feature flag. If the updated flag is used as a prerequisite - # for other flags, the SDK assumes that those flags may now behave differently and sends flag change events - # for them as well. - # - # Note that this does not necessarily mean the flag's value has changed for any particular evaluation - # context, only that some part of the flag configuration was changed so that it may return a - # different value than it previously returned for some context. If you want to track flag value changes, - # use {#add_flag_value_change_listener} instead. - # - # It is possible, given current design restrictions, that a listener might be notified when no change has - # occurred. This edge case will be addressed in a later version of the SDK. It is important to note this issue - # does not affect {#add_flag_value_change_listener} listeners. - # - # If using the file data source, any change in a data file will be treated as a change to every flag. Again, - # use {#add_flag_value_change_listener} (or just re-evaluate the flag # yourself) if you want to know whether - # this is a change that really affects a flag's value. - # - # Change events only work if the SDK is actually connecting to LaunchDarkly (or using the file data source). - # If the SDK is only reading flags from a database then it cannot know when there is a change, because - # flags are read on an as-needed basis. - # - # The listener will be called from a worker thread. - # - # Calling this method for an already-registered listener has no effect. - # - # @param listener [#update] - # - def add_listener(listener) end - - # - # Unregisters a listener so that it will no longer be notified of feature flag changes. - # - # Calling this method for a listener that was not previously registered has no effect. - # - # @param listener [Object] - # - def remove_listener(listener) end - - # - # Registers a listener to be notified of a change in a specific feature flag's value for a specific - # evaluation context. - # - # When you call this method, it first immediately evaluates the feature flag. It then uses - # {#add_listener} to start listening for feature flag configuration - # changes, and whenever the specified feature flag changes, it re-evaluates the flag for the same context. - # It then calls your listener if and only if the resulting value has changed. - # - # All feature flag evaluations require an instance of {LaunchDarkly::LDContext}. If the feature flag you are - # tracking does not have any context targeting rules, you must still pass a dummy context such as - # `LDContext.with_key("for-global-flags")`. If you do not want the user to appear on your dashboard, - # use the anonymous property: `LDContext.create({key: "for-global-flags", kind: "user", anonymous: true})`. - # - # The returned listener represents the subscription that was created by this method - # call; to unsubscribe, pass that object (not your listener) to {#remove_listener}. - # - # @param key [Symbol] - # @param context [LaunchDarkly::LDContext] - # @param listener [#update] - # - def add_flag_value_change_listener(key, context, listener) end - end - - # - # Change event fired when some aspect of the flag referenced by the key has changed. - # - class FlagChange - attr_accessor :key - - # @param [Symbol] key - def initialize(key) - @key = key - end - end - - # - # Change event fired when the evaluated value for the specified flag key has changed. - # - class FlagValueChange - attr_accessor :key - attr_accessor :old_value - attr_accessor :new_value - - # @param [Symbol] key - # @param [Object] old_value - # @param [Object] new_value - def initialize(key, old_value, new_value) - @key = key - @old_value = old_value - @new_value = new_value - end - end - - module DataStore - # - # An interface for querying the status of a persistent data store. - # - # An implementation of this interface is returned by {LaunchDarkly::LDClient#data_store_status_provider}. - # Application code should not implement this interface. - # - module StatusProvider - # - # Returns the current status of the store. - # - # This is only meaningful for persistent stores, or any custom data store implementation that makes use of - # the status reporting mechanism provided by the SDK. For the default in-memory store, the status will always - # be reported as "available". - # - # @return [Status] the latest status - # - def status - end - - # - # Indicates whether the current data store implementation supports status monitoring. - # - # This is normally true for all persistent data stores, and false for the default in-memory store. A true value - # means that any listeners added with {#add_listener} can expect to be notified if there is any error in - # storing data, and then notified again when the error condition is resolved. A false value means that the - # status is not meaningful and listeners should not expect to be notified. - # - # @return [Boolean] true if status monitoring is enabled - # - def monitoring_enabled? - end - - # - # Subscribes for notifications of status changes. - # - # Applications may wish to know if there is an outage in a persistent data store, since that could mean that - # flag evaluations are unable to get the flag data from the store (unless it is currently cached) and therefore - # might return default values. - # - # If the SDK receives an exception while trying to query or update the data store, then it notifies listeners - # that the store appears to be offline ({Status#available} is false) and begins polling the store - # at intervals until a query succeeds. Once it succeeds, it notifies listeners again with {Status#available} - # set to true. - # - # This method has no effect if the data store implementation does not support status tracking, such as if you - # are using the default in-memory store rather than a persistent store. - # - # @param listener [#update] the listener to add - # - def add_listener(listener) - end - - # - # Unsubscribes from notifications of status changes. - # - # This method has no effect if the data store implementation does not support status tracking, such as if you - # are using the default in-memory store rather than a persistent store. - # - # @param listener [Object] the listener to remove; if no such listener was added, this does nothing - # - def remove_listener(listener) - end - end - - # - # Interface that a data store implementation can use to report information back to the SDK. - # - module UpdateSink - # - # Reports a change in the data store's operational status. - # - # This is what makes the status monitoring mechanisms in {StatusProvider} work. - # - # @param status [Status] the updated status properties - # - def update_status(status) - end - end - - class Status - def initialize(available, stale) - @available = available - @stale = stale - end - - # - # Returns true if the SDK believes the data store is now available. - # - # This property is normally true. If the SDK receives an exception while trying to query or update the data - # store, then it sets this property to false (notifying listeners, if any) and polls the store at intervals - # until a query succeeds. Once it succeeds, it sets the property back to true (again notifying listeners). - # - # @return [Boolean] true if store is available - # - attr_reader :available - - # - # Returns true if the store may be out of date due to a previous - # outage, so the SDK should attempt to refresh all feature flag data - # and rewrite it to the store. - # - # This property is not meaningful to application code. - # - # @return [Boolean] true if data should be rewritten - # - attr_reader :stale - end - end - - # - # Mixin that defines the required methods of a data source implementation. This is the - # component that delivers feature flag data from LaunchDarkly to the LDClient by putting - # the data in the {FeatureStore}. It is expected to run concurrently on its own thread. - # - # The client has its own standard implementation, which uses either a streaming connection or - # polling depending on your configuration. Normally you will not need to use another one - # except for testing purposes. Two such test fixtures are {LaunchDarkly::Integrations::FileData} - # and {LaunchDarkly::Integrations::TestData}. - # - module DataSource - # - # Checks whether the data source has finished initializing. Initialization is considered done - # once it has received one complete data set from LaunchDarkly. - # - # @return [Boolean] true if initialization is complete - # - def initialized? - end - - # - # Puts the data source into an active state. Normally this means it will make its first - # connection attempt to LaunchDarkly. If `start` has already been called, calling it again - # should simply return the same value as the first call. - # - # @return [Concurrent::Event] an Event which will be set once initialization is complete - # - def start - end - - # - # Puts the data source into an inactive state and releases all of its resources. - # This state should be considered permanent (`start` does not have to work after `stop`). - # - def stop - end - end - - module BigSegmentStore - # - # Returns information about the overall state of the store. This method will be called only - # when the SDK needs the latest state, so it should not be cached. - # - # @return [BigSegmentStoreMetadata] - # - def get_metadata - end - - # - # Queries the store for a snapshot of the current segment state for a specific context. - # - # The context_hash is a base64-encoded string produced by hashing the context key as defined by - # the Big Segments specification; the store implementation does not need to know the details - # of how this is done, because it deals only with already-hashed keys, but the string can be - # assumed to only contain characters that are valid in base64. - # - # The return value should be either a Hash, or nil if the context is not referenced in any big - # segments. Each key in the Hash is a "segment reference", which is how segments are - # identified in Big Segment data. This string is not identical to the segment key-- the SDK - # will add other information. The store implementation should not be concerned with the - # format of the string. Each value in the Hash is true if the context is explicitly included in - # the segment, false if the context is explicitly excluded from the segment-- and is not also - # explicitly included (that is, if both an include and an exclude existed in the data, the - # include would take precedence). If the context's status in a particular segment is undefined, - # there should be no key or value for that segment. - # - # This Hash may be cached by the SDK, so it should not be modified after it is created. It - # is a snapshot of the segment membership state at one point in time. - # - # @param context_hash [String] - # @return [Hash] true/false values for Big Segments that reference this context - # - def get_membership(context_hash) - end - - # - # Performs any necessary cleanup to shut down the store when the client is being shut down. - # - # @return [void] - # - def stop - end - end - - # - # Values returned by {BigSegmentStore#get_metadata}. - # - class BigSegmentStoreMetadata - def initialize(last_up_to_date) - @last_up_to_date = last_up_to_date - end - - # The Unix epoch millisecond timestamp of the last update to the {BigSegmentStore}. It is - # nil if the store has never been updated. - # - # @return [Integer|nil] - attr_reader :last_up_to_date - end - - # - # Information about the status of a Big Segment store, provided by {BigSegmentStoreStatusProvider}. - # - # Big Segments are a specific type of segments. For more information, read the LaunchDarkly - # documentation: https://docs.launchdarkly.com/home/users/big-segments - # - class BigSegmentStoreStatus - def initialize(available, stale) - @available = available - @stale = stale - end - - # True if the Big Segment store is able to respond to queries, so that the SDK can evaluate - # whether a context is in a segment or not. - # - # If this property is false, the store is not able to make queries (for instance, it may not have - # a valid database connection). In this case, the SDK will treat any reference to a Big Segment - # as if no contexts are included in that segment. Also, the {EvaluationReason} associated with - # with any flag evaluation that references a Big Segment when the store is not available will - # have a `big_segments_status` of `STORE_ERROR`. - # - # @return [Boolean] - attr_reader :available - - # True if the Big Segment store is available, but has not been updated within the amount of time - # specified by {BigSegmentsConfig#stale_after}. - # - # This may indicate that the LaunchDarkly Relay Proxy, which populates the store, has stopped - # running or has become unable to receive fresh data from LaunchDarkly. Any feature flag - # evaluations that reference a Big Segment will be using the last known data, which may be out - # of date. Also, the {EvaluationReason} associated with those evaluations will have a - # `big_segments_status` of `STALE`. - # - # @return [Boolean] - attr_reader :stale - - def ==(other) - self.available == other.available && self.stale == other.stale - end - end - - # - # An interface for querying the status of a Big Segment store. - # - # The Big Segment store is the component that receives information about Big Segments, normally - # from a database populated by the LaunchDarkly Relay Proxy. Big Segments are a specific type - # of segments. For more information, read the LaunchDarkly documentation: - # https://docs.launchdarkly.com/home/users/big-segments - # - # An implementation of this interface is returned by {LDClient#big_segment_store_status_provider}. - # Application code never needs to implement this interface. - # - # There are two ways to interact with the status. One is to simply get the current status; if its - # `available` property is true, then the SDK is able to evaluate context membership in Big Segments, - # and the `stale`` property indicates whether the data might be out of date. - # - # The other way is to subscribe to status change notifications. Applications may wish to know if - # there is an outage in the Big Segment store, or if it has become stale (the Relay Proxy has - # stopped updating it with new data), since then flag evaluations that reference a Big Segment - # might return incorrect values. To allow finding out about status changes as soon as possible, - # `BigSegmentStoreStatusProvider` mixes in Ruby's - # [Observable](https://docs.ruby-lang.org/en/2.5.0/Observable.html) module to provide standard - # methods such as `add_observer`. Observers will be called with a new {BigSegmentStoreStatus} - # value whenever the status changes. - # - # @example Getting the current status - # status = client.big_segment_store_status_provider.status - # - # @example Subscribing to status notifications - # client.big_segment_store_status_provider.add_observer(self, :big_segments_status_changed) - # - # def big_segments_status_changed(new_status) - # puts "Big segment store status is now: #{new_status}" - # end - # - module BigSegmentStoreStatusProvider - include Observable - # - # Gets the current status of the store, if known. - # - # @return [BigSegmentStoreStatus] the status, or nil if the SDK has not yet queried the Big - # Segment store status - # - def status - end - end - - module DataSource - # - # An interface for querying the status of the SDK's data source. The data - # source is the component that receives updates to feature flag data; - # normally this is a streaming connection, but it could be polling or - # file data depending on your configuration. - # - # An implementation of this interface is returned by - # {LaunchDarkly::LDClient#data_source_status_provider}. Application code - # never needs to implement this interface. - # - module StatusProvider - # - # Returns the current status of the data source. - # - # All of the built-in data source implementations are guaranteed to update this status whenever they - # successfully initialize, encounter an error, or recover after an error. - # - # For a custom data source implementation, it is the responsibility of the data source to push - # status updates to the SDK; if it does not do so, the status will always be reported as - # {Status::INITIALIZING}. - # - # @return [Status] - # - def status - end - - # - # Subscribes for notifications of status changes. - # - # The listener will be notified whenever any property of the status has changed. See {Status} for an - # explanation of the meaning of each property and what could cause it to change. - # - # Notifications will be dispatched on a worker thread. It is the listener's responsibility to return as soon as - # possible so as not to block subsequent notifications. - # - # @param [#update] the listener to add - # - def add_listener(listener) end - - # - # Unsubscribes from notifications of status changes. - # - def remove_listener(listener) end - end - - # - # Interface that a data source implementation will use to push data into - # the SDK. - # - # The data source interacts with this object, rather than manipulating - # the data store directly, so that the SDK can perform any other - # necessary operations that must happen when data is updated. - # - module UpdateSink - # - # Initializes (or re-initializes) the store with the specified set of entities. Any - # existing entries will be removed. Implementations can assume that this data set is up to - # date-- there is no need to perform individual version comparisons between the existing - # objects and the supplied features. - # - # If possible, the store should update the entire data set atomically. If that is not possible, - # it should iterate through the outer hash and then the inner hash using the existing iteration - # order of those hashes (the SDK will ensure that the items were inserted into the hashes in - # the correct order), storing each item, and then delete any leftover items at the very end. - # - # @param all_data [Hash] a hash where each key is one of the data kind objects, and each - # value is in turn a hash of string keys to entities - # @return [void] - # - def init(all_data) end - - # - # Attempt to add an entity, or update an existing entity with the same key. An update - # should only succeed if the new item's `:version` is greater than the old one; - # otherwise, the method should do nothing. - # - # @param kind [Object] the kind of entity to add or update - # @param item [Hash] the entity to add or update - # @return [void] - # - def upsert(kind, item) end - - # - # Attempt to delete an entity if it exists. Deletion should only succeed if the - # `version` parameter is greater than the existing entity's `:version`; otherwise, the - # method should do nothing. - # - # @param kind [Object] the kind of entity to delete - # @param key [String] the unique key of the entity - # @param version [Integer] the entity must have a lower version than this to be deleted - # @return [void] - # - def delete(kind, key, version) end - - # - # Informs the SDK of a change in the data source's status. - # - # Data source implementations should use this method if they have any - # concept of being in a valid state, a temporarily disconnected state, - # or a permanently stopped state. - # - # If `new_state` is different from the previous state, and/or - # `new_error` is non-null, the SDK will start returning the new status - # (adding a timestamp for the change) from {StatusProvider#status}, and - # will trigger status change events to any registered listeners. - # - # A special case is that if {new_state} is {Status::INTERRUPTED}, but the - # previous state was {Status::INITIALIZING}, the state will remain at - # {Status::INITIALIZING} because {Status::INTERRUPTED} is only meaningful - # after a successful startup. - # - # @param new_state [Symbol] - # @param new_error [ErrorInfo, nil] - # - def update_status(new_state, new_error) end - end - - # - # Information about the data source's status and about the last status change. - # - class Status - # - # The initial state of the data source when the SDK is being initialized. - # - # If it encounters an error that requires it to retry initialization, the state will remain at - # {INITIALIZING} until it either succeeds and becomes {VALID}, or permanently fails and - # becomes {OFF}. - # - - INITIALIZING = :initializing - - # - # Indicates that the data source is currently operational and has not had any problems since the - # last time it received data. - # - # In streaming mode, this means that there is currently an open stream connection and that at least - # one initial message has been received on the stream. In polling mode, it means that the last poll - # request succeeded. - # - VALID = :valid - - # - # Indicates that the data source encountered an error that it will attempt to recover from. - # - # In streaming mode, this means that the stream connection failed, or had to be dropped due to some - # other error, and will be retried after a backoff delay. In polling mode, it means that the last poll - # request failed, and a new poll request will be made after the configured polling interval. - # - INTERRUPTED = :interrupted - - # - # Indicates that the data source has been permanently shut down. - # - # This could be because it encountered an unrecoverable error (for instance, the LaunchDarkly service - # rejected the SDK key; an invalid SDK key will never become valid), or because the SDK client was - # explicitly shut down. - # - OFF = :off - - # @return [Symbol] The basic state - attr_reader :state - # @return [Time] timestamp of the last state transition - attr_reader :state_since - # @return [ErrorInfo, nil] a description of the last error or nil if no errors have occurred since startup - attr_reader :last_error - - def initialize(state, state_since, last_error) - @state = state - @state_since = state_since - @last_error = last_error - end - end - - # - # A description of an error condition that the data source encountered. - # - class ErrorInfo - # - # An unexpected error, such as an uncaught exception, further described by {#message}. - # - UNKNOWN = :unknown - - # - # An I/O error such as a dropped connection. - # - NETWORK_ERROR = :network_error - - # - # The LaunchDarkly service returned an HTTP response with an error status, available with - # {#status_code}. - # - ERROR_RESPONSE = :error_response - - # - # The SDK received malformed data from the LaunchDarkly service. - # - INVALID_DATA = :invalid_data - - # - # The data source itself is working, but when it tried to put an update into the data store, the data - # store failed (so the SDK may not have the latest data). - # - # Data source implementations do not need to report this kind of error; it will be automatically - # reported by the SDK when exceptions are detected. - # - STORE_ERROR = :store_error - - # @return [Symbol] the general category of the error - attr_reader :kind - # @return [Integer] an HTTP status or zero - attr_reader :status_code - # @return [String, nil] message an error message if applicable, or nil - attr_reader :message - # @return [Time] time the error timestamp - attr_reader :time - - def initialize(kind, status_code, message, time) - @kind = kind - @status_code = status_code - @message = message - @time = time - end - end - end - - # - # Namespace for feature-flag based technology migration support. - # - module Migrations - # - # A migrator is the interface through which migration support is executed. A migrator is configured through the - # {LaunchDarkly::Migrations::MigratorBuilder} class. - # - module Migrator - # - # Uses the provided flag key and context to execute a migration-backed read operation. - # - # @param key [String] - # @param context [LaunchDarkly::LDContext] - # @param default_stage [Symbol] - # @param payload [Object, nil] - # - # @return [LaunchDarkly::Migrations::OperationResult] - # - def read(key, context, default_stage, payload = nil) end - - # - # Uses the provided flag key and context to execute a migration-backed write operation. - # - # @param key [String] - # @param context [LaunchDarkly::LDContext] - # @param default_stage [Symbol] - # @param payload [Object, nil] - # - # @return [LaunchDarkly::Migrations::WriteResult] - # - def write(key, context, default_stage, payload = nil) end - end - - # - # An OpTracker is responsible for managing the collection of measurements that which a user might wish to record - # throughout a migration-assisted operation. - # - # Example measurements include latency, errors, and consistency. - # - # This data can be provided to the {LaunchDarkly::LDClient.track_migration_op} method to relay this metric - # information upstream to LaunchDarkly services. - # - module OpTracker - # - # Sets the migration related operation associated with these tracking measurements. - # - # @param [Symbol] op The read or write operation symbol. - # - def operation(op) end - - # - # Allows recording which origins were called during a migration. - # - # @param [Symbol] origin Designation for the old or new origin. - # - def invoked(origin) end - - # - # Allows recording the results of a consistency check. - # - # This method accepts a callable which should take no parameters and return a single boolean to represent the - # consistency check results for a read operation. - # - # A callable is provided in case sampling rules do not require consistency checking to run. In this case, we can - # avoid the overhead of a function by not using the callable. - # - # @param [#call] is_consistent closure to return result of comparison check - # - def consistent(is_consistent) end - - # - # Allows recording whether an error occurred during the operation. - # - # @param [Symbol] origin Designation for the old or new origin. - # - def error(origin) end - - # - # Allows tracking the recorded latency for an individual operation. - # - # @param [Symbol] origin Designation for the old or new origin. - # @param [Float] duration Duration measurement in milliseconds (ms). - # - def latency(origin, duration) end - - # - # Creates an instance of {LaunchDarkly::Impl::MigrationOpEventData}. - # - # @return [LaunchDarkly::Impl::MigrationOpEvent, String] A migration op event or a string describing the error. - # failure. - # - def build - end - end - end - - module Hooks - # - # Mixin for extending SDK functionality via hooks. - # - # All provided hook implementations **MUST** include this mixin. Hooks without this mixin will be ignored. - # - # This mixin includes default implementations for all hook handlers. This allows LaunchDarkly to expand the list - # of hook handlers without breaking customer integrations. - # - module Hook - # - # Get metadata about the hook implementation. - # - # @return [Metadata] - # - def metadata - Metadata.new('UNDEFINED') - end - - # - # The before method is called during the execution of a variation method before the flag value has been - # determined. The method is executed synchronously. - # - # @param evaluation_series_context [EvaluationSeriesContext] Contains information about the evaluation being - # performed. This is not mutable. - # @param data [Hash] A record associated with each stage of hook invocations. Each stage is called with the data - # of the previous stage for a series. The input record should not be modified. - # @return [Hash] Data to use when executing the next state of the hook in the evaluation series. - # - def before_evaluation(evaluation_series_context, data) - data - end - - # - # The after method is called during the execution of the variation method after the flag value has been - # determined. The method is executed synchronously. - # - # @param evaluation_series_context [EvaluationSeriesContext] Contains read-only information about the evaluation - # being performed. - # @param data [Hash] A record associated with each stage of hook invocations. Each stage is called with the data - # of the previous stage for a series. - # @param detail [LaunchDarkly::EvaluationDetail] The result of the evaluation. This value should not be - # modified. - # @return [Hash] Data to use when executing the next state of the hook in the evaluation series. - # - def after_evaluation(evaluation_series_context, data, detail) - data - end - end - - # - # Metadata data class used for annotating hook implementations. - # - class Metadata - attr_reader :name - - def initialize(name) - @name = name - end - end - - # - # Contextual information that will be provided to handlers during evaluation series. - # - class EvaluationSeriesContext - attr_reader :key - attr_reader :context - attr_reader :default_value - attr_reader :method - - # - # @param key [String] - # @param context [LaunchDarkly::LDContext] - # @param default_value [any] - # @param method [Symbol] - # - def initialize(key, context, default_value, method) - @key = key - @context = context - @default_value = default_value - @method = method - end - end - end - - module Plugins - # - # Metadata about the SDK. - # - class SdkMetadata - # The id of the SDK (e.g., "ruby-server-sdk") - # @return [String] - attr_reader :name - - # The version of the SDK - # @return [String] - attr_reader :version - - # The wrapper name if this SDK is a wrapper - # @return [String, nil] - attr_reader :wrapper_name - - # The wrapper version if this SDK is a wrapper - # @return [String, nil] - attr_reader :wrapper_version - - def initialize(name:, version:, wrapper_name: nil, wrapper_version: nil) - @name = name - @version = version - @wrapper_name = wrapper_name - @wrapper_version = wrapper_version - end - end - - # - # Metadata about the application using the SDK. - # - class ApplicationMetadata - # The id of the application - # @return [String, nil] - attr_reader :id - - # The version of the application - # @return [String, nil] - attr_reader :version - - def initialize(id: nil, version: nil) - @id = id - @version = version - end - end - - # - # Metadata about the environment in which the SDK is running. - # - class EnvironmentMetadata - # Information about the SDK - # @return [SdkMetadata] - attr_reader :sdk - - # Information about the application - # @return [ApplicationMetadata, nil] - attr_reader :application - - # The SDK key used to initialize the SDK - # @return [String, nil] - attr_reader :sdk_key - - def initialize(sdk:, application: nil, sdk_key: nil) - @sdk = sdk - @application = application - @sdk_key = sdk_key - end - end - - # - # Metadata about a plugin implementation. - # - class PluginMetadata - # A name representing the plugin instance - # @return [String] - attr_reader :name - - def initialize(name) - @name = name - end - end - - # - # Mixin for extending SDK functionality via plugins. - # - # All provided plugin implementations **MUST** include this mixin. Plugins without this mixin will be ignored. - # - # This mixin includes default implementations for optional methods. This allows LaunchDarkly to expand the list - # of plugin methods without breaking customer integrations. - # - # Plugins provide an interface which allows for initialization, access to credentials, and hook registration - # in a single interface. - # - module Plugin - # - # Get metadata about the plugin implementation. - # - # @return [PluginMetadata] - # - def metadata - PluginMetadata.new('UNDEFINED') - end - - # - # Register the plugin with the SDK client. - # - # This method is called during SDK initialization to allow the plugin to set up any necessary integrations, - # register hooks, or perform other initialization tasks. - # - # @param client [LDClient] The LDClient instance - # @param environment_metadata [EnvironmentMetadata] Metadata about the environment in which the SDK is running - # @return [void] - # - def register(client, environment_metadata) - # Default implementation does nothing - end - - # - # Get a list of hooks that this plugin provides. - # - # This method is called before register() to collect all hooks from plugins. The hooks returned will be - # added to the SDK's hook configuration. - # - # @param environment_metadata [EnvironmentMetadata] Metadata about the environment in which the SDK is running - # @return [Array] A list of hooks to be registered with the SDK - # - def get_hooks(environment_metadata) - [] - end - end - end - end -end +require "ldclient-rb/interfaces/feature_store" +require "ldclient-rb/interfaces/flag_tracker" +require "ldclient-rb/interfaces/data_store" +require "ldclient-rb/interfaces/data_source" +require "ldclient-rb/interfaces/big_segment_store" +require "ldclient-rb/interfaces/migrations" +require "ldclient-rb/interfaces/hooks" +require "ldclient-rb/interfaces/plugins" \ No newline at end of file diff --git a/lib/ldclient-rb/interfaces/big_segment_store.rb b/lib/ldclient-rb/interfaces/big_segment_store.rb new file mode 100644 index 00000000..3f0f2022 --- /dev/null +++ b/lib/ldclient-rb/interfaces/big_segment_store.rb @@ -0,0 +1,153 @@ +require "observer" + +module LaunchDarkly + module Interfaces + module BigSegmentStore + # + # Returns information about the overall state of the store. This method will be called only + # when the SDK needs the latest state, so it should not be cached. + # + # @return [BigSegmentStoreMetadata] + # + def get_metadata + end + + # + # Queries the store for a snapshot of the current segment state for a specific context. + # + # The context_hash is a base64-encoded string produced by hashing the context key as defined by + # the Big Segments specification; the store implementation does not need to know the details + # of how this is done, because it deals only with already-hashed keys, but the string can be + # assumed to only contain characters that are valid in base64. + # + # The return value should be either a Hash, or nil if the context is not referenced in any big + # segments. Each key in the Hash is a "segment reference", which is how segments are + # identified in Big Segment data. This string is not identical to the segment key-- the SDK + # will add other information. The store implementation should not be concerned with the + # format of the string. Each value in the Hash is true if the context is explicitly included in + # the segment, false if the context is explicitly excluded from the segment-- and is not also + # explicitly included (that is, if both an include and an exclude existed in the data, the + # include would take precedence). If the context's status in a particular segment is undefined, + # there should be no key or value for that segment. + # + # This Hash may be cached by the SDK, so it should not be modified after it is created. It + # is a snapshot of the segment membership state at one point in time. + # + # @param context_hash [String] + # @return [Hash] true/false values for Big Segments that reference this context + # + def get_membership(context_hash) + end + + # + # Performs any necessary cleanup to shut down the store when the client is being shut down. + # + # @return [void] + # + def stop + end + end + + # + # Values returned by {BigSegmentStore#get_metadata}. + # + class BigSegmentStoreMetadata + def initialize(last_up_to_date) + @last_up_to_date = last_up_to_date + end + + # The Unix epoch millisecond timestamp of the last update to the {BigSegmentStore}. It is + # nil if the store has never been updated. + # + # @return [Integer|nil] + attr_reader :last_up_to_date + end + + # + # Information about the status of a Big Segment store, provided by {BigSegmentStoreStatusProvider}. + # + # Big Segments are a specific type of segments. For more information, read the LaunchDarkly + # documentation: https://docs.launchdarkly.com/home/users/big-segments + # + class BigSegmentStoreStatus + def initialize(available, stale) + @available = available + @stale = stale + end + + # True if the Big Segment store is able to respond to queries, so that the SDK can evaluate + # whether a context is in a segment or not. + # + # If this property is false, the store is not able to make queries (for instance, it may not have + # a valid database connection). In this case, the SDK will treat any reference to a Big Segment + # as if no contexts are included in that segment. Also, the {EvaluationReason} associated with + # with any flag evaluation that references a Big Segment when the store is not available will + # have a `big_segments_status` of `STORE_ERROR`. + # + # @return [Boolean] + attr_reader :available + + # True if the Big Segment store is available, but has not been updated within the amount of time + # specified by {BigSegmentsConfig#stale_after}. + # + # This may indicate that the LaunchDarkly Relay Proxy, which populates the store, has stopped + # running or has become unable to receive fresh data from LaunchDarkly. Any feature flag + # evaluations that reference a Big Segment will be using the last known data, which may be out + # of date. Also, the {EvaluationReason} associated with those evaluations will have a + # `big_segments_status` of `STALE`. + # + # @return [Boolean] + attr_reader :stale + + def ==(other) + self.available == other.available && self.stale == other.stale + end + end + + # + # An interface for querying the status of a Big Segment store. + # + # The Big Segment store is the component that receives information about Big Segments, normally + # from a database populated by the LaunchDarkly Relay Proxy. Big Segments are a specific type + # of segments. For more information, read the LaunchDarkly documentation: + # https://docs.launchdarkly.com/home/users/big-segments + # + # An implementation of this interface is returned by {LDClient#big_segment_store_status_provider}. + # Application code never needs to implement this interface. + # + # There are two ways to interact with the status. One is to simply get the current status; if its + # `available` property is true, then the SDK is able to evaluate context membership in Big Segments, + # and the `stale`` property indicates whether the data might be out of date. + # + # The other way is to subscribe to status change notifications. Applications may wish to know if + # there is an outage in the Big Segment store, or if it has become stale (the Relay Proxy has + # stopped updating it with new data), since then flag evaluations that reference a Big Segment + # might return incorrect values. To allow finding out about status changes as soon as possible, + # `BigSegmentStoreStatusProvider` mixes in Ruby's + # [Observable](https://docs.ruby-lang.org/en/2.5.0/Observable.html) module to provide standard + # methods such as `add_observer`. Observers will be called with a new {BigSegmentStoreStatus} + # value whenever the status changes. + # + # @example Getting the current status + # status = client.big_segment_store_status_provider.status + # + # @example Subscribing to status notifications + # client.big_segment_store_status_provider.add_observer(self, :big_segments_status_changed) + # + # def big_segments_status_changed(new_status) + # puts "Big segment store status is now: #{new_status}" + # end + # + module BigSegmentStoreStatusProvider + include Observable + # + # Gets the current status of the store, if known. + # + # @return [BigSegmentStoreStatus] the status, or nil if the SDK has not yet queried the Big + # Segment store status + # + def status + end + end + end +end \ No newline at end of file diff --git a/lib/ldclient-rb/interfaces/data_source.rb b/lib/ldclient-rb/interfaces/data_source.rb new file mode 100644 index 00000000..f0cc1d17 --- /dev/null +++ b/lib/ldclient-rb/interfaces/data_source.rb @@ -0,0 +1,265 @@ +module LaunchDarkly + module Interfaces + # + # Mixin that defines the required methods of a data source implementation. This is the + # component that delivers feature flag data from LaunchDarkly to the LDClient by putting + # the data in the {FeatureStore}. It is expected to run concurrently on its own thread. + # + # The client has its own standard implementation, which uses either a streaming connection or + # polling depending on your configuration. Normally you will not need to use another one + # except for testing purposes. Two such test fixtures are {LaunchDarkly::Integrations::FileData} + # and {LaunchDarkly::Integrations::TestData}. + # + module DataSource + # + # Checks whether the data source has finished initializing. Initialization is considered done + # once it has received one complete data set from LaunchDarkly. + # + # @return [Boolean] true if initialization is complete + # + def initialized? + end + + # + # Puts the data source into an active state. Normally this means it will make its first + # connection attempt to LaunchDarkly. If `start` has already been called, calling it again + # should simply return the same value as the first call. + # + # @return [Concurrent::Event] an Event which will be set once initialization is complete + # + def start + end + + # + # Puts the data source into an inactive state and releases all of its resources. + # This state should be considered permanent (`start` does not have to work after `stop`). + # + def stop + end + + # + # An interface for querying the status of the SDK's data source. The data + # source is the component that receives updates to feature flag data; + # normally this is a streaming connection, but it could be polling or + # file data depending on your configuration. + # + # An implementation of this interface is returned by + # {LaunchDarkly::LDClient#data_source_status_provider}. Application code + # never needs to implement this interface. + # + module StatusProvider + # + # Returns the current status of the data source. + # + # All of the built-in data source implementations are guaranteed to update this status whenever they + # successfully initialize, encounter an error, or recover after an error. + # + # For a custom data source implementation, it is the responsibility of the data source to push + # status updates to the SDK; if it does not do so, the status will always be reported as + # {Status::INITIALIZING}. + # + # @return [Status] + # + def status + end + + # + # Subscribes for notifications of status changes. + # + # The listener will be notified whenever any property of the status has changed. See {Status} for an + # explanation of the meaning of each property and what could cause it to change. + # + # Notifications will be dispatched on a worker thread. It is the listener's responsibility to return as soon as + # possible so as not to block subsequent notifications. + # + # @param [#update] the listener to add + # + def add_listener(listener) end + + # + # Unsubscribes from notifications of status changes. + # + def remove_listener(listener) end + end + + # + # Interface that a data source implementation will use to push data into + # the SDK. + # + # The data source interacts with this object, rather than manipulating + # the data store directly, so that the SDK can perform any other + # necessary operations that must happen when data is updated. + # + module UpdateSink + # + # Initializes (or re-initializes) the store with the specified set of entities. Any + # existing entries will be removed. Implementations can assume that this data set is up to + # date-- there is no need to perform individual version comparisons between the existing + # objects and the supplied features. + # + # If possible, the store should update the entire data set atomically. If that is not possible, + # it should iterate through the outer hash and then the inner hash using the existing iteration + # order of those hashes (the SDK will ensure that the items were inserted into the hashes in + # the correct order), storing each item, and then delete any leftover items at the very end. + # + # @param all_data [Hash] a hash where each key is one of the data kind objects, and each + # value is in turn a hash of string keys to entities + # @return [void] + # + def init(all_data) end + + # + # Attempt to add an entity, or update an existing entity with the same key. An update + # should only succeed if the new item's `:version` is greater than the old one; + # otherwise, the method should do nothing. + # + # @param kind [Object] the kind of entity to add or update + # @param item [Hash] the entity to add or update + # @return [void] + # + def upsert(kind, item) end + + # + # Attempt to delete an entity if it exists. Deletion should only succeed if the + # `version` parameter is greater than the existing entity's `:version`; otherwise, the + # method should do nothing. + # + # @param kind [Object] the kind of entity to delete + # @param key [String] the unique key of the entity + # @param version [Integer] the entity must have a lower version than this to be deleted + # @return [void] + # + def delete(kind, key, version) end + + # + # Informs the SDK of a change in the data source's status. + # + # Data source implementations should use this method if they have any + # concept of being in a valid state, a temporarily disconnected state, + # or a permanently stopped state. + # + # If `new_state` is different from the previous state, and/or + # `new_error` is non-null, the SDK will start returning the new status + # (adding a timestamp for the change) from {StatusProvider#status}, and + # will trigger status change events to any registered listeners. + # + # A special case is that if {new_state} is {Status::INTERRUPTED}, but the + # previous state was {Status::INITIALIZING}, the state will remain at + # {Status::INITIALIZING} because {Status::INTERRUPTED} is only meaningful + # after a successful startup. + # + # @param new_state [Symbol] + # @param new_error [ErrorInfo, nil] + # + def update_status(new_state, new_error) end + end + + # + # Information about the data source's status and about the last status change. + # + class Status + # + # The initial state of the data source when the SDK is being initialized. + # + # If it encounters an error that requires it to retry initialization, the state will remain at + # {INITIALIZING} until it either succeeds and becomes {VALID}, or permanently fails and + # becomes {OFF}. + # + + INITIALIZING = :initializing + + # + # Indicates that the data source is currently operational and has not had any problems since the + # last time it received data. + # + # In streaming mode, this means that there is currently an open stream connection and that at least + # one initial message has been received on the stream. In polling mode, it means that the last poll + # request succeeded. + # + VALID = :valid + + # + # Indicates that the data source encountered an error that it will attempt to recover from. + # + # In streaming mode, this means that the stream connection failed, or had to be dropped due to some + # other error, and will be retried after a backoff delay. In polling mode, it means that the last poll + # request failed, and a new poll request will be made after the configured polling interval. + # + INTERRUPTED = :interrupted + + # + # Indicates that the data source has been permanently shut down. + # + # This could be because it encountered an unrecoverable error (for instance, the LaunchDarkly service + # rejected the SDK key; an invalid SDK key will never become valid), or because the SDK client was + # explicitly shut down. + # + OFF = :off + + # @return [Symbol] The basic state + attr_reader :state + # @return [Time] timestamp of the last state transition + attr_reader :state_since + # @return [ErrorInfo, nil] a description of the last error or nil if no errors have occurred since startup + attr_reader :last_error + + def initialize(state, state_since, last_error) + @state = state + @state_since = state_since + @last_error = last_error + end + end + + # + # A description of an error condition that the data source encountered. + # + class ErrorInfo + # + # An unexpected error, such as an uncaught exception, further described by {#message}. + # + UNKNOWN = :unknown + + # + # An I/O error such as a dropped connection. + # + NETWORK_ERROR = :network_error + + # + # The LaunchDarkly service returned an HTTP response with an error status, available with + # {#status_code}. + # + ERROR_RESPONSE = :error_response + + # + # The SDK received malformed data from the LaunchDarkly service. + # + INVALID_DATA = :invalid_data + + # + # The data source itself is working, but when it tried to put an update into the data store, the data + # store failed (so the SDK may not have the latest data). + # + # Data source implementations do not need to report this kind of error; it will be automatically + # reported by the SDK when exceptions are detected. + # + STORE_ERROR = :store_error + + # @return [Symbol] the general category of the error + attr_reader :kind + # @return [Integer] an HTTP status or zero + attr_reader :status_code + # @return [String, nil] message an error message if applicable, or nil + attr_reader :message + # @return [Time] time the error timestamp + attr_reader :time + + def initialize(kind, status_code, message, time) + @kind = kind + @status_code = status_code + @message = message + @time = time + end + end + end + end +end \ No newline at end of file diff --git a/lib/ldclient-rb/interfaces/data_store.rb b/lib/ldclient-rb/interfaces/data_store.rb new file mode 100644 index 00000000..d95462e7 --- /dev/null +++ b/lib/ldclient-rb/interfaces/data_store.rb @@ -0,0 +1,113 @@ +module LaunchDarkly + module Interfaces + module DataStore + # + # An interface for querying the status of a persistent data store. + # + # An implementation of this interface is returned by {LaunchDarkly::LDClient#data_store_status_provider}. + # Application code should not implement this interface. + # + module StatusProvider + # + # Returns the current status of the store. + # + # This is only meaningful for persistent stores, or any custom data store implementation that makes use of + # the status reporting mechanism provided by the SDK. For the default in-memory store, the status will always + # be reported as "available". + # + # @return [Status] the latest status + # + def status + end + + # + # Indicates whether the current data store implementation supports status monitoring. + # + # This is normally true for all persistent data stores, and false for the default in-memory store. A true value + # means that any listeners added with {#add_listener} can expect to be notified if there is any error in + # storing data, and then notified again when the error condition is resolved. A false value means that the + # status is not meaningful and listeners should not expect to be notified. + # + # @return [Boolean] true if status monitoring is enabled + # + def monitoring_enabled? + end + + # + # Subscribes for notifications of status changes. + # + # Applications may wish to know if there is an outage in a persistent data store, since that could mean that + # flag evaluations are unable to get the flag data from the store (unless it is currently cached) and therefore + # might return default values. + # + # If the SDK receives an exception while trying to query or update the data store, then it notifies listeners + # that the store appears to be offline ({Status#available} is false) and begins polling the store + # at intervals until a query succeeds. Once it succeeds, it notifies listeners again with {Status#available} + # set to true. + # + # This method has no effect if the data store implementation does not support status tracking, such as if you + # are using the default in-memory store rather than a persistent store. + # + # @param listener [#update] the listener to add + # + def add_listener(listener) + end + + # + # Unsubscribes from notifications of status changes. + # + # This method has no effect if the data store implementation does not support status tracking, such as if you + # are using the default in-memory store rather than a persistent store. + # + # @param listener [Object] the listener to remove; if no such listener was added, this does nothing + # + def remove_listener(listener) + end + end + + # + # Interface that a data store implementation can use to report information back to the SDK. + # + module UpdateSink + # + # Reports a change in the data store's operational status. + # + # This is what makes the status monitoring mechanisms in {StatusProvider} work. + # + # @param status [Status] the updated status properties + # + def update_status(status) + end + end + + class Status + def initialize(available, stale) + @available = available + @stale = stale + end + + # + # Returns true if the SDK believes the data store is now available. + # + # This property is normally true. If the SDK receives an exception while trying to query or update the data + # store, then it sets this property to false (notifying listeners, if any) and polls the store at intervals + # until a query succeeds. Once it succeeds, it sets the property back to true (again notifying listeners). + # + # @return [Boolean] true if store is available + # + attr_reader :available + + # + # Returns true if the store may be out of date due to a previous + # outage, so the SDK should attempt to refresh all feature flag data + # and rewrite it to the store. + # + # This property is not meaningful to application code. + # + # @return [Boolean] true if data should be rewritten + # + attr_reader :stale + end + end + end +end \ No newline at end of file diff --git a/lib/ldclient-rb/interfaces/feature_store.rb b/lib/ldclient-rb/interfaces/feature_store.rb new file mode 100644 index 00000000..da685f9f --- /dev/null +++ b/lib/ldclient-rb/interfaces/feature_store.rb @@ -0,0 +1,162 @@ +module LaunchDarkly + module Interfaces + # + # Mixin that defines the required methods of a feature store implementation. The LaunchDarkly + # client uses the feature store to persist feature flags and related objects received from + # the LaunchDarkly service. Implementations must support concurrent access and updates. + # For more about how feature stores can be used, see: + # [Using a persistent feature store](https://docs.launchdarkly.com/sdk/features/storing-data#ruby). + # + # An entity that can be stored in a feature store is a hash that can be converted to and from + # JSON, and that has at a minimum the following properties: `:key`, a string that is unique + # among entities of the same kind; `:version`, an integer that is higher for newer data; + # `:deleted`, a boolean (optional, defaults to false) that if true means this is a + # placeholder for a deleted entity. + # + # To represent the different kinds of objects that can be stored, such as feature flags and + # segments, the SDK will provide a "kind" object; this is a hash with a single property, + # `:namespace`, which is a short string unique to that kind. This string can be used as a + # collection name or a key prefix. + # + # The default implementation is {LaunchDarkly::InMemoryFeatureStore}. Several implementations + # that use databases can be found in {LaunchDarkly::Integrations}. If you want to write a new + # implementation, see {LaunchDarkly::Integrations::Util} for tools that can make this task + # simpler. + # + module FeatureStore + # + # Initializes (or re-initializes) the store with the specified set of entities. Any + # existing entries will be removed. Implementations can assume that this data set is up to + # date-- there is no need to perform individual version comparisons between the existing + # objects and the supplied features. + # + # If possible, the store should update the entire data set atomically. If that is not possible, + # it should iterate through the outer hash and then the inner hash using the existing iteration + # order of those hashes (the SDK will ensure that the items were inserted into the hashes in + # the correct order), storing each item, and then delete any leftover items at the very end. + # + # @param all_data [Hash] a hash where each key is one of the data kind objects, and each + # value is in turn a hash of string keys to entities + # @return [void] + # + def init(all_data) + end + + # + # Returns the entity to which the specified key is mapped, if any. + # + # @param kind [Object] the kind of entity to get + # @param key [String] the unique key of the entity to get + # @return [Hash] the entity; nil if the key was not found, or if the stored entity's + # `:deleted` property was true + # + def get(kind, key) + end + + # + # Returns all stored entities of the specified kind, not including deleted entities. + # + # @param kind [Object] the kind of entity to get + # @return [Hash] a hash where each key is the entity's `:key` property and each value + # is the entity + # + def all(kind) + end + + # + # Attempt to add an entity, or update an existing entity with the same key. An update + # should only succeed if the new item's `:version` is greater than the old one; + # otherwise, the method should do nothing. + # + # @param kind [Object] the kind of entity to add or update + # @param item [Hash] the entity to add or update + # @return [void] + # + def upsert(kind, item) + end + + # + # Attempt to delete an entity if it exists. Deletion should only succeed if the + # `version` parameter is greater than the existing entity's `:version`; otherwise, the + # method should do nothing. + # + # @param kind [Object] the kind of entity to delete + # @param key [String] the unique key of the entity + # @param version [Integer] the entity must have a lower version than this to be deleted + # @return [void] + # + def delete(kind, key, version) + end + + # + # Checks whether this store has been initialized. That means that `init` has been called + # either by this process, or (if the store can be shared) by another process. This + # method will be called frequently, so it should be efficient. You can assume that if it + # has returned true once, it can continue to return true, i.e. a store cannot become + # uninitialized again. + # + # @return [Boolean] true if the store is in an initialized state + # + def initialized? + end + + # + # Performs any necessary cleanup to shut down the store when the client is being shut down. + # + # @return [void] + # + def stop + end + + # + # WARN: This isn't a required method on a FeatureStore yet. The SDK will + # currently check if the provided store responds to this method, and if + # it does, will take appropriate action based on the documented behavior + # below. This will become required in a future major version release of + # the SDK. + # + # Returns true if this data store implementation supports status + # monitoring. + # + # This is normally only true for persistent data stores but it could also + # be true for any custom {FeatureStore} implementation. + # + # Returning true means that the store guarantees that if it ever enters + # an invalid state (that is, an operation has failed or it knows that + # operations cannot succeed at the moment), it will publish a status + # update, and will then publish another status update once it has + # returned to a valid state. + # + # Custom implementations must implement `def available?` which + # synchronously checks if the store is available. Without this method, + # the SDK cannot ensure status updates will occur once the store has gone + # offline. + # + # The same value will be returned from + # {StatusProvider::monitoring_enabled?}. + # + # def monitoring_enabled? end + + # + # WARN: This isn't a required method on a FeatureStore. The SDK will + # check if the provided store responds to this method, and if it does, + # will take appropriate action based on the documented behavior below. + # Usage of this method will be dropped in a future version of the SDK. + # + # Tests whether the data store seems to be functioning normally. + # + # This should not be a detailed test of different kinds of operations, + # but just the smallest possible operation to determine whether (for + # instance) we can reach the database. + # + # Whenever one of the store's other methods throws an exception, the SDK + # will assume that it may have become unavailable (e.g. the database + # connection was lost). The SDK will then call {#available?} at intervals + # until it returns true. + # + # @return [Boolean] true if the underlying data store is reachable + # + # def available? end + end + end +end \ No newline at end of file diff --git a/lib/ldclient-rb/interfaces/flag_tracker.rb b/lib/ldclient-rb/interfaces/flag_tracker.rb new file mode 100644 index 00000000..6fbacd79 --- /dev/null +++ b/lib/ldclient-rb/interfaces/flag_tracker.rb @@ -0,0 +1,106 @@ +module LaunchDarkly + module Interfaces + # + # An interface for tracking changes in feature flag configurations. + # + # An implementation of this interface is returned by {LaunchDarkly::LDClient#flag_tracker}. + # Application code never needs to implement this interface. + # + module FlagTracker + # + # Registers a listener to be notified of feature flag changes in general. + # + # The listener will be notified whenever the SDK receives any change to any feature flag's configuration, + # or to a user segment that is referenced by a feature flag. If the updated flag is used as a prerequisite + # for other flags, the SDK assumes that those flags may now behave differently and sends flag change events + # for them as well. + # + # Note that this does not necessarily mean the flag's value has changed for any particular evaluation + # context, only that some part of the flag configuration was changed so that it may return a + # different value than it previously returned for some context. If you want to track flag value changes, + # use {#add_flag_value_change_listener} instead. + # + # It is possible, given current design restrictions, that a listener might be notified when no change has + # occurred. This edge case will be addressed in a later version of the SDK. It is important to note this issue + # does not affect {#add_flag_value_change_listener} listeners. + # + # If using the file data source, any change in a data file will be treated as a change to every flag. Again, + # use {#add_flag_value_change_listener} (or just re-evaluate the flag # yourself) if you want to know whether + # this is a change that really affects a flag's value. + # + # Change events only work if the SDK is actually connecting to LaunchDarkly (or using the file data source). + # If the SDK is only reading flags from a database then it cannot know when there is a change, because + # flags are read on an as-needed basis. + # + # The listener will be called from a worker thread. + # + # Calling this method for an already-registered listener has no effect. + # + # @param listener [#update] + # + def add_listener(listener) end + + # + # Unregisters a listener so that it will no longer be notified of feature flag changes. + # + # Calling this method for a listener that was not previously registered has no effect. + # + # @param listener [Object] + # + def remove_listener(listener) end + + # + # Registers a listener to be notified of a change in a specific feature flag's value for a specific + # evaluation context. + # + # When you call this method, it first immediately evaluates the feature flag. It then uses + # {#add_listener} to start listening for feature flag configuration + # changes, and whenever the specified feature flag changes, it re-evaluates the flag for the same context. + # It then calls your listener if and only if the resulting value has changed. + # + # All feature flag evaluations require an instance of {LaunchDarkly::LDContext}. If the feature flag you are + # tracking does not have any context targeting rules, you must still pass a dummy context such as + # `LDContext.with_key("for-global-flags")`. If you do not want the user to appear on your dashboard, + # use the anonymous property: `LDContext.create({key: "for-global-flags", kind: "user", anonymous: true})`. + # + # The returned listener represents the subscription that was created by this method + # call; to unsubscribe, pass that object (not your listener) to {#remove_listener}. + # + # @param key [Symbol] + # @param context [LaunchDarkly::LDContext] + # @param listener [#update] + # + def add_flag_value_change_listener(key, context, listener) end + end + + # + # Change event fired when some aspect of the flag referenced by the key has changed. + # + class FlagChange + attr_accessor :key + + # @param [Symbol] key + def initialize(key) + @key = key + end + end + + # + # Change event fired when the evaluated value for the specified flag key has changed. + # + class FlagValueChange + attr_accessor :key + attr_accessor :old_value + attr_accessor :new_value + + # @param [Symbol] key + # @param [Object] old_value + # @param [Object] new_value + def initialize(key, old_value, new_value) + @key = key + @old_value = old_value + @new_value = new_value + end + end + end +end \ No newline at end of file diff --git a/lib/ldclient-rb/interfaces/hooks.rb b/lib/ldclient-rb/interfaces/hooks.rb new file mode 100644 index 00000000..1455a653 --- /dev/null +++ b/lib/ldclient-rb/interfaces/hooks.rb @@ -0,0 +1,88 @@ +module LaunchDarkly + module Interfaces + module Hooks + # + # Mixin for extending SDK functionality via hooks. + # + # All provided hook implementations **MUST** include this mixin. Hooks without this mixin will be ignored. + # + # This mixin includes default implementations for all hook handlers. This allows LaunchDarkly to expand the list + # of hook handlers without breaking customer integrations. + # + module Hook + # + # Get metadata about the hook implementation. + # + # @return [Metadata] + # + def metadata + Metadata.new('UNDEFINED') + end + + # + # The before method is called during the execution of a variation method before the flag value has been + # determined. The method is executed synchronously. + # + # @param evaluation_series_context [EvaluationSeriesContext] Contains information about the evaluation being + # performed. This is not mutable. + # @param data [Hash] A record associated with each stage of hook invocations. Each stage is called with the data + # of the previous stage for a series. The input record should not be modified. + # @return [Hash] Data to use when executing the next state of the hook in the evaluation series. + # + def before_evaluation(evaluation_series_context, data) + data + end + + # + # The after method is called during the execution of the variation method after the flag value has been + # determined. The method is executed synchronously. + # + # @param evaluation_series_context [EvaluationSeriesContext] Contains read-only information about the evaluation + # being performed. + # @param data [Hash] A record associated with each stage of hook invocations. Each stage is called with the data + # of the previous stage for a series. + # @param detail [LaunchDarkly::EvaluationDetail] The result of the evaluation. This value should not be + # modified. + # @return [Hash] Data to use when executing the next state of the hook in the evaluation series. + # + def after_evaluation(evaluation_series_context, data, detail) + data + end + end + + # + # Metadata data class used for annotating hook implementations. + # + class Metadata + attr_reader :name + + def initialize(name) + @name = name + end + end + + # + # Contextual information that will be provided to handlers during evaluation series. + # + class EvaluationSeriesContext + attr_reader :key + attr_reader :context + attr_reader :default_value + attr_reader :method + + # + # @param key [String] + # @param context [LaunchDarkly::LDContext] + # @param default_value [any] + # @param method [Symbol] + # + def initialize(key, context, default_value, method) + @key = key + @context = context + @default_value = default_value + @method = method + end + end + end + end +end \ No newline at end of file diff --git a/lib/ldclient-rb/interfaces/migrations.rb b/lib/ldclient-rb/interfaces/migrations.rb new file mode 100644 index 00000000..919ff820 --- /dev/null +++ b/lib/ldclient-rb/interfaces/migrations.rb @@ -0,0 +1,100 @@ +module LaunchDarkly + module Interfaces + # + # Namespace for feature-flag based technology migration support. + # + module Migrations + # + # A migrator is the interface through which migration support is executed. A migrator is configured through the + # {LaunchDarkly::Migrations::MigratorBuilder} class. + # + module Migrator + # + # Uses the provided flag key and context to execute a migration-backed read operation. + # + # @param key [String] + # @param context [LaunchDarkly::LDContext] + # @param default_stage [Symbol] + # @param payload [Object, nil] + # + # @return [LaunchDarkly::Migrations::OperationResult] + # + def read(key, context, default_stage, payload = nil) end + + # + # Uses the provided flag key and context to execute a migration-backed write operation. + # + # @param key [String] + # @param context [LaunchDarkly::LDContext] + # @param default_stage [Symbol] + # @param payload [Object, nil] + # + # @return [LaunchDarkly::Migrations::WriteResult] + # + def write(key, context, default_stage, payload = nil) end + end + + # + # An OpTracker is responsible for managing the collection of measurements that which a user might wish to record + # throughout a migration-assisted operation. + # + # Example measurements include latency, errors, and consistency. + # + # This data can be provided to the {LaunchDarkly::LDClient.track_migration_op} method to relay this metric + # information upstream to LaunchDarkly services. + # + module OpTracker + # + # Sets the migration related operation associated with these tracking measurements. + # + # @param [Symbol] op The read or write operation symbol. + # + def operation(op) end + + # + # Allows recording which origins were called during a migration. + # + # @param [Symbol] origin Designation for the old or new origin. + # + def invoked(origin) end + + # + # Allows recording the results of a consistency check. + # + # This method accepts a callable which should take no parameters and return a single boolean to represent the + # consistency check results for a read operation. + # + # A callable is provided in case sampling rules do not require consistency checking to run. In this case, we can + # avoid the overhead of a function by not using the callable. + # + # @param [#call] is_consistent closure to return result of comparison check + # + def consistent(is_consistent) end + + # + # Allows recording whether an error occurred during the operation. + # + # @param [Symbol] origin Designation for the old or new origin. + # + def error(origin) end + + # + # Allows tracking the recorded latency for an individual operation. + # + # @param [Symbol] origin Designation for the old or new origin. + # @param [Float] duration Duration measurement in milliseconds (ms). + # + def latency(origin, duration) end + + # + # Creates an instance of {LaunchDarkly::Impl::MigrationOpEventData}. + # + # @return [LaunchDarkly::Impl::MigrationOpEvent, String] A migration op event or a string describing the error. + # failure. + # + def build + end + end + end + end +end \ No newline at end of file diff --git a/lib/ldclient-rb/interfaces/plugins.rb b/lib/ldclient-rb/interfaces/plugins.rb new file mode 100644 index 00000000..53166d8a --- /dev/null +++ b/lib/ldclient-rb/interfaces/plugins.rb @@ -0,0 +1,136 @@ +module LaunchDarkly + module Interfaces + module Plugins + # + # Metadata about the SDK. + # + class SdkMetadata + # The id of the SDK (e.g., "ruby-server-sdk") + # @return [String] + attr_reader :name + + # The version of the SDK + # @return [String] + attr_reader :version + + # The wrapper name if this SDK is a wrapper + # @return [String, nil] + attr_reader :wrapper_name + + # The wrapper version if this SDK is a wrapper + # @return [String, nil] + attr_reader :wrapper_version + + def initialize(name:, version:, wrapper_name: nil, wrapper_version: nil) + @name = name + @version = version + @wrapper_name = wrapper_name + @wrapper_version = wrapper_version + end + end + + # + # Metadata about the application using the SDK. + # + class ApplicationMetadata + # The id of the application + # @return [String, nil] + attr_reader :id + + # The version of the application + # @return [String, nil] + attr_reader :version + + def initialize(id: nil, version: nil) + @id = id + @version = version + end + end + + # + # Metadata about the environment in which the SDK is running. + # + class EnvironmentMetadata + # Information about the SDK + # @return [SdkMetadata] + attr_reader :sdk + + # Information about the application + # @return [ApplicationMetadata, nil] + attr_reader :application + + # The SDK key used to initialize the SDK + # @return [String, nil] + attr_reader :sdk_key + + def initialize(sdk:, application: nil, sdk_key: nil) + @sdk = sdk + @application = application + @sdk_key = sdk_key + end + end + + # + # Metadata about a plugin implementation. + # + class PluginMetadata + # A name representing the plugin instance + # @return [String] + attr_reader :name + + def initialize(name) + @name = name + end + end + + # + # Mixin for extending SDK functionality via plugins. + # + # All provided plugin implementations **MUST** include this mixin. Plugins without this mixin will be ignored. + # + # This mixin includes default implementations for optional methods. This allows LaunchDarkly to expand the list + # of plugin methods without breaking customer integrations. + # + # Plugins provide an interface which allows for initialization, access to credentials, and hook registration + # in a single interface. + # + module Plugin + # + # Get metadata about the plugin implementation. + # + # @return [PluginMetadata] + # + def metadata + PluginMetadata.new('UNDEFINED') + end + + # + # Register the plugin with the SDK client. + # + # This method is called during SDK initialization to allow the plugin to set up any necessary integrations, + # register hooks, or perform other initialization tasks. + # + # @param client [LDClient] The LDClient instance + # @param environment_metadata [EnvironmentMetadata] Metadata about the environment in which the SDK is running + # @return [void] + # + def register(client, environment_metadata) + # Default implementation does nothing + end + + # + # Get a list of hooks that this plugin provides. + # + # This method is called before register() to collect all hooks from plugins. The hooks returned will be + # added to the SDK's hook configuration. + # + # @param environment_metadata [EnvironmentMetadata] Metadata about the environment in which the SDK is running + # @return [Array] A list of hooks to be registered with the SDK + # + def get_hooks(environment_metadata) + [] + end + end + end + end +end \ No newline at end of file From eebbd464f02a238b08def819c113489e78e0e9a9 Mon Sep 17 00:00:00 2001 From: jsonbailey Date: Fri, 11 Jul 2025 16:52:40 +0000 Subject: [PATCH 3/8] fix rubocop style issues --- lib/ldclient-rb/interfaces.rb | 2 +- .../interfaces/big_segment_store.rb | 2 +- lib/ldclient-rb/interfaces/data_source.rb | 2 +- lib/ldclient-rb/interfaces/data_store.rb | 2 +- lib/ldclient-rb/interfaces/feature_store.rb | 2 +- lib/ldclient-rb/interfaces/flag_tracker.rb | 2 +- lib/ldclient-rb/interfaces/hooks.rb | 2 +- lib/ldclient-rb/interfaces/migrations.rb | 2 +- lib/ldclient-rb/interfaces/plugins.rb | 14 ++++++------ lib/ldclient-rb/ldclient.rb | 4 ++-- spec/ldclient_plugins_spec.rb | 22 +++++++++---------- 11 files changed, 28 insertions(+), 28 deletions(-) diff --git a/lib/ldclient-rb/interfaces.rb b/lib/ldclient-rb/interfaces.rb index 60201067..95d1e9b9 100644 --- a/lib/ldclient-rb/interfaces.rb +++ b/lib/ldclient-rb/interfaces.rb @@ -6,4 +6,4 @@ require "ldclient-rb/interfaces/big_segment_store" require "ldclient-rb/interfaces/migrations" require "ldclient-rb/interfaces/hooks" -require "ldclient-rb/interfaces/plugins" \ No newline at end of file +require "ldclient-rb/interfaces/plugins" diff --git a/lib/ldclient-rb/interfaces/big_segment_store.rb b/lib/ldclient-rb/interfaces/big_segment_store.rb index 3f0f2022..733e6ba1 100644 --- a/lib/ldclient-rb/interfaces/big_segment_store.rb +++ b/lib/ldclient-rb/interfaces/big_segment_store.rb @@ -150,4 +150,4 @@ def status end end end -end \ No newline at end of file +end diff --git a/lib/ldclient-rb/interfaces/data_source.rb b/lib/ldclient-rb/interfaces/data_source.rb index f0cc1d17..27ee5769 100644 --- a/lib/ldclient-rb/interfaces/data_source.rb +++ b/lib/ldclient-rb/interfaces/data_source.rb @@ -262,4 +262,4 @@ def initialize(kind, status_code, message, time) end end end -end \ No newline at end of file +end diff --git a/lib/ldclient-rb/interfaces/data_store.rb b/lib/ldclient-rb/interfaces/data_store.rb index d95462e7..0865dc70 100644 --- a/lib/ldclient-rb/interfaces/data_store.rb +++ b/lib/ldclient-rb/interfaces/data_store.rb @@ -110,4 +110,4 @@ def initialize(available, stale) end end end -end \ No newline at end of file +end diff --git a/lib/ldclient-rb/interfaces/feature_store.rb b/lib/ldclient-rb/interfaces/feature_store.rb index da685f9f..f8f5cc23 100644 --- a/lib/ldclient-rb/interfaces/feature_store.rb +++ b/lib/ldclient-rb/interfaces/feature_store.rb @@ -159,4 +159,4 @@ def stop # def available? end end end -end \ No newline at end of file +end diff --git a/lib/ldclient-rb/interfaces/flag_tracker.rb b/lib/ldclient-rb/interfaces/flag_tracker.rb index 6fbacd79..3c8a8f8c 100644 --- a/lib/ldclient-rb/interfaces/flag_tracker.rb +++ b/lib/ldclient-rb/interfaces/flag_tracker.rb @@ -103,4 +103,4 @@ def initialize(key, old_value, new_value) end end end -end \ No newline at end of file +end diff --git a/lib/ldclient-rb/interfaces/hooks.rb b/lib/ldclient-rb/interfaces/hooks.rb index 1455a653..4c27bb21 100644 --- a/lib/ldclient-rb/interfaces/hooks.rb +++ b/lib/ldclient-rb/interfaces/hooks.rb @@ -85,4 +85,4 @@ def initialize(key, context, default_value, method) end end end -end \ No newline at end of file +end diff --git a/lib/ldclient-rb/interfaces/migrations.rb b/lib/ldclient-rb/interfaces/migrations.rb index 919ff820..da0f7df8 100644 --- a/lib/ldclient-rb/interfaces/migrations.rb +++ b/lib/ldclient-rb/interfaces/migrations.rb @@ -97,4 +97,4 @@ def build end end end -end \ No newline at end of file +end diff --git a/lib/ldclient-rb/interfaces/plugins.rb b/lib/ldclient-rb/interfaces/plugins.rb index 53166d8a..9d913c12 100644 --- a/lib/ldclient-rb/interfaces/plugins.rb +++ b/lib/ldclient-rb/interfaces/plugins.rb @@ -8,15 +8,15 @@ class SdkMetadata # The id of the SDK (e.g., "ruby-server-sdk") # @return [String] attr_reader :name - + # The version of the SDK # @return [String] attr_reader :version - + # The wrapper name if this SDK is a wrapper # @return [String, nil] attr_reader :wrapper_name - + # The wrapper version if this SDK is a wrapper # @return [String, nil] attr_reader :wrapper_version @@ -36,7 +36,7 @@ class ApplicationMetadata # The id of the application # @return [String, nil] attr_reader :id - + # The version of the application # @return [String, nil] attr_reader :version @@ -54,11 +54,11 @@ class EnvironmentMetadata # Information about the SDK # @return [SdkMetadata] attr_reader :sdk - + # Information about the application # @return [ApplicationMetadata, nil] attr_reader :application - + # The SDK key used to initialize the SDK # @return [String, nil] attr_reader :sdk_key @@ -133,4 +133,4 @@ def get_hooks(environment_metadata) end end end -end \ No newline at end of file +end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 844ef422..d785e87b 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -91,7 +91,7 @@ def postfork(wait_for_sec = 5) private def start_up(wait_for_sec) environment_metadata = get_environment_metadata plugin_hooks = get_plugin_hooks(environment_metadata) - + @hooks = Concurrent::Array.new(@config.hooks + plugin_hooks) @shared_executor = Concurrent::SingleThreadExecutor.new @@ -186,7 +186,7 @@ def postfork(wait_for_sec = 5) ) application_metadata = nil - if @config.application && (!@config.application.empty?) + if @config.application && !@config.application.empty? application_metadata = Interfaces::Plugins::ApplicationMetadata.new( id: @config.application[:id], version: @config.application[:version] diff --git a/spec/ldclient_plugins_spec.rb b/spec/ldclient_plugins_spec.rb index d942bc7c..be5b2d9e 100644 --- a/spec/ldclient_plugins_spec.rb +++ b/spec/ldclient_plugins_spec.rb @@ -49,7 +49,7 @@ def register(client, environment_metadata) it "collects hooks from plugins" do hook = MockHook.new(->(_, _) { }, ->(_, _, _) { }) plugin = MockPlugin.new("test-plugin", [hook]) - + with_client(test_config(plugins: [plugin])) do |client| expect(client.instance_variable_get("@hooks")).to include(hook) end @@ -58,7 +58,7 @@ def register(client, environment_metadata) it "handles plugin hook errors gracefully" do plugin = MockPlugin.new("error-plugin") allow(plugin).to receive(:get_hooks).and_raise("Hook error") - + with_client(test_config(plugins: [plugin])) do |client| expect(client).to be_initialized end @@ -70,7 +70,7 @@ def register(client, environment_metadata) registered = false register_callback = ->(client, metadata) { registered = true } plugin = MockPlugin.new("test-plugin", [], register_callback) - + with_client(test_config(plugins: [plugin])) do |client| expect(registered).to be true end @@ -80,7 +80,7 @@ def register(client, environment_metadata) received_metadata = nil register_callback = ->(client, metadata) { received_metadata = metadata } plugin = MockPlugin.new("test-plugin", [], register_callback) - + with_client(test_config(plugins: [plugin])) do |client| expect(received_metadata).to be_a(Interfaces::Plugins::EnvironmentMetadata) expect(received_metadata.sdk.name).to eq("ruby-server-sdk") @@ -91,7 +91,7 @@ def register(client, environment_metadata) it "handles plugin registration errors gracefully" do register_callback = ->(client, metadata) { raise "Registration error" } plugin = MockPlugin.new("error-plugin", [], register_callback) - + with_client(test_config(plugins: [plugin])) do |client| expect(client).to be_initialized end @@ -103,7 +103,7 @@ def register(client, environment_metadata) order = [] plugin1 = MockPlugin.new("plugin1", [], ->(_, _) { order << "plugin1" }) plugin2 = MockPlugin.new("plugin2", [], ->(_, _) { order << "plugin2" }) - + with_client(test_config(plugins: [plugin1, plugin2])) do |client| expect(order).to eq ["plugin1", "plugin2"] end @@ -113,7 +113,7 @@ def register(client, environment_metadata) config_hook = MockHook.new(->(_, _) { }, ->(_, _, _) { }) plugin_hook = MockHook.new(->(_, _) { }, ->(_, _, _) { }) plugin = MockPlugin.new("test-plugin", [plugin_hook]) - + with_client(test_config(hooks: [config_hook], plugins: [plugin])) do |client| hooks = client.instance_variable_get("@hooks") config_hook_index = hooks.index(config_hook) @@ -131,7 +131,7 @@ def register(client, environment_metadata) wrapper_name: "test-wrapper", wrapper_version: "2.0.0" ) - + expect(metadata.name).to eq("test-sdk") expect(metadata.version).to eq("1.0.0") expect(metadata.wrapper_name).to eq("test-wrapper") @@ -143,7 +143,7 @@ def register(client, environment_metadata) id: "test-app", version: "3.0.0" ) - + expect(metadata.id).to eq("test-app") expect(metadata.version).to eq("3.0.0") end @@ -151,13 +151,13 @@ def register(client, environment_metadata) it "creates EnvironmentMetadata correctly" do sdk_metadata = Interfaces::Plugins::SdkMetadata.new(name: "test", version: "1.0") app_metadata = Interfaces::Plugins::ApplicationMetadata.new(id: "app") - + metadata = Interfaces::Plugins::EnvironmentMetadata.new( sdk: sdk_metadata, application: app_metadata, sdk_key: "test-key" ) - + expect(metadata.sdk).to eq(sdk_metadata) expect(metadata.application).to eq(app_metadata) expect(metadata.sdk_key).to eq("test-key") From 57471b1e3ce7678403763c119722ebf85bfad43b Mon Sep 17 00:00:00 2001 From: jsonbailey Date: Fri, 11 Jul 2025 17:39:30 +0000 Subject: [PATCH 4/8] ignore specific rubocop offense --- lib/ldclient-rb/integrations/test_data/flag_builder.rb | 2 +- spec/ldclient_plugins_spec.rb | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/ldclient-rb/integrations/test_data/flag_builder.rb b/lib/ldclient-rb/integrations/test_data/flag_builder.rb index 2c4aa1b0..81c6df79 100644 --- a/lib/ldclient-rb/integrations/test_data/flag_builder.rb +++ b/lib/ldclient-rb/integrations/test_data/flag_builder.rb @@ -487,7 +487,7 @@ def build # class FlagRuleBuilder # @private - FlagRuleClause = Struct.new(:contextKind, :attribute, :op, :values, :negate, keyword_init: true) + FlagRuleClause = Struct.new(:contextKind, :attribute, :op, :values, :negate, keyword_init: true) # rubocop:disable Naming/MethodName # @private def initialize(flag_builder) diff --git a/spec/ldclient_plugins_spec.rb b/spec/ldclient_plugins_spec.rb index be5b2d9e..0f103312 100644 --- a/spec/ldclient_plugins_spec.rb +++ b/spec/ldclient_plugins_spec.rb @@ -1,3 +1,4 @@ +require "mock_components" require "spec_helper" module LaunchDarkly From b359747e8505522327bf15b34e8ff741535209da Mon Sep 17 00:00:00 2001 From: jsonbailey Date: Fri, 11 Jul 2025 21:01:04 +0000 Subject: [PATCH 5/8] remove extra mock plugin definition --- spec/ldclient_plugins_spec.rb | 22 ---------------------- 1 file changed, 22 deletions(-) diff --git a/spec/ldclient_plugins_spec.rb b/spec/ldclient_plugins_spec.rb index 0f103312..a05c1cf1 100644 --- a/spec/ldclient_plugins_spec.rb +++ b/spec/ldclient_plugins_spec.rb @@ -3,28 +3,6 @@ module LaunchDarkly describe "LDClient plugins tests" do - class MockPlugin - include Interfaces::Plugins::Plugin - - def initialize(name, hooks = [], register_callback = nil) - @name = name - @hooks = hooks - @register_callback = register_callback - end - - def metadata - Interfaces::Plugins::PluginMetadata.new(@name) - end - - def get_hooks(environment_metadata) - @hooks - end - - def register(client, environment_metadata) - @register_callback.call(client, environment_metadata) if @register_callback - end - end - context "plugin configuration" do it "can register a plugin on the config" do plugin = MockPlugin.new("test-plugin") From 0d59aba52d193351ffc2de67418961b89f938519 Mon Sep 17 00:00:00 2001 From: jsonbailey Date: Tue, 15 Jul 2025 14:47:00 +0000 Subject: [PATCH 6/8] simplify exception handling inside method --- lib/ldclient-rb/ldclient.rb | 24 +++++++++--------------- 1 file changed, 9 insertions(+), 15 deletions(-) diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index d785e87b..2c986933 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -203,22 +203,18 @@ def postfork(wait_for_sec = 5) private def get_plugin_hooks(environment_metadata) hooks = [] @config.plugins.each do |plugin| - begin - hooks.concat(plugin.get_hooks(environment_metadata)) - rescue => e + hooks.concat(plugin.get_hooks(environment_metadata)) + rescue => e @config.logger.error { "[LDClient] Error getting hooks from plugin #{plugin.metadata.name}: #{e}" } - end end hooks end private def register_plugins(environment_metadata) @config.plugins.each do |plugin| - begin - plugin.register(self, environment_metadata) - rescue => e - @config.logger.error { "[LDClient] Error registering plugin #{plugin.metadata.name}: #{e}" } - end + plugin.register(self, environment_metadata) + rescue => e + @config.logger.error { "[LDClient] Error registering plugin #{plugin.metadata.name}: #{e}" } end end @@ -425,12 +421,10 @@ def variation_detail(key, context, default) # @return [any] # private def try_execute_stage(method, hook_name) - begin - yield - rescue => e - @config.logger.error { "[LDClient] An error occurred in #{method} of the hook #{hook_name}: #{e}" } - nil - end + yield + rescue => e + @config.logger.error { "[LDClient] An error occurred in #{method} of the hook #{hook_name}: #{e}" } + nil end # From 18f96386e13bc0b9ee770d063bdad7fcea29a0b0 Mon Sep 17 00:00:00 2001 From: jsonbailey Date: Tue, 15 Jul 2025 15:19:33 +0000 Subject: [PATCH 7/8] bump linux latest and take the blue pill (stay in the matrix) --- .github/workflows/ci.yml | 17 +++++------------ 1 file changed, 5 insertions(+), 12 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a22014f5..85c219e0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -10,20 +10,13 @@ on: - "**.md" jobs: - build-linux-oldest: + build-linux: uses: ./.github/workflows/build-gem.yml + strategy: + matrix: + version: ["3.2", "3.4", "jruby-9.4"] with: - version: "3.2" - - build-linux-latest: - uses: ./.github/workflows/build-gem.yml - with: - version: "3.2" - - build-linux-jruby: - uses: ./.github/workflows/build-gem.yml - with: - version: "jruby-9.4" + version: ${{ matrix.version }} build-docs: runs-on: ubuntu-latest From ec8e09bb2600bf993d0b41def9cec04a1bf91380 Mon Sep 17 00:00:00 2001 From: jsonbailey Date: Tue, 15 Jul 2025 15:23:46 +0000 Subject: [PATCH 8/8] AWS sdk fails in 3.4, we will open a PR with them before we add it back. --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 85c219e0..83090cb2 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -14,7 +14,7 @@ jobs: uses: ./.github/workflows/build-gem.yml strategy: matrix: - version: ["3.2", "3.4", "jruby-9.4"] + version: ["3.2", "jruby-9.4"] with: version: ${{ matrix.version }}