From 9bdf3cc814f0a67117ee5eebe68c55884f8dffbc Mon Sep 17 00:00:00 2001 From: Dwij Patel Date: Thu, 20 Feb 2025 01:08:29 +0530 Subject: [PATCH 1/6] fix(tracker): initialize LiteLLM provider conditionally for OpenAI API patching --- agentops/llms/tracker.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/agentops/llms/tracker.py b/agentops/llms/tracker.py index 648920963..43cea7e1c 100644 --- a/agentops/llms/tracker.py +++ b/agentops/llms/tracker.py @@ -104,6 +104,7 @@ def override_api(self): """ Overrides key methods of the specified API to record events. """ + litellm_initialized = False for api in self.SUPPORTED_APIS: if api in sys.modules: @@ -116,11 +117,11 @@ def override_api(self): if Version(module_version) >= parse("1.3.1"): provider = LiteLLMProvider(self.client) provider.override() + litellm_initialized = True else: logger.warning(f"Only LiteLLM>=1.3.1 supported. v{module_version} found.") - return # If using an abstraction like litellm, do not patch the underlying LLM APIs - if api == "openai": + if api == "openai" and not litellm_initialized: # Patch openai v1.0.0+ methods if hasattr(module, "__version__"): module_version = parse(module.__version__) From ca1bae9e3177d23e233c97b9fb70f996886ff947 Mon Sep 17 00:00:00 2001 From: Dwij Patel Date: Thu, 20 Feb 2025 02:51:35 +0530 Subject: [PATCH 2/6] feat(tracker): add detection for LiteLLM calls before OpenAI initialization --- agentops/llms/tracker.py | 42 ++++++++++++++++++++++++++++++++++++---- 1 file changed, 38 insertions(+), 4 deletions(-) diff --git a/agentops/llms/tracker.py b/agentops/llms/tracker.py index 43cea7e1c..acdcaadc0 100644 --- a/agentops/llms/tracker.py +++ b/agentops/llms/tracker.py @@ -1,3 +1,4 @@ +import inspect import sys from importlib import import_module from importlib.metadata import version @@ -99,21 +100,53 @@ class LlmTracker: def __init__(self, client): self.client = client + self.litellm_initialized = False + + def _is_litellm_call(self): + """ + Detects if the API call originated from LiteLLM. + Returns True if LiteLLM appears in the call stack **before** OpenAI. + """ + stack = inspect.stack() + + litellm_seen = False # Track if LiteLLM was encountered + openai_seen = False # Track if OpenAI was encountered + + for frame in stack: + module = inspect.getmodule(frame.frame) + + module_name = module.__name__ if module else None + + filename = frame.filename.lower() + + if module_name and "litellm" in module_name or "litellm" in filename: + print("LiteLLM detected.") + litellm_seen = True + + if module_name and "openai" in module_name or "openai" in filename: + print("OpenAI detected.") + openai_seen = True + + if not litellm_seen: + return False + + return litellm_seen def override_api(self): """ Overrides key methods of the specified API to record events. """ litellm_initialized = False - + for api in self.SUPPORTED_APIS: if api in sys.modules: module = import_module(api) + if api == "litellm": module_version = version(api) if module_version is None: logger.warning("Cannot determine LiteLLM version. Only LiteLLM>=1.3.1 supported.") - + if Version(module_version) >= parse("1.3.1"): provider = LiteLLMProvider(self.client) provider.override() @@ -121,9 +154,10 @@ def override_api(self): else: logger.warning(f"Only LiteLLM>=1.3.1 supported. v{module_version} found.") - if api == "openai" and not litellm_initialized: + if api == "openai": # Patch openai v1.0.0+ methods - if hasattr(module, "__version__"): + # Ensure OpenAI is only initialized if it was NOT called inside LiteLLM + if not self._is_litellm_call(): module_version = parse(module.__version__) if module_version >= parse("1.0.0"): provider = OpenAiProvider(self.client) From 3096f03c96a972a79266b2913b38c8e4453563cd Mon Sep 17 00:00:00 2001 From: Dwij Patel Date: Thu, 20 Feb 2025 03:00:14 +0530 Subject: [PATCH 3/6] refactor(tracker): remove print statements and update LiteLLM initialization variable --- agentops/llms/tracker.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/agentops/llms/tracker.py b/agentops/llms/tracker.py index acdcaadc0..ec8e9a860 100644 --- a/agentops/llms/tracker.py +++ b/agentops/llms/tracker.py @@ -120,11 +120,9 @@ def _is_litellm_call(self): filename = frame.filename.lower() if module_name and "litellm" in module_name or "litellm" in filename: - print("LiteLLM detected.") litellm_seen = True if module_name and "openai" in module_name or "openai" in filename: - print("OpenAI detected.") openai_seen = True if not litellm_seen: @@ -136,8 +134,6 @@ def override_api(self): """ Overrides key methods of the specified API to record events. """ - litellm_initialized = False - for api in self.SUPPORTED_APIS: if api in sys.modules: module = import_module(api) @@ -150,7 +146,7 @@ def override_api(self): if Version(module_version) >= parse("1.3.1"): provider = LiteLLMProvider(self.client) provider.override() - litellm_initialized = True + self.litellm_initialized = True else: logger.warning(f"Only LiteLLM>=1.3.1 supported. v{module_version} found.") From 00253ce4567a0877a3277169aee85864d03bb281 Mon Sep 17 00:00:00 2001 From: Dwij Patel Date: Thu, 20 Feb 2025 04:43:29 +0530 Subject: [PATCH 4/6] fix(tracker): add version check for OpenAI module before initialization --- agentops/llms/tracker.py | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/agentops/llms/tracker.py b/agentops/llms/tracker.py index ec8e9a860..5f0b5f1dd 100644 --- a/agentops/llms/tracker.py +++ b/agentops/llms/tracker.py @@ -154,15 +154,16 @@ def override_api(self): # Patch openai v1.0.0+ methods # Ensure OpenAI is only initialized if it was NOT called inside LiteLLM if not self._is_litellm_call(): - module_version = parse(module.__version__) - if module_version >= parse("1.0.0"): - provider = OpenAiProvider(self.client) - provider.override() - else: - raise DeprecationWarning( - "OpenAI versions < 0.1 are no longer supported by AgentOps. Please upgrade OpenAI or " - "downgrade AgentOps to <=0.3.8." - ) + if hasattr(module, "__version__"): + module_version = parse(module.__version__) + if module_version >= parse("1.0.0"): + provider = OpenAiProvider(self.client) + provider.override() + else: + raise DeprecationWarning( + "OpenAI versions < 0.1 are no longer supported by AgentOps. Please upgrade OpenAI or " + "downgrade AgentOps to <=0.3.8." + ) if api == "cohere": # Patch cohere v5.4.0+ methods From a20d5eb23ebe098bd47d151ca6cb8b2c9bc0ebc9 Mon Sep 17 00:00:00 2001 From: Dwij Patel Date: Thu, 20 Feb 2025 23:48:39 +0530 Subject: [PATCH 5/6] refactor(tracker): clean up whitespace in LlmTracker class methods --- agentops/llms/tracker.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/agentops/llms/tracker.py b/agentops/llms/tracker.py index 5f0b5f1dd..fedd8f0fd 100644 --- a/agentops/llms/tracker.py +++ b/agentops/llms/tracker.py @@ -114,11 +114,11 @@ def _is_litellm_call(self): for frame in stack: module = inspect.getmodule(frame.frame) - + module_name = module.__name__ if module else None - + filename = frame.filename.lower() - + if module_name and "litellm" in module_name or "litellm" in filename: litellm_seen = True @@ -137,12 +137,12 @@ def override_api(self): for api in self.SUPPORTED_APIS: if api in sys.modules: module = import_module(api) - + if api == "litellm": module_version = version(api) if module_version is None: logger.warning("Cannot determine LiteLLM version. Only LiteLLM>=1.3.1 supported.") - + if Version(module_version) >= parse("1.3.1"): provider = LiteLLMProvider(self.client) provider.override() From 57fccbe1cef7af333795f5a9b6cc947fd266f23a Mon Sep 17 00:00:00 2001 From: Dwij Patel Date: Thu, 20 Feb 2025 23:53:41 +0530 Subject: [PATCH 6/6] added appropriate comments for further reference --- agentops/llms/tracker.py | 22 +++++++++++++++++++--- 1 file changed, 19 insertions(+), 3 deletions(-) diff --git a/agentops/llms/tracker.py b/agentops/llms/tracker.py index fedd8f0fd..4ce5b9841 100644 --- a/agentops/llms/tracker.py +++ b/agentops/llms/tracker.py @@ -105,12 +105,26 @@ def __init__(self, client): def _is_litellm_call(self): """ Detects if the API call originated from LiteLLM. - Returns True if LiteLLM appears in the call stack **before** OpenAI. + + **Issue We Are Addressing:** + - When using LiteLLM, it internally calls OpenAI methods, which results in OpenAI being initialized by default. + - This creates an issue where OpenAI is tracked as the primary provider, even when the request was routed via LiteLLM. + - We need to ensure that OpenAI is only tracked if it was explicitly used and **not** invoked indirectly through LiteLLM. + + **How This Works:** + - The function checks the call stack (execution history) to determine the order in which modules were called. + - If LiteLLM appears in the call stack **before** OpenAI, then OpenAI was invoked via LiteLLM, meaning we should ignore OpenAI. + - If OpenAI appears first without LiteLLM, then OpenAI was used directly, and we should track it as expected. + + **Return Value:** + - Returns `True` if the API call originated from LiteLLM. + - Returns `False` if OpenAI was directly called without going through LiteLLM. """ + stack = inspect.stack() - litellm_seen = False # Track if LiteLLM was encountered - openai_seen = False # Track if OpenAI was encountered + litellm_seen = False # Track if LiteLLM was encountered in the stack + openai_seen = False # Track if OpenAI was encountered in the stack for frame in stack: module = inspect.getmodule(frame.frame) @@ -125,9 +139,11 @@ def _is_litellm_call(self): if module_name and "openai" in module_name or "openai" in filename: openai_seen = True + # If OpenAI is seen **before** LiteLLM, it means OpenAI was used directly, so return False if not litellm_seen: return False + # If LiteLLM was seen at any point before OpenAI, return True (indicating an indirect OpenAI call via LiteLLM) return litellm_seen def override_api(self):