From e3b21ab287520b769f71a5ed6ec65dc031086bbb Mon Sep 17 00:00:00 2001 From: Hassieb Pakzad <68423100+hassiebp@users.noreply.github.com> Date: Fri, 15 Aug 2025 14:04:57 +0200 Subject: [PATCH 1/4] fix(langchain): take only int values in parsed usage --- langfuse/langchain/CallbackHandler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/langfuse/langchain/CallbackHandler.py b/langfuse/langchain/CallbackHandler.py index 03e73c3f2..0dd0c13b5 100644 --- a/langfuse/langchain/CallbackHandler.py +++ b/langfuse/langchain/CallbackHandler.py @@ -965,7 +965,7 @@ def _parse_usage_model(usage: typing.Union[pydantic.BaseModel, dict]) -> Any: if "input" in usage_model: usage_model["input"] = max(0, usage_model["input"] - value) - usage_model = {k: v for k, v in usage_model.items() if not isinstance(v, str)} + usage_model = {k: v for k, v in usage_model.items() if isinstance(v, int)} return usage_model if usage_model else None From 9049a7ef1519553a391c02c3850a295a258907c8 Mon Sep 17 00:00:00 2001 From: Hassieb Pakzad <68423100+hassiebp@users.noreply.github.com> Date: Fri, 15 Aug 2025 14:10:48 +0200 Subject: [PATCH 2/4] push --- langfuse/langchain/CallbackHandler.py | 34 ++++++++++++++++----------- 1 file changed, 20 insertions(+), 14 deletions(-) diff --git a/langfuse/langchain/CallbackHandler.py b/langfuse/langchain/CallbackHandler.py index 0dd0c13b5..154c5e683 100644 --- a/langfuse/langchain/CallbackHandler.py +++ b/langfuse/langchain/CallbackHandler.py @@ -875,22 +875,28 @@ def _parse_usage_model(usage: typing.Union[pydantic.BaseModel, dict]) -> Any: usage_model = cast(Dict, usage.copy()) # Copy all existing key-value pairs # Skip OpenAI usage types as they are handled server side - if not all( + if all( openai_key in usage_model - for openai_key in ["prompt_tokens", "completion_tokens", "total_tokens"] + for openai_key in [ + "prompt_tokens", + "completion_tokens", + "total_tokens", + "prompt_tokens_details", + "completion_tokens_details", + ] ): - for model_key, langfuse_key in conversion_list: - if model_key in usage_model: - captured_count = usage_model.pop(model_key) - final_count = ( - sum(captured_count) - if isinstance(captured_count, list) - else captured_count - ) # For Bedrock, the token count is a list when streamed - - usage_model[langfuse_key] = ( - final_count # Translate key and keep the value - ) + return usage_model + + for model_key, langfuse_key in conversion_list: + if model_key in usage_model: + captured_count = usage_model.pop(model_key) + final_count = ( + sum(captured_count) + if isinstance(captured_count, list) + else captured_count + ) # For Bedrock, the token count is a list when streamed + + usage_model[langfuse_key] = final_count # Translate key and keep the value if isinstance(usage_model, dict): if "input_token_details" in usage_model: From 7036a75d32acccc567773129c0190b4e56ed8be1 Mon Sep 17 00:00:00 2001 From: Hassieb Pakzad <68423100+hassiebp@users.noreply.github.com> Date: Fri, 15 Aug 2025 14:17:10 +0200 Subject: [PATCH 3/4] Update langfuse/langchain/CallbackHandler.py Co-authored-by: ellipsis-dev[bot] <65095814+ellipsis-dev[bot]@users.noreply.github.com> --- langfuse/langchain/CallbackHandler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/langfuse/langchain/CallbackHandler.py b/langfuse/langchain/CallbackHandler.py index 154c5e683..35b33c04d 100644 --- a/langfuse/langchain/CallbackHandler.py +++ b/langfuse/langchain/CallbackHandler.py @@ -971,7 +971,7 @@ def _parse_usage_model(usage: typing.Union[pydantic.BaseModel, dict]) -> Any: if "input" in usage_model: usage_model["input"] = max(0, usage_model["input"] - value) - usage_model = {k: v for k, v in usage_model.items() if isinstance(v, int)} + usage_model = {k: v for k, v in usage_model.items() if type(v) is int} return usage_model if usage_model else None From 3bb9c43472bf01e68e11d18c44c5728cc82906d3 Mon Sep 17 00:00:00 2001 From: Hassieb Pakzad <68423100+hassiebp@users.noreply.github.com> Date: Fri, 15 Aug 2025 18:01:06 +0200 Subject: [PATCH 4/4] push --- langfuse/langchain/CallbackHandler.py | 33 +++++++++++++++++++-------- 1 file changed, 23 insertions(+), 10 deletions(-) diff --git a/langfuse/langchain/CallbackHandler.py b/langfuse/langchain/CallbackHandler.py index 35b33c04d..7c898807c 100644 --- a/langfuse/langchain/CallbackHandler.py +++ b/langfuse/langchain/CallbackHandler.py @@ -875,15 +875,28 @@ def _parse_usage_model(usage: typing.Union[pydantic.BaseModel, dict]) -> Any: usage_model = cast(Dict, usage.copy()) # Copy all existing key-value pairs # Skip OpenAI usage types as they are handled server side - if all( - openai_key in usage_model - for openai_key in [ - "prompt_tokens", - "completion_tokens", - "total_tokens", - "prompt_tokens_details", - "completion_tokens_details", - ] + if ( + all( + openai_key in usage_model + for openai_key in [ + "prompt_tokens", + "completion_tokens", + "total_tokens", + "prompt_tokens_details", + "completion_tokens_details", + ] + ) + and len(usage_model.keys()) == 5 + ) or ( + all( + openai_key in usage_model + for openai_key in [ + "prompt_tokens", + "completion_tokens", + "total_tokens", + ] + ) + and len(usage_model.keys()) == 3 ): return usage_model @@ -971,7 +984,7 @@ def _parse_usage_model(usage: typing.Union[pydantic.BaseModel, dict]) -> Any: if "input" in usage_model: usage_model["input"] = max(0, usage_model["input"] - value) - usage_model = {k: v for k, v in usage_model.items() if type(v) is int} + usage_model = {k: v for k, v in usage_model.items() if isinstance(v, int)} return usage_model if usage_model else None