Skip to content

Commit 2d3ff4f

Browse files
authored
Merge branch 'main' into api-spec-bot-ad16fa0
2 parents 723886b + d233bc4 commit 2d3ff4f

File tree

7 files changed

+101
-5
lines changed

7 files changed

+101
-5
lines changed

langfuse/_client/client.py

Lines changed: 18 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -79,6 +79,7 @@
7979
from langfuse._utils.parse_error import handle_fern_exception
8080
from langfuse._utils.prompt_cache import PromptCache
8181
from langfuse.api.resources.commons.errors.error import Error
82+
from langfuse.api.resources.commons.errors.not_found_error import NotFoundError
8283
from langfuse.api.resources.ingestion.types.score_body import ScoreBody
8384
from langfuse.api.resources.prompts.types import (
8485
CreatePromptRequest_Chat,
@@ -3250,20 +3251,28 @@ def create_dataset(
32503251
name: str,
32513252
description: Optional[str] = None,
32523253
metadata: Optional[Any] = None,
3254+
input_schema: Optional[Any] = None,
3255+
expected_output_schema: Optional[Any] = None,
32533256
) -> Dataset:
32543257
"""Create a dataset with the given name on Langfuse.
32553258
32563259
Args:
32573260
name: Name of the dataset to create.
32583261
description: Description of the dataset. Defaults to None.
32593262
metadata: Additional metadata. Defaults to None.
3263+
input_schema: JSON Schema for validating dataset item inputs. When set, all new items will be validated against this schema.
3264+
expected_output_schema: JSON Schema for validating dataset item expected outputs. When set, all new items will be validated against this schema.
32603265
32613266
Returns:
32623267
Dataset: The created dataset as returned by the Langfuse API.
32633268
"""
32643269
try:
32653270
body = CreateDatasetRequest(
3266-
name=name, description=description, metadata=metadata
3271+
name=name,
3272+
description=description,
3273+
metadata=metadata,
3274+
inputSchema=input_schema,
3275+
expectedOutputSchema=expected_output_schema,
32673276
)
32683277
langfuse_logger.debug(f"Creating datasets {body}")
32693278

@@ -3597,6 +3606,14 @@ def fetch_prompts() -> Any:
35973606

35983607
return prompt
35993608

3609+
except NotFoundError as not_found_error:
3610+
langfuse_logger.warning(
3611+
f"Prompt '{cache_key}' not found during refresh, evicting from cache."
3612+
)
3613+
if self._resources is not None:
3614+
self._resources.prompt_cache.delete(cache_key)
3615+
raise not_found_error
3616+
36003617
except Exception as e:
36013618
langfuse_logger.error(
36023619
f"Error while fetching prompt '{cache_key}': {str(e)}"

langfuse/_utils/prompt_cache.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -158,6 +158,9 @@ def set(self, key: str, value: PromptClient, ttl_seconds: Optional[int]) -> None
158158

159159
self._cache[key] = PromptCacheItem(value, ttl_seconds)
160160

161+
def delete(self, key: str) -> None:
162+
self._cache.pop(key, None)
163+
161164
def invalidate(self, prompt_name: str) -> None:
162165
"""Invalidate all cached prompts with the given prompt name."""
163166
for key in list(self._cache):

langfuse/langchain/CallbackHandler.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1175,6 +1175,9 @@ def _parse_usage_model(usage: Union[pydantic.BaseModel, dict]) -> Any:
11751175
if "input" in usage_model:
11761176
usage_model["input"] = max(0, usage_model["input"] - value)
11771177

1178+
if f"input_modality_{item['modality']}" in usage_model:
1179+
usage_model[f"input_modality_{item['modality']}"] = max(0, usage_model[f"input_modality_{item['modality']}"] - value)
1180+
11781181
usage_model = {k: v for k, v in usage_model.items() if isinstance(v, int)}
11791182

11801183
return usage_model if usage_model else None

langfuse/version.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
"""@private"""
22

3-
__version__ = "3.10.1"
3+
__version__ = "3.10.2"

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
[tool.poetry]
22
name = "langfuse"
33

4-
version = "3.10.1"
4+
version = "3.10.2"
55
description = "A client library for accessing langfuse"
66
authors = ["langfuse <developers@langfuse.com>"]
77
license = "MIT"

tests/test_datasets.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -146,7 +146,7 @@ def test_dataset_run_with_metadata_and_description():
146146
dataset_name = create_uuid()
147147
langfuse.create_dataset(name=dataset_name)
148148

149-
input = json.dumps({"input": "Hello World"})
149+
input = {"input": "Hello World"}
150150
langfuse.create_dataset_item(dataset_name=dataset_name, input=input)
151151

152152
dataset = langfuse.get_dataset(dataset_name)
@@ -187,7 +187,7 @@ def test_get_dataset_runs():
187187
dataset_name = create_uuid()
188188
langfuse.create_dataset(name=dataset_name)
189189

190-
input = json.dumps({"input": "Hello World"})
190+
input = {"input": "Hello World"}
191191
langfuse.create_dataset_item(dataset_name=dataset_name, input=input)
192192

193193
dataset = langfuse.get_dataset(dataset_name)

tests/test_prompt.py

Lines changed: 73 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,10 @@
77
from langfuse._client.client import Langfuse
88
from langfuse._utils.prompt_cache import (
99
DEFAULT_PROMPT_CACHE_TTL_SECONDS,
10+
PromptCache,
1011
PromptCacheItem,
1112
)
13+
from langfuse.api.resources.commons.errors.not_found_error import NotFoundError
1214
from langfuse.api.resources.prompts import Prompt_Chat, Prompt_Text
1315
from langfuse.model import ChatPromptClient, TextPromptClient
1416
from tests.utils import create_uuid, get_api
@@ -679,9 +681,15 @@ def test_prompt_end_to_end():
679681

680682
@pytest.fixture
681683
def langfuse():
684+
from langfuse._client.resource_manager import LangfuseResourceManager
685+
682686
langfuse_instance = Langfuse()
683687
langfuse_instance.api = Mock()
684688

689+
if langfuse_instance._resources is None:
690+
langfuse_instance._resources = Mock(spec=LangfuseResourceManager)
691+
langfuse_instance._resources.prompt_cache = PromptCache()
692+
685693
return langfuse_instance
686694

687695

@@ -1157,6 +1165,71 @@ def test_get_expired_prompt_when_failing_fetch(mock_time, langfuse: Langfuse):
11571165
assert result_call_2 == prompt_client
11581166

11591167

1168+
@patch.object(PromptCacheItem, "get_epoch_seconds")
1169+
def test_evict_prompt_cache_entry_when_refresh_returns_not_found(
1170+
mock_time, langfuse: Langfuse
1171+
) -> None:
1172+
mock_time.return_value = 0
1173+
1174+
prompt_name = "test_evict_prompt_cache_entry_when_refresh_returns_not_found"
1175+
ttl_seconds = 5
1176+
fallback_prompt = "fallback text prompt"
1177+
1178+
prompt = Prompt_Text(
1179+
name=prompt_name,
1180+
version=1,
1181+
prompt="Make me laugh",
1182+
labels=[],
1183+
type="text",
1184+
config={},
1185+
tags=[],
1186+
)
1187+
prompt_client = TextPromptClient(prompt)
1188+
cache_key = PromptCache.generate_cache_key(prompt_name, version=None, label=None)
1189+
1190+
mock_server_call = langfuse.api.prompts.get
1191+
mock_server_call.return_value = prompt
1192+
1193+
initial_result = langfuse.get_prompt(
1194+
prompt_name,
1195+
cache_ttl_seconds=ttl_seconds,
1196+
max_retries=0,
1197+
)
1198+
assert initial_result == prompt_client
1199+
assert langfuse._resources.prompt_cache.get(cache_key) is not None
1200+
1201+
# Expire cache entry and trigger background refresh
1202+
mock_time.return_value = ttl_seconds + 1
1203+
1204+
def raise_not_found(*_args: object, **_kwargs: object) -> None:
1205+
raise NotFoundError({"message": "Prompt not found"})
1206+
1207+
mock_server_call.side_effect = raise_not_found
1208+
1209+
stale_result = langfuse.get_prompt(
1210+
prompt_name,
1211+
cache_ttl_seconds=ttl_seconds,
1212+
max_retries=0,
1213+
)
1214+
assert stale_result == prompt_client
1215+
1216+
while True:
1217+
if langfuse._resources.prompt_cache._task_manager.active_tasks() == 0:
1218+
break
1219+
sleep(0.1)
1220+
1221+
assert langfuse._resources.prompt_cache.get(cache_key) is None
1222+
1223+
fallback_result = langfuse.get_prompt(
1224+
prompt_name,
1225+
cache_ttl_seconds=ttl_seconds,
1226+
fallback=fallback_prompt,
1227+
max_retries=0,
1228+
)
1229+
assert fallback_result.is_fallback
1230+
assert fallback_result.prompt == fallback_prompt
1231+
1232+
11601233
# Should fetch new prompt if version changes
11611234
def test_get_fresh_prompt_when_version_changes(langfuse: Langfuse):
11621235
prompt_name = "test_get_fresh_prompt_when_version_changes"

0 commit comments

Comments
 (0)