|
7 | 7 | from langfuse._client.client import Langfuse |
8 | 8 | from langfuse._utils.prompt_cache import ( |
9 | 9 | DEFAULT_PROMPT_CACHE_TTL_SECONDS, |
| 10 | + PromptCache, |
10 | 11 | PromptCacheItem, |
11 | 12 | ) |
| 13 | +from langfuse.api.resources.commons.errors.not_found_error import NotFoundError |
12 | 14 | from langfuse.api.resources.prompts import Prompt_Chat, Prompt_Text |
13 | 15 | from langfuse.model import ChatPromptClient, TextPromptClient |
14 | 16 | from tests.utils import create_uuid, get_api |
@@ -679,9 +681,15 @@ def test_prompt_end_to_end(): |
679 | 681 |
|
680 | 682 | @pytest.fixture |
681 | 683 | def langfuse(): |
| 684 | + from langfuse._client.resource_manager import LangfuseResourceManager |
| 685 | + |
682 | 686 | langfuse_instance = Langfuse() |
683 | 687 | langfuse_instance.api = Mock() |
684 | 688 |
|
| 689 | + if langfuse_instance._resources is None: |
| 690 | + langfuse_instance._resources = Mock(spec=LangfuseResourceManager) |
| 691 | + langfuse_instance._resources.prompt_cache = PromptCache() |
| 692 | + |
685 | 693 | return langfuse_instance |
686 | 694 |
|
687 | 695 |
|
@@ -1157,6 +1165,71 @@ def test_get_expired_prompt_when_failing_fetch(mock_time, langfuse: Langfuse): |
1157 | 1165 | assert result_call_2 == prompt_client |
1158 | 1166 |
|
1159 | 1167 |
|
| 1168 | +@patch.object(PromptCacheItem, "get_epoch_seconds") |
| 1169 | +def test_evict_prompt_cache_entry_when_refresh_returns_not_found( |
| 1170 | + mock_time, langfuse: Langfuse |
| 1171 | +) -> None: |
| 1172 | + mock_time.return_value = 0 |
| 1173 | + |
| 1174 | + prompt_name = "test_evict_prompt_cache_entry_when_refresh_returns_not_found" |
| 1175 | + ttl_seconds = 5 |
| 1176 | + fallback_prompt = "fallback text prompt" |
| 1177 | + |
| 1178 | + prompt = Prompt_Text( |
| 1179 | + name=prompt_name, |
| 1180 | + version=1, |
| 1181 | + prompt="Make me laugh", |
| 1182 | + labels=[], |
| 1183 | + type="text", |
| 1184 | + config={}, |
| 1185 | + tags=[], |
| 1186 | + ) |
| 1187 | + prompt_client = TextPromptClient(prompt) |
| 1188 | + cache_key = PromptCache.generate_cache_key(prompt_name, version=None, label=None) |
| 1189 | + |
| 1190 | + mock_server_call = langfuse.api.prompts.get |
| 1191 | + mock_server_call.return_value = prompt |
| 1192 | + |
| 1193 | + initial_result = langfuse.get_prompt( |
| 1194 | + prompt_name, |
| 1195 | + cache_ttl_seconds=ttl_seconds, |
| 1196 | + max_retries=0, |
| 1197 | + ) |
| 1198 | + assert initial_result == prompt_client |
| 1199 | + assert langfuse._resources.prompt_cache.get(cache_key) is not None |
| 1200 | + |
| 1201 | + # Expire cache entry and trigger background refresh |
| 1202 | + mock_time.return_value = ttl_seconds + 1 |
| 1203 | + |
| 1204 | + def raise_not_found(*_args: object, **_kwargs: object) -> None: |
| 1205 | + raise NotFoundError({"message": "Prompt not found"}) |
| 1206 | + |
| 1207 | + mock_server_call.side_effect = raise_not_found |
| 1208 | + |
| 1209 | + stale_result = langfuse.get_prompt( |
| 1210 | + prompt_name, |
| 1211 | + cache_ttl_seconds=ttl_seconds, |
| 1212 | + max_retries=0, |
| 1213 | + ) |
| 1214 | + assert stale_result == prompt_client |
| 1215 | + |
| 1216 | + while True: |
| 1217 | + if langfuse._resources.prompt_cache._task_manager.active_tasks() == 0: |
| 1218 | + break |
| 1219 | + sleep(0.1) |
| 1220 | + |
| 1221 | + assert langfuse._resources.prompt_cache.get(cache_key) is None |
| 1222 | + |
| 1223 | + fallback_result = langfuse.get_prompt( |
| 1224 | + prompt_name, |
| 1225 | + cache_ttl_seconds=ttl_seconds, |
| 1226 | + fallback=fallback_prompt, |
| 1227 | + max_retries=0, |
| 1228 | + ) |
| 1229 | + assert fallback_result.is_fallback |
| 1230 | + assert fallback_result.prompt == fallback_prompt |
| 1231 | + |
| 1232 | + |
1160 | 1233 | # Should fetch new prompt if version changes |
1161 | 1234 | def test_get_fresh_prompt_when_version_changes(langfuse: Langfuse): |
1162 | 1235 | prompt_name = "test_get_fresh_prompt_when_version_changes" |
|
0 commit comments