Skip to content

Commit 4502d3b

Browse files
authored
Merge pull request #6 from aws-samples/fix/feedback
Fix/feedback
2 parents 193a931 + 5ea9e32 commit 4502d3b

File tree

14 files changed

+257
-136
lines changed

14 files changed

+257
-136
lines changed

README.md

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -91,13 +91,15 @@ curl -X POST http://localhost:8080/invocations \
9191
-H "Content-Type: application/json" \
9292
-d '{"input": {"prompt": "Hello", "conversation_id": "<conversation_id>"}}'
9393

94-
curl -X POST http://localhost:8080/api/v1/feedback \
94+
curl -X POST http://localhost:8080/api/v1/invocations \
9595
-H "Content-Type: application/json" \
9696
-d '{
97-
"run_id": "<run-id>",
98-
"session_id": "<session-id>",
99-
"score": 1.0,
100-
"comment": "Great response!"
97+
"feedback": {
98+
"run_id": "<run-id>",
99+
"session_id": "<session-id>",
100+
"score": 1.0,
101+
"comment": "Great response!"
102+
}
101103
}'
102104

103105

cx-agent-backend/src/domain/services/agent_service.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@ class AgentRequest:
2424
user_id: str
2525
model: str
2626
session_id: str | None = None
27+
trace_id: str | None = None
2728

2829

2930
@dataclass(frozen=True)

cx-agent-backend/src/domain/services/conversation_service.py

Lines changed: 34 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,17 @@
11
"""Domain service for conversation business logic."""
22

3+
import logging
4+
import os
35
from uuid import UUID
46

7+
from langfuse import get_client, Langfuse
8+
59
from domain.entities.conversation import Conversation, Message
610
from domain.repositories.conversation_repository import ConversationRepository
711
from domain.services.agent_service import AgentRequest, AgentService, AgentType
812
from domain.services.guardrail_service import GuardrailAssessment, GuardrailService
913

14+
logger = logging.getLogger(__name__)
1015

1116
class ConversationService:
1217
"""Service for conversation business logic."""
@@ -67,6 +72,7 @@ async def send_message(
6772
user_id=conversation.user_id,
6873
model=model,
6974
session_id=str(conversation.id),
75+
trace_id=None, # Can be set from FastAPI layer
7076
)
7177
agent_response = await self._agent_service.process_request(agent_request)
7278

@@ -109,23 +115,40 @@ async def get_user_conversations(self, user_id: str) -> list[Conversation]:
109115

110116
async def log_feedback(self, user_id: str, session_id: str, message_id: str, score: int, comment: str = "") -> None:
111117
"""Log user feedback to Langfuse."""
118+
119+
# Log feedback attempt
120+
feedback_msg = f"[FEEDBACK] Attempting to log feedback - user_id: {user_id}, session_id: {session_id}, message_id: {message_id}, score: {score}"
121+
logger.info(feedback_msg)
122+
112123
try:
113-
import os
114-
from langfuse import Langfuse
124+
125+
logger.info("[FEEDBACK] Langfuse config - enabled: %s, host: %s",
126+
self._langfuse_config.get("enabled"),
127+
self._langfuse_config.get("host"))
115128

116129
if self._langfuse_config.get("enabled"):
130+
logger.info("[FEEDBACK] Langfuse is enabled, setting environment variables")
117131
os.environ["LANGFUSE_SECRET_KEY"] = self._langfuse_config.get("secret_key")
118132
os.environ["LANGFUSE_PUBLIC_KEY"] = self._langfuse_config.get("public_key")
119133
os.environ["LANGFUSE_HOST"] = self._langfuse_config.get("host")
120134

121-
langfuse = Langfuse()
135+
langfuse = get_client()
136+
predefined_trace_id = Langfuse.create_trace_id(seed=session_id)
122137

123-
langfuse.create_score(
124-
trace_id=str(f"{user_id}_{session_id}"),
125-
name="user-feedback",
126-
value=score,
127-
data_type="NUMERIC",
128-
comment=comment,
129-
)
138+
logger.info("[FEEDBACK] Calling span.score_trace")
139+
with langfuse.start_as_current_span(
140+
name="langchain-request",
141+
trace_context={"trace_id": predefined_trace_id}
142+
) as span:
143+
result = span.score_trace(
144+
name="user-feedback",
145+
value=score,
146+
data_type="NUMERIC",
147+
comment=comment
148+
)
149+
150+
logger.info("[FEEDBACK] Successfully created score: %s", result)
151+
else:
152+
logger.info("[FEEDBACK] Langfuse is not enabled in config")
130153
except Exception as e:
131-
print(f"Failed to log feedback to Langfuse: {e}")
154+
logger.error(f"[FEEDBACK] Failed to log feedback to Langfuse: {e}")

cx-agent-backend/src/infrastructure/adapters/langgraph_agent_service.py

Lines changed: 57 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -3,9 +3,9 @@
33
from langchain_core.messages import AIMessage, HumanMessage
44
from langchain_core.runnables import RunnableConfig
55
import os
6-
import base64
76
import logging
87
from langgraph.prebuilt import create_react_agent
8+
from langfuse import get_client, Langfuse
99
from langfuse.langchain import CallbackHandler
1010

1111
logger = logging.getLogger(__name__)
@@ -76,7 +76,20 @@ def _create_agent(self, agent_type: AgentType, model: str) -> any:
7676

7777
async def process_request(self, request: AgentRequest) -> AgentResponse:
7878
"""Process request through appropriate agent."""
79-
logger.info(f"Processing request for user {request.user_id}, session {request.session_id}, agent type {request.agent_type}")
79+
logger.info(
80+
"Processing request for user %s, session %s, agent type %s",
81+
request.user_id,
82+
request.session_id,
83+
request.agent_type,
84+
)
85+
86+
# Use trace_id from request if provided, otherwise create one
87+
langfuse = None
88+
predefined_trace_id = getattr(request, 'trace_id', None)
89+
if self._langfuse_config.get("enabled"):
90+
langfuse = get_client()
91+
if not predefined_trace_id:
92+
predefined_trace_id = Langfuse.create_trace_id(seed=request.session_id)
8093

8194
# Check input guardrails if enabled
8295
if self._guardrail_service and request.messages:
@@ -115,28 +128,51 @@ async def process_request(self, request: AgentRequest) -> AgentResponse:
115128
# Create config with Langfuse callback if enabled
116129
callbacks = []
117130
trace_id = None
131+
response = None
118132

119133
if self._langfuse_config.get("enabled"):
120134
os.environ["LANGFUSE_SECRET_KEY"] = self._langfuse_config.get("secret_key")
121135
os.environ["LANGFUSE_PUBLIC_KEY"] = self._langfuse_config.get("public_key")
122136
os.environ["LANGFUSE_HOST"] = self._langfuse_config.get("host")
123137

138+
trace_id = predefined_trace_id
139+
124140
langfuse_handler = CallbackHandler()
125-
callbacks.append(langfuse_handler)
126-
trace_id = str(f"{request.user_id}_{request.session_id}")
127-
128-
config = RunnableConfig(
129-
configurable={
130-
"thread_id": f"{request.user_id}_{request.session_id}",
131-
"user_id": request.user_id,
132-
},
133-
callbacks=callbacks,
134-
)
135-
136-
# Invoke agent
137-
logger.debug(f"Invoking agent with {len(lc_messages)} messages")
138-
response = await agent.ainvoke({"messages": lc_messages}, config=config)
139-
logger.debug(f"Agent response contains {len(response['messages'])} messages")
141+
142+
with langfuse.start_as_current_span(
143+
name="langchain-request",
144+
trace_context={"trace_id": predefined_trace_id}
145+
) as span:
146+
span.update_trace(
147+
user_id=request.user_id,
148+
input={"messages": [msg.content for msg in request.messages]}
149+
)
150+
151+
config = RunnableConfig(
152+
configurable={
153+
"thread_id": f"{request.session_id}",
154+
"user_id": request.user_id,
155+
},
156+
callbacks=[langfuse_handler],
157+
)
158+
159+
# Invoke agent
160+
logger.debug("Invoking agent with %s messages", len(lc_messages))
161+
response = await agent.ainvoke({"messages": lc_messages}, config=config)
162+
163+
span.update_trace(output={"response": response["messages"][-1].content if response["messages"] else ""})
164+
else:
165+
config = RunnableConfig(
166+
configurable={
167+
"thread_id": f"{request.session_id}",
168+
"user_id": request.user_id,
169+
},
170+
)
171+
172+
# Invoke agent
173+
logger.debug("Invoking agent with %s messages", len(lc_messages))
174+
response = await agent.ainvoke({"messages": lc_messages}, config=config)
175+
logger.debug("Agent response contains %s messages", len(response["messages"]))
140176
# Extract response
141177
last_message = response["messages"][-1]
142178
tools_used = []
@@ -152,7 +188,7 @@ async def process_request(self, request: AgentRequest) -> AgentResponse:
152188
tools_used.append(tool_call["name"])
153189
# Remove duplicates
154190
tools_used = list(set(tools_used))
155-
logger.info(f"Agent completed. Tools used: {tools_used}")
191+
logger.info("Agent completed. Tools used: %s", tools_used)
156192

157193
# Check output guardrails if enabled
158194
if self._guardrail_service:
@@ -173,12 +209,12 @@ async def process_request(self, request: AgentRequest) -> AgentResponse:
173209
"model": request.model,
174210
"agent_type": request.agent_type.value,
175211
"trace_id": trace_id,
176-
"debug_message_count": len(response["messages"]),
212+
"debug_message_count": len(response["messages"]) if response else 0,
177213
"debug_message_types": message_types,
178214
"debug_tools_found": len(tools_used) > 0,
179215
}
180216

181-
logger.info(f"Returning response for session {request.session_id}")
217+
logger.info("Returning response for session %s", request.session_id)
182218
return AgentResponse(
183219
content=last_message.content,
184220
agent_type=request.agent_type,
@@ -201,7 +237,7 @@ async def stream_response(self, request: AgentRequest):
201237
# Create config
202238
config = RunnableConfig(
203239
configurable={
204-
"thread_id": f"{request.user_id}_{request.session_id}",
240+
"thread_id": f"{request.session_id}",
205241
"user_id": request.user_id,
206242
}
207243
)

cx-agent-backend/src/infrastructure/adapters/openai_llm_service.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
from langchain_core.messages import AIMessage, HumanMessage, SystemMessage
44
from langchain_openai import ChatOpenAI
55

6-
from domain.entities.conversation import Message, MessageRole
6+
from domain.entities.conversation import MessageRole
77
from domain.services.llm_service import LLMRequest, LLMResponse, LLMService
88

99

0 commit comments

Comments
 (0)