Skip to content

Commit 3f8e235

Browse files
committed
> Fix agentic extraction to ensure temperature and top_p are mutually...
1 parent d637a02 commit 3f8e235

File tree

2 files changed

+50
-5
lines changed

2 files changed

+50
-5
lines changed

Makefile

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -129,14 +129,14 @@ ui-build:
129129

130130
commit: lint test
131131
$(info Generating commit message...)
132-
export COMMIT_MESSAGE="$(shell q chat --no-interactive --trust-all-tools "Understand pending local git change and changes to be committed, then infer a commit message. Return this commit message only" | tail -n 1 | sed 's/\x1b\[[0-9;]*m//g')" && \
132+
export COMMIT_MESSAGE="$(shell kiro-cli chat --no-interactive --trust-all-tools "Understand pending local git change and changes to be committed, then infer a commit message. Return this commit message only on a single line." | grep ">" | tail -n 1 | sed 's/\x1b\[[0-9;]*m//g')" && \
133133
git add . && \
134134
git commit -am "$${COMMIT_MESSAGE}" && \
135135
git push
136136

137137
fastcommit: fastlint
138138
$(info Generating commit message...)
139-
export COMMIT_MESSAGE="$(shell q chat --no-interactive --trust-all-tools "Understand pending local git change and changes to be committed, then infer a commit message. Return this commit message only" | tail -n 1 | sed 's/\x1b\[[0-9;]*m//g')" && \
139+
export COMMIT_MESSAGE="$(shell kiro-cli chat --no-interactive --trust-all-tools "Understand pending local git change and changes to be committed, then infer a commit message. Return this commit message only on a single line." | grep ">" | tail -n 1 | sed 's/\x1b\[[0-9;]*m//g')" && \
140140
git add . && \
141141
git commit -am "$${COMMIT_MESSAGE}" && \
142142
git push

lib/idp_common_pkg/idp_common/extraction/agentic_idp.py

Lines changed: 48 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -691,6 +691,37 @@ def _build_model_config(
691691
return model_config
692692

693693

694+
def _get_inference_params(temperature: float, top_p: float | None) -> dict[str, float]:
695+
"""
696+
Get inference parameters ensuring temperature and top_p are mutually exclusive.
697+
698+
Some Bedrock models don't allow both temperature and top_p to be specified.
699+
This follows the same logic as bedrock/client.py lines 348-364.
700+
701+
Args:
702+
temperature: Temperature value from config
703+
top_p: Top_p value from config (may be None)
704+
705+
Returns:
706+
Dict with only one of temperature or top_p
707+
"""
708+
params = {}
709+
710+
# Only use top_p if temperature is 0.0
711+
if top_p is not None and temperature == 0.0:
712+
params["top_p"] = top_p
713+
logger.debug(
714+
"Using top_p for inference (temperature is 0.0)", extra={"top_p": top_p}
715+
)
716+
else:
717+
params["temperature"] = temperature
718+
logger.debug(
719+
"Using temperature for inference", extra={"temperature": temperature}
720+
)
721+
722+
return params
723+
724+
694725
def _prepare_prompt_content(
695726
prompt: str | Message | Image.Image,
696727
page_images: list[bytes] | None,
@@ -1001,11 +1032,16 @@ async def structured_output_async(
10011032

10021033
# Track token usage
10031034
token_usage = _initialize_token_usage()
1035+
1036+
# Get inference params ensuring temperature and top_p are mutually exclusive
1037+
inference_params = _get_inference_params(
1038+
temperature=config.extraction.temperature, top_p=config.extraction.top_p
1039+
)
1040+
10041041
agent = Agent(
10051042
model=BedrockModel(
10061043
**model_config,
1007-
temperature=config.extraction.temperature,
1008-
top_p=config.extraction.top_p,
1044+
**inference_params,
10091045
), # pyright: ignore[reportArgumentType]
10101046
tools=tools,
10111047
system_prompt=final_system_prompt,
@@ -1078,8 +1114,17 @@ async def structured_output_async(
10781114
connect_timeout=connect_timeout,
10791115
read_timeout=read_timeout,
10801116
)
1117+
1118+
# Get inference params for review agent ensuring temperature and top_p are mutually exclusive
1119+
review_inference_params = _get_inference_params(
1120+
temperature=config.extraction.temperature, top_p=config.extraction.top_p
1121+
)
1122+
10811123
agent = Agent(
1082-
model=BedrockModel(**review_model_config), # pyright: ignore[reportArgumentType]
1124+
model=BedrockModel(
1125+
**review_model_config,
1126+
**review_inference_params,
1127+
), # pyright: ignore[reportArgumentType]
10831128
tools=tools,
10841129
system_prompt=f"{final_system_prompt}",
10851130
state={

0 commit comments

Comments
 (0)