Skip to content

Commit 1febc15

Browse files
committed
fix: enforce tier-aware max_tokens in AutoAgents LLM calls
CodeGraphChatAdapter was creating GenerationConfig with max_tokens: None, ignoring tier-based token limits. This caused all responses to use default limits regardless of tier. Changes: - Store tier in CodeGraphChatAdapter - Add get_max_tokens() method with env var override - Set max_tokens in GenerationConfig based on tier Token limits by tier: - Small: 2,048 tokens - Medium: 4,096 tokens - Large: 8,192 tokens - Massive: 16,384 tokens Environment variable override: - Set MCP_CODE_AGENT_MAX_OUTPUT_TOKENS to override tier defaults - Useful for testing or specific deployment constraints This ensures analysis output length matches tier expectations.
1 parent fa91915 commit 1febc15

File tree

1 file changed

+26
-4
lines changed

1 file changed

+26
-4
lines changed

crates/codegraph-mcp/src/autoagents/agent_builder.rs

Lines changed: 26 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -34,11 +34,33 @@ pub(crate) fn convert_messages(messages: &[Message]) -> Vec<ChatMessage> {
3434
/// Adapter that bridges codegraph_ai::LLMProvider to AutoAgents ChatProvider
3535
pub struct CodeGraphChatAdapter {
3636
provider: Arc<dyn CodeGraphLLM>,
37+
tier: ContextTier,
3738
}
3839

3940
impl CodeGraphChatAdapter {
40-
pub fn new(provider: Arc<dyn CodeGraphLLM>) -> Self {
41-
Self { provider }
41+
pub fn new(provider: Arc<dyn CodeGraphLLM>, tier: ContextTier) -> Self {
42+
Self { provider, tier }
43+
}
44+
45+
/// Get tier-aware max_tokens, respecting environment variable override
46+
fn get_max_tokens(&self) -> Option<usize> {
47+
// Check for environment variable override first
48+
if let Ok(val) = std::env::var("MCP_CODE_AGENT_MAX_OUTPUT_TOKENS") {
49+
if let Ok(tokens) = val.parse::<usize>() {
50+
tracing::info!("Using MCP_CODE_AGENT_MAX_OUTPUT_TOKENS={}", tokens);
51+
return Some(tokens);
52+
}
53+
}
54+
55+
// Use tier-based defaults
56+
let tokens = match self.tier {
57+
ContextTier::Small => 2048,
58+
ContextTier::Medium => 4096,
59+
ContextTier::Large => 8192,
60+
ContextTier::Massive => 16384,
61+
};
62+
63+
Some(tokens)
4264
}
4365
}
4466

@@ -80,7 +102,7 @@ impl ChatProvider for CodeGraphChatAdapter {
80102
// Call CodeGraph LLM provider with structured output support
81103
let config = codegraph_ai::llm_provider::GenerationConfig {
82104
temperature: 0.1,
83-
max_tokens: None,
105+
max_tokens: self.get_max_tokens(),
84106
response_format,
85107
..Default::default()
86108
};
@@ -357,7 +379,7 @@ impl CodeGraphAgentBuilder {
357379
analysis_type: AnalysisType,
358380
) -> Self {
359381
Self {
360-
llm_adapter: Arc::new(CodeGraphChatAdapter::new(llm_provider)),
382+
llm_adapter: Arc::new(CodeGraphChatAdapter::new(llm_provider, tier)),
361383
tool_factory: GraphToolFactory::new(tool_executor),
362384
tier,
363385
analysis_type,

0 commit comments

Comments
 (0)