Skip to content

Commit d29261a

Browse files
Virtuoso633xuanyang15
authored andcommitted
feat(models): Enable multi-provider support for Claude and LiteLLM
Merges: #2810 Co-authored-by: Xuan Yang <xygoogle@google.com> PiperOrigin-RevId: 836706608
1 parent e6be5bc commit d29261a

File tree

5 files changed

+156
-7
lines changed

5 files changed

+156
-7
lines changed

src/google/adk/models/__init__.py

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,3 +33,23 @@
3333
LLMRegistry.register(Gemini)
3434
LLMRegistry.register(Gemma)
3535
LLMRegistry.register(ApigeeLlm)
36+
37+
# Optionally register Claude if anthropic package is installed
38+
try:
39+
from .anthropic_llm import Claude
40+
41+
LLMRegistry.register(Claude)
42+
__all__.append('Claude')
43+
except Exception:
44+
# Claude support requires: pip install google-adk[extensions]
45+
pass
46+
47+
# Optionally register LiteLlm if litellm package is installed
48+
try:
49+
from .lite_llm import LiteLlm
50+
51+
LLMRegistry.register(LiteLlm)
52+
__all__.append('LiteLlm')
53+
except Exception:
54+
# LiteLLM support requires: pip install google-adk[extensions]
55+
pass

src/google/adk/models/lite_llm.py

Lines changed: 11 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1388,11 +1388,19 @@ async def generate_content_async(
13881388
def supported_models(cls) -> list[str]:
13891389
"""Provides the list of supported models.
13901390
1391-
LiteLlm supports all models supported by litellm. We do not keep track of
1392-
these models here. So we return an empty list.
1391+
This registers common provider prefixes. LiteLlm can handle many more,
1392+
but these patterns activate the integration for the most common use cases.
1393+
See https://docs.litellm.ai/docs/providers for a full list.
13931394
13941395
Returns:
13951396
A list of supported models.
13961397
"""
13971398

1398-
return []
1399+
return [
1400+
# For OpenAI models (e.g., "openai/gpt-4o")
1401+
r"openai/.*",
1402+
# For Groq models via Groq API (e.g., "groq/llama3-70b-8192")
1403+
r"groq/.*",
1404+
# For Anthropic models (e.g., "anthropic/claude-3-opus-20240229")
1405+
r"anthropic/.*",
1406+
]

src/google/adk/models/registry.py

Lines changed: 23 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -99,4 +99,26 @@ def resolve(model: str) -> type[BaseLlm]:
9999
if re.compile(regex).fullmatch(model):
100100
return llm_class
101101

102-
raise ValueError(f'Model {model} not found.')
102+
# Provide helpful error messages for known patterns
103+
error_msg = f'Model {model} not found.'
104+
105+
# Check if it matches known patterns that require optional dependencies
106+
if re.match(r'^claude-', model):
107+
error_msg += (
108+
'\n\nClaude models require the anthropic package.'
109+
'\nInstall it with: pip install google-adk[extensions]'
110+
'\nOr: pip install anthropic>=0.43.0'
111+
)
112+
elif '/' in model:
113+
# Any model with provider/model format likely needs LiteLLM
114+
error_msg += (
115+
'\n\nProvider-style models (e.g., "provider/model-name") require'
116+
' the litellm package.'
117+
'\nInstall it with: pip install google-adk[extensions]'
118+
'\nOr: pip install litellm>=1.75.5'
119+
'\n\nSupported providers include: openai, groq, anthropic, and 100+'
120+
' others.'
121+
'\nSee https://docs.litellm.ai/docs/providers for a full list.'
122+
)
123+
124+
raise ValueError(error_msg)

tests/unittests/agents/test_llm_agent_fields.py

Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,9 @@
2222
from google.adk.agents.invocation_context import InvocationContext
2323
from google.adk.agents.llm_agent import LlmAgent
2424
from google.adk.agents.readonly_context import ReadonlyContext
25+
from google.adk.models.anthropic_llm import Claude
26+
from google.adk.models.google_llm import Gemini
27+
from google.adk.models.lite_llm import LiteLlm
2528
from google.adk.models.llm_request import LlmRequest
2629
from google.adk.models.registry import LLMRegistry
2730
from google.adk.sessions.in_memory_session_service import InMemorySessionService
@@ -411,3 +414,47 @@ async def test_handle_vais_only(self):
411414
assert len(tools) == 1
412415
assert tools[0].name == 'vertex_ai_search'
413416
assert tools[0].__class__.__name__ == 'VertexAiSearchTool'
417+
418+
419+
# Tests for multi-provider model support via string model names
420+
@pytest.mark.parametrize(
421+
'model_name',
422+
[
423+
'gemini-1.5-flash',
424+
'gemini-2.0-flash-exp',
425+
],
426+
)
427+
def test_agent_with_gemini_string_model(model_name):
428+
"""Test that Agent accepts Gemini model strings and resolves to Gemini."""
429+
agent = LlmAgent(name='test_agent', model=model_name)
430+
assert isinstance(agent.canonical_model, Gemini)
431+
assert agent.canonical_model.model == model_name
432+
433+
434+
@pytest.mark.parametrize(
435+
'model_name',
436+
[
437+
'claude-3-5-sonnet-v2@20241022',
438+
'claude-sonnet-4@20250514',
439+
],
440+
)
441+
def test_agent_with_claude_string_model(model_name):
442+
"""Test that Agent accepts Claude model strings and resolves to Claude."""
443+
agent = LlmAgent(name='test_agent', model=model_name)
444+
assert isinstance(agent.canonical_model, Claude)
445+
assert agent.canonical_model.model == model_name
446+
447+
448+
@pytest.mark.parametrize(
449+
'model_name',
450+
[
451+
'openai/gpt-4o',
452+
'groq/llama3-70b-8192',
453+
'anthropic/claude-3-opus-20240229',
454+
],
455+
)
456+
def test_agent_with_litellm_string_model(model_name):
457+
"""Test that Agent accepts LiteLLM provider strings."""
458+
agent = LlmAgent(name='test_agent', model=model_name)
459+
assert isinstance(agent.canonical_model, LiteLlm)
460+
assert agent.canonical_model.model == model_name

tests/unittests/models/test_models.py

Lines changed: 55 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
from google.adk import models
1616
from google.adk.models.anthropic_llm import Claude
1717
from google.adk.models.google_llm import Gemini
18-
from google.adk.models.registry import LLMRegistry
18+
from google.adk.models.lite_llm import LiteLlm
1919
import pytest
2020

2121

@@ -34,6 +34,7 @@
3434
],
3535
)
3636
def test_match_gemini_family(model_name):
37+
"""Test that Gemini models are resolved correctly."""
3738
assert models.LLMRegistry.resolve(model_name) is Gemini
3839

3940

@@ -51,12 +52,63 @@ def test_match_gemini_family(model_name):
5152
],
5253
)
5354
def test_match_claude_family(model_name):
54-
LLMRegistry.register(Claude)
55-
55+
"""Test that Claude models are resolved correctly."""
5656
assert models.LLMRegistry.resolve(model_name) is Claude
5757

5858

59+
@pytest.mark.parametrize(
60+
'model_name',
61+
[
62+
'openai/gpt-4o',
63+
'openai/gpt-4o-mini',
64+
'groq/llama3-70b-8192',
65+
'groq/mixtral-8x7b-32768',
66+
'anthropic/claude-3-opus-20240229',
67+
'anthropic/claude-3-5-sonnet-20241022',
68+
],
69+
)
70+
def test_match_litellm_family(model_name):
71+
"""Test that LiteLLM models are resolved correctly."""
72+
assert models.LLMRegistry.resolve(model_name) is LiteLlm
73+
74+
5975
def test_non_exist_model():
6076
with pytest.raises(ValueError) as e_info:
6177
models.LLMRegistry.resolve('non-exist-model')
6278
assert 'Model non-exist-model not found.' in str(e_info.value)
79+
80+
81+
def test_helpful_error_for_claude_without_extensions():
82+
"""Test that missing Claude models show helpful install instructions.
83+
84+
Note: This test may pass even when anthropic IS installed, because it
85+
only checks the error message format when a model is not found.
86+
"""
87+
# Use a non-existent Claude model variant to trigger error
88+
with pytest.raises(ValueError) as e_info:
89+
models.LLMRegistry.resolve('claude-nonexistent-model-xyz')
90+
91+
error_msg = str(e_info.value)
92+
# The error should mention anthropic package and installation instructions
93+
# These checks work whether or not anthropic is actually installed
94+
assert 'Model claude-nonexistent-model-xyz not found' in error_msg
95+
assert 'anthropic package' in error_msg
96+
assert 'pip install' in error_msg
97+
98+
99+
def test_helpful_error_for_litellm_without_extensions():
100+
"""Test that missing LiteLLM models show helpful install instructions.
101+
102+
Note: This test may pass even when litellm IS installed, because it
103+
only checks the error message format when a model is not found.
104+
"""
105+
# Use a non-existent provider to trigger error
106+
with pytest.raises(ValueError) as e_info:
107+
models.LLMRegistry.resolve('unknown-provider/gpt-4o')
108+
109+
error_msg = str(e_info.value)
110+
# The error should mention litellm package for provider-style models
111+
assert 'Model unknown-provider/gpt-4o not found' in error_msg
112+
assert 'litellm package' in error_msg
113+
assert 'pip install' in error_msg
114+
assert 'Provider-style models' in error_msg

0 commit comments

Comments
 (0)