Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
13b3602
Add LangChain 1.x support and comprehensive integration tests
fede-kamel Nov 26, 2025
a0db8c5
Fix CI: Update poetry.lock and fix dependency conflicts
fede-kamel Nov 26, 2025
f515cd3
Fix linting issues in integration tests
fede-kamel Nov 26, 2025
6674ef6
Require langchain-core>=1.1.0 for ModelProfileRegistry
fede-kamel Nov 26, 2025
0864c9e
Fix mypy type errors for LangChain 1.x compatibility
fede-kamel Nov 26, 2025
defa0b7
Restore type: ignore for mock HTTPError responses
fede-kamel Nov 26, 2025
42c2358
Add comprehensive integration tests for OpenAI models
fede-kamel Dec 1, 2025
63c0426
Fix linting issues in test files
fede-kamel Dec 1, 2025
68231af
Update CI matrix to test Python 3.9, 3.12, 3.13
fede-kamel Dec 2, 2025
7a15cb2
Restore backward compatibility with LangChain 0.3.x
fede-kamel Dec 2, 2025
d74b49a
Fix test_message_text_property to work with both LangChain 0.3.x and 1.x
fede-kamel Dec 2, 2025
20a1327
Skip JSON mode tests for OpenAI models due to 500 errors
fede-kamel Dec 2, 2025
4185b91
Fix mypy type errors for bind() return type narrowing
fede-kamel Dec 2, 2025
eac4d00
Update poetry.lock for Python 3.9 support
fede-kamel Dec 2, 2025
6f3e9b8
Fix Python 3.9 compatibility
fede-kamel Dec 2, 2025
c693948
Remove unused type ignore comments for mypy
fede-kamel Dec 2, 2025
5bf6e8e
Support both LangChain 0.3.x and 1.x via Python version markers
fede-kamel Dec 2, 2025
5eb591e
Fix Python 3.9 compatibility issues in tests
fede-kamel Dec 2, 2025
3499780
Fix mypy unreachable error code in test
fede-kamel Dec 2, 2025
d7be806
Fix get_min_versions.py to respect Python version markers
fede-kamel Dec 2, 2025
580750d
Add clarifying comment for type annotation in bind_tools
fede-kamel Dec 3, 2025
304d33a
Move test_openai_model.py to integration tests directory
fede-kamel Dec 3, 2025
915c40a
Convert test_openai_model.py to proper pytest format
fede-kamel Dec 5, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
59 changes: 31 additions & 28 deletions .github/scripts/get_min_versions.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,25 +47,43 @@ def get_min_version_from_toml(toml_path: str):
# Parse dependencies list into a dictionary
# Format: "package-name>=x.x.x,<y.y.y" or "package-name>=x.x.x; python_version < '3.10'"
dependencies = {}
python_version = f"{sys.version_info.major}.{sys.version_info.minor}"

for dep in dependencies_list:
# Remove environment markers (everything after semicolon)
dep_without_marker = dep.split(";")[0].strip()
# Check if there's a Python version marker
if ";" in dep:
dep_without_marker, marker = dep.split(";", 1)
dep_without_marker = dep_without_marker.strip()
marker = marker.strip()

# Check if this dependency applies to current Python version
# Handle python_version < '3.10' and python_version >= '3.10' markers
applies_to_current = True
if "python_version" in marker:
if "<" in marker and not ">=" in marker:
# python_version < 'X.Y'
match = re.search(r"python_version\s*<\s*['\"](\d+\.\d+)['\"]", marker)
if match:
max_version = match.group(1)
applies_to_current = parse_version(python_version) < parse_version(max_version)
elif ">=" in marker:
# python_version >= 'X.Y'
match = re.search(r"python_version\s*>=\s*['\"](\d+\.\d+)['\"]", marker)
if match:
min_version_marker = match.group(1)
applies_to_current = parse_version(python_version) >= parse_version(min_version_marker)

if not applies_to_current:
continue
else:
dep_without_marker = dep.strip()

# Extract package name and version spec
match = re.match(r"^([a-zA-Z0-9_-]+)(.*)$", dep_without_marker)
if match:
pkg_name = match.group(1)
version_spec = match.group(2)

# If this package already exists, collect both version specs
if pkg_name in dependencies:
# Store as a list to handle multiple version constraints
if isinstance(dependencies[pkg_name], list):
dependencies[pkg_name].append(version_spec)
else:
dependencies[pkg_name] = [dependencies[pkg_name], version_spec]
else:
dependencies[pkg_name] = version_spec
dependencies[pkg_name] = version_spec

# Initialize a dictionary to store the minimum versions
min_versions = {}
Expand All @@ -74,23 +92,8 @@ def get_min_version_from_toml(toml_path: str):
for lib in MIN_VERSION_LIBS:
# Check if the lib is present in the dependencies
if lib in dependencies:
# Get the version string(s)
version_spec = dependencies[lib]

# Handle list format (multiple version constraints for different Python versions)
if isinstance(version_spec, list):
# Extract all version strings from the list and find the minimum
versions = []
for spec in version_spec:
if spec:
versions.append(get_min_version(spec))

# If we found versions, use the minimum one
if versions:
min_version = min(versions, key=parse_version)
min_versions[lib] = min_version
elif isinstance(version_spec, str) and version_spec:
# Handle simple string format
if version_spec:
min_version = get_min_version(version_spec)
min_versions[lib] = min_version

Expand Down
1 change: 1 addition & 0 deletions .github/workflows/_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ jobs:
python-version:
- "3.9"
- "3.12"
- "3.13"
name: "make test #${{ matrix.python-version }}"
steps:
- uses: actions/checkout@v4
Expand Down
19 changes: 15 additions & 4 deletions libs/oci/langchain_oci/chat_models/oci_data_science.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,12 @@
agenerate_from_stream,
generate_from_stream,
)
from langchain_core.messages import AIMessageChunk, BaseMessage, BaseMessageChunk
from langchain_core.messages import (
AIMessage,
AIMessageChunk,
BaseMessage,
BaseMessageChunk,
)
from langchain_core.output_parsers import (
JsonOutputParser,
PydanticOutputParser,
Expand Down Expand Up @@ -765,11 +770,17 @@ def _process_response(self, response_json: dict) -> ChatResult:

def bind_tools(
self,
tools: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]],
tools: Sequence[Union[Dict[str, Any], type, Callable, BaseTool]],
# Type annotation matches LangChain's BaseChatModel API.
# Runtime validation occurs in convert_to_openai_tool().
*,
tool_choice: Optional[str] = None,
**kwargs: Any,
) -> Runnable[LanguageModelInput, BaseMessage]:
) -> Runnable[LanguageModelInput, AIMessage]:
formatted_tools = [convert_to_openai_tool(tool) for tool in tools]
return super().bind(tools=formatted_tools, **kwargs)
if tool_choice is not None:
kwargs["tool_choice"] = tool_choice
return super().bind(tools=formatted_tools, **kwargs) # type: ignore[return-value]


class ChatOCIModelDeploymentVLLM(ChatOCIModelDeployment):
Expand Down
12 changes: 7 additions & 5 deletions libs/oci/langchain_oci/chat_models/oci_generative_ai.py
Original file line number Diff line number Diff line change
Expand Up @@ -1236,14 +1236,16 @@ def _prepare_request(

def bind_tools(
self,
tools: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]],
tools: Sequence[Union[Dict[str, Any], type, Callable, BaseTool]],
# Type annotation matches LangChain's BaseChatModel API.
# Runtime validation occurs in convert_to_openai_tool().
*,
tool_choice: Optional[
Union[dict, str, Literal["auto", "none", "required", "any"], bool]
] = None,
parallel_tool_calls: Optional[bool] = None,
**kwargs: Any,
) -> Runnable[LanguageModelInput, BaseMessage]:
) -> Runnable[LanguageModelInput, AIMessage]:
"""Bind tool-like objects to this chat model.

Assumes model is compatible with Meta's tool-calling API.
Expand Down Expand Up @@ -1285,7 +1287,7 @@ def bind_tools(
)
kwargs["is_parallel_tool_calls"] = True

return super().bind(tools=formatted_tools, **kwargs)
return super().bind(tools=formatted_tools, **kwargs) # type: ignore[return-value]

def with_structured_output(
self,
Expand Down Expand Up @@ -1358,7 +1360,7 @@ def with_structured_output(
key_name=tool_name, first_tool_only=True
)
elif method == "json_mode":
llm = self.bind(response_format={"type": "JSON_OBJECT"})
llm = self.bind(response_format={"type": "JSON_OBJECT"}) # type: ignore[assignment]
output_parser = (
PydanticOutputParser(pydantic_object=schema)
if is_pydantic_schema
Expand All @@ -1382,7 +1384,7 @@ def with_structured_output(
json_schema=response_json_schema
)

llm = self.bind(response_format=response_format_obj)
llm = self.bind(response_format=response_format_obj) # type: ignore[assignment]
if is_pydantic_schema:
output_parser = PydanticOutputParser(pydantic_object=schema)
else:
Expand Down
Loading