Skip to content

Commit a83c749

Browse files
committed
Fix code style: Apply ruff linting fixes
- Break long lines to comply with 88 character limit - Fix import sorting in test files - Improve code readability with proper line breaks
1 parent c9e030d commit a83c749

File tree

4 files changed

+1367
-37
lines changed

4 files changed

+1367
-37
lines changed

libs/oci/langchain_oci/chat_models/oci_generative_ai.py

Lines changed: 20 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -367,9 +367,7 @@ def messages_to_oci_params(
367367
self.oci_chat_message[self.get_role(msg)](
368368
tool_results=[
369369
self.oci_tool_result(
370-
call=self.oci_tool_call(
371-
name=msg.name, parameters={}
372-
),
370+
call=self.oci_tool_call(name=msg.name, parameters={}),
373371
outputs=[{"output": msg.content}],
374372
)
375373
],
@@ -381,9 +379,17 @@ def messages_to_oci_params(
381379
for i, message in enumerate(messages[::-1]):
382380
current_turn.append(message)
383381
if isinstance(message, HumanMessage):
384-
if len(messages) > i and isinstance(messages[len(messages) - i - 2], ToolMessage):
385-
# add dummy message REPEATING the tool_result to avoid the error about ToolMessage needing to be followed by an AI message
386-
oci_chat_history.append(self.oci_chat_message['CHATBOT'](message=messages[len(messages) - i - 2].content))
382+
if len(messages) > i and isinstance(
383+
messages[len(messages) - i - 2], ToolMessage
384+
):
385+
# add dummy message REPEATING the tool_result to avoid
386+
# the error about ToolMessage needing to be followed
387+
# by an AI message
388+
oci_chat_history.append(
389+
self.oci_chat_message["CHATBOT"](
390+
message=messages[len(messages) - i - 2].content
391+
)
392+
)
387393
break
388394
current_turn = list(reversed(current_turn))
389395

@@ -713,8 +719,8 @@ def messages_to_oci_params(
713719
else:
714720
oci_message = self.oci_chat_message[role](content=tool_content)
715721
elif isinstance(message, AIMessage) and (
716-
message.tool_calls or
717-
message.additional_kwargs.get("tool_calls")):
722+
message.tool_calls or message.additional_kwargs.get("tool_calls")
723+
):
718724
# Process content and tool calls for assistant messages
719725
content = self._process_message_content(message.content)
720726
tool_calls = []
@@ -741,10 +747,11 @@ def messages_to_oci_params(
741747
"api_format": self.chat_api_format,
742748
}
743749

744-
# BUGFIX: If tool results have been received and tools are bound, set tool_choice to "none"
745-
# to prevent the model from making additional tool calls in a loop.
746-
# This addresses a known issue with Meta Llama models that continue calling tools
747-
# even after receiving results.
750+
# BUGFIX: If tool results have been received and tools are bound,
751+
# set tool_choice to "none" to prevent the model from making
752+
# additional tool calls in a loop.
753+
# This addresses a known issue with Meta Llama models that
754+
# continue calling tools even after receiving results.
748755
has_tool_results = any(isinstance(msg, ToolMessage) for msg in messages)
749756
if has_tool_results and "tools" in kwargs and "tool_choice" not in kwargs:
750757
result["tool_choice"] = self.oci_tool_choice_none()
@@ -944,6 +951,7 @@ def process_stream_tool_calls(
944951

945952
class MetaProvider(GenericProvider):
946953
"""Provider for Meta models. This provider is for backward compatibility."""
954+
947955
pass
948956

949957

0 commit comments

Comments
 (0)