|
30 | 30 | from .._streaming import Stream, AsyncStream |
31 | 31 | from .._base_client import make_request_options |
32 | 32 | from ..types.embeddings_response import EmbeddingsResponse |
| 33 | +from ..types.shared_params.message import Message |
33 | 34 | from ..types.inference_completion_response import InferenceCompletionResponse |
34 | 35 | from ..types.shared_params.sampling_params import SamplingParams |
35 | 36 | from ..types.shared_params.interleaved_content import InterleavedContent |
@@ -62,7 +63,7 @@ def with_streaming_response(self) -> InferenceResourceWithStreamingResponse: |
62 | 63 | def chat_completion( |
63 | 64 | self, |
64 | 65 | *, |
65 | | - messages: Iterable[inference_chat_completion_params.Message], |
| 66 | + messages: Iterable[Message], |
66 | 67 | model_id: str, |
67 | 68 | logprobs: inference_chat_completion_params.Logprobs | NotGiven = NOT_GIVEN, |
68 | 69 | response_format: inference_chat_completion_params.ResponseFormat | NotGiven = NOT_GIVEN, |
@@ -106,7 +107,7 @@ def chat_completion( |
106 | 107 | def chat_completion( |
107 | 108 | self, |
108 | 109 | *, |
109 | | - messages: Iterable[inference_chat_completion_params.Message], |
| 110 | + messages: Iterable[Message], |
110 | 111 | model_id: str, |
111 | 112 | stream: Literal[True], |
112 | 113 | logprobs: inference_chat_completion_params.Logprobs | NotGiven = NOT_GIVEN, |
@@ -150,7 +151,7 @@ def chat_completion( |
150 | 151 | def chat_completion( |
151 | 152 | self, |
152 | 153 | *, |
153 | | - messages: Iterable[inference_chat_completion_params.Message], |
| 154 | + messages: Iterable[Message], |
154 | 155 | model_id: str, |
155 | 156 | stream: bool, |
156 | 157 | logprobs: inference_chat_completion_params.Logprobs | NotGiven = NOT_GIVEN, |
@@ -194,7 +195,7 @@ def chat_completion( |
194 | 195 | def chat_completion( |
195 | 196 | self, |
196 | 197 | *, |
197 | | - messages: Iterable[inference_chat_completion_params.Message], |
| 198 | + messages: Iterable[Message], |
198 | 199 | model_id: str, |
199 | 200 | logprobs: inference_chat_completion_params.Logprobs | NotGiven = NOT_GIVEN, |
200 | 201 | response_format: inference_chat_completion_params.ResponseFormat | NotGiven = NOT_GIVEN, |
@@ -472,7 +473,7 @@ def with_streaming_response(self) -> AsyncInferenceResourceWithStreamingResponse |
472 | 473 | async def chat_completion( |
473 | 474 | self, |
474 | 475 | *, |
475 | | - messages: Iterable[inference_chat_completion_params.Message], |
| 476 | + messages: Iterable[Message], |
476 | 477 | model_id: str, |
477 | 478 | logprobs: inference_chat_completion_params.Logprobs | NotGiven = NOT_GIVEN, |
478 | 479 | response_format: inference_chat_completion_params.ResponseFormat | NotGiven = NOT_GIVEN, |
@@ -516,7 +517,7 @@ async def chat_completion( |
516 | 517 | async def chat_completion( |
517 | 518 | self, |
518 | 519 | *, |
519 | | - messages: Iterable[inference_chat_completion_params.Message], |
| 520 | + messages: Iterable[Message], |
520 | 521 | model_id: str, |
521 | 522 | stream: Literal[True], |
522 | 523 | logprobs: inference_chat_completion_params.Logprobs | NotGiven = NOT_GIVEN, |
@@ -560,7 +561,7 @@ async def chat_completion( |
560 | 561 | async def chat_completion( |
561 | 562 | self, |
562 | 563 | *, |
563 | | - messages: Iterable[inference_chat_completion_params.Message], |
| 564 | + messages: Iterable[Message], |
564 | 565 | model_id: str, |
565 | 566 | stream: bool, |
566 | 567 | logprobs: inference_chat_completion_params.Logprobs | NotGiven = NOT_GIVEN, |
@@ -604,7 +605,7 @@ async def chat_completion( |
604 | 605 | async def chat_completion( |
605 | 606 | self, |
606 | 607 | *, |
607 | | - messages: Iterable[inference_chat_completion_params.Message], |
| 608 | + messages: Iterable[Message], |
608 | 609 | model_id: str, |
609 | 610 | logprobs: inference_chat_completion_params.Logprobs | NotGiven = NOT_GIVEN, |
610 | 611 | response_format: inference_chat_completion_params.ResponseFormat | NotGiven = NOT_GIVEN, |
|
0 commit comments