Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 8 additions & 9 deletions src/llama_stack_client/resources/agents/turn.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
from ..._base_client import make_request_options
from ...types.agents import turn_create_params
from ...types.agents.turn import Turn
from ...types.agents.agent_turn_response_stream_chunk import AgentTurnResponseStreamChunk

__all__ = ["TurnResource", "AsyncTurnResource"]

Expand Down Expand Up @@ -95,7 +94,7 @@ def create(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> Stream[AgentTurnResponseStreamChunk]:
) -> Stream[Turn]:
"""
Args:
extra_headers: Send extra headers
Expand Down Expand Up @@ -124,7 +123,7 @@ def create(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> Turn | Stream[AgentTurnResponseStreamChunk]:
) -> Turn | Stream[Turn]:
"""
Args:
extra_headers: Send extra headers
Expand Down Expand Up @@ -153,7 +152,7 @@ def create(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> Turn | Stream[AgentTurnResponseStreamChunk]:
) -> Turn | Stream[Turn]:
if not agent_id:
raise ValueError(f"Expected a non-empty value for `agent_id` but received {agent_id!r}")
if not session_id:
Expand All @@ -174,7 +173,7 @@ def create(
),
cast_to=Turn,
stream=stream or False,
stream_cls=Stream[AgentTurnResponseStreamChunk],
stream_cls=Stream[Turn],
)

def retrieve(
Expand Down Expand Up @@ -280,7 +279,7 @@ async def create(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> AsyncStream[AgentTurnResponseStreamChunk]:
) -> AsyncStream[Turn]:
"""
Args:
extra_headers: Send extra headers
Expand Down Expand Up @@ -309,7 +308,7 @@ async def create(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> Turn | AsyncStream[AgentTurnResponseStreamChunk]:
) -> Turn | AsyncStream[Turn]:
"""
Args:
extra_headers: Send extra headers
Expand Down Expand Up @@ -338,7 +337,7 @@ async def create(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> Turn | AsyncStream[AgentTurnResponseStreamChunk]:
) -> Turn | AsyncStream[Turn]:
if not agent_id:
raise ValueError(f"Expected a non-empty value for `agent_id` but received {agent_id!r}")
if not session_id:
Expand All @@ -359,7 +358,7 @@ async def create(
),
cast_to=Turn,
stream=stream or False,
stream_cls=AsyncStream[AgentTurnResponseStreamChunk],
stream_cls=AsyncStream[Turn],
)

async def retrieve(
Expand Down
16 changes: 8 additions & 8 deletions src/llama_stack_client/resources/post_training/job.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from __future__ import annotations

from typing import Type, Optional, cast
from typing import List, Type, Optional, cast

import httpx

Expand All @@ -22,7 +22,7 @@
from ..._wrappers import DataWrapper
from ..._base_client import make_request_options
from ...types.post_training import job_cancel_params, job_status_params, job_artifacts_params
from ...types.post_training.job_list_response import JobListResponse
from ...types.list_post_training_jobs_response import Data
from ...types.post_training.job_status_response import JobStatusResponse
from ...types.post_training.job_artifacts_response import JobArtifactsResponse

Expand Down Expand Up @@ -58,17 +58,17 @@ def list(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> JobListResponse:
) -> List[Data]:
return self._get(
"/v1/post-training/jobs",
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
post_parser=DataWrapper[JobListResponse]._unwrapper,
post_parser=DataWrapper[List[Data]]._unwrapper,
),
cast_to=cast(Type[JobListResponse], DataWrapper[JobListResponse]),
cast_to=cast(Type[List[Data]], DataWrapper[Data]),
)

def artifacts(
Expand Down Expand Up @@ -198,17 +198,17 @@ async def list(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> JobListResponse:
) -> List[Data]:
return await self._get(
"/v1/post-training/jobs",
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
post_parser=DataWrapper[JobListResponse]._unwrapper,
post_parser=DataWrapper[List[Data]]._unwrapper,
),
cast_to=cast(Type[JobListResponse], DataWrapper[JobListResponse]),
cast_to=cast(Type[List[Data]], DataWrapper[Data]),
)

async def artifacts(
Expand Down
3 changes: 0 additions & 3 deletions src/llama_stack_client/types/agents/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,7 @@
from .turn import Turn as Turn
from .session import Session as Session
from .turn_create_params import TurnCreateParams as TurnCreateParams
from .turn_response_event import TurnResponseEvent as TurnResponseEvent
from .session_create_params import SessionCreateParams as SessionCreateParams
from .step_retrieve_response import StepRetrieveResponse as StepRetrieveResponse
from .session_create_response import SessionCreateResponse as SessionCreateResponse
from .session_retrieve_params import SessionRetrieveParams as SessionRetrieveParams
from .turn_response_event_payload import TurnResponseEventPayload as TurnResponseEventPayload
from .agent_turn_response_stream_chunk import AgentTurnResponseStreamChunk as AgentTurnResponseStreamChunk
2 changes: 1 addition & 1 deletion src/llama_stack_client/types/inference_step.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@


class InferenceStep(BaseModel):
inference_model_response: CompletionMessage = FieldInfo(alias="model_response")
api_model_response: CompletionMessage = FieldInfo(alias="model_response")

step_id: str

Expand Down
25 changes: 3 additions & 22 deletions src/llama_stack_client/types/list_datasets_response.py
Original file line number Diff line number Diff line change
@@ -1,30 +1,11 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

from typing import Dict, List, Union
from typing_extensions import Literal

from .._models import BaseModel
from .shared.url import URL
from .shared.param_type import ParamType
from .dataset_list_response import DatasetListResponse

__all__ = ["ListDatasetsResponse", "Data"]


class Data(BaseModel):
dataset_schema: Dict[str, ParamType]

identifier: str

metadata: Dict[str, Union[bool, float, str, List[object], object, None]]

provider_id: str

provider_resource_id: str

type: Literal["dataset"]

url: URL
__all__ = ["ListDatasetsResponse"]


class ListDatasetsResponse(BaseModel):
data: List[Data]
data: DatasetListResponse
5 changes: 2 additions & 3 deletions src/llama_stack_client/types/list_eval_tasks_response.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

from typing import List

from .._models import BaseModel
from .eval_task import EvalTask
from .eval_task_list_response import EvalTaskListResponse

__all__ = ["ListEvalTasksResponse"]


class ListEvalTasksResponse(BaseModel):
data: List[EvalTask]
data: EvalTaskListResponse
5 changes: 2 additions & 3 deletions src/llama_stack_client/types/list_models_response.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

from typing import List

from .model import Model
from .._models import BaseModel
from .model_list_response import ModelListResponse

__all__ = ["ListModelsResponse"]


class ListModelsResponse(BaseModel):
data: List[Model]
data: ModelListResponse
5 changes: 2 additions & 3 deletions src/llama_stack_client/types/list_providers_response.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

from typing import List

from .._models import BaseModel
from .provider_info import ProviderInfo
from .provider_list_response import ProviderListResponse

__all__ = ["ListProvidersResponse"]


class ListProvidersResponse(BaseModel):
data: List[ProviderInfo]
data: ProviderListResponse
5 changes: 2 additions & 3 deletions src/llama_stack_client/types/list_routes_response.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

from typing import List

from .._models import BaseModel
from .route_info import RouteInfo
from .route_list_response import RouteListResponse

__all__ = ["ListRoutesResponse"]


class ListRoutesResponse(BaseModel):
data: List[RouteInfo]
data: RouteListResponse
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

from typing import List

from .._models import BaseModel
from .scoring_fn import ScoringFn
from .scoring_function_list_response import ScoringFunctionListResponse

__all__ = ["ListScoringFunctionsResponse"]


class ListScoringFunctionsResponse(BaseModel):
data: List[ScoringFn]
data: ScoringFunctionListResponse
5 changes: 2 additions & 3 deletions src/llama_stack_client/types/list_shields_response.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

from typing import List

from .shield import Shield
from .._models import BaseModel
from .shield_list_response import ShieldListResponse

__all__ = ["ListShieldsResponse"]


class ListShieldsResponse(BaseModel):
data: List[Shield]
data: ShieldListResponse
5 changes: 2 additions & 3 deletions src/llama_stack_client/types/list_tool_groups_response.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

from typing import List

from .._models import BaseModel
from .tool_group import ToolGroup
from .toolgroup_list_response import ToolgroupListResponse

__all__ = ["ListToolGroupsResponse"]


class ListToolGroupsResponse(BaseModel):
data: List[ToolGroup]
data: ToolgroupListResponse
5 changes: 2 additions & 3 deletions src/llama_stack_client/types/list_tools_response.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

from typing import List

from .tool import Tool
from .._models import BaseModel
from .tool_list_response import ToolListResponse

__all__ = ["ListToolsResponse"]


class ListToolsResponse(BaseModel):
data: List[Tool]
data: ToolListResponse
21 changes: 3 additions & 18 deletions src/llama_stack_client/types/list_vector_dbs_response.py
Original file line number Diff line number Diff line change
@@ -1,26 +1,11 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

from typing import List
from typing_extensions import Literal

from .._models import BaseModel
from .vector_db_list_response import VectorDBListResponse

__all__ = ["ListVectorDBsResponse", "Data"]


class Data(BaseModel):
embedding_dimension: int

embedding_model: str

identifier: str

provider_id: str

provider_resource_id: str

type: Literal["vector_db"]
__all__ = ["ListVectorDBsResponse"]


class ListVectorDBsResponse(BaseModel):
data: List[Data]
data: VectorDBListResponse
23 changes: 3 additions & 20 deletions src/llama_stack_client/types/query_spans_response.py
Original file line number Diff line number Diff line change
@@ -1,28 +1,11 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

from typing import Dict, List, Union, Optional
from datetime import datetime

from .._models import BaseModel
from .telemetry_query_spans_response import TelemetryQuerySpansResponse

__all__ = ["QuerySpansResponse", "Data"]


class Data(BaseModel):
name: str

span_id: str

start_time: datetime

trace_id: str

attributes: Optional[Dict[str, Union[bool, float, str, List[object], object, None]]] = None

end_time: Optional[datetime] = None

parent_span_id: Optional[str] = None
__all__ = ["QuerySpansResponse"]


class QuerySpansResponse(BaseModel):
data: List[Data]
data: TelemetryQuerySpansResponse
Loading