diff --git a/.release-please-manifest.json b/.release-please-manifest.json
index 17473a20..b3b5e583 100644
--- a/.release-please-manifest.json
+++ b/.release-please-manifest.json
@@ -1,3 +1,3 @@
{
- ".": "0.1.0-alpha.3"
+ ".": "0.1.0-alpha.4"
}
diff --git a/.stats.yml b/.stats.yml
index fbc11ff4..fe4493c0 100644
--- a/.stats.yml
+++ b/.stats.yml
@@ -1,4 +1,4 @@
-configured_endpoints: 96
+configured_endpoints: 105
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/llamastack%2Fllama-stack-client-df7a19394e9124c18ec4e888e2856d22b5ebfd6fe6fe6e929ff6cfadb2ae7e2a.yml
openapi_spec_hash: 9428682672fdd7e2afee7af9ef849dc9
-config_hash: 3e9fdf542184399384ed713426a8065c
+config_hash: e1d37a77a6e8ca86fb6bccb4b0f172c9
diff --git a/CHANGELOG.md b/CHANGELOG.md
index fac2b210..587420a7 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,23 @@
# Changelog
+## 0.1.0-alpha.4 (2025-06-27)
+
+Full Changelog: [v0.1.0-alpha.3...v0.1.0-alpha.4](https://github.com/llamastack/llama-stack-client-python/compare/v0.1.0-alpha.3...v0.1.0-alpha.4)
+
+### Features
+
+* **api:** update via SDK Studio ([4333cb0](https://github.com/llamastack/llama-stack-client-python/commit/4333cb0307fd99654e53e8f87b3b2951be027b44))
+
+
+### Bug Fixes
+
+* **ci:** update pyproject.toml to use uv and remove broken CI ([#5](https://github.com/llamastack/llama-stack-client-python/issues/5)) ([7bc925c](https://github.com/llamastack/llama-stack-client-python/commit/7bc925c00401799d8f3345a4873f1b0028cb45ea))
+
+
+### Chores
+
+* **internal:** version bump ([867ea24](https://github.com/llamastack/llama-stack-client-python/commit/867ea24344fd71fc9787807a47144af5e3de82f8))
+
## 0.1.0-alpha.3 (2025-06-27)
Full Changelog: [v0.1.0-alpha.2...v0.1.0-alpha.3](https://github.com/llamastack/llama-stack-client-python/compare/v0.1.0-alpha.2...v0.1.0-alpha.3)
diff --git a/api.md b/api.md
index 68cf9719..c7a7686b 100644
--- a/api.md
+++ b/api.md
@@ -115,12 +115,16 @@ from llama_stack_client.types import (
ToolExecutionStep,
ToolResponse,
AgentCreateResponse,
+ AgentRetrieveResponse,
+ AgentListResponse,
)
```
Methods:
- client.agents.create(\*\*params) -> AgentCreateResponse
+- client.agents.retrieve(agent_id) -> AgentRetrieveResponse
+- client.agents.list(\*\*params) -> AgentListResponse
- client.agents.delete(agent_id) -> None
## Session
@@ -128,13 +132,14 @@ Methods:
Types:
```python
-from llama_stack_client.types.agents import Session, SessionCreateResponse
+from llama_stack_client.types.agents import Session, SessionCreateResponse, SessionListResponse
```
Methods:
- client.agents.session.create(agent_id, \*\*params) -> SessionCreateResponse
- client.agents.session.retrieve(session_id, \*, agent_id, \*\*params) -> Session
+- client.agents.session.list(agent_id, \*\*params) -> SessionListResponse
- client.agents.session.delete(session_id, \*, agent_id) -> None
## Steps
@@ -186,6 +191,7 @@ Methods:
- client.datasets.retrieve(dataset_id) -> DatasetRetrieveResponse
- client.datasets.list() -> DatasetListResponse
+- client.datasets.appendrows(dataset_id, \*\*params) -> None
- client.datasets.iterrows(dataset_id, \*\*params) -> DatasetIterrowsResponse
- client.datasets.register(\*\*params) -> DatasetRegisterResponse
- client.datasets.unregister(dataset_id) -> None
@@ -358,12 +364,22 @@ Methods:
Types:
```python
-from llama_stack_client.types.vector_stores import VectorStoreFile
+from llama_stack_client.types.vector_stores import (
+ VectorStoreFile,
+ FileListResponse,
+ FileDeleteResponse,
+ FileContentResponse,
+)
```
Methods:
- client.vector_stores.files.create(vector_store_id, \*\*params) -> VectorStoreFile
+- client.vector_stores.files.retrieve(file_id, \*, vector_store_id) -> VectorStoreFile
+- client.vector_stores.files.update(file_id, \*, vector_store_id, \*\*params) -> VectorStoreFile
+- client.vector_stores.files.list(vector_store_id, \*\*params) -> FileListResponse
+- client.vector_stores.files.delete(file_id, \*, vector_store_id) -> FileDeleteResponse
+- client.vector_stores.files.content(file_id, \*, vector_store_id) -> FileContentResponse
# Models
diff --git a/pyproject.toml b/pyproject.toml
index ed2b910f..9ed3185a 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[project]
name = "llama_stack_client"
-version = "0.1.0-alpha.3"
+version = "0.1.0-alpha.4"
description = "The official Python library for the llama-stack-client API"
dynamic = ["readme"]
license = "Apache-2.0"
diff --git a/src/llama_stack_client/resources/agents/agents.py b/src/llama_stack_client/resources/agents/agents.py
index 5b34cea8..6a4ffe85 100644
--- a/src/llama_stack_client/resources/agents/agents.py
+++ b/src/llama_stack_client/resources/agents/agents.py
@@ -20,7 +20,7 @@
StepsResourceWithStreamingResponse,
AsyncStepsResourceWithStreamingResponse,
)
-from ...types import agent_create_params
+from ...types import agent_list_params, agent_create_params
from .session import (
SessionResource,
AsyncSessionResource,
@@ -40,7 +40,9 @@
async_to_streamed_response_wrapper,
)
from ..._base_client import make_request_options
+from ...types.agent_list_response import AgentListResponse
from ...types.agent_create_response import AgentCreateResponse
+from ...types.agent_retrieve_response import AgentRetrieveResponse
from ...types.shared_params.agent_config import AgentConfig
__all__ = ["AgentsResource", "AsyncAgentsResource"]
@@ -112,6 +114,85 @@ def create(
cast_to=AgentCreateResponse,
)
+ def retrieve(
+ self,
+ agent_id: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> AgentRetrieveResponse:
+ """
+ Describe an agent by its ID.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not agent_id:
+ raise ValueError(f"Expected a non-empty value for `agent_id` but received {agent_id!r}")
+ return self._get(
+ f"/v1/agents/{agent_id}",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=AgentRetrieveResponse,
+ )
+
+ def list(
+ self,
+ *,
+ limit: int | NotGiven = NOT_GIVEN,
+ start_index: int | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> AgentListResponse:
+ """
+ List all agents.
+
+ Args:
+ limit: The number of agents to return.
+
+ start_index: The index to start the pagination from.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return self._get(
+ "/v1/agents",
+ options=make_request_options(
+ extra_headers=extra_headers,
+ extra_query=extra_query,
+ extra_body=extra_body,
+ timeout=timeout,
+ query=maybe_transform(
+ {
+ "limit": limit,
+ "start_index": start_index,
+ },
+ agent_list_params.AgentListParams,
+ ),
+ ),
+ cast_to=AgentListResponse,
+ )
+
def delete(
self,
agent_id: str,
@@ -213,6 +294,85 @@ async def create(
cast_to=AgentCreateResponse,
)
+ async def retrieve(
+ self,
+ agent_id: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> AgentRetrieveResponse:
+ """
+ Describe an agent by its ID.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not agent_id:
+ raise ValueError(f"Expected a non-empty value for `agent_id` but received {agent_id!r}")
+ return await self._get(
+ f"/v1/agents/{agent_id}",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=AgentRetrieveResponse,
+ )
+
+ async def list(
+ self,
+ *,
+ limit: int | NotGiven = NOT_GIVEN,
+ start_index: int | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> AgentListResponse:
+ """
+ List all agents.
+
+ Args:
+ limit: The number of agents to return.
+
+ start_index: The index to start the pagination from.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return await self._get(
+ "/v1/agents",
+ options=make_request_options(
+ extra_headers=extra_headers,
+ extra_query=extra_query,
+ extra_body=extra_body,
+ timeout=timeout,
+ query=await async_maybe_transform(
+ {
+ "limit": limit,
+ "start_index": start_index,
+ },
+ agent_list_params.AgentListParams,
+ ),
+ ),
+ cast_to=AgentListResponse,
+ )
+
async def delete(
self,
agent_id: str,
@@ -255,6 +415,12 @@ def __init__(self, agents: AgentsResource) -> None:
self.create = to_raw_response_wrapper(
agents.create,
)
+ self.retrieve = to_raw_response_wrapper(
+ agents.retrieve,
+ )
+ self.list = to_raw_response_wrapper(
+ agents.list,
+ )
self.delete = to_raw_response_wrapper(
agents.delete,
)
@@ -279,6 +445,12 @@ def __init__(self, agents: AsyncAgentsResource) -> None:
self.create = async_to_raw_response_wrapper(
agents.create,
)
+ self.retrieve = async_to_raw_response_wrapper(
+ agents.retrieve,
+ )
+ self.list = async_to_raw_response_wrapper(
+ agents.list,
+ )
self.delete = async_to_raw_response_wrapper(
agents.delete,
)
@@ -303,6 +475,12 @@ def __init__(self, agents: AgentsResource) -> None:
self.create = to_streamed_response_wrapper(
agents.create,
)
+ self.retrieve = to_streamed_response_wrapper(
+ agents.retrieve,
+ )
+ self.list = to_streamed_response_wrapper(
+ agents.list,
+ )
self.delete = to_streamed_response_wrapper(
agents.delete,
)
@@ -327,6 +505,12 @@ def __init__(self, agents: AsyncAgentsResource) -> None:
self.create = async_to_streamed_response_wrapper(
agents.create,
)
+ self.retrieve = async_to_streamed_response_wrapper(
+ agents.retrieve,
+ )
+ self.list = async_to_streamed_response_wrapper(
+ agents.list,
+ )
self.delete = async_to_streamed_response_wrapper(
agents.delete,
)
diff --git a/src/llama_stack_client/resources/agents/session.py b/src/llama_stack_client/resources/agents/session.py
index ccefeb0b..4e1704d5 100644
--- a/src/llama_stack_client/resources/agents/session.py
+++ b/src/llama_stack_client/resources/agents/session.py
@@ -17,8 +17,9 @@
async_to_streamed_response_wrapper,
)
from ..._base_client import make_request_options
-from ...types.agents import session_create_params, session_retrieve_params
+from ...types.agents import session_list_params, session_create_params, session_retrieve_params
from ...types.agents.session import Session
+from ...types.agents.session_list_response import SessionListResponse
from ...types.agents.session_create_response import SessionCreateResponse
__all__ = ["SessionResource", "AsyncSessionResource"]
@@ -124,6 +125,55 @@ def retrieve(
cast_to=Session,
)
+ def list(
+ self,
+ agent_id: str,
+ *,
+ limit: int | NotGiven = NOT_GIVEN,
+ start_index: int | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> SessionListResponse:
+ """
+ List all session(s) of a given agent.
+
+ Args:
+ limit: The number of sessions to return.
+
+ start_index: The index to start the pagination from.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not agent_id:
+ raise ValueError(f"Expected a non-empty value for `agent_id` but received {agent_id!r}")
+ return self._get(
+ f"/v1/agents/{agent_id}/sessions",
+ options=make_request_options(
+ extra_headers=extra_headers,
+ extra_query=extra_query,
+ extra_body=extra_body,
+ timeout=timeout,
+ query=maybe_transform(
+ {
+ "limit": limit,
+ "start_index": start_index,
+ },
+ session_list_params.SessionListParams,
+ ),
+ ),
+ cast_to=SessionListResponse,
+ )
+
def delete(
self,
session_id: str,
@@ -264,6 +314,55 @@ async def retrieve(
cast_to=Session,
)
+ async def list(
+ self,
+ agent_id: str,
+ *,
+ limit: int | NotGiven = NOT_GIVEN,
+ start_index: int | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> SessionListResponse:
+ """
+ List all session(s) of a given agent.
+
+ Args:
+ limit: The number of sessions to return.
+
+ start_index: The index to start the pagination from.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not agent_id:
+ raise ValueError(f"Expected a non-empty value for `agent_id` but received {agent_id!r}")
+ return await self._get(
+ f"/v1/agents/{agent_id}/sessions",
+ options=make_request_options(
+ extra_headers=extra_headers,
+ extra_query=extra_query,
+ extra_body=extra_body,
+ timeout=timeout,
+ query=await async_maybe_transform(
+ {
+ "limit": limit,
+ "start_index": start_index,
+ },
+ session_list_params.SessionListParams,
+ ),
+ ),
+ cast_to=SessionListResponse,
+ )
+
async def delete(
self,
session_id: str,
@@ -312,6 +411,9 @@ def __init__(self, session: SessionResource) -> None:
self.retrieve = to_raw_response_wrapper(
session.retrieve,
)
+ self.list = to_raw_response_wrapper(
+ session.list,
+ )
self.delete = to_raw_response_wrapper(
session.delete,
)
@@ -327,6 +429,9 @@ def __init__(self, session: AsyncSessionResource) -> None:
self.retrieve = async_to_raw_response_wrapper(
session.retrieve,
)
+ self.list = async_to_raw_response_wrapper(
+ session.list,
+ )
self.delete = async_to_raw_response_wrapper(
session.delete,
)
@@ -342,6 +447,9 @@ def __init__(self, session: SessionResource) -> None:
self.retrieve = to_streamed_response_wrapper(
session.retrieve,
)
+ self.list = to_streamed_response_wrapper(
+ session.list,
+ )
self.delete = to_streamed_response_wrapper(
session.delete,
)
@@ -357,6 +465,9 @@ def __init__(self, session: AsyncSessionResource) -> None:
self.retrieve = async_to_streamed_response_wrapper(
session.retrieve,
)
+ self.list = async_to_streamed_response_wrapper(
+ session.list,
+ )
self.delete = async_to_streamed_response_wrapper(
session.delete,
)
diff --git a/src/llama_stack_client/resources/datasets.py b/src/llama_stack_client/resources/datasets.py
index 45dcaeba..e2f0a149 100644
--- a/src/llama_stack_client/resources/datasets.py
+++ b/src/llama_stack_client/resources/datasets.py
@@ -7,7 +7,7 @@
import httpx
-from ..types import dataset_iterrows_params, dataset_register_params
+from ..types import dataset_iterrows_params, dataset_register_params, dataset_appendrows_params
from .._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
from .._utils import maybe_transform, async_maybe_transform
from .._compat import cached_property
@@ -104,6 +104,44 @@ def list(
cast_to=cast(Type[DatasetListResponse], DataWrapper[DatasetListResponse]),
)
+ def appendrows(
+ self,
+ dataset_id: str,
+ *,
+ rows: Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]],
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> None:
+ """
+ Append rows to a dataset.
+
+ Args:
+ rows: The rows to append to the dataset.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not dataset_id:
+ raise ValueError(f"Expected a non-empty value for `dataset_id` but received {dataset_id!r}")
+ extra_headers = {"Accept": "*/*", **(extra_headers or {})}
+ return self._post(
+ f"/v1/datasetio/append-rows/{dataset_id}",
+ body=maybe_transform({"rows": rows}, dataset_appendrows_params.DatasetAppendrowsParams),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=NoneType,
+ )
+
def iterrows(
self,
dataset_id: str,
@@ -342,6 +380,44 @@ async def list(
cast_to=cast(Type[DatasetListResponse], DataWrapper[DatasetListResponse]),
)
+ async def appendrows(
+ self,
+ dataset_id: str,
+ *,
+ rows: Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]],
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> None:
+ """
+ Append rows to a dataset.
+
+ Args:
+ rows: The rows to append to the dataset.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not dataset_id:
+ raise ValueError(f"Expected a non-empty value for `dataset_id` but received {dataset_id!r}")
+ extra_headers = {"Accept": "*/*", **(extra_headers or {})}
+ return await self._post(
+ f"/v1/datasetio/append-rows/{dataset_id}",
+ body=await async_maybe_transform({"rows": rows}, dataset_appendrows_params.DatasetAppendrowsParams),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=NoneType,
+ )
+
async def iterrows(
self,
dataset_id: str,
@@ -514,6 +590,9 @@ def __init__(self, datasets: DatasetsResource) -> None:
self.list = to_raw_response_wrapper(
datasets.list,
)
+ self.appendrows = to_raw_response_wrapper(
+ datasets.appendrows,
+ )
self.iterrows = to_raw_response_wrapper(
datasets.iterrows,
)
@@ -535,6 +614,9 @@ def __init__(self, datasets: AsyncDatasetsResource) -> None:
self.list = async_to_raw_response_wrapper(
datasets.list,
)
+ self.appendrows = async_to_raw_response_wrapper(
+ datasets.appendrows,
+ )
self.iterrows = async_to_raw_response_wrapper(
datasets.iterrows,
)
@@ -556,6 +638,9 @@ def __init__(self, datasets: DatasetsResource) -> None:
self.list = to_streamed_response_wrapper(
datasets.list,
)
+ self.appendrows = to_streamed_response_wrapper(
+ datasets.appendrows,
+ )
self.iterrows = to_streamed_response_wrapper(
datasets.iterrows,
)
@@ -577,6 +662,9 @@ def __init__(self, datasets: AsyncDatasetsResource) -> None:
self.list = async_to_streamed_response_wrapper(
datasets.list,
)
+ self.appendrows = async_to_streamed_response_wrapper(
+ datasets.appendrows,
+ )
self.iterrows = async_to_streamed_response_wrapper(
datasets.iterrows,
)
diff --git a/src/llama_stack_client/resources/vector_stores/files.py b/src/llama_stack_client/resources/vector_stores/files.py
index 1ef48084..4ea92a1e 100644
--- a/src/llama_stack_client/resources/vector_stores/files.py
+++ b/src/llama_stack_client/resources/vector_stores/files.py
@@ -3,6 +3,7 @@
from __future__ import annotations
from typing import Dict, Union, Iterable
+from typing_extensions import Literal
import httpx
@@ -17,8 +18,11 @@
async_to_streamed_response_wrapper,
)
from ..._base_client import make_request_options
-from ...types.vector_stores import file_create_params
+from ...types.vector_stores import file_list_params, file_create_params, file_update_params
from ...types.vector_stores.vector_store_file import VectorStoreFile
+from ...types.vector_stores.file_list_response import FileListResponse
+from ...types.vector_stores.file_delete_response import FileDeleteResponse
+from ...types.vector_stores.file_content_response import FileContentResponse
__all__ = ["FilesResource", "AsyncFilesResource"]
@@ -93,6 +97,205 @@ def create(
cast_to=VectorStoreFile,
)
+ def retrieve(
+ self,
+ file_id: str,
+ *,
+ vector_store_id: str,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> VectorStoreFile:
+ """
+ Retrieves a vector store file.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not vector_store_id:
+ raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}")
+ if not file_id:
+ raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}")
+ return self._get(
+ f"/v1/openai/v1/vector_stores/{vector_store_id}/files/{file_id}",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=VectorStoreFile,
+ )
+
+ def update(
+ self,
+ file_id: str,
+ *,
+ vector_store_id: str,
+ attributes: Dict[str, Union[bool, float, str, Iterable[object], object, None]],
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> VectorStoreFile:
+ """
+ Updates a vector store file.
+
+ Args:
+ attributes: The updated key-value attributes to store with the file.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not vector_store_id:
+ raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}")
+ if not file_id:
+ raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}")
+ return self._post(
+ f"/v1/openai/v1/vector_stores/{vector_store_id}/files/{file_id}",
+ body=maybe_transform({"attributes": attributes}, file_update_params.FileUpdateParams),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=VectorStoreFile,
+ )
+
+ def list(
+ self,
+ vector_store_id: str,
+ *,
+ after: str | NotGiven = NOT_GIVEN,
+ before: str | NotGiven = NOT_GIVEN,
+ filter: Literal["completed", "in_progress", "cancelled", "failed"] | NotGiven = NOT_GIVEN,
+ limit: int | NotGiven = NOT_GIVEN,
+ order: str | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> FileListResponse:
+ """
+ List files in a vector store.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not vector_store_id:
+ raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}")
+ return self._get(
+ f"/v1/openai/v1/vector_stores/{vector_store_id}/files",
+ options=make_request_options(
+ extra_headers=extra_headers,
+ extra_query=extra_query,
+ extra_body=extra_body,
+ timeout=timeout,
+ query=maybe_transform(
+ {
+ "after": after,
+ "before": before,
+ "filter": filter,
+ "limit": limit,
+ "order": order,
+ },
+ file_list_params.FileListParams,
+ ),
+ ),
+ cast_to=FileListResponse,
+ )
+
+ def delete(
+ self,
+ file_id: str,
+ *,
+ vector_store_id: str,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> FileDeleteResponse:
+ """
+ Delete a vector store file.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not vector_store_id:
+ raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}")
+ if not file_id:
+ raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}")
+ return self._delete(
+ f"/v1/openai/v1/vector_stores/{vector_store_id}/files/{file_id}",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=FileDeleteResponse,
+ )
+
+ def content(
+ self,
+ file_id: str,
+ *,
+ vector_store_id: str,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> FileContentResponse:
+ """
+ Retrieves the contents of a vector store file.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not vector_store_id:
+ raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}")
+ if not file_id:
+ raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}")
+ return self._get(
+ f"/v1/openai/v1/vector_stores/{vector_store_id}/files/{file_id}/content",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=FileContentResponse,
+ )
+
class AsyncFilesResource(AsyncAPIResource):
@cached_property
@@ -164,6 +367,205 @@ async def create(
cast_to=VectorStoreFile,
)
+ async def retrieve(
+ self,
+ file_id: str,
+ *,
+ vector_store_id: str,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> VectorStoreFile:
+ """
+ Retrieves a vector store file.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not vector_store_id:
+ raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}")
+ if not file_id:
+ raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}")
+ return await self._get(
+ f"/v1/openai/v1/vector_stores/{vector_store_id}/files/{file_id}",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=VectorStoreFile,
+ )
+
+ async def update(
+ self,
+ file_id: str,
+ *,
+ vector_store_id: str,
+ attributes: Dict[str, Union[bool, float, str, Iterable[object], object, None]],
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> VectorStoreFile:
+ """
+ Updates a vector store file.
+
+ Args:
+ attributes: The updated key-value attributes to store with the file.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not vector_store_id:
+ raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}")
+ if not file_id:
+ raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}")
+ return await self._post(
+ f"/v1/openai/v1/vector_stores/{vector_store_id}/files/{file_id}",
+ body=await async_maybe_transform({"attributes": attributes}, file_update_params.FileUpdateParams),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=VectorStoreFile,
+ )
+
+ async def list(
+ self,
+ vector_store_id: str,
+ *,
+ after: str | NotGiven = NOT_GIVEN,
+ before: str | NotGiven = NOT_GIVEN,
+ filter: Literal["completed", "in_progress", "cancelled", "failed"] | NotGiven = NOT_GIVEN,
+ limit: int | NotGiven = NOT_GIVEN,
+ order: str | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> FileListResponse:
+ """
+ List files in a vector store.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not vector_store_id:
+ raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}")
+ return await self._get(
+ f"/v1/openai/v1/vector_stores/{vector_store_id}/files",
+ options=make_request_options(
+ extra_headers=extra_headers,
+ extra_query=extra_query,
+ extra_body=extra_body,
+ timeout=timeout,
+ query=await async_maybe_transform(
+ {
+ "after": after,
+ "before": before,
+ "filter": filter,
+ "limit": limit,
+ "order": order,
+ },
+ file_list_params.FileListParams,
+ ),
+ ),
+ cast_to=FileListResponse,
+ )
+
+ async def delete(
+ self,
+ file_id: str,
+ *,
+ vector_store_id: str,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> FileDeleteResponse:
+ """
+ Delete a vector store file.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not vector_store_id:
+ raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}")
+ if not file_id:
+ raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}")
+ return await self._delete(
+ f"/v1/openai/v1/vector_stores/{vector_store_id}/files/{file_id}",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=FileDeleteResponse,
+ )
+
+ async def content(
+ self,
+ file_id: str,
+ *,
+ vector_store_id: str,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> FileContentResponse:
+ """
+ Retrieves the contents of a vector store file.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not vector_store_id:
+ raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}")
+ if not file_id:
+ raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}")
+ return await self._get(
+ f"/v1/openai/v1/vector_stores/{vector_store_id}/files/{file_id}/content",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=FileContentResponse,
+ )
+
class FilesResourceWithRawResponse:
def __init__(self, files: FilesResource) -> None:
@@ -172,6 +574,21 @@ def __init__(self, files: FilesResource) -> None:
self.create = to_raw_response_wrapper(
files.create,
)
+ self.retrieve = to_raw_response_wrapper(
+ files.retrieve,
+ )
+ self.update = to_raw_response_wrapper(
+ files.update,
+ )
+ self.list = to_raw_response_wrapper(
+ files.list,
+ )
+ self.delete = to_raw_response_wrapper(
+ files.delete,
+ )
+ self.content = to_raw_response_wrapper(
+ files.content,
+ )
class AsyncFilesResourceWithRawResponse:
@@ -181,6 +598,21 @@ def __init__(self, files: AsyncFilesResource) -> None:
self.create = async_to_raw_response_wrapper(
files.create,
)
+ self.retrieve = async_to_raw_response_wrapper(
+ files.retrieve,
+ )
+ self.update = async_to_raw_response_wrapper(
+ files.update,
+ )
+ self.list = async_to_raw_response_wrapper(
+ files.list,
+ )
+ self.delete = async_to_raw_response_wrapper(
+ files.delete,
+ )
+ self.content = async_to_raw_response_wrapper(
+ files.content,
+ )
class FilesResourceWithStreamingResponse:
@@ -190,6 +622,21 @@ def __init__(self, files: FilesResource) -> None:
self.create = to_streamed_response_wrapper(
files.create,
)
+ self.retrieve = to_streamed_response_wrapper(
+ files.retrieve,
+ )
+ self.update = to_streamed_response_wrapper(
+ files.update,
+ )
+ self.list = to_streamed_response_wrapper(
+ files.list,
+ )
+ self.delete = to_streamed_response_wrapper(
+ files.delete,
+ )
+ self.content = to_streamed_response_wrapper(
+ files.content,
+ )
class AsyncFilesResourceWithStreamingResponse:
@@ -199,3 +646,18 @@ def __init__(self, files: AsyncFilesResource) -> None:
self.create = async_to_streamed_response_wrapper(
files.create,
)
+ self.retrieve = async_to_streamed_response_wrapper(
+ files.retrieve,
+ )
+ self.update = async_to_streamed_response_wrapper(
+ files.update,
+ )
+ self.list = async_to_streamed_response_wrapper(
+ files.list,
+ )
+ self.delete = async_to_streamed_response_wrapper(
+ files.delete,
+ )
+ self.content = async_to_streamed_response_wrapper(
+ files.content,
+ )
diff --git a/src/llama_stack_client/types/__init__.py b/src/llama_stack_client/types/__init__.py
index 7f742ba5..cfb77868 100644
--- a/src/llama_stack_client/types/__init__.py
+++ b/src/llama_stack_client/types/__init__.py
@@ -53,12 +53,14 @@
from .shield_call_step import ShieldCallStep as ShieldCallStep
from .span_with_status import SpanWithStatus as SpanWithStatus
from .tool_list_params import ToolListParams as ToolListParams
+from .agent_list_params import AgentListParams as AgentListParams
from .evaluate_response import EvaluateResponse as EvaluateResponse
from .post_training_job import PostTrainingJob as PostTrainingJob
from .scoring_fn_params import ScoringFnParams as ScoringFnParams
from .file_create_params import FileCreateParams as FileCreateParams
from .tool_list_response import ToolListResponse as ToolListResponse
from .agent_create_params import AgentCreateParams as AgentCreateParams
+from .agent_list_response import AgentListResponse as AgentListResponse
from .completion_response import CompletionResponse as CompletionResponse
from .embeddings_response import EmbeddingsResponse as EmbeddingsResponse
from .list_files_response import ListFilesResponse as ListFilesResponse
@@ -96,6 +98,7 @@
from .shield_register_params import ShieldRegisterParams as ShieldRegisterParams
from .tool_invocation_result import ToolInvocationResult as ToolInvocationResult
from .vector_io_query_params import VectorIoQueryParams as VectorIoQueryParams
+from .agent_retrieve_response import AgentRetrieveResponse as AgentRetrieveResponse
from .benchmark_list_response import BenchmarkListResponse as BenchmarkListResponse
from .dataset_iterrows_params import DatasetIterrowsParams as DatasetIterrowsParams
from .dataset_register_params import DatasetRegisterParams as DatasetRegisterParams
@@ -111,6 +114,7 @@
from .safety_run_shield_params import SafetyRunShieldParams as SafetyRunShieldParams
from .vector_store_list_params import VectorStoreListParams as VectorStoreListParams
from .benchmark_register_params import BenchmarkRegisterParams as BenchmarkRegisterParams
+from .dataset_appendrows_params import DatasetAppendrowsParams as DatasetAppendrowsParams
from .dataset_iterrows_response import DatasetIterrowsResponse as DatasetIterrowsResponse
from .dataset_register_response import DatasetRegisterResponse as DatasetRegisterResponse
from .dataset_retrieve_response import DatasetRetrieveResponse as DatasetRetrieveResponse
diff --git a/src/llama_stack_client/types/agent_list_params.py b/src/llama_stack_client/types/agent_list_params.py
new file mode 100644
index 00000000..15da545b
--- /dev/null
+++ b/src/llama_stack_client/types/agent_list_params.py
@@ -0,0 +1,15 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from typing_extensions import TypedDict
+
+__all__ = ["AgentListParams"]
+
+
+class AgentListParams(TypedDict, total=False):
+ limit: int
+ """The number of agents to return."""
+
+ start_index: int
+ """The index to start the pagination from."""
diff --git a/src/llama_stack_client/types/agent_list_response.py b/src/llama_stack_client/types/agent_list_response.py
new file mode 100644
index 00000000..d0640e21
--- /dev/null
+++ b/src/llama_stack_client/types/agent_list_response.py
@@ -0,0 +1,18 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import Dict, List, Union, Optional
+
+from .._models import BaseModel
+
+__all__ = ["AgentListResponse"]
+
+
+class AgentListResponse(BaseModel):
+ data: List[Dict[str, Union[bool, float, str, List[object], object, None]]]
+ """The list of items for the current page"""
+
+ has_more: bool
+ """Whether there are more items available after this set"""
+
+ url: Optional[str] = None
+ """The URL for accessing this list"""
diff --git a/src/llama_stack_client/types/agent_retrieve_response.py b/src/llama_stack_client/types/agent_retrieve_response.py
new file mode 100644
index 00000000..132821cb
--- /dev/null
+++ b/src/llama_stack_client/types/agent_retrieve_response.py
@@ -0,0 +1,17 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from datetime import datetime
+
+from .._models import BaseModel
+from .shared.agent_config import AgentConfig
+
+__all__ = ["AgentRetrieveResponse"]
+
+
+class AgentRetrieveResponse(BaseModel):
+ agent_config: AgentConfig
+ """Configuration for an agent."""
+
+ agent_id: str
+
+ created_at: datetime
diff --git a/src/llama_stack_client/types/agents/__init__.py b/src/llama_stack_client/types/agents/__init__.py
index 30355cbf..f4f48353 100644
--- a/src/llama_stack_client/types/agents/__init__.py
+++ b/src/llama_stack_client/types/agents/__init__.py
@@ -6,8 +6,10 @@
from .session import Session as Session
from .turn_create_params import TurnCreateParams as TurnCreateParams
from .turn_resume_params import TurnResumeParams as TurnResumeParams
+from .session_list_params import SessionListParams as SessionListParams
from .turn_response_event import TurnResponseEvent as TurnResponseEvent
from .session_create_params import SessionCreateParams as SessionCreateParams
+from .session_list_response import SessionListResponse as SessionListResponse
from .step_retrieve_response import StepRetrieveResponse as StepRetrieveResponse
from .session_create_response import SessionCreateResponse as SessionCreateResponse
from .session_retrieve_params import SessionRetrieveParams as SessionRetrieveParams
diff --git a/src/llama_stack_client/types/agents/session_list_params.py b/src/llama_stack_client/types/agents/session_list_params.py
new file mode 100644
index 00000000..0644d1ae
--- /dev/null
+++ b/src/llama_stack_client/types/agents/session_list_params.py
@@ -0,0 +1,15 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from typing_extensions import TypedDict
+
+__all__ = ["SessionListParams"]
+
+
+class SessionListParams(TypedDict, total=False):
+ limit: int
+ """The number of sessions to return."""
+
+ start_index: int
+ """The index to start the pagination from."""
diff --git a/src/llama_stack_client/types/agents/session_list_response.py b/src/llama_stack_client/types/agents/session_list_response.py
new file mode 100644
index 00000000..e70ecc46
--- /dev/null
+++ b/src/llama_stack_client/types/agents/session_list_response.py
@@ -0,0 +1,18 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import Dict, List, Union, Optional
+
+from ..._models import BaseModel
+
+__all__ = ["SessionListResponse"]
+
+
+class SessionListResponse(BaseModel):
+ data: List[Dict[str, Union[bool, float, str, List[object], object, None]]]
+ """The list of items for the current page"""
+
+ has_more: bool
+ """Whether there are more items available after this set"""
+
+ url: Optional[str] = None
+ """The URL for accessing this list"""
diff --git a/src/llama_stack_client/types/dataset_appendrows_params.py b/src/llama_stack_client/types/dataset_appendrows_params.py
new file mode 100644
index 00000000..2e96e124
--- /dev/null
+++ b/src/llama_stack_client/types/dataset_appendrows_params.py
@@ -0,0 +1,13 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from typing import Dict, Union, Iterable
+from typing_extensions import Required, TypedDict
+
+__all__ = ["DatasetAppendrowsParams"]
+
+
+class DatasetAppendrowsParams(TypedDict, total=False):
+ rows: Required[Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]]]
+ """The rows to append to the dataset."""
diff --git a/src/llama_stack_client/types/vector_stores/__init__.py b/src/llama_stack_client/types/vector_stores/__init__.py
index 550270e2..82fc5047 100644
--- a/src/llama_stack_client/types/vector_stores/__init__.py
+++ b/src/llama_stack_client/types/vector_stores/__init__.py
@@ -2,5 +2,10 @@
from __future__ import annotations
+from .file_list_params import FileListParams as FileListParams
from .vector_store_file import VectorStoreFile as VectorStoreFile
from .file_create_params import FileCreateParams as FileCreateParams
+from .file_list_response import FileListResponse as FileListResponse
+from .file_update_params import FileUpdateParams as FileUpdateParams
+from .file_delete_response import FileDeleteResponse as FileDeleteResponse
+from .file_content_response import FileContentResponse as FileContentResponse
diff --git a/src/llama_stack_client/types/vector_stores/file_content_response.py b/src/llama_stack_client/types/vector_stores/file_content_response.py
new file mode 100644
index 00000000..987160e4
--- /dev/null
+++ b/src/llama_stack_client/types/vector_stores/file_content_response.py
@@ -0,0 +1,24 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import Dict, List, Union
+from typing_extensions import Literal
+
+from ..._models import BaseModel
+
+__all__ = ["FileContentResponse", "Content"]
+
+
+class Content(BaseModel):
+ text: str
+
+ type: Literal["text"]
+
+
+class FileContentResponse(BaseModel):
+ attributes: Dict[str, Union[bool, float, str, List[object], object, None]]
+
+ content: List[Content]
+
+ file_id: str
+
+ filename: str
diff --git a/src/llama_stack_client/types/vector_stores/file_delete_response.py b/src/llama_stack_client/types/vector_stores/file_delete_response.py
new file mode 100644
index 00000000..be90cec6
--- /dev/null
+++ b/src/llama_stack_client/types/vector_stores/file_delete_response.py
@@ -0,0 +1,13 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from ..._models import BaseModel
+
+__all__ = ["FileDeleteResponse"]
+
+
+class FileDeleteResponse(BaseModel):
+ id: str
+
+ deleted: bool
+
+ object: str
diff --git a/src/llama_stack_client/types/vector_stores/file_list_params.py b/src/llama_stack_client/types/vector_stores/file_list_params.py
new file mode 100644
index 00000000..3843cb3f
--- /dev/null
+++ b/src/llama_stack_client/types/vector_stores/file_list_params.py
@@ -0,0 +1,19 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from typing_extensions import Literal, TypedDict
+
+__all__ = ["FileListParams"]
+
+
+class FileListParams(TypedDict, total=False):
+ after: str
+
+ before: str
+
+ filter: Literal["completed", "in_progress", "cancelled", "failed"]
+
+ limit: int
+
+ order: str
diff --git a/src/llama_stack_client/types/vector_stores/file_list_response.py b/src/llama_stack_client/types/vector_stores/file_list_response.py
new file mode 100644
index 00000000..d9f5f466
--- /dev/null
+++ b/src/llama_stack_client/types/vector_stores/file_list_response.py
@@ -0,0 +1,20 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import List, Optional
+
+from ..._models import BaseModel
+from .vector_store_file import VectorStoreFile
+
+__all__ = ["FileListResponse"]
+
+
+class FileListResponse(BaseModel):
+ data: List[VectorStoreFile]
+
+ has_more: bool
+
+ object: str
+
+ first_id: Optional[str] = None
+
+ last_id: Optional[str] = None
diff --git a/src/llama_stack_client/types/vector_stores/file_update_params.py b/src/llama_stack_client/types/vector_stores/file_update_params.py
new file mode 100644
index 00000000..fddfc8c6
--- /dev/null
+++ b/src/llama_stack_client/types/vector_stores/file_update_params.py
@@ -0,0 +1,15 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from typing import Dict, Union, Iterable
+from typing_extensions import Required, TypedDict
+
+__all__ = ["FileUpdateParams"]
+
+
+class FileUpdateParams(TypedDict, total=False):
+ vector_store_id: Required[str]
+
+ attributes: Required[Dict[str, Union[bool, float, str, Iterable[object], object, None]]]
+ """The updated key-value attributes to store with the file."""
diff --git a/tests/api_resources/agents/test_session.py b/tests/api_resources/agents/test_session.py
index 2c80df58..b49ab492 100644
--- a/tests/api_resources/agents/test_session.py
+++ b/tests/api_resources/agents/test_session.py
@@ -11,6 +11,7 @@
from llama_stack_client import LlamaStackClient, AsyncLlamaStackClient
from llama_stack_client.types.agents import (
Session,
+ SessionListResponse,
SessionCreateResponse,
)
@@ -119,6 +120,53 @@ def test_path_params_retrieve(self, client: LlamaStackClient) -> None:
agent_id="agent_id",
)
+ @parametrize
+ def test_method_list(self, client: LlamaStackClient) -> None:
+ session = client.agents.session.list(
+ agent_id="agent_id",
+ )
+ assert_matches_type(SessionListResponse, session, path=["response"])
+
+ @parametrize
+ def test_method_list_with_all_params(self, client: LlamaStackClient) -> None:
+ session = client.agents.session.list(
+ agent_id="agent_id",
+ limit=0,
+ start_index=0,
+ )
+ assert_matches_type(SessionListResponse, session, path=["response"])
+
+ @parametrize
+ def test_raw_response_list(self, client: LlamaStackClient) -> None:
+ response = client.agents.session.with_raw_response.list(
+ agent_id="agent_id",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ session = response.parse()
+ assert_matches_type(SessionListResponse, session, path=["response"])
+
+ @parametrize
+ def test_streaming_response_list(self, client: LlamaStackClient) -> None:
+ with client.agents.session.with_streaming_response.list(
+ agent_id="agent_id",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ session = response.parse()
+ assert_matches_type(SessionListResponse, session, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ def test_path_params_list(self, client: LlamaStackClient) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_id` but received ''"):
+ client.agents.session.with_raw_response.list(
+ agent_id="",
+ )
+
@parametrize
def test_method_delete(self, client: LlamaStackClient) -> None:
session = client.agents.session.delete(
@@ -272,6 +320,53 @@ async def test_path_params_retrieve(self, async_client: AsyncLlamaStackClient) -
agent_id="agent_id",
)
+ @parametrize
+ async def test_method_list(self, async_client: AsyncLlamaStackClient) -> None:
+ session = await async_client.agents.session.list(
+ agent_id="agent_id",
+ )
+ assert_matches_type(SessionListResponse, session, path=["response"])
+
+ @parametrize
+ async def test_method_list_with_all_params(self, async_client: AsyncLlamaStackClient) -> None:
+ session = await async_client.agents.session.list(
+ agent_id="agent_id",
+ limit=0,
+ start_index=0,
+ )
+ assert_matches_type(SessionListResponse, session, path=["response"])
+
+ @parametrize
+ async def test_raw_response_list(self, async_client: AsyncLlamaStackClient) -> None:
+ response = await async_client.agents.session.with_raw_response.list(
+ agent_id="agent_id",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ session = await response.parse()
+ assert_matches_type(SessionListResponse, session, path=["response"])
+
+ @parametrize
+ async def test_streaming_response_list(self, async_client: AsyncLlamaStackClient) -> None:
+ async with async_client.agents.session.with_streaming_response.list(
+ agent_id="agent_id",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ session = await response.parse()
+ assert_matches_type(SessionListResponse, session, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ async def test_path_params_list(self, async_client: AsyncLlamaStackClient) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_id` but received ''"):
+ await async_client.agents.session.with_raw_response.list(
+ agent_id="",
+ )
+
@parametrize
async def test_method_delete(self, async_client: AsyncLlamaStackClient) -> None:
session = await async_client.agents.session.delete(
diff --git a/tests/api_resources/test_agents.py b/tests/api_resources/test_agents.py
index c4aa5349..18b34012 100644
--- a/tests/api_resources/test_agents.py
+++ b/tests/api_resources/test_agents.py
@@ -9,7 +9,11 @@
from tests.utils import assert_matches_type
from llama_stack_client import LlamaStackClient, AsyncLlamaStackClient
-from llama_stack_client.types import AgentCreateResponse
+from llama_stack_client.types import (
+ AgentListResponse,
+ AgentCreateResponse,
+ AgentRetrieveResponse,
+)
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
@@ -106,6 +110,77 @@ def test_streaming_response_create(self, client: LlamaStackClient) -> None:
assert cast(Any, response.is_closed) is True
+ @parametrize
+ def test_method_retrieve(self, client: LlamaStackClient) -> None:
+ agent = client.agents.retrieve(
+ "agent_id",
+ )
+ assert_matches_type(AgentRetrieveResponse, agent, path=["response"])
+
+ @parametrize
+ def test_raw_response_retrieve(self, client: LlamaStackClient) -> None:
+ response = client.agents.with_raw_response.retrieve(
+ "agent_id",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ agent = response.parse()
+ assert_matches_type(AgentRetrieveResponse, agent, path=["response"])
+
+ @parametrize
+ def test_streaming_response_retrieve(self, client: LlamaStackClient) -> None:
+ with client.agents.with_streaming_response.retrieve(
+ "agent_id",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ agent = response.parse()
+ assert_matches_type(AgentRetrieveResponse, agent, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ def test_path_params_retrieve(self, client: LlamaStackClient) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_id` but received ''"):
+ client.agents.with_raw_response.retrieve(
+ "",
+ )
+
+ @parametrize
+ def test_method_list(self, client: LlamaStackClient) -> None:
+ agent = client.agents.list()
+ assert_matches_type(AgentListResponse, agent, path=["response"])
+
+ @parametrize
+ def test_method_list_with_all_params(self, client: LlamaStackClient) -> None:
+ agent = client.agents.list(
+ limit=0,
+ start_index=0,
+ )
+ assert_matches_type(AgentListResponse, agent, path=["response"])
+
+ @parametrize
+ def test_raw_response_list(self, client: LlamaStackClient) -> None:
+ response = client.agents.with_raw_response.list()
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ agent = response.parse()
+ assert_matches_type(AgentListResponse, agent, path=["response"])
+
+ @parametrize
+ def test_streaming_response_list(self, client: LlamaStackClient) -> None:
+ with client.agents.with_streaming_response.list() as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ agent = response.parse()
+ assert_matches_type(AgentListResponse, agent, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
@parametrize
def test_method_delete(self, client: LlamaStackClient) -> None:
agent = client.agents.delete(
@@ -239,6 +314,77 @@ async def test_streaming_response_create(self, async_client: AsyncLlamaStackClie
assert cast(Any, response.is_closed) is True
+ @parametrize
+ async def test_method_retrieve(self, async_client: AsyncLlamaStackClient) -> None:
+ agent = await async_client.agents.retrieve(
+ "agent_id",
+ )
+ assert_matches_type(AgentRetrieveResponse, agent, path=["response"])
+
+ @parametrize
+ async def test_raw_response_retrieve(self, async_client: AsyncLlamaStackClient) -> None:
+ response = await async_client.agents.with_raw_response.retrieve(
+ "agent_id",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ agent = await response.parse()
+ assert_matches_type(AgentRetrieveResponse, agent, path=["response"])
+
+ @parametrize
+ async def test_streaming_response_retrieve(self, async_client: AsyncLlamaStackClient) -> None:
+ async with async_client.agents.with_streaming_response.retrieve(
+ "agent_id",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ agent = await response.parse()
+ assert_matches_type(AgentRetrieveResponse, agent, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ async def test_path_params_retrieve(self, async_client: AsyncLlamaStackClient) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_id` but received ''"):
+ await async_client.agents.with_raw_response.retrieve(
+ "",
+ )
+
+ @parametrize
+ async def test_method_list(self, async_client: AsyncLlamaStackClient) -> None:
+ agent = await async_client.agents.list()
+ assert_matches_type(AgentListResponse, agent, path=["response"])
+
+ @parametrize
+ async def test_method_list_with_all_params(self, async_client: AsyncLlamaStackClient) -> None:
+ agent = await async_client.agents.list(
+ limit=0,
+ start_index=0,
+ )
+ assert_matches_type(AgentListResponse, agent, path=["response"])
+
+ @parametrize
+ async def test_raw_response_list(self, async_client: AsyncLlamaStackClient) -> None:
+ response = await async_client.agents.with_raw_response.list()
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ agent = await response.parse()
+ assert_matches_type(AgentListResponse, agent, path=["response"])
+
+ @parametrize
+ async def test_streaming_response_list(self, async_client: AsyncLlamaStackClient) -> None:
+ async with async_client.agents.with_streaming_response.list() as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ agent = await response.parse()
+ assert_matches_type(AgentListResponse, agent, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
@parametrize
async def test_method_delete(self, async_client: AsyncLlamaStackClient) -> None:
agent = await async_client.agents.delete(
diff --git a/tests/api_resources/test_datasets.py b/tests/api_resources/test_datasets.py
index 9cd17f45..eee1de8c 100644
--- a/tests/api_resources/test_datasets.py
+++ b/tests/api_resources/test_datasets.py
@@ -85,6 +85,48 @@ def test_streaming_response_list(self, client: LlamaStackClient) -> None:
assert cast(Any, response.is_closed) is True
+ @parametrize
+ def test_method_appendrows(self, client: LlamaStackClient) -> None:
+ dataset = client.datasets.appendrows(
+ dataset_id="dataset_id",
+ rows=[{"foo": True}],
+ )
+ assert dataset is None
+
+ @parametrize
+ def test_raw_response_appendrows(self, client: LlamaStackClient) -> None:
+ response = client.datasets.with_raw_response.appendrows(
+ dataset_id="dataset_id",
+ rows=[{"foo": True}],
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ dataset = response.parse()
+ assert dataset is None
+
+ @parametrize
+ def test_streaming_response_appendrows(self, client: LlamaStackClient) -> None:
+ with client.datasets.with_streaming_response.appendrows(
+ dataset_id="dataset_id",
+ rows=[{"foo": True}],
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ dataset = response.parse()
+ assert dataset is None
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ def test_path_params_appendrows(self, client: LlamaStackClient) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `dataset_id` but received ''"):
+ client.datasets.with_raw_response.appendrows(
+ dataset_id="",
+ rows=[{"foo": True}],
+ )
+
@parametrize
def test_method_iterrows(self, client: LlamaStackClient) -> None:
dataset = client.datasets.iterrows(
@@ -295,6 +337,48 @@ async def test_streaming_response_list(self, async_client: AsyncLlamaStackClient
assert cast(Any, response.is_closed) is True
+ @parametrize
+ async def test_method_appendrows(self, async_client: AsyncLlamaStackClient) -> None:
+ dataset = await async_client.datasets.appendrows(
+ dataset_id="dataset_id",
+ rows=[{"foo": True}],
+ )
+ assert dataset is None
+
+ @parametrize
+ async def test_raw_response_appendrows(self, async_client: AsyncLlamaStackClient) -> None:
+ response = await async_client.datasets.with_raw_response.appendrows(
+ dataset_id="dataset_id",
+ rows=[{"foo": True}],
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ dataset = await response.parse()
+ assert dataset is None
+
+ @parametrize
+ async def test_streaming_response_appendrows(self, async_client: AsyncLlamaStackClient) -> None:
+ async with async_client.datasets.with_streaming_response.appendrows(
+ dataset_id="dataset_id",
+ rows=[{"foo": True}],
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ dataset = await response.parse()
+ assert dataset is None
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ async def test_path_params_appendrows(self, async_client: AsyncLlamaStackClient) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `dataset_id` but received ''"):
+ await async_client.datasets.with_raw_response.appendrows(
+ dataset_id="",
+ rows=[{"foo": True}],
+ )
+
@parametrize
async def test_method_iterrows(self, async_client: AsyncLlamaStackClient) -> None:
dataset = await async_client.datasets.iterrows(
diff --git a/tests/api_resources/vector_stores/test_files.py b/tests/api_resources/vector_stores/test_files.py
index f9728a36..235f80e3 100644
--- a/tests/api_resources/vector_stores/test_files.py
+++ b/tests/api_resources/vector_stores/test_files.py
@@ -9,7 +9,12 @@
from tests.utils import assert_matches_type
from llama_stack_client import LlamaStackClient, AsyncLlamaStackClient
-from llama_stack_client.types.vector_stores import VectorStoreFile
+from llama_stack_client.types.vector_stores import (
+ VectorStoreFile,
+ FileListResponse,
+ FileDeleteResponse,
+ FileContentResponse,
+)
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
@@ -69,6 +74,253 @@ def test_path_params_create(self, client: LlamaStackClient) -> None:
file_id="file_id",
)
+ @parametrize
+ def test_method_retrieve(self, client: LlamaStackClient) -> None:
+ file = client.vector_stores.files.retrieve(
+ file_id="file_id",
+ vector_store_id="vector_store_id",
+ )
+ assert_matches_type(VectorStoreFile, file, path=["response"])
+
+ @parametrize
+ def test_raw_response_retrieve(self, client: LlamaStackClient) -> None:
+ response = client.vector_stores.files.with_raw_response.retrieve(
+ file_id="file_id",
+ vector_store_id="vector_store_id",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ file = response.parse()
+ assert_matches_type(VectorStoreFile, file, path=["response"])
+
+ @parametrize
+ def test_streaming_response_retrieve(self, client: LlamaStackClient) -> None:
+ with client.vector_stores.files.with_streaming_response.retrieve(
+ file_id="file_id",
+ vector_store_id="vector_store_id",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ file = response.parse()
+ assert_matches_type(VectorStoreFile, file, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ def test_path_params_retrieve(self, client: LlamaStackClient) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `vector_store_id` but received ''"):
+ client.vector_stores.files.with_raw_response.retrieve(
+ file_id="file_id",
+ vector_store_id="",
+ )
+
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `file_id` but received ''"):
+ client.vector_stores.files.with_raw_response.retrieve(
+ file_id="",
+ vector_store_id="vector_store_id",
+ )
+
+ @parametrize
+ def test_method_update(self, client: LlamaStackClient) -> None:
+ file = client.vector_stores.files.update(
+ file_id="file_id",
+ vector_store_id="vector_store_id",
+ attributes={"foo": True},
+ )
+ assert_matches_type(VectorStoreFile, file, path=["response"])
+
+ @parametrize
+ def test_raw_response_update(self, client: LlamaStackClient) -> None:
+ response = client.vector_stores.files.with_raw_response.update(
+ file_id="file_id",
+ vector_store_id="vector_store_id",
+ attributes={"foo": True},
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ file = response.parse()
+ assert_matches_type(VectorStoreFile, file, path=["response"])
+
+ @parametrize
+ def test_streaming_response_update(self, client: LlamaStackClient) -> None:
+ with client.vector_stores.files.with_streaming_response.update(
+ file_id="file_id",
+ vector_store_id="vector_store_id",
+ attributes={"foo": True},
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ file = response.parse()
+ assert_matches_type(VectorStoreFile, file, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ def test_path_params_update(self, client: LlamaStackClient) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `vector_store_id` but received ''"):
+ client.vector_stores.files.with_raw_response.update(
+ file_id="file_id",
+ vector_store_id="",
+ attributes={"foo": True},
+ )
+
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `file_id` but received ''"):
+ client.vector_stores.files.with_raw_response.update(
+ file_id="",
+ vector_store_id="vector_store_id",
+ attributes={"foo": True},
+ )
+
+ @parametrize
+ def test_method_list(self, client: LlamaStackClient) -> None:
+ file = client.vector_stores.files.list(
+ vector_store_id="vector_store_id",
+ )
+ assert_matches_type(FileListResponse, file, path=["response"])
+
+ @parametrize
+ def test_method_list_with_all_params(self, client: LlamaStackClient) -> None:
+ file = client.vector_stores.files.list(
+ vector_store_id="vector_store_id",
+ after="after",
+ before="before",
+ filter="completed",
+ limit=0,
+ order="order",
+ )
+ assert_matches_type(FileListResponse, file, path=["response"])
+
+ @parametrize
+ def test_raw_response_list(self, client: LlamaStackClient) -> None:
+ response = client.vector_stores.files.with_raw_response.list(
+ vector_store_id="vector_store_id",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ file = response.parse()
+ assert_matches_type(FileListResponse, file, path=["response"])
+
+ @parametrize
+ def test_streaming_response_list(self, client: LlamaStackClient) -> None:
+ with client.vector_stores.files.with_streaming_response.list(
+ vector_store_id="vector_store_id",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ file = response.parse()
+ assert_matches_type(FileListResponse, file, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ def test_path_params_list(self, client: LlamaStackClient) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `vector_store_id` but received ''"):
+ client.vector_stores.files.with_raw_response.list(
+ vector_store_id="",
+ )
+
+ @parametrize
+ def test_method_delete(self, client: LlamaStackClient) -> None:
+ file = client.vector_stores.files.delete(
+ file_id="file_id",
+ vector_store_id="vector_store_id",
+ )
+ assert_matches_type(FileDeleteResponse, file, path=["response"])
+
+ @parametrize
+ def test_raw_response_delete(self, client: LlamaStackClient) -> None:
+ response = client.vector_stores.files.with_raw_response.delete(
+ file_id="file_id",
+ vector_store_id="vector_store_id",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ file = response.parse()
+ assert_matches_type(FileDeleteResponse, file, path=["response"])
+
+ @parametrize
+ def test_streaming_response_delete(self, client: LlamaStackClient) -> None:
+ with client.vector_stores.files.with_streaming_response.delete(
+ file_id="file_id",
+ vector_store_id="vector_store_id",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ file = response.parse()
+ assert_matches_type(FileDeleteResponse, file, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ def test_path_params_delete(self, client: LlamaStackClient) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `vector_store_id` but received ''"):
+ client.vector_stores.files.with_raw_response.delete(
+ file_id="file_id",
+ vector_store_id="",
+ )
+
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `file_id` but received ''"):
+ client.vector_stores.files.with_raw_response.delete(
+ file_id="",
+ vector_store_id="vector_store_id",
+ )
+
+ @parametrize
+ def test_method_content(self, client: LlamaStackClient) -> None:
+ file = client.vector_stores.files.content(
+ file_id="file_id",
+ vector_store_id="vector_store_id",
+ )
+ assert_matches_type(FileContentResponse, file, path=["response"])
+
+ @parametrize
+ def test_raw_response_content(self, client: LlamaStackClient) -> None:
+ response = client.vector_stores.files.with_raw_response.content(
+ file_id="file_id",
+ vector_store_id="vector_store_id",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ file = response.parse()
+ assert_matches_type(FileContentResponse, file, path=["response"])
+
+ @parametrize
+ def test_streaming_response_content(self, client: LlamaStackClient) -> None:
+ with client.vector_stores.files.with_streaming_response.content(
+ file_id="file_id",
+ vector_store_id="vector_store_id",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ file = response.parse()
+ assert_matches_type(FileContentResponse, file, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ def test_path_params_content(self, client: LlamaStackClient) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `vector_store_id` but received ''"):
+ client.vector_stores.files.with_raw_response.content(
+ file_id="file_id",
+ vector_store_id="",
+ )
+
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `file_id` but received ''"):
+ client.vector_stores.files.with_raw_response.content(
+ file_id="",
+ vector_store_id="vector_store_id",
+ )
+
class TestAsyncFiles:
parametrize = pytest.mark.parametrize(
@@ -126,3 +378,250 @@ async def test_path_params_create(self, async_client: AsyncLlamaStackClient) ->
vector_store_id="",
file_id="file_id",
)
+
+ @parametrize
+ async def test_method_retrieve(self, async_client: AsyncLlamaStackClient) -> None:
+ file = await async_client.vector_stores.files.retrieve(
+ file_id="file_id",
+ vector_store_id="vector_store_id",
+ )
+ assert_matches_type(VectorStoreFile, file, path=["response"])
+
+ @parametrize
+ async def test_raw_response_retrieve(self, async_client: AsyncLlamaStackClient) -> None:
+ response = await async_client.vector_stores.files.with_raw_response.retrieve(
+ file_id="file_id",
+ vector_store_id="vector_store_id",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ file = await response.parse()
+ assert_matches_type(VectorStoreFile, file, path=["response"])
+
+ @parametrize
+ async def test_streaming_response_retrieve(self, async_client: AsyncLlamaStackClient) -> None:
+ async with async_client.vector_stores.files.with_streaming_response.retrieve(
+ file_id="file_id",
+ vector_store_id="vector_store_id",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ file = await response.parse()
+ assert_matches_type(VectorStoreFile, file, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ async def test_path_params_retrieve(self, async_client: AsyncLlamaStackClient) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `vector_store_id` but received ''"):
+ await async_client.vector_stores.files.with_raw_response.retrieve(
+ file_id="file_id",
+ vector_store_id="",
+ )
+
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `file_id` but received ''"):
+ await async_client.vector_stores.files.with_raw_response.retrieve(
+ file_id="",
+ vector_store_id="vector_store_id",
+ )
+
+ @parametrize
+ async def test_method_update(self, async_client: AsyncLlamaStackClient) -> None:
+ file = await async_client.vector_stores.files.update(
+ file_id="file_id",
+ vector_store_id="vector_store_id",
+ attributes={"foo": True},
+ )
+ assert_matches_type(VectorStoreFile, file, path=["response"])
+
+ @parametrize
+ async def test_raw_response_update(self, async_client: AsyncLlamaStackClient) -> None:
+ response = await async_client.vector_stores.files.with_raw_response.update(
+ file_id="file_id",
+ vector_store_id="vector_store_id",
+ attributes={"foo": True},
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ file = await response.parse()
+ assert_matches_type(VectorStoreFile, file, path=["response"])
+
+ @parametrize
+ async def test_streaming_response_update(self, async_client: AsyncLlamaStackClient) -> None:
+ async with async_client.vector_stores.files.with_streaming_response.update(
+ file_id="file_id",
+ vector_store_id="vector_store_id",
+ attributes={"foo": True},
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ file = await response.parse()
+ assert_matches_type(VectorStoreFile, file, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ async def test_path_params_update(self, async_client: AsyncLlamaStackClient) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `vector_store_id` but received ''"):
+ await async_client.vector_stores.files.with_raw_response.update(
+ file_id="file_id",
+ vector_store_id="",
+ attributes={"foo": True},
+ )
+
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `file_id` but received ''"):
+ await async_client.vector_stores.files.with_raw_response.update(
+ file_id="",
+ vector_store_id="vector_store_id",
+ attributes={"foo": True},
+ )
+
+ @parametrize
+ async def test_method_list(self, async_client: AsyncLlamaStackClient) -> None:
+ file = await async_client.vector_stores.files.list(
+ vector_store_id="vector_store_id",
+ )
+ assert_matches_type(FileListResponse, file, path=["response"])
+
+ @parametrize
+ async def test_method_list_with_all_params(self, async_client: AsyncLlamaStackClient) -> None:
+ file = await async_client.vector_stores.files.list(
+ vector_store_id="vector_store_id",
+ after="after",
+ before="before",
+ filter="completed",
+ limit=0,
+ order="order",
+ )
+ assert_matches_type(FileListResponse, file, path=["response"])
+
+ @parametrize
+ async def test_raw_response_list(self, async_client: AsyncLlamaStackClient) -> None:
+ response = await async_client.vector_stores.files.with_raw_response.list(
+ vector_store_id="vector_store_id",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ file = await response.parse()
+ assert_matches_type(FileListResponse, file, path=["response"])
+
+ @parametrize
+ async def test_streaming_response_list(self, async_client: AsyncLlamaStackClient) -> None:
+ async with async_client.vector_stores.files.with_streaming_response.list(
+ vector_store_id="vector_store_id",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ file = await response.parse()
+ assert_matches_type(FileListResponse, file, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ async def test_path_params_list(self, async_client: AsyncLlamaStackClient) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `vector_store_id` but received ''"):
+ await async_client.vector_stores.files.with_raw_response.list(
+ vector_store_id="",
+ )
+
+ @parametrize
+ async def test_method_delete(self, async_client: AsyncLlamaStackClient) -> None:
+ file = await async_client.vector_stores.files.delete(
+ file_id="file_id",
+ vector_store_id="vector_store_id",
+ )
+ assert_matches_type(FileDeleteResponse, file, path=["response"])
+
+ @parametrize
+ async def test_raw_response_delete(self, async_client: AsyncLlamaStackClient) -> None:
+ response = await async_client.vector_stores.files.with_raw_response.delete(
+ file_id="file_id",
+ vector_store_id="vector_store_id",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ file = await response.parse()
+ assert_matches_type(FileDeleteResponse, file, path=["response"])
+
+ @parametrize
+ async def test_streaming_response_delete(self, async_client: AsyncLlamaStackClient) -> None:
+ async with async_client.vector_stores.files.with_streaming_response.delete(
+ file_id="file_id",
+ vector_store_id="vector_store_id",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ file = await response.parse()
+ assert_matches_type(FileDeleteResponse, file, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ async def test_path_params_delete(self, async_client: AsyncLlamaStackClient) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `vector_store_id` but received ''"):
+ await async_client.vector_stores.files.with_raw_response.delete(
+ file_id="file_id",
+ vector_store_id="",
+ )
+
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `file_id` but received ''"):
+ await async_client.vector_stores.files.with_raw_response.delete(
+ file_id="",
+ vector_store_id="vector_store_id",
+ )
+
+ @parametrize
+ async def test_method_content(self, async_client: AsyncLlamaStackClient) -> None:
+ file = await async_client.vector_stores.files.content(
+ file_id="file_id",
+ vector_store_id="vector_store_id",
+ )
+ assert_matches_type(FileContentResponse, file, path=["response"])
+
+ @parametrize
+ async def test_raw_response_content(self, async_client: AsyncLlamaStackClient) -> None:
+ response = await async_client.vector_stores.files.with_raw_response.content(
+ file_id="file_id",
+ vector_store_id="vector_store_id",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ file = await response.parse()
+ assert_matches_type(FileContentResponse, file, path=["response"])
+
+ @parametrize
+ async def test_streaming_response_content(self, async_client: AsyncLlamaStackClient) -> None:
+ async with async_client.vector_stores.files.with_streaming_response.content(
+ file_id="file_id",
+ vector_store_id="vector_store_id",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ file = await response.parse()
+ assert_matches_type(FileContentResponse, file, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ async def test_path_params_content(self, async_client: AsyncLlamaStackClient) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `vector_store_id` but received ''"):
+ await async_client.vector_stores.files.with_raw_response.content(
+ file_id="file_id",
+ vector_store_id="",
+ )
+
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `file_id` but received ''"):
+ await async_client.vector_stores.files.with_raw_response.content(
+ file_id="",
+ vector_store_id="vector_store_id",
+ )