diff --git a/.release-please-manifest.json b/.release-please-manifest.json
index 23b4f02c..ed9acd29 100644
--- a/.release-please-manifest.json
+++ b/.release-please-manifest.json
@@ -1,3 +1,3 @@
{
- ".": "0.2.19-alpha.1"
+ ".": "0.2.23-alpha.1"
}
diff --git a/.stats.yml b/.stats.yml
index f937dc2e..fa9edfc7 100644
--- a/.stats.yml
+++ b/.stats.yml
@@ -1,4 +1,4 @@
-configured_endpoints: 107
+configured_endpoints: 111
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/llamastack%2Fllama-stack-client-f252873ea1e1f38fd207331ef2621c511154d5be3f4076e59cc15754fc58eee4.yml
openapi_spec_hash: 10cbb4337a06a9fdd7d08612dd6044c3
-config_hash: 40b8d777e1eb8b6ab05759b663edd2fb
+config_hash: 0358112cc0f3d880b4d55debdbe1cfa3
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 68e09576..0011c19f 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,45 @@
# Changelog
+## 0.2.23-alpha.1 (2025-09-26)
+
+Full Changelog: [v0.2.19-alpha.1...v0.2.23-alpha.1](https://github.com/llamastack/llama-stack-client-python/compare/v0.2.19-alpha.1...v0.2.23-alpha.1)
+
+### Features
+
+* **api:** manual updates ([e9873f9](https://github.com/llamastack/llama-stack-client-python/commit/e9873f92a90262e76e67b50a054c3632dddab572))
+* improve future compat with pydantic v3 ([5c7b05b](https://github.com/llamastack/llama-stack-client-python/commit/5c7b05b5bcb922d82b57838f31a734588f31855a))
+* **types:** replace List[str] with SequenceNotStr in params ([2c0cdd1](https://github.com/llamastack/llama-stack-client-python/commit/2c0cdd14a84245001b2f46d220950b7828e4675d))
+
+
+### Bug Fixes
+
+* avoid newer type syntax ([c94275d](https://github.com/llamastack/llama-stack-client-python/commit/c94275d8aea13deafb0821e535774f48f0063a37))
+* **client:** fix circular dependencies and offset pagination ([fc50cbe](https://github.com/llamastack/llama-stack-client-python/commit/fc50cbe63b0483b1ac786e09134ae8917d0a40b7))
+* **project:** avoid duplicate dev dep sections ([ccc9dd2](https://github.com/llamastack/llama-stack-client-python/commit/ccc9dd2d704424b0b1f6a3f1d0da09385bfda5cf))
+* **tool:** revert the ToolDefParam stuff because we reverted the bad commit ([e2d543d](https://github.com/llamastack/llama-stack-client-python/commit/e2d543dfe2cdab2e1d74e3fa2bd2334af040b56d))
+
+
+### Chores
+
+* **deps:** move deprecated `dev-dependencies` in `pyproject.toml` to dev group ([a2f4544](https://github.com/llamastack/llama-stack-client-python/commit/a2f45441edf67842c2db37765ee773394cd2dda3))
+* do not install brew dependencies in ./scripts/bootstrap by default ([6f269ff](https://github.com/llamastack/llama-stack-client-python/commit/6f269ff5dec08256d4e2e144b85cd67c8f860bb2))
+* **internal:** add Sequence related utils ([54fd160](https://github.com/llamastack/llama-stack-client-python/commit/54fd1607f0dc4506a329b8a8f3c747cc6a413789))
+* **internal:** codegen related update ([1cf74fe](https://github.com/llamastack/llama-stack-client-python/commit/1cf74fec41180b9357ad91cf6f67b950c98db150))
+* **internal:** minor formatting change ([1956d9a](https://github.com/llamastack/llama-stack-client-python/commit/1956d9aaf2aec0ab8bee590dc8df3d049689acd9))
+* **internal:** move mypy configurations to `pyproject.toml` file ([a27daa1](https://github.com/llamastack/llama-stack-client-python/commit/a27daa1627b674eb431175629f9be54e9fc8a39e))
+* **internal:** run tests in an isolated environment ([c101839](https://github.com/llamastack/llama-stack-client-python/commit/c1018390ca2754d8d6152bc3a350e95ce1138ac9))
+* **internal:** update pydantic dependency ([b48f1f4](https://github.com/llamastack/llama-stack-client-python/commit/b48f1f4d730751de94029733e9466edd08589464))
+* **internal:** update pyright exclude list ([402a316](https://github.com/llamastack/llama-stack-client-python/commit/402a3160399d676cdcd214ea8a776fd2ce7c2db3))
+* **types:** change optional parameter type from NotGiven to Omit ([c241e7e](https://github.com/llamastack/llama-stack-client-python/commit/c241e7e5f4986b304bf9003eaf3224c7b141a500))
+
+
+### Build System
+
+* Bump version to 0.2.19 ([034b3a9](https://github.com/llamastack/llama-stack-client-python/commit/034b3a9debdb85be0cbe42cb8711e37629d3376c))
+* Bump version to 0.2.20 ([b178953](https://github.com/llamastack/llama-stack-client-python/commit/b178953c51e1bfe5075fe942eacab90e0caad180))
+* Bump version to 0.2.21 ([bb25f4a](https://github.com/llamastack/llama-stack-client-python/commit/bb25f4aab73df2f336a3f3ff184476367a1b66b4))
+* Bump version to 0.2.22 ([ba825eb](https://github.com/llamastack/llama-stack-client-python/commit/ba825eb25975fa7ae8589365871e59d1b02168d8))
+
## 0.2.19-alpha.1 (2025-08-26)
Full Changelog: [v0.2.18-alpha.3...v0.2.19-alpha.1](https://github.com/llamastack/llama-stack-client-python/compare/v0.2.18-alpha.3...v0.2.19-alpha.1)
diff --git a/api.md b/api.md
index a4396cd7..22c2120f 100644
--- a/api.md
+++ b/api.md
@@ -81,7 +81,12 @@ Methods:
Types:
```python
-from llama_stack_client.types import ResponseObject, ResponseObjectStream, ResponseListResponse
+from llama_stack_client.types import (
+ ResponseObject,
+ ResponseObjectStream,
+ ResponseListResponse,
+ ResponseDeleteResponse,
+)
```
Methods:
@@ -89,6 +94,7 @@ Methods:
- client.responses.create(\*\*params) -> ResponseObject
- client.responses.retrieve(response_id) -> ResponseObject
- client.responses.list(\*\*params) -> SyncOpenAICursorPage[ResponseListResponse]
+- client.responses.delete(response_id) -> ResponseDeleteResponse
## InputItems
@@ -242,6 +248,7 @@ from llama_stack_client.types import (
EmbeddingsResponse,
TokenLogProbs,
InferenceBatchChatCompletionResponse,
+ InferenceRerankResponse,
)
```
@@ -252,6 +259,7 @@ Methods:
- client.inference.chat_completion(\*\*params) -> ChatCompletionResponse
- client.inference.completion(\*\*params) -> CompletionResponse
- client.inference.embeddings(\*\*params) -> EmbeddingsResponse
+- client.inference.rerank(\*\*params) -> InferenceRerankResponse
# Embeddings
@@ -389,10 +397,22 @@ from llama_stack_client.types import ListModelsResponse, Model, ModelListRespons
Methods:
-- client.models.retrieve(model_id) -> Model
-- client.models.list() -> ModelListResponse
-- client.models.register(\*\*params) -> Model
-- client.models.unregister(model_id) -> None
+- client.models.retrieve(model_id) -> Model
+- client.models.list() -> ModelListResponse
+- client.models.register(\*\*params) -> Model
+- client.models.unregister(model_id) -> None
+
+## OpenAI
+
+Types:
+
+```python
+from llama_stack_client.types.models import OpenAIListResponse
+```
+
+Methods:
+
+- client.models.openai.list() -> OpenAIListResponse
# PostTraining
@@ -487,6 +507,7 @@ Methods:
- client.shields.retrieve(identifier) -> Shield
- client.shields.list() -> ShieldListResponse
+- client.shields.delete(identifier) -> None
- client.shields.register(\*\*params) -> Shield
# SyntheticDataGeneration
diff --git a/mypy.ini b/mypy.ini
deleted file mode 100644
index 1865499c..00000000
--- a/mypy.ini
+++ /dev/null
@@ -1,50 +0,0 @@
-[mypy]
-pretty = True
-show_error_codes = True
-
-# Exclude _files.py because mypy isn't smart enough to apply
-# the correct type narrowing and as this is an internal module
-# it's fine to just use Pyright.
-#
-# We also exclude our `tests` as mypy doesn't always infer
-# types correctly and Pyright will still catch any type errors.
-exclude = ^(src/llama_stack_client/_files\.py|_dev/.*\.py|tests/.*)$
-
-strict_equality = True
-implicit_reexport = True
-check_untyped_defs = True
-no_implicit_optional = True
-
-warn_return_any = True
-warn_unreachable = True
-warn_unused_configs = True
-
-# Turn these options off as it could cause conflicts
-# with the Pyright options.
-warn_unused_ignores = False
-warn_redundant_casts = False
-
-disallow_any_generics = True
-disallow_untyped_defs = True
-disallow_untyped_calls = True
-disallow_subclassing_any = True
-disallow_incomplete_defs = True
-disallow_untyped_decorators = True
-cache_fine_grained = True
-
-# By default, mypy reports an error if you assign a value to the result
-# of a function call that doesn't return anything. We do this in our test
-# cases:
-# ```
-# result = ...
-# assert result is None
-# ```
-# Changing this codegen to make mypy happy would increase complexity
-# and would not be worth it.
-disable_error_code = func-returns-value,overload-cannot-match
-
-# https://github.com/python/mypy/issues/12162
-[mypy.overrides]
-module = "black.files.*"
-ignore_errors = true
-ignore_missing_imports = true
diff --git a/pyproject.toml b/pyproject.toml
index 6ed4719e..148c9a92 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[project]
name = "llama_stack_client"
-version = "0.2.22"
+version = "0.2.23-alpha.1"
description = "The official Python library for the llama-stack-client API"
dynamic = ["readme"]
license = "MIT"
@@ -46,45 +46,36 @@ aiohttp = ["aiohttp", "httpx_aiohttp>=0.1.8"]
[tool.uv]
managed = true
required-version = ">=0.5.0"
+conflicts = [
+ [
+ { group = "pydantic-v1" },
+ { group = "pydantic-v2" },
+ ],
+]
+
+[dependency-groups]
# version pins are in uv.lock
-dev-dependencies = [
+dev = [
"pyright==1.1.399",
"mypy",
"respx",
- "pytest",
+ "pytest>=7.1.1",
"pytest-asyncio",
"ruff",
"time-machine",
"dirty-equals>=0.6.0",
"importlib-metadata>=6.7.0",
"rich>=13.7.1",
- "nest_asyncio==1.6.0",
"pytest-xdist>=3.6.1",
+ "pre-commit",
+ "black",
]
-conflicts = [
- [
- { group = "pydantic-v1" },
- { group = "pydantic-v2" },
- ],
-]
-
-[dependency-groups]
pydantic-v1 = [
"pydantic>=1.9.0,<2",
]
pydantic-v2 = [
"pydantic>=2,<3",
]
-dev = [
- "pytest>=7.1.1",
- "pytest-asyncio",
- "pre-commit",
- "black",
- "ruff",
- "mypy",
- "respx",
- "dirty-equals"
-]
[build-system]
@@ -135,6 +126,59 @@ filterwarnings = [
"error"
]
+[tool.mypy]
+pretty = true
+show_error_codes = true
+
+# Exclude _files.py because mypy isn't smart enough to apply
+# the correct type narrowing and as this is an internal module
+# it's fine to just use Pyright.
+#
+# We also exclude our `tests` as mypy doesn't always infer
+# types correctly and Pyright will still catch any type errors.
+exclude = ['src/llama_stack_client/_files.py', '_dev/.*.py', 'tests/.*']
+
+strict_equality = true
+implicit_reexport = true
+check_untyped_defs = true
+no_implicit_optional = true
+
+warn_return_any = true
+warn_unreachable = true
+warn_unused_configs = true
+
+# Turn these options off as it could cause conflicts
+# with the Pyright options.
+warn_unused_ignores = false
+warn_redundant_casts = false
+
+disallow_any_generics = true
+disallow_untyped_defs = true
+disallow_untyped_calls = true
+disallow_subclassing_any = true
+disallow_incomplete_defs = true
+disallow_untyped_decorators = true
+cache_fine_grained = true
+
+# By default, mypy reports an error if you assign a value to the result
+# of a function call that doesn't return anything. We do this in our test
+# cases:
+# ```
+# result = ...
+# assert result is None
+# ```
+# Changing this codegen to make mypy happy would increase complexity
+# and would not be worth it.
+disable_error_code = "func-returns-value,overload-cannot-match"
+
+# https://github.com/python/mypy/issues/12162
+
+[[tool.mypy.overrides]]
+module = "black.files.*"
+ignore_errors = true
+ignore_missing_imports = true
+
+
[tool.ruff]
line-length = 120
output-format = "grouped"
diff --git a/requirements-dev.lock b/requirements-dev.lock
index 5dc3c64c..003a453c 100644
--- a/requirements-dev.lock
+++ b/requirements-dev.lock
@@ -3,29 +3,40 @@
-e .
annotated-types==0.7.0
# via pydantic
-anyio==4.5.2 ; python_full_version < '3.9'
- # via
- # httpx
- # llama-stack-client
-anyio==4.8.0 ; python_full_version >= '3.9'
+anyio==4.8.0
# via
# httpx
# llama-stack-client
+black==25.1.0
certifi==2024.12.14
# via
# httpcore
# httpx
+ # requests
+cfgv==3.4.0
+ # via pre-commit
+charset-normalizer==3.4.3
+ # via requests
+click==8.2.1
+ # via
+ # black
+ # llama-stack-client
colorama==0.4.6 ; sys_platform == 'win32'
- # via pytest
+ # via
+ # click
+ # pytest
+ # tqdm
dirty-equals==0.9.0
+distlib==0.4.0
+ # via virtualenv
distro==1.9.0
# via llama-stack-client
-exceptiongroup==1.2.2 ; python_full_version < '3.11'
- # via
- # anyio
- # pytest
execnet==2.1.1
# via pytest-xdist
+filelock==3.19.1
+ # via virtualenv
+fire==0.7.1
+ # via llama-stack-client
h11==0.16.0
# via httpcore
httpcore==1.0.9
@@ -34,12 +45,14 @@ httpx==0.28.1
# via
# llama-stack-client
# respx
+identify==2.6.14
+ # via pre-commit
idna==3.10
# via
# anyio
# httpx
-importlib-metadata==8.5.0 ; python_full_version < '3.9'
-importlib-metadata==8.6.1 ; python_full_version >= '3.9'
+ # requests
+importlib-metadata==8.6.1
iniconfig==2.0.0
# via pytest
markdown-it-py==3.0.0
@@ -48,40 +61,64 @@ mdurl==0.1.2
# via markdown-it-py
mypy==1.14.1
mypy-extensions==1.0.0
- # via mypy
-nest-asyncio==1.6.0
+ # via
+ # black
+ # mypy
nodeenv==1.9.1
- # via pyright
+ # via
+ # pre-commit
+ # pyright
+numpy==2.3.3
+ # via pandas
packaging==24.2
- # via pytest
+ # via
+ # black
+ # pytest
+pandas==2.3.2
+ # via llama-stack-client
+pathspec==0.12.1
+ # via black
+platformdirs==4.4.0
+ # via
+ # black
+ # virtualenv
pluggy==1.5.0
# via pytest
-pydantic==2.10.3
+pre-commit==4.3.0
+prompt-toolkit==3.0.52
+ # via llama-stack-client
+pyaml==25.7.0
# via llama-stack-client
-pydantic-core==2.27.1
+pydantic==2.11.9
+ # via llama-stack-client
+pydantic-core==2.33.2
# via pydantic
pygments==2.19.1
# via
# pytest
# rich
pyright==1.1.399
-pytest==8.3.5 ; python_full_version < '3.9'
- # via
- # pytest-asyncio
- # pytest-xdist
-pytest==8.4.1 ; python_full_version >= '3.9'
+pytest==8.4.1
# via
# pytest-asyncio
# pytest-xdist
pytest-asyncio==0.24.0
-pytest-xdist==3.6.1 ; python_full_version < '3.9'
-pytest-xdist==3.7.0 ; python_full_version >= '3.9'
+pytest-xdist==3.7.0
python-dateutil==2.9.0.post0
- # via time-machine
-pytz==2024.2 ; python_full_version < '3.9'
- # via dirty-equals
+ # via
+ # pandas
+ # time-machine
+pytz==2024.2
+ # via pandas
+pyyaml==6.0.2
+ # via
+ # pre-commit
+ # pyaml
+requests==2.32.5
+ # via llama-stack-client
respx==0.22.0
rich==13.9.4
+ # via llama-stack-client
ruff==0.9.4
six==1.17.0
# via python-dateutil
@@ -89,23 +126,31 @@ sniffio==1.3.1
# via
# anyio
# llama-stack-client
-time-machine==2.15.0 ; python_full_version < '3.9'
-time-machine==2.16.0 ; python_full_version >= '3.9'
-tomli==2.2.1 ; python_full_version < '3.11'
+termcolor==3.1.0
# via
- # mypy
- # pytest
+ # fire
+ # llama-stack-client
+time-machine==2.16.0
+tqdm==4.67.1
+ # via llama-stack-client
typing-extensions==4.12.2
# via
- # annotated-types
# anyio
# llama-stack-client
# mypy
# pydantic
# pydantic-core
# pyright
- # rich
-zipp==3.20.2 ; python_full_version < '3.9'
- # via importlib-metadata
-zipp==3.21.0 ; python_full_version >= '3.9'
+ # typing-inspection
+typing-inspection==0.4.1
+ # via pydantic
+tzdata==2025.2
+ # via pandas
+urllib3==2.5.0
+ # via requests
+virtualenv==20.34.0
+ # via pre-commit
+wcwidth==0.2.13
+ # via prompt-toolkit
+zipp==3.21.0
# via importlib-metadata
diff --git a/scripts/bootstrap b/scripts/bootstrap
index bd692ad7..4638ec69 100755
--- a/scripts/bootstrap
+++ b/scripts/bootstrap
@@ -4,10 +4,18 @@ set -e
cd "$(dirname "$0")/.."
-if [ -f "Brewfile" ] && [ "$(uname -s)" = "Darwin" ] && [ "$SKIP_BREW" != "1" ]; then
+if [ -f "Brewfile" ] && [ "$(uname -s)" = "Darwin" ] && [ "$SKIP_BREW" != "1" ] && [ -t 0 ]; then
brew bundle check >/dev/null 2>&1 || {
- echo "==> Installing Homebrew dependencies…"
- brew bundle
+ echo -n "==> Install Homebrew dependencies? (y/N): "
+ read -r response
+ case "$response" in
+ [yY][eE][sS]|[yY])
+ brew bundle
+ ;;
+ *)
+ ;;
+ esac
+ echo
}
fi
diff --git a/scripts/test b/scripts/test
index 17a36dda..77ecb8f4 100755
--- a/scripts/test
+++ b/scripts/test
@@ -60,7 +60,10 @@ export DEFER_PYDANTIC_BUILD=false
function run_tests() {
echo "==> Running tests with Pydantic v2"
- uv run --all-extras pytest -W ignore::DeprecationWarning "$@"
+ uv run --isolated --all-extras pytest -W ignore::DeprecationWarning "$@"
+
+ echo "==> Running tests with Pydantic v1"
+ uv run --isolated --all-extras --group=pydantic-v1 pytest -W ignore::DeprecationWarning "$@"
}
# If UV_PYTHON is already set in the environment, just run the command once
diff --git a/src/llama_stack_client/__init__.py b/src/llama_stack_client/__init__.py
index f8622f3f..dea90c02 100644
--- a/src/llama_stack_client/__init__.py
+++ b/src/llama_stack_client/__init__.py
@@ -3,7 +3,7 @@
import typing as _t
from . import types
-from ._types import NOT_GIVEN, Omit, NoneType, NotGiven, Transport, ProxiesTypes
+from ._types import NOT_GIVEN, Omit, NoneType, NotGiven, Transport, ProxiesTypes, omit, not_given
from ._utils import file_from_path
from ._client import (
Client,
@@ -58,7 +58,9 @@
"ProxiesTypes",
"NotGiven",
"NOT_GIVEN",
+ "not_given",
"Omit",
+ "omit",
"LlamaStackClientError",
"APIError",
"APIStatusError",
diff --git a/src/llama_stack_client/_base_client.py b/src/llama_stack_client/_base_client.py
index b5e326e9..5320fcdb 100644
--- a/src/llama_stack_client/_base_client.py
+++ b/src/llama_stack_client/_base_client.py
@@ -42,7 +42,6 @@
from ._qs import Querystring
from ._files import to_httpx_files, async_to_httpx_files
from ._types import (
- NOT_GIVEN,
Body,
Omit,
Query,
@@ -57,9 +56,10 @@
RequestOptions,
HttpxRequestFiles,
ModelBuilderProtocol,
+ not_given,
)
from ._utils import is_dict, is_list, asyncify, is_given, lru_cache, is_mapping
-from ._compat import PYDANTIC_V2, model_copy, model_dump
+from ._compat import PYDANTIC_V1, model_copy, model_dump
from ._models import GenericModel, FinalRequestOptions, validate_type, construct_type
from ._response import (
APIResponse,
@@ -145,9 +145,9 @@ def __init__(
def __init__(
self,
*,
- url: URL | NotGiven = NOT_GIVEN,
- json: Body | NotGiven = NOT_GIVEN,
- params: Query | NotGiven = NOT_GIVEN,
+ url: URL | NotGiven = not_given,
+ json: Body | NotGiven = not_given,
+ params: Query | NotGiven = not_given,
) -> None:
self.url = url
self.json = json
@@ -232,7 +232,7 @@ def _set_private_attributes(
model: Type[_T],
options: FinalRequestOptions,
) -> None:
- if PYDANTIC_V2 and getattr(self, "__pydantic_private__", None) is None:
+ if (not PYDANTIC_V1) and getattr(self, "__pydantic_private__", None) is None:
self.__pydantic_private__ = {}
self._model = model
@@ -320,7 +320,7 @@ def _set_private_attributes(
client: AsyncAPIClient,
options: FinalRequestOptions,
) -> None:
- if PYDANTIC_V2 and getattr(self, "__pydantic_private__", None) is None:
+ if (not PYDANTIC_V1) and getattr(self, "__pydantic_private__", None) is None:
self.__pydantic_private__ = {}
self._model = model
@@ -595,7 +595,7 @@ def _maybe_override_cast_to(self, cast_to: type[ResponseT], options: FinalReques
# we internally support defining a temporary header to override the
# default `cast_to` type for use with `.with_raw_response` and `.with_streaming_response`
# see _response.py for implementation details
- override_cast_to = headers.pop(OVERRIDE_CAST_TO_HEADER, NOT_GIVEN)
+ override_cast_to = headers.pop(OVERRIDE_CAST_TO_HEADER, not_given)
if is_given(override_cast_to):
options.headers = headers
return cast(Type[ResponseT], override_cast_to)
@@ -825,7 +825,7 @@ def __init__(
version: str,
base_url: str | URL,
max_retries: int = DEFAULT_MAX_RETRIES,
- timeout: float | Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | Timeout | None | NotGiven = not_given,
http_client: httpx.Client | None = None,
custom_headers: Mapping[str, str] | None = None,
custom_query: Mapping[str, object] | None = None,
@@ -1356,7 +1356,7 @@ def __init__(
base_url: str | URL,
_strict_response_validation: bool,
max_retries: int = DEFAULT_MAX_RETRIES,
- timeout: float | Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | Timeout | None | NotGiven = not_given,
http_client: httpx.AsyncClient | None = None,
custom_headers: Mapping[str, str] | None = None,
custom_query: Mapping[str, object] | None = None,
@@ -1818,8 +1818,8 @@ def make_request_options(
extra_query: Query | None = None,
extra_body: Body | None = None,
idempotency_key: str | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- post_parser: PostParser | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ post_parser: PostParser | NotGiven = not_given,
) -> RequestOptions:
"""Create a dict of type RequestOptions without keys of NotGiven values."""
options: RequestOptions = {}
diff --git a/src/llama_stack_client/_client.py b/src/llama_stack_client/_client.py
index b12b2fda..3feccb40 100644
--- a/src/llama_stack_client/_client.py
+++ b/src/llama_stack_client/_client.py
@@ -4,7 +4,7 @@
import os
import json
-from typing import TYPE_CHECKING, Any, Union, Mapping
+from typing import TYPE_CHECKING, Any, Mapping
from typing_extensions import Self, override
import httpx
@@ -12,13 +12,13 @@
from . import _exceptions
from ._qs import Querystring
from ._types import (
- NOT_GIVEN,
Omit,
Timeout,
NotGiven,
Transport,
ProxiesTypes,
RequestOptions,
+ not_given,
)
from ._utils import is_given, get_async_library
from ._compat import cached_property
@@ -64,7 +64,6 @@
)
from .resources.files import FilesResource, AsyncFilesResource
from .resources.tools import ToolsResource, AsyncToolsResource
- from .resources.models import ModelsResource, AsyncModelsResource
from .resources.routes import RoutesResource, AsyncRoutesResource
from .resources.safety import SafetyResource, AsyncSafetyResource
from .resources.inspect import InspectResource, AsyncInspectResource
@@ -84,6 +83,7 @@
from .resources.completions import CompletionsResource, AsyncCompletionsResource
from .resources.moderations import ModerationsResource, AsyncModerationsResource
from .resources.agents.agents import AgentsResource, AsyncAgentsResource
+ from .resources.models.models import ModelsResource, AsyncModelsResource
from .resources.scoring_functions import ScoringFunctionsResource, AsyncScoringFunctionsResource
from .resources.responses.responses import ResponsesResource, AsyncResponsesResource
from .resources.synthetic_data_generation import (
@@ -115,7 +115,7 @@ def __init__(
*,
api_key: str | None = None,
base_url: str | httpx.URL | None = None,
- timeout: Union[float, Timeout, None, NotGiven] = NOT_GIVEN,
+ timeout: float | Timeout | None | NotGiven = not_given,
max_retries: int = DEFAULT_MAX_RETRIES,
default_headers: Mapping[str, str] | None = None,
default_query: Mapping[str, object] | None = None,
@@ -366,9 +366,9 @@ def copy(
*,
api_key: str | None = None,
base_url: str | httpx.URL | None = None,
- timeout: float | Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | Timeout | None | NotGiven = not_given,
http_client: httpx.Client | None = None,
- max_retries: int | NotGiven = NOT_GIVEN,
+ max_retries: int | NotGiven = not_given,
default_headers: Mapping[str, str] | None = None,
set_default_headers: Mapping[str, str] | None = None,
default_query: Mapping[str, object] | None = None,
@@ -455,7 +455,7 @@ def __init__(
*,
api_key: str | None = None,
base_url: str | httpx.URL | None = None,
- timeout: Union[float, Timeout, None, NotGiven] = NOT_GIVEN,
+ timeout: float | Timeout | None | NotGiven = not_given,
max_retries: int = DEFAULT_MAX_RETRIES,
default_headers: Mapping[str, str] | None = None,
default_query: Mapping[str, object] | None = None,
@@ -706,9 +706,9 @@ def copy(
*,
api_key: str | None = None,
base_url: str | httpx.URL | None = None,
- timeout: float | Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | Timeout | None | NotGiven = not_given,
http_client: httpx.AsyncClient | None = None,
- max_retries: int | NotGiven = NOT_GIVEN,
+ max_retries: int | NotGiven = not_given,
default_headers: Mapping[str, str] | None = None,
set_default_headers: Mapping[str, str] | None = None,
default_query: Mapping[str, object] | None = None,
diff --git a/src/llama_stack_client/_compat.py b/src/llama_stack_client/_compat.py
index 92d9ee61..bdef67f0 100644
--- a/src/llama_stack_client/_compat.py
+++ b/src/llama_stack_client/_compat.py
@@ -12,14 +12,13 @@
_T = TypeVar("_T")
_ModelT = TypeVar("_ModelT", bound=pydantic.BaseModel)
-# --------------- Pydantic v2 compatibility ---------------
+# --------------- Pydantic v2, v3 compatibility ---------------
# Pyright incorrectly reports some of our functions as overriding a method when they don't
# pyright: reportIncompatibleMethodOverride=false
-PYDANTIC_V2 = pydantic.VERSION.startswith("2.")
+PYDANTIC_V1 = pydantic.VERSION.startswith("1.")
-# v1 re-exports
if TYPE_CHECKING:
def parse_date(value: date | StrBytesIntFloat) -> date: # noqa: ARG001
@@ -44,90 +43,92 @@ def is_typeddict(type_: type[Any]) -> bool: # noqa: ARG001
...
else:
- if PYDANTIC_V2:
- from pydantic.v1.typing import (
+ # v1 re-exports
+ if PYDANTIC_V1:
+ from pydantic.typing import (
get_args as get_args,
is_union as is_union,
get_origin as get_origin,
is_typeddict as is_typeddict,
is_literal_type as is_literal_type,
)
- from pydantic.v1.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime
+ from pydantic.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime
else:
- from pydantic.typing import (
+ from ._utils import (
get_args as get_args,
is_union as is_union,
get_origin as get_origin,
+ parse_date as parse_date,
is_typeddict as is_typeddict,
+ parse_datetime as parse_datetime,
is_literal_type as is_literal_type,
)
- from pydantic.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime
# refactored config
if TYPE_CHECKING:
from pydantic import ConfigDict as ConfigDict
else:
- if PYDANTIC_V2:
- from pydantic import ConfigDict
- else:
+ if PYDANTIC_V1:
# TODO: provide an error message here?
ConfigDict = None
+ else:
+ from pydantic import ConfigDict as ConfigDict
# renamed methods / properties
def parse_obj(model: type[_ModelT], value: object) -> _ModelT:
- if PYDANTIC_V2:
- return model.model_validate(value)
- else:
+ if PYDANTIC_V1:
return cast(_ModelT, model.parse_obj(value)) # pyright: ignore[reportDeprecated, reportUnnecessaryCast]
+ else:
+ return model.model_validate(value)
def field_is_required(field: FieldInfo) -> bool:
- if PYDANTIC_V2:
- return field.is_required()
- return field.required # type: ignore
+ if PYDANTIC_V1:
+ return field.required # type: ignore
+ return field.is_required()
def field_get_default(field: FieldInfo) -> Any:
value = field.get_default()
- if PYDANTIC_V2:
- from pydantic_core import PydanticUndefined
-
- if value == PydanticUndefined:
- return None
+ if PYDANTIC_V1:
return value
+ from pydantic_core import PydanticUndefined
+
+ if value == PydanticUndefined:
+ return None
return value
def field_outer_type(field: FieldInfo) -> Any:
- if PYDANTIC_V2:
- return field.annotation
- return field.outer_type_ # type: ignore
+ if PYDANTIC_V1:
+ return field.outer_type_ # type: ignore
+ return field.annotation
def get_model_config(model: type[pydantic.BaseModel]) -> Any:
- if PYDANTIC_V2:
- return model.model_config
- return model.__config__ # type: ignore
+ if PYDANTIC_V1:
+ return model.__config__ # type: ignore
+ return model.model_config
def get_model_fields(model: type[pydantic.BaseModel]) -> dict[str, FieldInfo]:
- if PYDANTIC_V2:
- return model.model_fields
- return model.__fields__ # type: ignore
+ if PYDANTIC_V1:
+ return model.__fields__ # type: ignore
+ return model.model_fields
def model_copy(model: _ModelT, *, deep: bool = False) -> _ModelT:
- if PYDANTIC_V2:
- return model.model_copy(deep=deep)
- return model.copy(deep=deep) # type: ignore
+ if PYDANTIC_V1:
+ return model.copy(deep=deep) # type: ignore
+ return model.model_copy(deep=deep)
def model_json(model: pydantic.BaseModel, *, indent: int | None = None) -> str:
- if PYDANTIC_V2:
- return model.model_dump_json(indent=indent)
- return model.json(indent=indent) # type: ignore
+ if PYDANTIC_V1:
+ return model.json(indent=indent) # type: ignore
+ return model.model_dump_json(indent=indent)
def model_dump(
@@ -139,14 +140,14 @@ def model_dump(
warnings: bool = True,
mode: Literal["json", "python"] = "python",
) -> dict[str, Any]:
- if PYDANTIC_V2 or hasattr(model, "model_dump"):
+ if (not PYDANTIC_V1) or hasattr(model, "model_dump"):
return model.model_dump(
mode=mode,
exclude=exclude,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
# warnings are not supported in Pydantic v1
- warnings=warnings if PYDANTIC_V2 else True,
+ warnings=True if PYDANTIC_V1 else warnings,
)
return cast(
"dict[str, Any]",
@@ -159,9 +160,9 @@ def model_dump(
def model_parse(model: type[_ModelT], data: Any) -> _ModelT:
- if PYDANTIC_V2:
- return model.model_validate(data)
- return model.parse_obj(data) # pyright: ignore[reportDeprecated]
+ if PYDANTIC_V1:
+ return model.parse_obj(data) # pyright: ignore[reportDeprecated]
+ return model.model_validate(data)
# generic models
@@ -170,17 +171,16 @@ def model_parse(model: type[_ModelT], data: Any) -> _ModelT:
class GenericModel(pydantic.BaseModel): ...
else:
- if PYDANTIC_V2:
+ if PYDANTIC_V1:
+ import pydantic.generics
+
+ class GenericModel(pydantic.generics.GenericModel, pydantic.BaseModel): ...
+ else:
# there no longer needs to be a distinction in v2 but
# we still have to create our own subclass to avoid
# inconsistent MRO ordering errors
class GenericModel(pydantic.BaseModel): ...
- else:
- import pydantic.generics
-
- class GenericModel(pydantic.generics.GenericModel, pydantic.BaseModel): ...
-
# cached properties
if TYPE_CHECKING:
diff --git a/src/llama_stack_client/_models.py b/src/llama_stack_client/_models.py
index b8387ce9..6a3cd1d2 100644
--- a/src/llama_stack_client/_models.py
+++ b/src/llama_stack_client/_models.py
@@ -50,7 +50,7 @@
strip_annotated_type,
)
from ._compat import (
- PYDANTIC_V2,
+ PYDANTIC_V1,
ConfigDict,
GenericModel as BaseGenericModel,
get_args,
@@ -81,11 +81,7 @@ class _ConfigProtocol(Protocol):
class BaseModel(pydantic.BaseModel):
- if PYDANTIC_V2:
- model_config: ClassVar[ConfigDict] = ConfigDict(
- extra="allow", defer_build=coerce_boolean(os.environ.get("DEFER_PYDANTIC_BUILD", "true"))
- )
- else:
+ if PYDANTIC_V1:
@property
@override
@@ -95,6 +91,10 @@ def model_fields_set(self) -> set[str]:
class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated]
extra: Any = pydantic.Extra.allow # type: ignore
+ else:
+ model_config: ClassVar[ConfigDict] = ConfigDict(
+ extra="allow", defer_build=coerce_boolean(os.environ.get("DEFER_PYDANTIC_BUILD", "true"))
+ )
def to_dict(
self,
@@ -215,25 +215,25 @@ def construct( # pyright: ignore[reportIncompatibleMethodOverride]
if key not in model_fields:
parsed = construct_type(value=value, type_=extra_field_type) if extra_field_type is not None else value
- if PYDANTIC_V2:
- _extra[key] = parsed
- else:
+ if PYDANTIC_V1:
_fields_set.add(key)
fields_values[key] = parsed
+ else:
+ _extra[key] = parsed
object.__setattr__(m, "__dict__", fields_values)
- if PYDANTIC_V2:
- # these properties are copied from Pydantic's `model_construct()` method
- object.__setattr__(m, "__pydantic_private__", None)
- object.__setattr__(m, "__pydantic_extra__", _extra)
- object.__setattr__(m, "__pydantic_fields_set__", _fields_set)
- else:
+ if PYDANTIC_V1:
# init_private_attributes() does not exist in v2
m._init_private_attributes() # type: ignore
# copied from Pydantic v1's `construct()` method
object.__setattr__(m, "__fields_set__", _fields_set)
+ else:
+ # these properties are copied from Pydantic's `model_construct()` method
+ object.__setattr__(m, "__pydantic_private__", None)
+ object.__setattr__(m, "__pydantic_extra__", _extra)
+ object.__setattr__(m, "__pydantic_fields_set__", _fields_set)
return m
@@ -243,7 +243,7 @@ def construct( # pyright: ignore[reportIncompatibleMethodOverride]
# although not in practice
model_construct = construct
- if not PYDANTIC_V2:
+ if PYDANTIC_V1:
# we define aliases for some of the new pydantic v2 methods so
# that we can just document these methods without having to specify
# a specific pydantic version as some users may not know which
@@ -256,7 +256,7 @@ def model_dump(
mode: Literal["json", "python"] | str = "python",
include: IncEx | None = None,
exclude: IncEx | None = None,
- by_alias: bool = False,
+ by_alias: bool | None = None,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
@@ -264,6 +264,7 @@ def model_dump(
warnings: bool | Literal["none", "warn", "error"] = True,
context: dict[str, Any] | None = None,
serialize_as_any: bool = False,
+ fallback: Callable[[Any], Any] | None = None,
) -> dict[str, Any]:
"""Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump
@@ -295,16 +296,18 @@ def model_dump(
raise ValueError("context is only supported in Pydantic v2")
if serialize_as_any != False:
raise ValueError("serialize_as_any is only supported in Pydantic v2")
+ if fallback is not None:
+ raise ValueError("fallback is only supported in Pydantic v2")
dumped = super().dict( # pyright: ignore[reportDeprecated]
include=include,
exclude=exclude,
- by_alias=by_alias,
+ by_alias=by_alias if by_alias is not None else False,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
)
- return cast(dict[str, Any], json_safe(dumped)) if mode == "json" else dumped
+ return cast("dict[str, Any]", json_safe(dumped)) if mode == "json" else dumped
@override
def model_dump_json(
@@ -313,13 +316,14 @@ def model_dump_json(
indent: int | None = None,
include: IncEx | None = None,
exclude: IncEx | None = None,
- by_alias: bool = False,
+ by_alias: bool | None = None,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
round_trip: bool = False,
warnings: bool | Literal["none", "warn", "error"] = True,
context: dict[str, Any] | None = None,
+ fallback: Callable[[Any], Any] | None = None,
serialize_as_any: bool = False,
) -> str:
"""Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump_json
@@ -348,11 +352,13 @@ def model_dump_json(
raise ValueError("context is only supported in Pydantic v2")
if serialize_as_any != False:
raise ValueError("serialize_as_any is only supported in Pydantic v2")
+ if fallback is not None:
+ raise ValueError("fallback is only supported in Pydantic v2")
return super().json( # type: ignore[reportDeprecated]
indent=indent,
include=include,
exclude=exclude,
- by_alias=by_alias,
+ by_alias=by_alias if by_alias is not None else False,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
@@ -363,10 +369,10 @@ def _construct_field(value: object, field: FieldInfo, key: str) -> object:
if value is None:
return field_get_default(field)
- if PYDANTIC_V2:
- type_ = field.annotation
- else:
+ if PYDANTIC_V1:
type_ = cast(type, field.outer_type_) # type: ignore
+ else:
+ type_ = field.annotation # type: ignore
if type_ is None:
raise RuntimeError(f"Unexpected field type is None for {key}")
@@ -375,7 +381,7 @@ def _construct_field(value: object, field: FieldInfo, key: str) -> object:
def _get_extra_fields_type(cls: type[pydantic.BaseModel]) -> type | None:
- if not PYDANTIC_V2:
+ if PYDANTIC_V1:
# TODO
return None
@@ -628,30 +634,30 @@ def _build_discriminated_union_meta(*, union: type, meta_annotations: tuple[Any,
for variant in get_args(union):
variant = strip_annotated_type(variant)
if is_basemodel_type(variant):
- if PYDANTIC_V2:
- field = _extract_field_schema_pv2(variant, discriminator_field_name)
- if not field:
+ if PYDANTIC_V1:
+ field_info = cast("dict[str, FieldInfo]", variant.__fields__).get(discriminator_field_name) # pyright: ignore[reportDeprecated, reportUnnecessaryCast]
+ if not field_info:
continue
# Note: if one variant defines an alias then they all should
- discriminator_alias = field.get("serialization_alias")
-
- field_schema = field["schema"]
+ discriminator_alias = field_info.alias
- if field_schema["type"] == "literal":
- for entry in cast("LiteralSchema", field_schema)["expected"]:
+ if (annotation := getattr(field_info, "annotation", None)) and is_literal_type(annotation):
+ for entry in get_args(annotation):
if isinstance(entry, str):
mapping[entry] = variant
else:
- field_info = cast("dict[str, FieldInfo]", variant.__fields__).get(discriminator_field_name) # pyright: ignore[reportDeprecated, reportUnnecessaryCast]
- if not field_info:
+ field = _extract_field_schema_pv2(variant, discriminator_field_name)
+ if not field:
continue
# Note: if one variant defines an alias then they all should
- discriminator_alias = field_info.alias
+ discriminator_alias = field.get("serialization_alias")
- if (annotation := getattr(field_info, "annotation", None)) and is_literal_type(annotation):
- for entry in get_args(annotation):
+ field_schema = field["schema"]
+
+ if field_schema["type"] == "literal":
+ for entry in cast("LiteralSchema", field_schema)["expected"]:
if isinstance(entry, str):
mapping[entry] = variant
@@ -714,7 +720,7 @@ class GenericModel(BaseGenericModel, BaseModel):
pass
-if PYDANTIC_V2:
+if not PYDANTIC_V1:
from pydantic import TypeAdapter as _TypeAdapter
_CachedTypeAdapter = cast("TypeAdapter[object]", lru_cache(maxsize=None)(_TypeAdapter))
@@ -782,12 +788,12 @@ class FinalRequestOptions(pydantic.BaseModel):
json_data: Union[Body, None] = None
extra_json: Union[AnyMapping, None] = None
- if PYDANTIC_V2:
- model_config: ClassVar[ConfigDict] = ConfigDict(arbitrary_types_allowed=True)
- else:
+ if PYDANTIC_V1:
class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated]
arbitrary_types_allowed: bool = True
+ else:
+ model_config: ClassVar[ConfigDict] = ConfigDict(arbitrary_types_allowed=True)
def get_max_retries(self, max_retries: int) -> int:
if isinstance(self.max_retries, NotGiven):
@@ -820,9 +826,9 @@ def construct( # type: ignore
key: strip_not_given(value)
for key, value in values.items()
}
- if PYDANTIC_V2:
- return super().model_construct(_fields_set, **kwargs)
- return cast(FinalRequestOptions, super().construct(_fields_set, **kwargs)) # pyright: ignore[reportDeprecated]
+ if PYDANTIC_V1:
+ return cast(FinalRequestOptions, super().construct(_fields_set, **kwargs)) # pyright: ignore[reportDeprecated]
+ return super().model_construct(_fields_set, **kwargs)
if not TYPE_CHECKING:
# type checkers incorrectly complain about this assignment
diff --git a/src/llama_stack_client/_qs.py b/src/llama_stack_client/_qs.py
index 274320ca..ada6fd3f 100644
--- a/src/llama_stack_client/_qs.py
+++ b/src/llama_stack_client/_qs.py
@@ -4,7 +4,7 @@
from urllib.parse import parse_qs, urlencode
from typing_extensions import Literal, get_args
-from ._types import NOT_GIVEN, NotGiven, NotGivenOr
+from ._types import NotGiven, not_given
from ._utils import flatten
_T = TypeVar("_T")
@@ -41,8 +41,8 @@ def stringify(
self,
params: Params,
*,
- array_format: NotGivenOr[ArrayFormat] = NOT_GIVEN,
- nested_format: NotGivenOr[NestedFormat] = NOT_GIVEN,
+ array_format: ArrayFormat | NotGiven = not_given,
+ nested_format: NestedFormat | NotGiven = not_given,
) -> str:
return urlencode(
self.stringify_items(
@@ -56,8 +56,8 @@ def stringify_items(
self,
params: Params,
*,
- array_format: NotGivenOr[ArrayFormat] = NOT_GIVEN,
- nested_format: NotGivenOr[NestedFormat] = NOT_GIVEN,
+ array_format: ArrayFormat | NotGiven = not_given,
+ nested_format: NestedFormat | NotGiven = not_given,
) -> list[tuple[str, str]]:
opts = Options(
qs=self,
@@ -143,8 +143,8 @@ def __init__(
self,
qs: Querystring = _qs,
*,
- array_format: NotGivenOr[ArrayFormat] = NOT_GIVEN,
- nested_format: NotGivenOr[NestedFormat] = NOT_GIVEN,
+ array_format: ArrayFormat | NotGiven = not_given,
+ nested_format: NestedFormat | NotGiven = not_given,
) -> None:
self.array_format = qs.array_format if isinstance(array_format, NotGiven) else array_format
self.nested_format = qs.nested_format if isinstance(nested_format, NotGiven) else nested_format
diff --git a/src/llama_stack_client/_types.py b/src/llama_stack_client/_types.py
index 63631322..32548daa 100644
--- a/src/llama_stack_client/_types.py
+++ b/src/llama_stack_client/_types.py
@@ -13,10 +13,21 @@
Mapping,
TypeVar,
Callable,
+ Iterator,
Optional,
Sequence,
)
-from typing_extensions import Set, Literal, Protocol, TypeAlias, TypedDict, override, runtime_checkable
+from typing_extensions import (
+ Set,
+ Literal,
+ Protocol,
+ TypeAlias,
+ TypedDict,
+ SupportsIndex,
+ overload,
+ override,
+ runtime_checkable,
+)
import httpx
import pydantic
@@ -106,18 +117,21 @@ class RequestOptions(TypedDict, total=False):
# Sentinel class used until PEP 0661 is accepted
class NotGiven:
"""
- A sentinel singleton class used to distinguish omitted keyword arguments
- from those passed in with the value None (which may have different behavior).
+ For parameters with a meaningful None value, we need to distinguish between
+ the user explicitly passing None, and the user not passing the parameter at
+ all.
+
+ User code shouldn't need to use not_given directly.
For example:
```py
- def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: ...
+ def create(timeout: Timeout | None | NotGiven = not_given): ...
- get(timeout=1) # 1s timeout
- get(timeout=None) # No timeout
- get() # Default timeout behavior, which may not be statically known at the method definition.
+ create(timeout=1) # 1s timeout
+ create(timeout=None) # No timeout
+ create() # Default timeout behavior
```
"""
@@ -129,13 +143,14 @@ def __repr__(self) -> str:
return "NOT_GIVEN"
-NotGivenOr = Union[_T, NotGiven]
+not_given = NotGiven()
+# for backwards compatibility:
NOT_GIVEN = NotGiven()
class Omit:
- """In certain situations you need to be able to represent a case where a default value has
- to be explicitly removed and `None` is not an appropriate substitute, for example:
+ """
+ To explicitly omit something from being sent in a request, use `omit`.
```py
# as the default `Content-Type` header is `application/json` that will be sent
@@ -145,8 +160,8 @@ class Omit:
# to look something like: 'multipart/form-data; boundary=0d8382fcf5f8c3be01ca2e11002d2983'
client.post(..., headers={"Content-Type": "multipart/form-data"})
- # instead you can remove the default `application/json` header by passing Omit
- client.post(..., headers={"Content-Type": Omit()})
+ # instead you can remove the default `application/json` header by passing omit
+ client.post(..., headers={"Content-Type": omit})
```
"""
@@ -154,6 +169,9 @@ def __bool__(self) -> Literal[False]:
return False
+omit = Omit()
+
+
@runtime_checkable
class ModelBuilderProtocol(Protocol):
@classmethod
@@ -217,3 +235,26 @@ class _GenericAlias(Protocol):
class HttpxSendArgs(TypedDict, total=False):
auth: httpx.Auth
follow_redirects: bool
+
+
+_T_co = TypeVar("_T_co", covariant=True)
+
+
+if TYPE_CHECKING:
+ # This works because str.__contains__ does not accept object (either in typeshed or at runtime)
+ # https://github.com/hauntsaninja/useful_types/blob/5e9710f3875107d068e7679fd7fec9cfab0eff3b/useful_types/__init__.py#L285
+ class SequenceNotStr(Protocol[_T_co]):
+ @overload
+ def __getitem__(self, index: SupportsIndex, /) -> _T_co: ...
+ @overload
+ def __getitem__(self, index: slice, /) -> Sequence[_T_co]: ...
+ def __contains__(self, value: object, /) -> bool: ...
+ def __len__(self) -> int: ...
+ def __iter__(self) -> Iterator[_T_co]: ...
+ def index(self, value: Any, start: int = 0, stop: int = ..., /) -> int: ...
+ def count(self, value: Any, /) -> int: ...
+ def __reversed__(self) -> Iterator[_T_co]: ...
+else:
+ # just point this to a normal `Sequence` at runtime to avoid having to special case
+ # deserializing our custom sequence type
+ SequenceNotStr = Sequence
diff --git a/src/llama_stack_client/_utils/__init__.py b/src/llama_stack_client/_utils/__init__.py
index d4fda26f..dc64e29a 100644
--- a/src/llama_stack_client/_utils/__init__.py
+++ b/src/llama_stack_client/_utils/__init__.py
@@ -10,7 +10,6 @@
lru_cache as lru_cache,
is_mapping as is_mapping,
is_tuple_t as is_tuple_t,
- parse_date as parse_date,
is_iterable as is_iterable,
is_sequence as is_sequence,
coerce_float as coerce_float,
@@ -23,7 +22,6 @@
coerce_boolean as coerce_boolean,
coerce_integer as coerce_integer,
file_from_path as file_from_path,
- parse_datetime as parse_datetime,
strip_not_given as strip_not_given,
deepcopy_minimal as deepcopy_minimal,
get_async_library as get_async_library,
@@ -32,12 +30,20 @@
maybe_coerce_boolean as maybe_coerce_boolean,
maybe_coerce_integer as maybe_coerce_integer,
)
+from ._compat import (
+ get_args as get_args,
+ is_union as is_union,
+ get_origin as get_origin,
+ is_typeddict as is_typeddict,
+ is_literal_type as is_literal_type,
+)
from ._typing import (
is_list_type as is_list_type,
is_union_type as is_union_type,
extract_type_arg as extract_type_arg,
is_iterable_type as is_iterable_type,
is_required_type as is_required_type,
+ is_sequence_type as is_sequence_type,
is_annotated_type as is_annotated_type,
is_type_alias_type as is_type_alias_type,
strip_annotated_type as strip_annotated_type,
@@ -55,3 +61,4 @@
function_has_argument as function_has_argument,
assert_signatures_in_sync as assert_signatures_in_sync,
)
+from ._datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime
diff --git a/src/llama_stack_client/_utils/_compat.py b/src/llama_stack_client/_utils/_compat.py
new file mode 100644
index 00000000..dd703233
--- /dev/null
+++ b/src/llama_stack_client/_utils/_compat.py
@@ -0,0 +1,45 @@
+from __future__ import annotations
+
+import sys
+import typing_extensions
+from typing import Any, Type, Union, Literal, Optional
+from datetime import date, datetime
+from typing_extensions import get_args as _get_args, get_origin as _get_origin
+
+from .._types import StrBytesIntFloat
+from ._datetime_parse import parse_date as _parse_date, parse_datetime as _parse_datetime
+
+_LITERAL_TYPES = {Literal, typing_extensions.Literal}
+
+
+def get_args(tp: type[Any]) -> tuple[Any, ...]:
+ return _get_args(tp)
+
+
+def get_origin(tp: type[Any]) -> type[Any] | None:
+ return _get_origin(tp)
+
+
+def is_union(tp: Optional[Type[Any]]) -> bool:
+ if sys.version_info < (3, 10):
+ return tp is Union # type: ignore[comparison-overlap]
+ else:
+ import types
+
+ return tp is Union or tp is types.UnionType
+
+
+def is_typeddict(tp: Type[Any]) -> bool:
+ return typing_extensions.is_typeddict(tp)
+
+
+def is_literal_type(tp: Type[Any]) -> bool:
+ return get_origin(tp) in _LITERAL_TYPES
+
+
+def parse_date(value: Union[date, StrBytesIntFloat]) -> date:
+ return _parse_date(value)
+
+
+def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime:
+ return _parse_datetime(value)
diff --git a/src/llama_stack_client/_utils/_datetime_parse.py b/src/llama_stack_client/_utils/_datetime_parse.py
new file mode 100644
index 00000000..7cb9d9e6
--- /dev/null
+++ b/src/llama_stack_client/_utils/_datetime_parse.py
@@ -0,0 +1,136 @@
+"""
+This file contains code from https://github.com/pydantic/pydantic/blob/main/pydantic/v1/datetime_parse.py
+without the Pydantic v1 specific errors.
+"""
+
+from __future__ import annotations
+
+import re
+from typing import Dict, Union, Optional
+from datetime import date, datetime, timezone, timedelta
+
+from .._types import StrBytesIntFloat
+
+date_expr = r"(?P\d{4})-(?P\d{1,2})-(?P\d{1,2})"
+time_expr = (
+ r"(?P\d{1,2}):(?P\d{1,2})"
+ r"(?::(?P\d{1,2})(?:\.(?P\d{1,6})\d{0,6})?)?"
+ r"(?PZ|[+-]\d{2}(?::?\d{2})?)?$"
+)
+
+date_re = re.compile(f"{date_expr}$")
+datetime_re = re.compile(f"{date_expr}[T ]{time_expr}")
+
+
+EPOCH = datetime(1970, 1, 1)
+# if greater than this, the number is in ms, if less than or equal it's in seconds
+# (in seconds this is 11th October 2603, in ms it's 20th August 1970)
+MS_WATERSHED = int(2e10)
+# slightly more than datetime.max in ns - (datetime.max - EPOCH).total_seconds() * 1e9
+MAX_NUMBER = int(3e20)
+
+
+def _get_numeric(value: StrBytesIntFloat, native_expected_type: str) -> Union[None, int, float]:
+ if isinstance(value, (int, float)):
+ return value
+ try:
+ return float(value)
+ except ValueError:
+ return None
+ except TypeError:
+ raise TypeError(f"invalid type; expected {native_expected_type}, string, bytes, int or float") from None
+
+
+def _from_unix_seconds(seconds: Union[int, float]) -> datetime:
+ if seconds > MAX_NUMBER:
+ return datetime.max
+ elif seconds < -MAX_NUMBER:
+ return datetime.min
+
+ while abs(seconds) > MS_WATERSHED:
+ seconds /= 1000
+ dt = EPOCH + timedelta(seconds=seconds)
+ return dt.replace(tzinfo=timezone.utc)
+
+
+def _parse_timezone(value: Optional[str]) -> Union[None, int, timezone]:
+ if value == "Z":
+ return timezone.utc
+ elif value is not None:
+ offset_mins = int(value[-2:]) if len(value) > 3 else 0
+ offset = 60 * int(value[1:3]) + offset_mins
+ if value[0] == "-":
+ offset = -offset
+ return timezone(timedelta(minutes=offset))
+ else:
+ return None
+
+
+def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime:
+ """
+ Parse a datetime/int/float/string and return a datetime.datetime.
+
+ This function supports time zone offsets. When the input contains one,
+ the output uses a timezone with a fixed offset from UTC.
+
+ Raise ValueError if the input is well formatted but not a valid datetime.
+ Raise ValueError if the input isn't well formatted.
+ """
+ if isinstance(value, datetime):
+ return value
+
+ number = _get_numeric(value, "datetime")
+ if number is not None:
+ return _from_unix_seconds(number)
+
+ if isinstance(value, bytes):
+ value = value.decode()
+
+ assert not isinstance(value, (float, int))
+
+ match = datetime_re.match(value)
+ if match is None:
+ raise ValueError("invalid datetime format")
+
+ kw = match.groupdict()
+ if kw["microsecond"]:
+ kw["microsecond"] = kw["microsecond"].ljust(6, "0")
+
+ tzinfo = _parse_timezone(kw.pop("tzinfo"))
+ kw_: Dict[str, Union[None, int, timezone]] = {k: int(v) for k, v in kw.items() if v is not None}
+ kw_["tzinfo"] = tzinfo
+
+ return datetime(**kw_) # type: ignore
+
+
+def parse_date(value: Union[date, StrBytesIntFloat]) -> date:
+ """
+ Parse a date/int/float/string and return a datetime.date.
+
+ Raise ValueError if the input is well formatted but not a valid date.
+ Raise ValueError if the input isn't well formatted.
+ """
+ if isinstance(value, date):
+ if isinstance(value, datetime):
+ return value.date()
+ else:
+ return value
+
+ number = _get_numeric(value, "date")
+ if number is not None:
+ return _from_unix_seconds(number).date()
+
+ if isinstance(value, bytes):
+ value = value.decode()
+
+ assert not isinstance(value, (float, int))
+ match = date_re.match(value)
+ if match is None:
+ raise ValueError("invalid date format")
+
+ kw = {k: int(v) for k, v in match.groupdict().items()}
+
+ try:
+ return date(**kw)
+ except ValueError:
+ raise ValueError("invalid date format") from None
diff --git a/src/llama_stack_client/_utils/_transform.py b/src/llama_stack_client/_utils/_transform.py
index b0cc20a7..52075492 100644
--- a/src/llama_stack_client/_utils/_transform.py
+++ b/src/llama_stack_client/_utils/_transform.py
@@ -16,18 +16,20 @@
lru_cache,
is_mapping,
is_iterable,
+ is_sequence,
)
from .._files import is_base64_file_input
+from ._compat import get_origin, is_typeddict
from ._typing import (
is_list_type,
is_union_type,
extract_type_arg,
is_iterable_type,
is_required_type,
+ is_sequence_type,
is_annotated_type,
strip_annotated_type,
)
-from .._compat import get_origin, model_dump, is_typeddict
_T = TypeVar("_T")
@@ -167,6 +169,8 @@ def _transform_recursive(
Defaults to the same value as the `annotation` argument.
"""
+ from .._compat import model_dump
+
if inner_type is None:
inner_type = annotation
@@ -184,6 +188,8 @@ def _transform_recursive(
(is_list_type(stripped_type) and is_list(data))
# Iterable[T]
or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str))
+ # Sequence[T]
+ or (is_sequence_type(stripped_type) and is_sequence(data) and not isinstance(data, str))
):
# dicts are technically iterable, but it is an iterable on the keys of the dict and is not usually
# intended as an iterable, so we don't transform it.
@@ -262,7 +268,7 @@ def _transform_typeddict(
annotations = get_type_hints(expected_type, include_extras=True)
for key, value in data.items():
if not is_given(value):
- # we don't need to include `NotGiven` values here as they'll
+ # we don't need to include omitted values here as they'll
# be stripped out before the request is sent anyway
continue
@@ -329,6 +335,8 @@ async def _async_transform_recursive(
Defaults to the same value as the `annotation` argument.
"""
+ from .._compat import model_dump
+
if inner_type is None:
inner_type = annotation
@@ -346,6 +354,8 @@ async def _async_transform_recursive(
(is_list_type(stripped_type) and is_list(data))
# Iterable[T]
or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str))
+ # Sequence[T]
+ or (is_sequence_type(stripped_type) and is_sequence(data) and not isinstance(data, str))
):
# dicts are technically iterable, but it is an iterable on the keys of the dict and is not usually
# intended as an iterable, so we don't transform it.
@@ -424,7 +434,7 @@ async def _async_transform_typeddict(
annotations = get_type_hints(expected_type, include_extras=True)
for key, value in data.items():
if not is_given(value):
- # we don't need to include `NotGiven` values here as they'll
+ # we don't need to include omitted values here as they'll
# be stripped out before the request is sent anyway
continue
diff --git a/src/llama_stack_client/_utils/_typing.py b/src/llama_stack_client/_utils/_typing.py
index 1bac9542..193109f3 100644
--- a/src/llama_stack_client/_utils/_typing.py
+++ b/src/llama_stack_client/_utils/_typing.py
@@ -15,7 +15,7 @@
from ._utils import lru_cache
from .._types import InheritsGeneric
-from .._compat import is_union as _is_union
+from ._compat import is_union as _is_union
def is_annotated_type(typ: type) -> bool:
@@ -26,6 +26,11 @@ def is_list_type(typ: type) -> bool:
return (get_origin(typ) or typ) == list
+def is_sequence_type(typ: type) -> bool:
+ origin = get_origin(typ) or typ
+ return origin == typing_extensions.Sequence or origin == typing.Sequence or origin == _c_abc.Sequence
+
+
def is_iterable_type(typ: type) -> bool:
"""If the given type is `typing.Iterable[T]`"""
origin = get_origin(typ) or typ
diff --git a/src/llama_stack_client/_utils/_utils.py b/src/llama_stack_client/_utils/_utils.py
index ea3cf3f2..50d59269 100644
--- a/src/llama_stack_client/_utils/_utils.py
+++ b/src/llama_stack_client/_utils/_utils.py
@@ -21,8 +21,7 @@
import sniffio
-from .._types import NotGiven, FileTypes, NotGivenOr, HeadersLike
-from .._compat import parse_date as parse_date, parse_datetime as parse_datetime
+from .._types import Omit, NotGiven, FileTypes, HeadersLike
_T = TypeVar("_T")
_TupleT = TypeVar("_TupleT", bound=Tuple[object, ...])
@@ -64,7 +63,7 @@ def _extract_items(
try:
key = path[index]
except IndexError:
- if isinstance(obj, NotGiven):
+ if not is_given(obj):
# no value was provided - we can safely ignore
return []
@@ -127,8 +126,8 @@ def _extract_items(
return []
-def is_given(obj: NotGivenOr[_T]) -> TypeGuard[_T]:
- return not isinstance(obj, NotGiven)
+def is_given(obj: _T | NotGiven | Omit) -> TypeGuard[_T]:
+ return not isinstance(obj, NotGiven) and not isinstance(obj, Omit)
# Type safe methods for narrowing types with TypeVars.
diff --git a/src/llama_stack_client/pagination.py b/src/llama_stack_client/pagination.py
index 9122ff46..67106bc5 100644
--- a/src/llama_stack_client/pagination.py
+++ b/src/llama_stack_client/pagination.py
@@ -24,10 +24,13 @@ def _get_page_items(self) -> List[_T]:
@override
def next_page_info(self) -> Optional[PageInfo]:
next_index = self.next_index
- if not next_index:
- return None
+ if next_index is None:
+ return None # type: ignore[unreachable]
+
+ length = len(self._get_page_items())
+ current_count = next_index + length
- return PageInfo(params={"start_index": next_index})
+ return PageInfo(params={"start_index": current_count})
class AsyncDatasetsIterrows(BaseAsyncPage[_T], BasePage[_T], Generic[_T]):
@@ -44,10 +47,13 @@ def _get_page_items(self) -> List[_T]:
@override
def next_page_info(self) -> Optional[PageInfo]:
next_index = self.next_index
- if not next_index:
- return None
+ if next_index is None:
+ return None # type: ignore[unreachable]
+
+ length = len(self._get_page_items())
+ current_count = next_index + length
- return PageInfo(params={"start_index": next_index})
+ return PageInfo(params={"start_index": current_count})
class SyncOpenAICursorPage(BaseSyncPage[_T], BasePage[_T], Generic[_T]):
diff --git a/src/llama_stack_client/resources/agents/agents.py b/src/llama_stack_client/resources/agents/agents.py
index 6a4ffe85..50d65a60 100644
--- a/src/llama_stack_client/resources/agents/agents.py
+++ b/src/llama_stack_client/resources/agents/agents.py
@@ -29,7 +29,7 @@
SessionResourceWithStreamingResponse,
AsyncSessionResourceWithStreamingResponse,
)
-from ..._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
+from ..._types import Body, Omit, Query, Headers, NoneType, NotGiven, omit, not_given
from ..._utils import maybe_transform, async_maybe_transform
from ..._compat import cached_property
from ..._resource import SyncAPIResource, AsyncAPIResource
@@ -89,7 +89,7 @@ def create(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> AgentCreateResponse:
"""
Create an agent with the given configuration.
@@ -123,7 +123,7 @@ def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> AgentRetrieveResponse:
"""
Describe an agent by its ID.
@@ -150,14 +150,14 @@ def retrieve(
def list(
self,
*,
- limit: int | NotGiven = NOT_GIVEN,
- start_index: int | NotGiven = NOT_GIVEN,
+ limit: int | Omit = omit,
+ start_index: int | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> AgentListResponse:
"""
List all agents.
@@ -202,7 +202,7 @@ def delete(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> None:
"""
Delete an agent by its ID and its associated sessions and turns.
@@ -269,7 +269,7 @@ async def create(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> AgentCreateResponse:
"""
Create an agent with the given configuration.
@@ -303,7 +303,7 @@ async def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> AgentRetrieveResponse:
"""
Describe an agent by its ID.
@@ -330,14 +330,14 @@ async def retrieve(
async def list(
self,
*,
- limit: int | NotGiven = NOT_GIVEN,
- start_index: int | NotGiven = NOT_GIVEN,
+ limit: int | Omit = omit,
+ start_index: int | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> AgentListResponse:
"""
List all agents.
@@ -382,7 +382,7 @@ async def delete(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> None:
"""
Delete an agent by its ID and its associated sessions and turns.
diff --git a/src/llama_stack_client/resources/agents/session.py b/src/llama_stack_client/resources/agents/session.py
index 4e1704d5..de5b35de 100644
--- a/src/llama_stack_client/resources/agents/session.py
+++ b/src/llama_stack_client/resources/agents/session.py
@@ -2,11 +2,9 @@
from __future__ import annotations
-from typing import List
-
import httpx
-from ..._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
+from ..._types import Body, Omit, Query, Headers, NoneType, NotGiven, SequenceNotStr, omit, not_given
from ..._utils import maybe_transform, async_maybe_transform
from ..._compat import cached_property
from ..._resource import SyncAPIResource, AsyncAPIResource
@@ -55,7 +53,7 @@ def create(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> SessionCreateResponse:
"""
Create a new session for an agent.
@@ -87,13 +85,13 @@ def retrieve(
session_id: str,
*,
agent_id: str,
- turn_ids: List[str] | NotGiven = NOT_GIVEN,
+ turn_ids: SequenceNotStr[str] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Session:
"""
Retrieve an agent session by its ID.
@@ -129,14 +127,14 @@ def list(
self,
agent_id: str,
*,
- limit: int | NotGiven = NOT_GIVEN,
- start_index: int | NotGiven = NOT_GIVEN,
+ limit: int | Omit = omit,
+ start_index: int | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> SessionListResponse:
"""
List all session(s) of a given agent.
@@ -184,7 +182,7 @@ def delete(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> None:
"""
Delete an agent session by its ID and its associated turns.
@@ -242,7 +240,7 @@ async def create(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> SessionCreateResponse:
"""
Create a new session for an agent.
@@ -274,13 +272,13 @@ async def retrieve(
session_id: str,
*,
agent_id: str,
- turn_ids: List[str] | NotGiven = NOT_GIVEN,
+ turn_ids: SequenceNotStr[str] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Session:
"""
Retrieve an agent session by its ID.
@@ -318,14 +316,14 @@ async def list(
self,
agent_id: str,
*,
- limit: int | NotGiven = NOT_GIVEN,
- start_index: int | NotGiven = NOT_GIVEN,
+ limit: int | Omit = omit,
+ start_index: int | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> SessionListResponse:
"""
List all session(s) of a given agent.
@@ -373,7 +371,7 @@ async def delete(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> None:
"""
Delete an agent session by its ID and its associated turns.
diff --git a/src/llama_stack_client/resources/agents/steps.py b/src/llama_stack_client/resources/agents/steps.py
index 78f9a88e..94138edc 100644
--- a/src/llama_stack_client/resources/agents/steps.py
+++ b/src/llama_stack_client/resources/agents/steps.py
@@ -4,7 +4,7 @@
import httpx
-from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from ..._types import Body, Query, Headers, NotGiven, not_given
from ..._compat import cached_property
from ..._resource import SyncAPIResource, AsyncAPIResource
from ..._response import (
@@ -51,7 +51,7 @@ def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> StepRetrieveResponse:
"""
Retrieve an agent step by its ID.
@@ -114,7 +114,7 @@ async def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> StepRetrieveResponse:
"""
Retrieve an agent step by its ID.
diff --git a/src/llama_stack_client/resources/agents/turn.py b/src/llama_stack_client/resources/agents/turn.py
index b98b593b..a10da847 100644
--- a/src/llama_stack_client/resources/agents/turn.py
+++ b/src/llama_stack_client/resources/agents/turn.py
@@ -2,12 +2,12 @@
from __future__ import annotations
-from typing import List, Iterable
+from typing import Iterable
from typing_extensions import Literal, overload
import httpx
-from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from ..._types import Body, Omit, Query, Headers, NotGiven, SequenceNotStr, omit, not_given
from ..._utils import required_args, maybe_transform, async_maybe_transform
from ..._compat import cached_property
from ..._resource import SyncAPIResource, AsyncAPIResource
@@ -54,16 +54,16 @@ def create(
*,
agent_id: str,
messages: Iterable[turn_create_params.Message],
- documents: Iterable[turn_create_params.Document] | NotGiven = NOT_GIVEN,
- stream: Literal[False] | NotGiven = NOT_GIVEN,
- tool_config: turn_create_params.ToolConfig | NotGiven = NOT_GIVEN,
- toolgroups: List[turn_create_params.Toolgroup] | NotGiven = NOT_GIVEN,
+ documents: Iterable[turn_create_params.Document] | Omit = omit,
+ stream: Literal[False] | Omit = omit,
+ tool_config: turn_create_params.ToolConfig | Omit = omit,
+ toolgroups: SequenceNotStr[turn_create_params.Toolgroup] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Turn:
"""
Create a new turn for an agent.
@@ -100,15 +100,15 @@ def create(
agent_id: str,
messages: Iterable[turn_create_params.Message],
stream: Literal[True],
- documents: Iterable[turn_create_params.Document] | NotGiven = NOT_GIVEN,
- tool_config: turn_create_params.ToolConfig | NotGiven = NOT_GIVEN,
- toolgroups: List[turn_create_params.Toolgroup] | NotGiven = NOT_GIVEN,
+ documents: Iterable[turn_create_params.Document] | Omit = omit,
+ tool_config: turn_create_params.ToolConfig | Omit = omit,
+ toolgroups: SequenceNotStr[turn_create_params.Toolgroup] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Stream[AgentTurnResponseStreamChunk]:
"""
Create a new turn for an agent.
@@ -145,15 +145,15 @@ def create(
agent_id: str,
messages: Iterable[turn_create_params.Message],
stream: bool,
- documents: Iterable[turn_create_params.Document] | NotGiven = NOT_GIVEN,
- tool_config: turn_create_params.ToolConfig | NotGiven = NOT_GIVEN,
- toolgroups: List[turn_create_params.Toolgroup] | NotGiven = NOT_GIVEN,
+ documents: Iterable[turn_create_params.Document] | Omit = omit,
+ tool_config: turn_create_params.ToolConfig | Omit = omit,
+ toolgroups: SequenceNotStr[turn_create_params.Toolgroup] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Turn | Stream[AgentTurnResponseStreamChunk]:
"""
Create a new turn for an agent.
@@ -189,16 +189,16 @@ def create(
*,
agent_id: str,
messages: Iterable[turn_create_params.Message],
- documents: Iterable[turn_create_params.Document] | NotGiven = NOT_GIVEN,
- stream: Literal[False] | Literal[True] | NotGiven = NOT_GIVEN,
- tool_config: turn_create_params.ToolConfig | NotGiven = NOT_GIVEN,
- toolgroups: List[turn_create_params.Toolgroup] | NotGiven = NOT_GIVEN,
+ documents: Iterable[turn_create_params.Document] | Omit = omit,
+ stream: Literal[False] | Literal[True] | Omit = omit,
+ tool_config: turn_create_params.ToolConfig | Omit = omit,
+ toolgroups: SequenceNotStr[turn_create_params.Toolgroup] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Turn | Stream[AgentTurnResponseStreamChunk]:
if not agent_id:
raise ValueError(f"Expected a non-empty value for `agent_id` but received {agent_id!r}")
@@ -237,7 +237,7 @@ def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Turn:
"""
Retrieve an agent turn by its ID.
@@ -273,13 +273,13 @@ def resume(
agent_id: str,
session_id: str,
tool_responses: Iterable[ToolResponseParam],
- stream: Literal[False] | NotGiven = NOT_GIVEN,
+ stream: Literal[False] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Turn:
"""Resume an agent turn with executed tool call responses.
@@ -317,7 +317,7 @@ def resume(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Stream[AgentTurnResponseStreamChunk]:
"""Resume an agent turn with executed tool call responses.
@@ -355,7 +355,7 @@ def resume(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Turn | Stream[AgentTurnResponseStreamChunk]:
"""Resume an agent turn with executed tool call responses.
@@ -387,13 +387,13 @@ def resume(
agent_id: str,
session_id: str,
tool_responses: Iterable[ToolResponseParam],
- stream: Literal[False] | Literal[True] | NotGiven = NOT_GIVEN,
+ stream: Literal[False] | Literal[True] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Turn | Stream[AgentTurnResponseStreamChunk]:
if not agent_id:
raise ValueError(f"Expected a non-empty value for `agent_id` but received {agent_id!r}")
@@ -448,16 +448,16 @@ async def create(
*,
agent_id: str,
messages: Iterable[turn_create_params.Message],
- documents: Iterable[turn_create_params.Document] | NotGiven = NOT_GIVEN,
- stream: Literal[False] | NotGiven = NOT_GIVEN,
- tool_config: turn_create_params.ToolConfig | NotGiven = NOT_GIVEN,
- toolgroups: List[turn_create_params.Toolgroup] | NotGiven = NOT_GIVEN,
+ documents: Iterable[turn_create_params.Document] | Omit = omit,
+ stream: Literal[False] | Omit = omit,
+ tool_config: turn_create_params.ToolConfig | Omit = omit,
+ toolgroups: SequenceNotStr[turn_create_params.Toolgroup] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Turn:
"""
Create a new turn for an agent.
@@ -494,15 +494,15 @@ async def create(
agent_id: str,
messages: Iterable[turn_create_params.Message],
stream: Literal[True],
- documents: Iterable[turn_create_params.Document] | NotGiven = NOT_GIVEN,
- tool_config: turn_create_params.ToolConfig | NotGiven = NOT_GIVEN,
- toolgroups: List[turn_create_params.Toolgroup] | NotGiven = NOT_GIVEN,
+ documents: Iterable[turn_create_params.Document] | Omit = omit,
+ tool_config: turn_create_params.ToolConfig | Omit = omit,
+ toolgroups: SequenceNotStr[turn_create_params.Toolgroup] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> AsyncStream[AgentTurnResponseStreamChunk]:
"""
Create a new turn for an agent.
@@ -539,15 +539,15 @@ async def create(
agent_id: str,
messages: Iterable[turn_create_params.Message],
stream: bool,
- documents: Iterable[turn_create_params.Document] | NotGiven = NOT_GIVEN,
- tool_config: turn_create_params.ToolConfig | NotGiven = NOT_GIVEN,
- toolgroups: List[turn_create_params.Toolgroup] | NotGiven = NOT_GIVEN,
+ documents: Iterable[turn_create_params.Document] | Omit = omit,
+ tool_config: turn_create_params.ToolConfig | Omit = omit,
+ toolgroups: SequenceNotStr[turn_create_params.Toolgroup] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Turn | AsyncStream[AgentTurnResponseStreamChunk]:
"""
Create a new turn for an agent.
@@ -583,16 +583,16 @@ async def create(
*,
agent_id: str,
messages: Iterable[turn_create_params.Message],
- documents: Iterable[turn_create_params.Document] | NotGiven = NOT_GIVEN,
- stream: Literal[False] | Literal[True] | NotGiven = NOT_GIVEN,
- tool_config: turn_create_params.ToolConfig | NotGiven = NOT_GIVEN,
- toolgroups: List[turn_create_params.Toolgroup] | NotGiven = NOT_GIVEN,
+ documents: Iterable[turn_create_params.Document] | Omit = omit,
+ stream: Literal[False] | Literal[True] | Omit = omit,
+ tool_config: turn_create_params.ToolConfig | Omit = omit,
+ toolgroups: SequenceNotStr[turn_create_params.Toolgroup] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Turn | AsyncStream[AgentTurnResponseStreamChunk]:
if not agent_id:
raise ValueError(f"Expected a non-empty value for `agent_id` but received {agent_id!r}")
@@ -631,7 +631,7 @@ async def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Turn:
"""
Retrieve an agent turn by its ID.
@@ -667,13 +667,13 @@ async def resume(
agent_id: str,
session_id: str,
tool_responses: Iterable[ToolResponseParam],
- stream: Literal[False] | NotGiven = NOT_GIVEN,
+ stream: Literal[False] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Turn:
"""Resume an agent turn with executed tool call responses.
@@ -711,7 +711,7 @@ async def resume(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> AsyncStream[AgentTurnResponseStreamChunk]:
"""Resume an agent turn with executed tool call responses.
@@ -749,7 +749,7 @@ async def resume(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Turn | AsyncStream[AgentTurnResponseStreamChunk]:
"""Resume an agent turn with executed tool call responses.
@@ -781,13 +781,13 @@ async def resume(
agent_id: str,
session_id: str,
tool_responses: Iterable[ToolResponseParam],
- stream: Literal[False] | Literal[True] | NotGiven = NOT_GIVEN,
+ stream: Literal[False] | Literal[True] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Turn | AsyncStream[AgentTurnResponseStreamChunk]:
if not agent_id:
raise ValueError(f"Expected a non-empty value for `agent_id` but received {agent_id!r}")
diff --git a/src/llama_stack_client/resources/benchmarks.py b/src/llama_stack_client/resources/benchmarks.py
index 7b92833b..92b8a0c1 100644
--- a/src/llama_stack_client/resources/benchmarks.py
+++ b/src/llama_stack_client/resources/benchmarks.py
@@ -2,12 +2,12 @@
from __future__ import annotations
-from typing import Dict, List, Type, Union, Iterable, cast
+from typing import Dict, Type, Union, Iterable, cast
import httpx
from ..types import benchmark_register_params
-from .._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
+from .._types import Body, Omit, Query, Headers, NoneType, NotGiven, SequenceNotStr, omit, not_given
from .._utils import maybe_transform, async_maybe_transform
from .._compat import cached_property
from .._resource import SyncAPIResource, AsyncAPIResource
@@ -54,7 +54,7 @@ def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Benchmark:
"""
Get a benchmark by its ID.
@@ -86,7 +86,7 @@ def list(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> BenchmarkListResponse:
"""List all benchmarks."""
return self._get(
@@ -106,16 +106,16 @@ def register(
*,
benchmark_id: str,
dataset_id: str,
- scoring_functions: List[str],
- metadata: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
- provider_benchmark_id: str | NotGiven = NOT_GIVEN,
- provider_id: str | NotGiven = NOT_GIVEN,
+ scoring_functions: SequenceNotStr[str],
+ metadata: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
+ provider_benchmark_id: str | Omit = omit,
+ provider_id: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> None:
"""
Register a benchmark.
@@ -191,7 +191,7 @@ async def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Benchmark:
"""
Get a benchmark by its ID.
@@ -223,7 +223,7 @@ async def list(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> BenchmarkListResponse:
"""List all benchmarks."""
return await self._get(
@@ -243,16 +243,16 @@ async def register(
*,
benchmark_id: str,
dataset_id: str,
- scoring_functions: List[str],
- metadata: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
- provider_benchmark_id: str | NotGiven = NOT_GIVEN,
- provider_id: str | NotGiven = NOT_GIVEN,
+ scoring_functions: SequenceNotStr[str],
+ metadata: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
+ provider_benchmark_id: str | Omit = omit,
+ provider_id: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> None:
"""
Register a benchmark.
diff --git a/src/llama_stack_client/resources/chat/completions.py b/src/llama_stack_client/resources/chat/completions.py
index ccf2cba9..5445a2d1 100644
--- a/src/llama_stack_client/resources/chat/completions.py
+++ b/src/llama_stack_client/resources/chat/completions.py
@@ -2,12 +2,12 @@
from __future__ import annotations
-from typing import Any, Dict, List, Union, Iterable, cast
+from typing import Any, Dict, Union, Iterable, cast
from typing_extensions import Literal, overload
import httpx
-from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from ..._types import Body, Omit, Query, Headers, NotGiven, SequenceNotStr, omit, not_given
from ..._utils import required_args, maybe_transform, async_maybe_transform
from ..._compat import cached_property
from ..._resource import SyncAPIResource, AsyncAPIResource
@@ -55,35 +55,33 @@ def create(
*,
messages: Iterable[completion_create_params.Message],
model: str,
- frequency_penalty: float | NotGiven = NOT_GIVEN,
- function_call: Union[str, Dict[str, Union[bool, float, str, Iterable[object], object, None]]]
- | NotGiven = NOT_GIVEN,
- functions: Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | NotGiven = NOT_GIVEN,
- logit_bias: Dict[str, float] | NotGiven = NOT_GIVEN,
- logprobs: bool | NotGiven = NOT_GIVEN,
- max_completion_tokens: int | NotGiven = NOT_GIVEN,
- max_tokens: int | NotGiven = NOT_GIVEN,
- n: int | NotGiven = NOT_GIVEN,
- parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
- presence_penalty: float | NotGiven = NOT_GIVEN,
- response_format: completion_create_params.ResponseFormat | NotGiven = NOT_GIVEN,
- seed: int | NotGiven = NOT_GIVEN,
- stop: Union[str, List[str]] | NotGiven = NOT_GIVEN,
- stream: Literal[False] | NotGiven = NOT_GIVEN,
- stream_options: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
- temperature: float | NotGiven = NOT_GIVEN,
- tool_choice: Union[str, Dict[str, Union[bool, float, str, Iterable[object], object, None]]]
- | NotGiven = NOT_GIVEN,
- tools: Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | NotGiven = NOT_GIVEN,
- top_logprobs: int | NotGiven = NOT_GIVEN,
- top_p: float | NotGiven = NOT_GIVEN,
- user: str | NotGiven = NOT_GIVEN,
+ frequency_penalty: float | Omit = omit,
+ function_call: Union[str, Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | Omit = omit,
+ functions: Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | Omit = omit,
+ logit_bias: Dict[str, float] | Omit = omit,
+ logprobs: bool | Omit = omit,
+ max_completion_tokens: int | Omit = omit,
+ max_tokens: int | Omit = omit,
+ n: int | Omit = omit,
+ parallel_tool_calls: bool | Omit = omit,
+ presence_penalty: float | Omit = omit,
+ response_format: completion_create_params.ResponseFormat | Omit = omit,
+ seed: int | Omit = omit,
+ stop: Union[str, SequenceNotStr[str]] | Omit = omit,
+ stream: Literal[False] | Omit = omit,
+ stream_options: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
+ temperature: float | Omit = omit,
+ tool_choice: Union[str, Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | Omit = omit,
+ tools: Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | Omit = omit,
+ top_logprobs: int | Omit = omit,
+ top_p: float | Omit = omit,
+ user: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> CompletionCreateResponse:
"""
Generate an OpenAI-compatible chat completion for the given messages using the
@@ -154,34 +152,32 @@ def create(
messages: Iterable[completion_create_params.Message],
model: str,
stream: Literal[True],
- frequency_penalty: float | NotGiven = NOT_GIVEN,
- function_call: Union[str, Dict[str, Union[bool, float, str, Iterable[object], object, None]]]
- | NotGiven = NOT_GIVEN,
- functions: Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | NotGiven = NOT_GIVEN,
- logit_bias: Dict[str, float] | NotGiven = NOT_GIVEN,
- logprobs: bool | NotGiven = NOT_GIVEN,
- max_completion_tokens: int | NotGiven = NOT_GIVEN,
- max_tokens: int | NotGiven = NOT_GIVEN,
- n: int | NotGiven = NOT_GIVEN,
- parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
- presence_penalty: float | NotGiven = NOT_GIVEN,
- response_format: completion_create_params.ResponseFormat | NotGiven = NOT_GIVEN,
- seed: int | NotGiven = NOT_GIVEN,
- stop: Union[str, List[str]] | NotGiven = NOT_GIVEN,
- stream_options: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
- temperature: float | NotGiven = NOT_GIVEN,
- tool_choice: Union[str, Dict[str, Union[bool, float, str, Iterable[object], object, None]]]
- | NotGiven = NOT_GIVEN,
- tools: Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | NotGiven = NOT_GIVEN,
- top_logprobs: int | NotGiven = NOT_GIVEN,
- top_p: float | NotGiven = NOT_GIVEN,
- user: str | NotGiven = NOT_GIVEN,
+ frequency_penalty: float | Omit = omit,
+ function_call: Union[str, Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | Omit = omit,
+ functions: Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | Omit = omit,
+ logit_bias: Dict[str, float] | Omit = omit,
+ logprobs: bool | Omit = omit,
+ max_completion_tokens: int | Omit = omit,
+ max_tokens: int | Omit = omit,
+ n: int | Omit = omit,
+ parallel_tool_calls: bool | Omit = omit,
+ presence_penalty: float | Omit = omit,
+ response_format: completion_create_params.ResponseFormat | Omit = omit,
+ seed: int | Omit = omit,
+ stop: Union[str, SequenceNotStr[str]] | Omit = omit,
+ stream_options: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
+ temperature: float | Omit = omit,
+ tool_choice: Union[str, Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | Omit = omit,
+ tools: Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | Omit = omit,
+ top_logprobs: int | Omit = omit,
+ top_p: float | Omit = omit,
+ user: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Stream[ChatCompletionChunk]:
"""
Generate an OpenAI-compatible chat completion for the given messages using the
@@ -252,34 +248,32 @@ def create(
messages: Iterable[completion_create_params.Message],
model: str,
stream: bool,
- frequency_penalty: float | NotGiven = NOT_GIVEN,
- function_call: Union[str, Dict[str, Union[bool, float, str, Iterable[object], object, None]]]
- | NotGiven = NOT_GIVEN,
- functions: Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | NotGiven = NOT_GIVEN,
- logit_bias: Dict[str, float] | NotGiven = NOT_GIVEN,
- logprobs: bool | NotGiven = NOT_GIVEN,
- max_completion_tokens: int | NotGiven = NOT_GIVEN,
- max_tokens: int | NotGiven = NOT_GIVEN,
- n: int | NotGiven = NOT_GIVEN,
- parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
- presence_penalty: float | NotGiven = NOT_GIVEN,
- response_format: completion_create_params.ResponseFormat | NotGiven = NOT_GIVEN,
- seed: int | NotGiven = NOT_GIVEN,
- stop: Union[str, List[str]] | NotGiven = NOT_GIVEN,
- stream_options: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
- temperature: float | NotGiven = NOT_GIVEN,
- tool_choice: Union[str, Dict[str, Union[bool, float, str, Iterable[object], object, None]]]
- | NotGiven = NOT_GIVEN,
- tools: Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | NotGiven = NOT_GIVEN,
- top_logprobs: int | NotGiven = NOT_GIVEN,
- top_p: float | NotGiven = NOT_GIVEN,
- user: str | NotGiven = NOT_GIVEN,
+ frequency_penalty: float | Omit = omit,
+ function_call: Union[str, Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | Omit = omit,
+ functions: Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | Omit = omit,
+ logit_bias: Dict[str, float] | Omit = omit,
+ logprobs: bool | Omit = omit,
+ max_completion_tokens: int | Omit = omit,
+ max_tokens: int | Omit = omit,
+ n: int | Omit = omit,
+ parallel_tool_calls: bool | Omit = omit,
+ presence_penalty: float | Omit = omit,
+ response_format: completion_create_params.ResponseFormat | Omit = omit,
+ seed: int | Omit = omit,
+ stop: Union[str, SequenceNotStr[str]] | Omit = omit,
+ stream_options: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
+ temperature: float | Omit = omit,
+ tool_choice: Union[str, Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | Omit = omit,
+ tools: Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | Omit = omit,
+ top_logprobs: int | Omit = omit,
+ top_p: float | Omit = omit,
+ user: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> CompletionCreateResponse | Stream[ChatCompletionChunk]:
"""
Generate an OpenAI-compatible chat completion for the given messages using the
@@ -349,35 +343,33 @@ def create(
*,
messages: Iterable[completion_create_params.Message],
model: str,
- frequency_penalty: float | NotGiven = NOT_GIVEN,
- function_call: Union[str, Dict[str, Union[bool, float, str, Iterable[object], object, None]]]
- | NotGiven = NOT_GIVEN,
- functions: Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | NotGiven = NOT_GIVEN,
- logit_bias: Dict[str, float] | NotGiven = NOT_GIVEN,
- logprobs: bool | NotGiven = NOT_GIVEN,
- max_completion_tokens: int | NotGiven = NOT_GIVEN,
- max_tokens: int | NotGiven = NOT_GIVEN,
- n: int | NotGiven = NOT_GIVEN,
- parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
- presence_penalty: float | NotGiven = NOT_GIVEN,
- response_format: completion_create_params.ResponseFormat | NotGiven = NOT_GIVEN,
- seed: int | NotGiven = NOT_GIVEN,
- stop: Union[str, List[str]] | NotGiven = NOT_GIVEN,
- stream: Literal[False] | Literal[True] | NotGiven = NOT_GIVEN,
- stream_options: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
- temperature: float | NotGiven = NOT_GIVEN,
- tool_choice: Union[str, Dict[str, Union[bool, float, str, Iterable[object], object, None]]]
- | NotGiven = NOT_GIVEN,
- tools: Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | NotGiven = NOT_GIVEN,
- top_logprobs: int | NotGiven = NOT_GIVEN,
- top_p: float | NotGiven = NOT_GIVEN,
- user: str | NotGiven = NOT_GIVEN,
+ frequency_penalty: float | Omit = omit,
+ function_call: Union[str, Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | Omit = omit,
+ functions: Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | Omit = omit,
+ logit_bias: Dict[str, float] | Omit = omit,
+ logprobs: bool | Omit = omit,
+ max_completion_tokens: int | Omit = omit,
+ max_tokens: int | Omit = omit,
+ n: int | Omit = omit,
+ parallel_tool_calls: bool | Omit = omit,
+ presence_penalty: float | Omit = omit,
+ response_format: completion_create_params.ResponseFormat | Omit = omit,
+ seed: int | Omit = omit,
+ stop: Union[str, SequenceNotStr[str]] | Omit = omit,
+ stream: Literal[False] | Literal[True] | Omit = omit,
+ stream_options: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
+ temperature: float | Omit = omit,
+ tool_choice: Union[str, Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | Omit = omit,
+ tools: Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | Omit = omit,
+ top_logprobs: int | Omit = omit,
+ top_p: float | Omit = omit,
+ user: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> CompletionCreateResponse | Stream[ChatCompletionChunk]:
return self._post(
"/v1/openai/v1/chat/completions",
@@ -430,7 +422,7 @@ def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> CompletionRetrieveResponse:
"""
Describe a chat completion by its ID.
@@ -457,16 +449,16 @@ def retrieve(
def list(
self,
*,
- after: str | NotGiven = NOT_GIVEN,
- limit: int | NotGiven = NOT_GIVEN,
- model: str | NotGiven = NOT_GIVEN,
- order: Literal["asc", "desc"] | NotGiven = NOT_GIVEN,
+ after: str | Omit = omit,
+ limit: int | Omit = omit,
+ model: str | Omit = omit,
+ order: Literal["asc", "desc"] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> SyncOpenAICursorPage[CompletionListResponse]:
"""
List all chat completions.
@@ -536,35 +528,33 @@ async def create(
*,
messages: Iterable[completion_create_params.Message],
model: str,
- frequency_penalty: float | NotGiven = NOT_GIVEN,
- function_call: Union[str, Dict[str, Union[bool, float, str, Iterable[object], object, None]]]
- | NotGiven = NOT_GIVEN,
- functions: Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | NotGiven = NOT_GIVEN,
- logit_bias: Dict[str, float] | NotGiven = NOT_GIVEN,
- logprobs: bool | NotGiven = NOT_GIVEN,
- max_completion_tokens: int | NotGiven = NOT_GIVEN,
- max_tokens: int | NotGiven = NOT_GIVEN,
- n: int | NotGiven = NOT_GIVEN,
- parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
- presence_penalty: float | NotGiven = NOT_GIVEN,
- response_format: completion_create_params.ResponseFormat | NotGiven = NOT_GIVEN,
- seed: int | NotGiven = NOT_GIVEN,
- stop: Union[str, List[str]] | NotGiven = NOT_GIVEN,
- stream: Literal[False] | NotGiven = NOT_GIVEN,
- stream_options: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
- temperature: float | NotGiven = NOT_GIVEN,
- tool_choice: Union[str, Dict[str, Union[bool, float, str, Iterable[object], object, None]]]
- | NotGiven = NOT_GIVEN,
- tools: Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | NotGiven = NOT_GIVEN,
- top_logprobs: int | NotGiven = NOT_GIVEN,
- top_p: float | NotGiven = NOT_GIVEN,
- user: str | NotGiven = NOT_GIVEN,
+ frequency_penalty: float | Omit = omit,
+ function_call: Union[str, Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | Omit = omit,
+ functions: Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | Omit = omit,
+ logit_bias: Dict[str, float] | Omit = omit,
+ logprobs: bool | Omit = omit,
+ max_completion_tokens: int | Omit = omit,
+ max_tokens: int | Omit = omit,
+ n: int | Omit = omit,
+ parallel_tool_calls: bool | Omit = omit,
+ presence_penalty: float | Omit = omit,
+ response_format: completion_create_params.ResponseFormat | Omit = omit,
+ seed: int | Omit = omit,
+ stop: Union[str, SequenceNotStr[str]] | Omit = omit,
+ stream: Literal[False] | Omit = omit,
+ stream_options: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
+ temperature: float | Omit = omit,
+ tool_choice: Union[str, Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | Omit = omit,
+ tools: Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | Omit = omit,
+ top_logprobs: int | Omit = omit,
+ top_p: float | Omit = omit,
+ user: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> CompletionCreateResponse:
"""
Generate an OpenAI-compatible chat completion for the given messages using the
@@ -635,34 +625,32 @@ async def create(
messages: Iterable[completion_create_params.Message],
model: str,
stream: Literal[True],
- frequency_penalty: float | NotGiven = NOT_GIVEN,
- function_call: Union[str, Dict[str, Union[bool, float, str, Iterable[object], object, None]]]
- | NotGiven = NOT_GIVEN,
- functions: Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | NotGiven = NOT_GIVEN,
- logit_bias: Dict[str, float] | NotGiven = NOT_GIVEN,
- logprobs: bool | NotGiven = NOT_GIVEN,
- max_completion_tokens: int | NotGiven = NOT_GIVEN,
- max_tokens: int | NotGiven = NOT_GIVEN,
- n: int | NotGiven = NOT_GIVEN,
- parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
- presence_penalty: float | NotGiven = NOT_GIVEN,
- response_format: completion_create_params.ResponseFormat | NotGiven = NOT_GIVEN,
- seed: int | NotGiven = NOT_GIVEN,
- stop: Union[str, List[str]] | NotGiven = NOT_GIVEN,
- stream_options: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
- temperature: float | NotGiven = NOT_GIVEN,
- tool_choice: Union[str, Dict[str, Union[bool, float, str, Iterable[object], object, None]]]
- | NotGiven = NOT_GIVEN,
- tools: Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | NotGiven = NOT_GIVEN,
- top_logprobs: int | NotGiven = NOT_GIVEN,
- top_p: float | NotGiven = NOT_GIVEN,
- user: str | NotGiven = NOT_GIVEN,
+ frequency_penalty: float | Omit = omit,
+ function_call: Union[str, Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | Omit = omit,
+ functions: Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | Omit = omit,
+ logit_bias: Dict[str, float] | Omit = omit,
+ logprobs: bool | Omit = omit,
+ max_completion_tokens: int | Omit = omit,
+ max_tokens: int | Omit = omit,
+ n: int | Omit = omit,
+ parallel_tool_calls: bool | Omit = omit,
+ presence_penalty: float | Omit = omit,
+ response_format: completion_create_params.ResponseFormat | Omit = omit,
+ seed: int | Omit = omit,
+ stop: Union[str, SequenceNotStr[str]] | Omit = omit,
+ stream_options: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
+ temperature: float | Omit = omit,
+ tool_choice: Union[str, Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | Omit = omit,
+ tools: Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | Omit = omit,
+ top_logprobs: int | Omit = omit,
+ top_p: float | Omit = omit,
+ user: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> AsyncStream[ChatCompletionChunk]:
"""
Generate an OpenAI-compatible chat completion for the given messages using the
@@ -733,34 +721,32 @@ async def create(
messages: Iterable[completion_create_params.Message],
model: str,
stream: bool,
- frequency_penalty: float | NotGiven = NOT_GIVEN,
- function_call: Union[str, Dict[str, Union[bool, float, str, Iterable[object], object, None]]]
- | NotGiven = NOT_GIVEN,
- functions: Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | NotGiven = NOT_GIVEN,
- logit_bias: Dict[str, float] | NotGiven = NOT_GIVEN,
- logprobs: bool | NotGiven = NOT_GIVEN,
- max_completion_tokens: int | NotGiven = NOT_GIVEN,
- max_tokens: int | NotGiven = NOT_GIVEN,
- n: int | NotGiven = NOT_GIVEN,
- parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
- presence_penalty: float | NotGiven = NOT_GIVEN,
- response_format: completion_create_params.ResponseFormat | NotGiven = NOT_GIVEN,
- seed: int | NotGiven = NOT_GIVEN,
- stop: Union[str, List[str]] | NotGiven = NOT_GIVEN,
- stream_options: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
- temperature: float | NotGiven = NOT_GIVEN,
- tool_choice: Union[str, Dict[str, Union[bool, float, str, Iterable[object], object, None]]]
- | NotGiven = NOT_GIVEN,
- tools: Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | NotGiven = NOT_GIVEN,
- top_logprobs: int | NotGiven = NOT_GIVEN,
- top_p: float | NotGiven = NOT_GIVEN,
- user: str | NotGiven = NOT_GIVEN,
+ frequency_penalty: float | Omit = omit,
+ function_call: Union[str, Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | Omit = omit,
+ functions: Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | Omit = omit,
+ logit_bias: Dict[str, float] | Omit = omit,
+ logprobs: bool | Omit = omit,
+ max_completion_tokens: int | Omit = omit,
+ max_tokens: int | Omit = omit,
+ n: int | Omit = omit,
+ parallel_tool_calls: bool | Omit = omit,
+ presence_penalty: float | Omit = omit,
+ response_format: completion_create_params.ResponseFormat | Omit = omit,
+ seed: int | Omit = omit,
+ stop: Union[str, SequenceNotStr[str]] | Omit = omit,
+ stream_options: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
+ temperature: float | Omit = omit,
+ tool_choice: Union[str, Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | Omit = omit,
+ tools: Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | Omit = omit,
+ top_logprobs: int | Omit = omit,
+ top_p: float | Omit = omit,
+ user: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> CompletionCreateResponse | AsyncStream[ChatCompletionChunk]:
"""
Generate an OpenAI-compatible chat completion for the given messages using the
@@ -830,35 +816,33 @@ async def create(
*,
messages: Iterable[completion_create_params.Message],
model: str,
- frequency_penalty: float | NotGiven = NOT_GIVEN,
- function_call: Union[str, Dict[str, Union[bool, float, str, Iterable[object], object, None]]]
- | NotGiven = NOT_GIVEN,
- functions: Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | NotGiven = NOT_GIVEN,
- logit_bias: Dict[str, float] | NotGiven = NOT_GIVEN,
- logprobs: bool | NotGiven = NOT_GIVEN,
- max_completion_tokens: int | NotGiven = NOT_GIVEN,
- max_tokens: int | NotGiven = NOT_GIVEN,
- n: int | NotGiven = NOT_GIVEN,
- parallel_tool_calls: bool | NotGiven = NOT_GIVEN,
- presence_penalty: float | NotGiven = NOT_GIVEN,
- response_format: completion_create_params.ResponseFormat | NotGiven = NOT_GIVEN,
- seed: int | NotGiven = NOT_GIVEN,
- stop: Union[str, List[str]] | NotGiven = NOT_GIVEN,
- stream: Literal[False] | Literal[True] | NotGiven = NOT_GIVEN,
- stream_options: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
- temperature: float | NotGiven = NOT_GIVEN,
- tool_choice: Union[str, Dict[str, Union[bool, float, str, Iterable[object], object, None]]]
- | NotGiven = NOT_GIVEN,
- tools: Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | NotGiven = NOT_GIVEN,
- top_logprobs: int | NotGiven = NOT_GIVEN,
- top_p: float | NotGiven = NOT_GIVEN,
- user: str | NotGiven = NOT_GIVEN,
+ frequency_penalty: float | Omit = omit,
+ function_call: Union[str, Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | Omit = omit,
+ functions: Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | Omit = omit,
+ logit_bias: Dict[str, float] | Omit = omit,
+ logprobs: bool | Omit = omit,
+ max_completion_tokens: int | Omit = omit,
+ max_tokens: int | Omit = omit,
+ n: int | Omit = omit,
+ parallel_tool_calls: bool | Omit = omit,
+ presence_penalty: float | Omit = omit,
+ response_format: completion_create_params.ResponseFormat | Omit = omit,
+ seed: int | Omit = omit,
+ stop: Union[str, SequenceNotStr[str]] | Omit = omit,
+ stream: Literal[False] | Literal[True] | Omit = omit,
+ stream_options: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
+ temperature: float | Omit = omit,
+ tool_choice: Union[str, Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | Omit = omit,
+ tools: Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]] | Omit = omit,
+ top_logprobs: int | Omit = omit,
+ top_p: float | Omit = omit,
+ user: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> CompletionCreateResponse | AsyncStream[ChatCompletionChunk]:
return await self._post(
"/v1/openai/v1/chat/completions",
@@ -911,7 +895,7 @@ async def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> CompletionRetrieveResponse:
"""
Describe a chat completion by its ID.
@@ -938,16 +922,16 @@ async def retrieve(
def list(
self,
*,
- after: str | NotGiven = NOT_GIVEN,
- limit: int | NotGiven = NOT_GIVEN,
- model: str | NotGiven = NOT_GIVEN,
- order: Literal["asc", "desc"] | NotGiven = NOT_GIVEN,
+ after: str | Omit = omit,
+ limit: int | Omit = omit,
+ model: str | Omit = omit,
+ order: Literal["asc", "desc"] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> AsyncPaginator[CompletionListResponse, AsyncOpenAICursorPage[CompletionListResponse]]:
"""
List all chat completions.
diff --git a/src/llama_stack_client/resources/completions.py b/src/llama_stack_client/resources/completions.py
index 23554ccb..2c1475de 100644
--- a/src/llama_stack_client/resources/completions.py
+++ b/src/llama_stack_client/resources/completions.py
@@ -2,13 +2,13 @@
from __future__ import annotations
-from typing import Dict, List, Union, Iterable
+from typing import Dict, Union, Iterable
from typing_extensions import Literal, overload
import httpx
from ..types import completion_create_params
-from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from .._types import Body, Omit, Query, Headers, NotGiven, SequenceNotStr, omit, not_given
from .._utils import required_args, maybe_transform, async_maybe_transform
from .._compat import cached_property
from .._resource import SyncAPIResource, AsyncAPIResource
@@ -50,31 +50,31 @@ def create(
self,
*,
model: str,
- prompt: Union[str, List[str], Iterable[int], Iterable[Iterable[int]]],
- best_of: int | NotGiven = NOT_GIVEN,
- echo: bool | NotGiven = NOT_GIVEN,
- frequency_penalty: float | NotGiven = NOT_GIVEN,
- guided_choice: List[str] | NotGiven = NOT_GIVEN,
- logit_bias: Dict[str, float] | NotGiven = NOT_GIVEN,
- logprobs: bool | NotGiven = NOT_GIVEN,
- max_tokens: int | NotGiven = NOT_GIVEN,
- n: int | NotGiven = NOT_GIVEN,
- presence_penalty: float | NotGiven = NOT_GIVEN,
- prompt_logprobs: int | NotGiven = NOT_GIVEN,
- seed: int | NotGiven = NOT_GIVEN,
- stop: Union[str, List[str]] | NotGiven = NOT_GIVEN,
- stream: Literal[False] | NotGiven = NOT_GIVEN,
- stream_options: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
- suffix: str | NotGiven = NOT_GIVEN,
- temperature: float | NotGiven = NOT_GIVEN,
- top_p: float | NotGiven = NOT_GIVEN,
- user: str | NotGiven = NOT_GIVEN,
+ prompt: Union[str, SequenceNotStr[str], Iterable[int], Iterable[Iterable[int]]],
+ best_of: int | Omit = omit,
+ echo: bool | Omit = omit,
+ frequency_penalty: float | Omit = omit,
+ guided_choice: SequenceNotStr[str] | Omit = omit,
+ logit_bias: Dict[str, float] | Omit = omit,
+ logprobs: bool | Omit = omit,
+ max_tokens: int | Omit = omit,
+ n: int | Omit = omit,
+ presence_penalty: float | Omit = omit,
+ prompt_logprobs: int | Omit = omit,
+ seed: int | Omit = omit,
+ stop: Union[str, SequenceNotStr[str]] | Omit = omit,
+ stream: Literal[False] | Omit = omit,
+ stream_options: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
+ suffix: str | Omit = omit,
+ temperature: float | Omit = omit,
+ top_p: float | Omit = omit,
+ user: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> CompletionCreateResponse:
"""
Generate an OpenAI-compatible completion for the given prompt using the
@@ -133,31 +133,31 @@ def create(
self,
*,
model: str,
- prompt: Union[str, List[str], Iterable[int], Iterable[Iterable[int]]],
+ prompt: Union[str, SequenceNotStr[str], Iterable[int], Iterable[Iterable[int]]],
stream: Literal[True],
- best_of: int | NotGiven = NOT_GIVEN,
- echo: bool | NotGiven = NOT_GIVEN,
- frequency_penalty: float | NotGiven = NOT_GIVEN,
- guided_choice: List[str] | NotGiven = NOT_GIVEN,
- logit_bias: Dict[str, float] | NotGiven = NOT_GIVEN,
- logprobs: bool | NotGiven = NOT_GIVEN,
- max_tokens: int | NotGiven = NOT_GIVEN,
- n: int | NotGiven = NOT_GIVEN,
- presence_penalty: float | NotGiven = NOT_GIVEN,
- prompt_logprobs: int | NotGiven = NOT_GIVEN,
- seed: int | NotGiven = NOT_GIVEN,
- stop: Union[str, List[str]] | NotGiven = NOT_GIVEN,
- stream_options: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
- suffix: str | NotGiven = NOT_GIVEN,
- temperature: float | NotGiven = NOT_GIVEN,
- top_p: float | NotGiven = NOT_GIVEN,
- user: str | NotGiven = NOT_GIVEN,
+ best_of: int | Omit = omit,
+ echo: bool | Omit = omit,
+ frequency_penalty: float | Omit = omit,
+ guided_choice: SequenceNotStr[str] | Omit = omit,
+ logit_bias: Dict[str, float] | Omit = omit,
+ logprobs: bool | Omit = omit,
+ max_tokens: int | Omit = omit,
+ n: int | Omit = omit,
+ presence_penalty: float | Omit = omit,
+ prompt_logprobs: int | Omit = omit,
+ seed: int | Omit = omit,
+ stop: Union[str, SequenceNotStr[str]] | Omit = omit,
+ stream_options: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
+ suffix: str | Omit = omit,
+ temperature: float | Omit = omit,
+ top_p: float | Omit = omit,
+ user: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Stream[CompletionCreateResponse]:
"""
Generate an OpenAI-compatible completion for the given prompt using the
@@ -216,31 +216,31 @@ def create(
self,
*,
model: str,
- prompt: Union[str, List[str], Iterable[int], Iterable[Iterable[int]]],
+ prompt: Union[str, SequenceNotStr[str], Iterable[int], Iterable[Iterable[int]]],
stream: bool,
- best_of: int | NotGiven = NOT_GIVEN,
- echo: bool | NotGiven = NOT_GIVEN,
- frequency_penalty: float | NotGiven = NOT_GIVEN,
- guided_choice: List[str] | NotGiven = NOT_GIVEN,
- logit_bias: Dict[str, float] | NotGiven = NOT_GIVEN,
- logprobs: bool | NotGiven = NOT_GIVEN,
- max_tokens: int | NotGiven = NOT_GIVEN,
- n: int | NotGiven = NOT_GIVEN,
- presence_penalty: float | NotGiven = NOT_GIVEN,
- prompt_logprobs: int | NotGiven = NOT_GIVEN,
- seed: int | NotGiven = NOT_GIVEN,
- stop: Union[str, List[str]] | NotGiven = NOT_GIVEN,
- stream_options: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
- suffix: str | NotGiven = NOT_GIVEN,
- temperature: float | NotGiven = NOT_GIVEN,
- top_p: float | NotGiven = NOT_GIVEN,
- user: str | NotGiven = NOT_GIVEN,
+ best_of: int | Omit = omit,
+ echo: bool | Omit = omit,
+ frequency_penalty: float | Omit = omit,
+ guided_choice: SequenceNotStr[str] | Omit = omit,
+ logit_bias: Dict[str, float] | Omit = omit,
+ logprobs: bool | Omit = omit,
+ max_tokens: int | Omit = omit,
+ n: int | Omit = omit,
+ presence_penalty: float | Omit = omit,
+ prompt_logprobs: int | Omit = omit,
+ seed: int | Omit = omit,
+ stop: Union[str, SequenceNotStr[str]] | Omit = omit,
+ stream_options: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
+ suffix: str | Omit = omit,
+ temperature: float | Omit = omit,
+ top_p: float | Omit = omit,
+ user: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> CompletionCreateResponse | Stream[CompletionCreateResponse]:
"""
Generate an OpenAI-compatible completion for the given prompt using the
@@ -299,31 +299,31 @@ def create(
self,
*,
model: str,
- prompt: Union[str, List[str], Iterable[int], Iterable[Iterable[int]]],
- best_of: int | NotGiven = NOT_GIVEN,
- echo: bool | NotGiven = NOT_GIVEN,
- frequency_penalty: float | NotGiven = NOT_GIVEN,
- guided_choice: List[str] | NotGiven = NOT_GIVEN,
- logit_bias: Dict[str, float] | NotGiven = NOT_GIVEN,
- logprobs: bool | NotGiven = NOT_GIVEN,
- max_tokens: int | NotGiven = NOT_GIVEN,
- n: int | NotGiven = NOT_GIVEN,
- presence_penalty: float | NotGiven = NOT_GIVEN,
- prompt_logprobs: int | NotGiven = NOT_GIVEN,
- seed: int | NotGiven = NOT_GIVEN,
- stop: Union[str, List[str]] | NotGiven = NOT_GIVEN,
- stream: Literal[False] | Literal[True] | NotGiven = NOT_GIVEN,
- stream_options: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
- suffix: str | NotGiven = NOT_GIVEN,
- temperature: float | NotGiven = NOT_GIVEN,
- top_p: float | NotGiven = NOT_GIVEN,
- user: str | NotGiven = NOT_GIVEN,
+ prompt: Union[str, SequenceNotStr[str], Iterable[int], Iterable[Iterable[int]]],
+ best_of: int | Omit = omit,
+ echo: bool | Omit = omit,
+ frequency_penalty: float | Omit = omit,
+ guided_choice: SequenceNotStr[str] | Omit = omit,
+ logit_bias: Dict[str, float] | Omit = omit,
+ logprobs: bool | Omit = omit,
+ max_tokens: int | Omit = omit,
+ n: int | Omit = omit,
+ presence_penalty: float | Omit = omit,
+ prompt_logprobs: int | Omit = omit,
+ seed: int | Omit = omit,
+ stop: Union[str, SequenceNotStr[str]] | Omit = omit,
+ stream: Literal[False] | Literal[True] | Omit = omit,
+ stream_options: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
+ suffix: str | Omit = omit,
+ temperature: float | Omit = omit,
+ top_p: float | Omit = omit,
+ user: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> CompletionCreateResponse | Stream[CompletionCreateResponse]:
return self._post(
"/v1/openai/v1/completions",
@@ -388,31 +388,31 @@ async def create(
self,
*,
model: str,
- prompt: Union[str, List[str], Iterable[int], Iterable[Iterable[int]]],
- best_of: int | NotGiven = NOT_GIVEN,
- echo: bool | NotGiven = NOT_GIVEN,
- frequency_penalty: float | NotGiven = NOT_GIVEN,
- guided_choice: List[str] | NotGiven = NOT_GIVEN,
- logit_bias: Dict[str, float] | NotGiven = NOT_GIVEN,
- logprobs: bool | NotGiven = NOT_GIVEN,
- max_tokens: int | NotGiven = NOT_GIVEN,
- n: int | NotGiven = NOT_GIVEN,
- presence_penalty: float | NotGiven = NOT_GIVEN,
- prompt_logprobs: int | NotGiven = NOT_GIVEN,
- seed: int | NotGiven = NOT_GIVEN,
- stop: Union[str, List[str]] | NotGiven = NOT_GIVEN,
- stream: Literal[False] | NotGiven = NOT_GIVEN,
- stream_options: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
- suffix: str | NotGiven = NOT_GIVEN,
- temperature: float | NotGiven = NOT_GIVEN,
- top_p: float | NotGiven = NOT_GIVEN,
- user: str | NotGiven = NOT_GIVEN,
+ prompt: Union[str, SequenceNotStr[str], Iterable[int], Iterable[Iterable[int]]],
+ best_of: int | Omit = omit,
+ echo: bool | Omit = omit,
+ frequency_penalty: float | Omit = omit,
+ guided_choice: SequenceNotStr[str] | Omit = omit,
+ logit_bias: Dict[str, float] | Omit = omit,
+ logprobs: bool | Omit = omit,
+ max_tokens: int | Omit = omit,
+ n: int | Omit = omit,
+ presence_penalty: float | Omit = omit,
+ prompt_logprobs: int | Omit = omit,
+ seed: int | Omit = omit,
+ stop: Union[str, SequenceNotStr[str]] | Omit = omit,
+ stream: Literal[False] | Omit = omit,
+ stream_options: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
+ suffix: str | Omit = omit,
+ temperature: float | Omit = omit,
+ top_p: float | Omit = omit,
+ user: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> CompletionCreateResponse:
"""
Generate an OpenAI-compatible completion for the given prompt using the
@@ -471,31 +471,31 @@ async def create(
self,
*,
model: str,
- prompt: Union[str, List[str], Iterable[int], Iterable[Iterable[int]]],
+ prompt: Union[str, SequenceNotStr[str], Iterable[int], Iterable[Iterable[int]]],
stream: Literal[True],
- best_of: int | NotGiven = NOT_GIVEN,
- echo: bool | NotGiven = NOT_GIVEN,
- frequency_penalty: float | NotGiven = NOT_GIVEN,
- guided_choice: List[str] | NotGiven = NOT_GIVEN,
- logit_bias: Dict[str, float] | NotGiven = NOT_GIVEN,
- logprobs: bool | NotGiven = NOT_GIVEN,
- max_tokens: int | NotGiven = NOT_GIVEN,
- n: int | NotGiven = NOT_GIVEN,
- presence_penalty: float | NotGiven = NOT_GIVEN,
- prompt_logprobs: int | NotGiven = NOT_GIVEN,
- seed: int | NotGiven = NOT_GIVEN,
- stop: Union[str, List[str]] | NotGiven = NOT_GIVEN,
- stream_options: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
- suffix: str | NotGiven = NOT_GIVEN,
- temperature: float | NotGiven = NOT_GIVEN,
- top_p: float | NotGiven = NOT_GIVEN,
- user: str | NotGiven = NOT_GIVEN,
+ best_of: int | Omit = omit,
+ echo: bool | Omit = omit,
+ frequency_penalty: float | Omit = omit,
+ guided_choice: SequenceNotStr[str] | Omit = omit,
+ logit_bias: Dict[str, float] | Omit = omit,
+ logprobs: bool | Omit = omit,
+ max_tokens: int | Omit = omit,
+ n: int | Omit = omit,
+ presence_penalty: float | Omit = omit,
+ prompt_logprobs: int | Omit = omit,
+ seed: int | Omit = omit,
+ stop: Union[str, SequenceNotStr[str]] | Omit = omit,
+ stream_options: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
+ suffix: str | Omit = omit,
+ temperature: float | Omit = omit,
+ top_p: float | Omit = omit,
+ user: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> AsyncStream[CompletionCreateResponse]:
"""
Generate an OpenAI-compatible completion for the given prompt using the
@@ -554,31 +554,31 @@ async def create(
self,
*,
model: str,
- prompt: Union[str, List[str], Iterable[int], Iterable[Iterable[int]]],
+ prompt: Union[str, SequenceNotStr[str], Iterable[int], Iterable[Iterable[int]]],
stream: bool,
- best_of: int | NotGiven = NOT_GIVEN,
- echo: bool | NotGiven = NOT_GIVEN,
- frequency_penalty: float | NotGiven = NOT_GIVEN,
- guided_choice: List[str] | NotGiven = NOT_GIVEN,
- logit_bias: Dict[str, float] | NotGiven = NOT_GIVEN,
- logprobs: bool | NotGiven = NOT_GIVEN,
- max_tokens: int | NotGiven = NOT_GIVEN,
- n: int | NotGiven = NOT_GIVEN,
- presence_penalty: float | NotGiven = NOT_GIVEN,
- prompt_logprobs: int | NotGiven = NOT_GIVEN,
- seed: int | NotGiven = NOT_GIVEN,
- stop: Union[str, List[str]] | NotGiven = NOT_GIVEN,
- stream_options: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
- suffix: str | NotGiven = NOT_GIVEN,
- temperature: float | NotGiven = NOT_GIVEN,
- top_p: float | NotGiven = NOT_GIVEN,
- user: str | NotGiven = NOT_GIVEN,
+ best_of: int | Omit = omit,
+ echo: bool | Omit = omit,
+ frequency_penalty: float | Omit = omit,
+ guided_choice: SequenceNotStr[str] | Omit = omit,
+ logit_bias: Dict[str, float] | Omit = omit,
+ logprobs: bool | Omit = omit,
+ max_tokens: int | Omit = omit,
+ n: int | Omit = omit,
+ presence_penalty: float | Omit = omit,
+ prompt_logprobs: int | Omit = omit,
+ seed: int | Omit = omit,
+ stop: Union[str, SequenceNotStr[str]] | Omit = omit,
+ stream_options: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
+ suffix: str | Omit = omit,
+ temperature: float | Omit = omit,
+ top_p: float | Omit = omit,
+ user: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> CompletionCreateResponse | AsyncStream[CompletionCreateResponse]:
"""
Generate an OpenAI-compatible completion for the given prompt using the
@@ -637,31 +637,31 @@ async def create(
self,
*,
model: str,
- prompt: Union[str, List[str], Iterable[int], Iterable[Iterable[int]]],
- best_of: int | NotGiven = NOT_GIVEN,
- echo: bool | NotGiven = NOT_GIVEN,
- frequency_penalty: float | NotGiven = NOT_GIVEN,
- guided_choice: List[str] | NotGiven = NOT_GIVEN,
- logit_bias: Dict[str, float] | NotGiven = NOT_GIVEN,
- logprobs: bool | NotGiven = NOT_GIVEN,
- max_tokens: int | NotGiven = NOT_GIVEN,
- n: int | NotGiven = NOT_GIVEN,
- presence_penalty: float | NotGiven = NOT_GIVEN,
- prompt_logprobs: int | NotGiven = NOT_GIVEN,
- seed: int | NotGiven = NOT_GIVEN,
- stop: Union[str, List[str]] | NotGiven = NOT_GIVEN,
- stream: Literal[False] | Literal[True] | NotGiven = NOT_GIVEN,
- stream_options: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
- suffix: str | NotGiven = NOT_GIVEN,
- temperature: float | NotGiven = NOT_GIVEN,
- top_p: float | NotGiven = NOT_GIVEN,
- user: str | NotGiven = NOT_GIVEN,
+ prompt: Union[str, SequenceNotStr[str], Iterable[int], Iterable[Iterable[int]]],
+ best_of: int | Omit = omit,
+ echo: bool | Omit = omit,
+ frequency_penalty: float | Omit = omit,
+ guided_choice: SequenceNotStr[str] | Omit = omit,
+ logit_bias: Dict[str, float] | Omit = omit,
+ logprobs: bool | Omit = omit,
+ max_tokens: int | Omit = omit,
+ n: int | Omit = omit,
+ presence_penalty: float | Omit = omit,
+ prompt_logprobs: int | Omit = omit,
+ seed: int | Omit = omit,
+ stop: Union[str, SequenceNotStr[str]] | Omit = omit,
+ stream: Literal[False] | Literal[True] | Omit = omit,
+ stream_options: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
+ suffix: str | Omit = omit,
+ temperature: float | Omit = omit,
+ top_p: float | Omit = omit,
+ user: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> CompletionCreateResponse | AsyncStream[CompletionCreateResponse]:
return await self._post(
"/v1/openai/v1/completions",
diff --git a/src/llama_stack_client/resources/datasets.py b/src/llama_stack_client/resources/datasets.py
index e2f0a149..e3a2af6d 100644
--- a/src/llama_stack_client/resources/datasets.py
+++ b/src/llama_stack_client/resources/datasets.py
@@ -8,7 +8,7 @@
import httpx
from ..types import dataset_iterrows_params, dataset_register_params, dataset_appendrows_params
-from .._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
+from .._types import Body, Omit, Query, Headers, NoneType, NotGiven, omit, not_given
from .._utils import maybe_transform, async_maybe_transform
from .._compat import cached_property
from .._resource import SyncAPIResource, AsyncAPIResource
@@ -57,7 +57,7 @@ def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> DatasetRetrieveResponse:
"""
Get a dataset by its ID.
@@ -89,7 +89,7 @@ def list(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> DatasetListResponse:
"""List all datasets."""
return self._get(
@@ -114,7 +114,7 @@ def appendrows(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> None:
"""
Append rows to a dataset.
@@ -146,14 +146,14 @@ def iterrows(
self,
dataset_id: str,
*,
- limit: int | NotGiven = NOT_GIVEN,
- start_index: int | NotGiven = NOT_GIVEN,
+ limit: int | Omit = omit,
+ start_index: int | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> DatasetIterrowsResponse:
"""Get a paginated list of rows from a dataset.
@@ -205,14 +205,14 @@ def register(
*,
purpose: Literal["post-training/messages", "eval/question-answer", "eval/messages-answer"],
source: dataset_register_params.Source,
- dataset_id: str | NotGiven = NOT_GIVEN,
- metadata: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
+ dataset_id: str | Omit = omit,
+ metadata: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> DatasetRegisterResponse:
"""Register a new dataset.
@@ -278,7 +278,7 @@ def unregister(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> None:
"""
Unregister a dataset by its ID.
@@ -333,7 +333,7 @@ async def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> DatasetRetrieveResponse:
"""
Get a dataset by its ID.
@@ -365,7 +365,7 @@ async def list(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> DatasetListResponse:
"""List all datasets."""
return await self._get(
@@ -390,7 +390,7 @@ async def appendrows(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> None:
"""
Append rows to a dataset.
@@ -422,14 +422,14 @@ async def iterrows(
self,
dataset_id: str,
*,
- limit: int | NotGiven = NOT_GIVEN,
- start_index: int | NotGiven = NOT_GIVEN,
+ limit: int | Omit = omit,
+ start_index: int | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> DatasetIterrowsResponse:
"""Get a paginated list of rows from a dataset.
@@ -481,14 +481,14 @@ async def register(
*,
purpose: Literal["post-training/messages", "eval/question-answer", "eval/messages-answer"],
source: dataset_register_params.Source,
- dataset_id: str | NotGiven = NOT_GIVEN,
- metadata: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
+ dataset_id: str | Omit = omit,
+ metadata: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> DatasetRegisterResponse:
"""Register a new dataset.
@@ -554,7 +554,7 @@ async def unregister(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> None:
"""
Unregister a dataset by its ID.
diff --git a/src/llama_stack_client/resources/embeddings.py b/src/llama_stack_client/resources/embeddings.py
index 144ebbf2..60c38cb2 100644
--- a/src/llama_stack_client/resources/embeddings.py
+++ b/src/llama_stack_client/resources/embeddings.py
@@ -2,12 +2,12 @@
from __future__ import annotations
-from typing import List, Union
+from typing import Union
import httpx
from ..types import embedding_create_params
-from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from .._types import Body, Omit, Query, Headers, NotGiven, SequenceNotStr, omit, not_given
from .._utils import maybe_transform, async_maybe_transform
from .._compat import cached_property
from .._resource import SyncAPIResource, AsyncAPIResource
@@ -46,17 +46,17 @@ def with_streaming_response(self) -> EmbeddingsResourceWithStreamingResponse:
def create(
self,
*,
- input: Union[str, List[str]],
+ input: Union[str, SequenceNotStr[str]],
model: str,
- dimensions: int | NotGiven = NOT_GIVEN,
- encoding_format: str | NotGiven = NOT_GIVEN,
- user: str | NotGiven = NOT_GIVEN,
+ dimensions: int | Omit = omit,
+ encoding_format: str | Omit = omit,
+ user: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> CreateEmbeddingsResponse:
"""
Generate OpenAI-compatible embeddings for the given input using the specified
@@ -128,17 +128,17 @@ def with_streaming_response(self) -> AsyncEmbeddingsResourceWithStreamingRespons
async def create(
self,
*,
- input: Union[str, List[str]],
+ input: Union[str, SequenceNotStr[str]],
model: str,
- dimensions: int | NotGiven = NOT_GIVEN,
- encoding_format: str | NotGiven = NOT_GIVEN,
- user: str | NotGiven = NOT_GIVEN,
+ dimensions: int | Omit = omit,
+ encoding_format: str | Omit = omit,
+ user: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> CreateEmbeddingsResponse:
"""
Generate OpenAI-compatible embeddings for the given input using the specified
diff --git a/src/llama_stack_client/resources/eval/eval.py b/src/llama_stack_client/resources/eval/eval.py
index 006f1717..87637875 100644
--- a/src/llama_stack_client/resources/eval/eval.py
+++ b/src/llama_stack_client/resources/eval/eval.py
@@ -2,7 +2,7 @@
from __future__ import annotations
-from typing import Dict, List, Union, Iterable
+from typing import Dict, Union, Iterable
import httpx
@@ -20,7 +20,7 @@
eval_run_eval_alpha_params,
eval_evaluate_rows_alpha_params,
)
-from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from ..._types import Body, Query, Headers, NotGiven, SequenceNotStr, not_given
from ..._utils import maybe_transform, async_maybe_transform
from ..._compat import cached_property
from ..._resource import SyncAPIResource, AsyncAPIResource
@@ -68,13 +68,13 @@ def evaluate_rows(
*,
benchmark_config: BenchmarkConfigParam,
input_rows: Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]],
- scoring_functions: List[str],
+ scoring_functions: SequenceNotStr[str],
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> EvaluateResponse:
"""
Evaluate a list of rows on a benchmark.
@@ -118,13 +118,13 @@ def evaluate_rows_alpha(
*,
benchmark_config: BenchmarkConfigParam,
input_rows: Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]],
- scoring_functions: List[str],
+ scoring_functions: SequenceNotStr[str],
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> EvaluateResponse:
"""
Evaluate a list of rows on a benchmark.
@@ -172,7 +172,7 @@ def run_eval(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Job:
"""
Run an evaluation on a benchmark.
@@ -209,7 +209,7 @@ def run_eval_alpha(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Job:
"""
Run an evaluation on a benchmark.
@@ -269,13 +269,13 @@ async def evaluate_rows(
*,
benchmark_config: BenchmarkConfigParam,
input_rows: Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]],
- scoring_functions: List[str],
+ scoring_functions: SequenceNotStr[str],
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> EvaluateResponse:
"""
Evaluate a list of rows on a benchmark.
@@ -319,13 +319,13 @@ async def evaluate_rows_alpha(
*,
benchmark_config: BenchmarkConfigParam,
input_rows: Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]],
- scoring_functions: List[str],
+ scoring_functions: SequenceNotStr[str],
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> EvaluateResponse:
"""
Evaluate a list of rows on a benchmark.
@@ -373,7 +373,7 @@ async def run_eval(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Job:
"""
Run an evaluation on a benchmark.
@@ -412,7 +412,7 @@ async def run_eval_alpha(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Job:
"""
Run an evaluation on a benchmark.
diff --git a/src/llama_stack_client/resources/eval/jobs.py b/src/llama_stack_client/resources/eval/jobs.py
index 16fa337f..21f6aea6 100644
--- a/src/llama_stack_client/resources/eval/jobs.py
+++ b/src/llama_stack_client/resources/eval/jobs.py
@@ -4,7 +4,7 @@
import httpx
-from ..._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
+from ..._types import Body, Query, Headers, NoneType, NotGiven, not_given
from ..._compat import cached_property
from ..._resource import SyncAPIResource, AsyncAPIResource
from ..._response import (
@@ -50,7 +50,7 @@ def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> EvaluateResponse:
"""
Get the result of a job.
@@ -86,7 +86,7 @@ def cancel(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> None:
"""
Cancel a job.
@@ -123,7 +123,7 @@ def status(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Job:
"""
Get the status of a job.
@@ -180,7 +180,7 @@ async def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> EvaluateResponse:
"""
Get the result of a job.
@@ -216,7 +216,7 @@ async def cancel(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> None:
"""
Cancel a job.
@@ -253,7 +253,7 @@ async def status(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Job:
"""
Get the status of a job.
diff --git a/src/llama_stack_client/resources/files.py b/src/llama_stack_client/resources/files.py
index 4a74e148..6b395e52 100644
--- a/src/llama_stack_client/resources/files.py
+++ b/src/llama_stack_client/resources/files.py
@@ -8,7 +8,7 @@
import httpx
from ..types import file_list_params, file_create_params
-from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven, FileTypes
+from .._types import Body, Omit, Query, Headers, NotGiven, FileTypes, omit, not_given
from .._utils import extract_files, maybe_transform, deepcopy_minimal, async_maybe_transform
from .._compat import cached_property
from .._resource import SyncAPIResource, AsyncAPIResource
@@ -56,7 +56,7 @@ def create(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> File:
"""Upload a file that can be used across various endpoints.
@@ -107,7 +107,7 @@ def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> File:
"""
Returns information about a specific file.
@@ -134,16 +134,16 @@ def retrieve(
def list(
self,
*,
- after: str | NotGiven = NOT_GIVEN,
- limit: int | NotGiven = NOT_GIVEN,
- order: Literal["asc", "desc"] | NotGiven = NOT_GIVEN,
- purpose: Literal["assistants", "batch"] | NotGiven = NOT_GIVEN,
+ after: str | Omit = omit,
+ limit: int | Omit = omit,
+ order: Literal["asc", "desc"] | Omit = omit,
+ purpose: Literal["assistants", "batch"] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> SyncOpenAICursorPage[File]:
"""
Returns a list of files that belong to the user's organization.
@@ -200,7 +200,7 @@ def delete(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> DeleteFileResponse:
"""
Delete a file.
@@ -233,7 +233,7 @@ def content(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> object:
"""
Returns the contents of the specified file.
@@ -288,7 +288,7 @@ async def create(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> File:
"""Upload a file that can be used across various endpoints.
@@ -339,7 +339,7 @@ async def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> File:
"""
Returns information about a specific file.
@@ -366,16 +366,16 @@ async def retrieve(
def list(
self,
*,
- after: str | NotGiven = NOT_GIVEN,
- limit: int | NotGiven = NOT_GIVEN,
- order: Literal["asc", "desc"] | NotGiven = NOT_GIVEN,
- purpose: Literal["assistants", "batch"] | NotGiven = NOT_GIVEN,
+ after: str | Omit = omit,
+ limit: int | Omit = omit,
+ order: Literal["asc", "desc"] | Omit = omit,
+ purpose: Literal["assistants", "batch"] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> AsyncPaginator[File, AsyncOpenAICursorPage[File]]:
"""
Returns a list of files that belong to the user's organization.
@@ -432,7 +432,7 @@ async def delete(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> DeleteFileResponse:
"""
Delete a file.
@@ -465,7 +465,7 @@ async def content(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> object:
"""
Returns the contents of the specified file.
diff --git a/src/llama_stack_client/resources/inference.py b/src/llama_stack_client/resources/inference.py
index 7aec2dbd..732025cc 100644
--- a/src/llama_stack_client/resources/inference.py
+++ b/src/llama_stack_client/resources/inference.py
@@ -3,19 +3,20 @@
from __future__ import annotations
import typing_extensions
-from typing import List, Union, Iterable
+from typing import Type, Union, Iterable, cast
from typing_extensions import Literal, overload
import httpx
from ..types import (
+ inference_rerank_params,
inference_completion_params,
inference_embeddings_params,
inference_chat_completion_params,
inference_batch_completion_params,
inference_batch_chat_completion_params,
)
-from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from .._types import Body, Omit, Query, Headers, NotGiven, SequenceNotStr, omit, not_given
from .._utils import required_args, maybe_transform, async_maybe_transform
from .._compat import cached_property
from .._resource import SyncAPIResource, AsyncAPIResource
@@ -25,12 +26,14 @@
async_to_raw_response_wrapper,
async_to_streamed_response_wrapper,
)
+from .._wrappers import DataWrapper
from .._streaming import Stream, AsyncStream
from .._base_client import make_request_options
from ..types.completion_response import CompletionResponse
from ..types.embeddings_response import EmbeddingsResponse
from ..types.shared_params.message import Message
from ..types.shared.batch_completion import BatchCompletion
+from ..types.inference_rerank_response import InferenceRerankResponse
from ..types.shared_params.response_format import ResponseFormat
from ..types.shared_params.sampling_params import SamplingParams
from ..types.shared.chat_completion_response import ChatCompletionResponse
@@ -67,17 +70,17 @@ def batch_chat_completion(
*,
messages_batch: Iterable[Iterable[Message]],
model_id: str,
- logprobs: inference_batch_chat_completion_params.Logprobs | NotGiven = NOT_GIVEN,
- response_format: ResponseFormat | NotGiven = NOT_GIVEN,
- sampling_params: SamplingParams | NotGiven = NOT_GIVEN,
- tool_config: inference_batch_chat_completion_params.ToolConfig | NotGiven = NOT_GIVEN,
- tools: Iterable[inference_batch_chat_completion_params.Tool] | NotGiven = NOT_GIVEN,
+ logprobs: inference_batch_chat_completion_params.Logprobs | Omit = omit,
+ response_format: ResponseFormat | Omit = omit,
+ sampling_params: SamplingParams | Omit = omit,
+ tool_config: inference_batch_chat_completion_params.ToolConfig | Omit = omit,
+ tools: Iterable[inference_batch_chat_completion_params.Tool] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> InferenceBatchChatCompletionResponse:
"""
Generate chat completions for a batch of messages using the specified model.
@@ -130,17 +133,17 @@ def batch_chat_completion(
def batch_completion(
self,
*,
- content_batch: List[InterleavedContent],
+ content_batch: SequenceNotStr[InterleavedContent],
model_id: str,
- logprobs: inference_batch_completion_params.Logprobs | NotGiven = NOT_GIVEN,
- response_format: ResponseFormat | NotGiven = NOT_GIVEN,
- sampling_params: SamplingParams | NotGiven = NOT_GIVEN,
+ logprobs: inference_batch_completion_params.Logprobs | Omit = omit,
+ response_format: ResponseFormat | Omit = omit,
+ sampling_params: SamplingParams | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> BatchCompletion:
"""
Generate completions for a batch of content using the specified model.
@@ -193,20 +196,20 @@ def chat_completion(
*,
messages: Iterable[Message],
model_id: str,
- logprobs: inference_chat_completion_params.Logprobs | NotGiven = NOT_GIVEN,
- response_format: ResponseFormat | NotGiven = NOT_GIVEN,
- sampling_params: SamplingParams | NotGiven = NOT_GIVEN,
- stream: Literal[False] | NotGiven = NOT_GIVEN,
- tool_choice: Literal["auto", "required", "none"] | NotGiven = NOT_GIVEN,
- tool_config: inference_chat_completion_params.ToolConfig | NotGiven = NOT_GIVEN,
- tool_prompt_format: Literal["json", "function_tag", "python_list"] | NotGiven = NOT_GIVEN,
- tools: Iterable[inference_chat_completion_params.Tool] | NotGiven = NOT_GIVEN,
+ logprobs: inference_chat_completion_params.Logprobs | Omit = omit,
+ response_format: ResponseFormat | Omit = omit,
+ sampling_params: SamplingParams | Omit = omit,
+ stream: Literal[False] | Omit = omit,
+ tool_choice: Literal["auto", "required", "none"] | Omit = omit,
+ tool_config: inference_chat_completion_params.ToolConfig | Omit = omit,
+ tool_prompt_format: Literal["json", "function_tag", "python_list"] | Omit = omit,
+ tools: Iterable[inference_chat_completion_params.Tool] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ChatCompletionResponse:
"""
Generate a chat completion for the given messages using the specified model.
@@ -265,19 +268,19 @@ def chat_completion(
messages: Iterable[Message],
model_id: str,
stream: Literal[True],
- logprobs: inference_chat_completion_params.Logprobs | NotGiven = NOT_GIVEN,
- response_format: ResponseFormat | NotGiven = NOT_GIVEN,
- sampling_params: SamplingParams | NotGiven = NOT_GIVEN,
- tool_choice: Literal["auto", "required", "none"] | NotGiven = NOT_GIVEN,
- tool_config: inference_chat_completion_params.ToolConfig | NotGiven = NOT_GIVEN,
- tool_prompt_format: Literal["json", "function_tag", "python_list"] | NotGiven = NOT_GIVEN,
- tools: Iterable[inference_chat_completion_params.Tool] | NotGiven = NOT_GIVEN,
+ logprobs: inference_chat_completion_params.Logprobs | Omit = omit,
+ response_format: ResponseFormat | Omit = omit,
+ sampling_params: SamplingParams | Omit = omit,
+ tool_choice: Literal["auto", "required", "none"] | Omit = omit,
+ tool_config: inference_chat_completion_params.ToolConfig | Omit = omit,
+ tool_prompt_format: Literal["json", "function_tag", "python_list"] | Omit = omit,
+ tools: Iterable[inference_chat_completion_params.Tool] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Stream[ChatCompletionResponseStreamChunk]:
"""
Generate a chat completion for the given messages using the specified model.
@@ -336,19 +339,19 @@ def chat_completion(
messages: Iterable[Message],
model_id: str,
stream: bool,
- logprobs: inference_chat_completion_params.Logprobs | NotGiven = NOT_GIVEN,
- response_format: ResponseFormat | NotGiven = NOT_GIVEN,
- sampling_params: SamplingParams | NotGiven = NOT_GIVEN,
- tool_choice: Literal["auto", "required", "none"] | NotGiven = NOT_GIVEN,
- tool_config: inference_chat_completion_params.ToolConfig | NotGiven = NOT_GIVEN,
- tool_prompt_format: Literal["json", "function_tag", "python_list"] | NotGiven = NOT_GIVEN,
- tools: Iterable[inference_chat_completion_params.Tool] | NotGiven = NOT_GIVEN,
+ logprobs: inference_chat_completion_params.Logprobs | Omit = omit,
+ response_format: ResponseFormat | Omit = omit,
+ sampling_params: SamplingParams | Omit = omit,
+ tool_choice: Literal["auto", "required", "none"] | Omit = omit,
+ tool_config: inference_chat_completion_params.ToolConfig | Omit = omit,
+ tool_prompt_format: Literal["json", "function_tag", "python_list"] | Omit = omit,
+ tools: Iterable[inference_chat_completion_params.Tool] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ChatCompletionResponse | Stream[ChatCompletionResponseStreamChunk]:
"""
Generate a chat completion for the given messages using the specified model.
@@ -406,20 +409,20 @@ def chat_completion(
*,
messages: Iterable[Message],
model_id: str,
- logprobs: inference_chat_completion_params.Logprobs | NotGiven = NOT_GIVEN,
- response_format: ResponseFormat | NotGiven = NOT_GIVEN,
- sampling_params: SamplingParams | NotGiven = NOT_GIVEN,
- stream: Literal[False] | Literal[True] | NotGiven = NOT_GIVEN,
- tool_choice: Literal["auto", "required", "none"] | NotGiven = NOT_GIVEN,
- tool_config: inference_chat_completion_params.ToolConfig | NotGiven = NOT_GIVEN,
- tool_prompt_format: Literal["json", "function_tag", "python_list"] | NotGiven = NOT_GIVEN,
- tools: Iterable[inference_chat_completion_params.Tool] | NotGiven = NOT_GIVEN,
+ logprobs: inference_chat_completion_params.Logprobs | Omit = omit,
+ response_format: ResponseFormat | Omit = omit,
+ sampling_params: SamplingParams | Omit = omit,
+ stream: Literal[False] | Literal[True] | Omit = omit,
+ tool_choice: Literal["auto", "required", "none"] | Omit = omit,
+ tool_config: inference_chat_completion_params.ToolConfig | Omit = omit,
+ tool_prompt_format: Literal["json", "function_tag", "python_list"] | Omit = omit,
+ tools: Iterable[inference_chat_completion_params.Tool] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ChatCompletionResponse | Stream[ChatCompletionResponseStreamChunk]:
if stream:
extra_headers = {"Accept": "text/event-stream", **(extra_headers or {})}
@@ -457,16 +460,16 @@ def completion(
*,
content: InterleavedContent,
model_id: str,
- logprobs: inference_completion_params.Logprobs | NotGiven = NOT_GIVEN,
- response_format: ResponseFormat | NotGiven = NOT_GIVEN,
- sampling_params: SamplingParams | NotGiven = NOT_GIVEN,
- stream: Literal[False] | NotGiven = NOT_GIVEN,
+ logprobs: inference_completion_params.Logprobs | Omit = omit,
+ response_format: ResponseFormat | Omit = omit,
+ sampling_params: SamplingParams | Omit = omit,
+ stream: Literal[False] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> CompletionResponse:
"""
Generate a completion for the given content using the specified model.
@@ -505,15 +508,15 @@ def completion(
content: InterleavedContent,
model_id: str,
stream: Literal[True],
- logprobs: inference_completion_params.Logprobs | NotGiven = NOT_GIVEN,
- response_format: ResponseFormat | NotGiven = NOT_GIVEN,
- sampling_params: SamplingParams | NotGiven = NOT_GIVEN,
+ logprobs: inference_completion_params.Logprobs | Omit = omit,
+ response_format: ResponseFormat | Omit = omit,
+ sampling_params: SamplingParams | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Stream[CompletionResponse]:
"""
Generate a completion for the given content using the specified model.
@@ -552,15 +555,15 @@ def completion(
content: InterleavedContent,
model_id: str,
stream: bool,
- logprobs: inference_completion_params.Logprobs | NotGiven = NOT_GIVEN,
- response_format: ResponseFormat | NotGiven = NOT_GIVEN,
- sampling_params: SamplingParams | NotGiven = NOT_GIVEN,
+ logprobs: inference_completion_params.Logprobs | Omit = omit,
+ response_format: ResponseFormat | Omit = omit,
+ sampling_params: SamplingParams | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> CompletionResponse | Stream[CompletionResponse]:
"""
Generate a completion for the given content using the specified model.
@@ -598,16 +601,16 @@ def completion(
*,
content: InterleavedContent,
model_id: str,
- logprobs: inference_completion_params.Logprobs | NotGiven = NOT_GIVEN,
- response_format: ResponseFormat | NotGiven = NOT_GIVEN,
- sampling_params: SamplingParams | NotGiven = NOT_GIVEN,
- stream: Literal[False] | Literal[True] | NotGiven = NOT_GIVEN,
+ logprobs: inference_completion_params.Logprobs | Omit = omit,
+ response_format: ResponseFormat | Omit = omit,
+ sampling_params: SamplingParams | Omit = omit,
+ stream: Literal[False] | Literal[True] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> CompletionResponse | Stream[CompletionResponse]:
if stream:
extra_headers = {"Accept": "text/event-stream", **(extra_headers or {})}
@@ -638,17 +641,17 @@ def completion(
def embeddings(
self,
*,
- contents: Union[List[str], Iterable[InterleavedContentItem]],
+ contents: Union[SequenceNotStr[str], Iterable[InterleavedContentItem]],
model_id: str,
- output_dimension: int | NotGiven = NOT_GIVEN,
- task_type: Literal["query", "document"] | NotGiven = NOT_GIVEN,
- text_truncation: Literal["none", "start", "end"] | NotGiven = NOT_GIVEN,
+ output_dimension: int | Omit = omit,
+ task_type: Literal["query", "document"] | Omit = omit,
+ text_truncation: Literal["none", "start", "end"] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> EmbeddingsResponse:
"""
Generate embeddings for content pieces using the specified model.
@@ -696,6 +699,64 @@ def embeddings(
cast_to=EmbeddingsResponse,
)
+ def rerank(
+ self,
+ *,
+ items: SequenceNotStr[inference_rerank_params.Item],
+ model: str,
+ query: inference_rerank_params.Query,
+ max_num_results: int | Omit = omit,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> InferenceRerankResponse:
+ """
+ Rerank a list of documents based on their relevance to a query.
+
+ Args:
+ items: List of items to rerank. Each item can be a string, text content part, or image
+ content part. Each input must not exceed the model's max input token length.
+
+ model: The identifier of the reranking model to use.
+
+ query: The search query to rank items against. Can be a string, text content part, or
+ image content part. The input must not exceed the model's max input token
+ length.
+
+ max_num_results: (Optional) Maximum number of results to return. Default: returns all.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return self._post(
+ "/v1/inference/rerank",
+ body=maybe_transform(
+ {
+ "items": items,
+ "model": model,
+ "query": query,
+ "max_num_results": max_num_results,
+ },
+ inference_rerank_params.InferenceRerankParams,
+ ),
+ options=make_request_options(
+ extra_headers=extra_headers,
+ extra_query=extra_query,
+ extra_body=extra_body,
+ timeout=timeout,
+ post_parser=DataWrapper[InferenceRerankResponse]._unwrapper,
+ ),
+ cast_to=cast(Type[InferenceRerankResponse], DataWrapper[InferenceRerankResponse]),
+ )
+
class AsyncInferenceResource(AsyncAPIResource):
@cached_property
@@ -722,17 +783,17 @@ async def batch_chat_completion(
*,
messages_batch: Iterable[Iterable[Message]],
model_id: str,
- logprobs: inference_batch_chat_completion_params.Logprobs | NotGiven = NOT_GIVEN,
- response_format: ResponseFormat | NotGiven = NOT_GIVEN,
- sampling_params: SamplingParams | NotGiven = NOT_GIVEN,
- tool_config: inference_batch_chat_completion_params.ToolConfig | NotGiven = NOT_GIVEN,
- tools: Iterable[inference_batch_chat_completion_params.Tool] | NotGiven = NOT_GIVEN,
+ logprobs: inference_batch_chat_completion_params.Logprobs | Omit = omit,
+ response_format: ResponseFormat | Omit = omit,
+ sampling_params: SamplingParams | Omit = omit,
+ tool_config: inference_batch_chat_completion_params.ToolConfig | Omit = omit,
+ tools: Iterable[inference_batch_chat_completion_params.Tool] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> InferenceBatchChatCompletionResponse:
"""
Generate chat completions for a batch of messages using the specified model.
@@ -785,17 +846,17 @@ async def batch_chat_completion(
async def batch_completion(
self,
*,
- content_batch: List[InterleavedContent],
+ content_batch: SequenceNotStr[InterleavedContent],
model_id: str,
- logprobs: inference_batch_completion_params.Logprobs | NotGiven = NOT_GIVEN,
- response_format: ResponseFormat | NotGiven = NOT_GIVEN,
- sampling_params: SamplingParams | NotGiven = NOT_GIVEN,
+ logprobs: inference_batch_completion_params.Logprobs | Omit = omit,
+ response_format: ResponseFormat | Omit = omit,
+ sampling_params: SamplingParams | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> BatchCompletion:
"""
Generate completions for a batch of content using the specified model.
@@ -848,20 +909,20 @@ async def chat_completion(
*,
messages: Iterable[Message],
model_id: str,
- logprobs: inference_chat_completion_params.Logprobs | NotGiven = NOT_GIVEN,
- response_format: ResponseFormat | NotGiven = NOT_GIVEN,
- sampling_params: SamplingParams | NotGiven = NOT_GIVEN,
- stream: Literal[False] | NotGiven = NOT_GIVEN,
- tool_choice: Literal["auto", "required", "none"] | NotGiven = NOT_GIVEN,
- tool_config: inference_chat_completion_params.ToolConfig | NotGiven = NOT_GIVEN,
- tool_prompt_format: Literal["json", "function_tag", "python_list"] | NotGiven = NOT_GIVEN,
- tools: Iterable[inference_chat_completion_params.Tool] | NotGiven = NOT_GIVEN,
+ logprobs: inference_chat_completion_params.Logprobs | Omit = omit,
+ response_format: ResponseFormat | Omit = omit,
+ sampling_params: SamplingParams | Omit = omit,
+ stream: Literal[False] | Omit = omit,
+ tool_choice: Literal["auto", "required", "none"] | Omit = omit,
+ tool_config: inference_chat_completion_params.ToolConfig | Omit = omit,
+ tool_prompt_format: Literal["json", "function_tag", "python_list"] | Omit = omit,
+ tools: Iterable[inference_chat_completion_params.Tool] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ChatCompletionResponse:
"""
Generate a chat completion for the given messages using the specified model.
@@ -920,19 +981,19 @@ async def chat_completion(
messages: Iterable[Message],
model_id: str,
stream: Literal[True],
- logprobs: inference_chat_completion_params.Logprobs | NotGiven = NOT_GIVEN,
- response_format: ResponseFormat | NotGiven = NOT_GIVEN,
- sampling_params: SamplingParams | NotGiven = NOT_GIVEN,
- tool_choice: Literal["auto", "required", "none"] | NotGiven = NOT_GIVEN,
- tool_config: inference_chat_completion_params.ToolConfig | NotGiven = NOT_GIVEN,
- tool_prompt_format: Literal["json", "function_tag", "python_list"] | NotGiven = NOT_GIVEN,
- tools: Iterable[inference_chat_completion_params.Tool] | NotGiven = NOT_GIVEN,
+ logprobs: inference_chat_completion_params.Logprobs | Omit = omit,
+ response_format: ResponseFormat | Omit = omit,
+ sampling_params: SamplingParams | Omit = omit,
+ tool_choice: Literal["auto", "required", "none"] | Omit = omit,
+ tool_config: inference_chat_completion_params.ToolConfig | Omit = omit,
+ tool_prompt_format: Literal["json", "function_tag", "python_list"] | Omit = omit,
+ tools: Iterable[inference_chat_completion_params.Tool] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> AsyncStream[ChatCompletionResponseStreamChunk]:
"""
Generate a chat completion for the given messages using the specified model.
@@ -991,19 +1052,19 @@ async def chat_completion(
messages: Iterable[Message],
model_id: str,
stream: bool,
- logprobs: inference_chat_completion_params.Logprobs | NotGiven = NOT_GIVEN,
- response_format: ResponseFormat | NotGiven = NOT_GIVEN,
- sampling_params: SamplingParams | NotGiven = NOT_GIVEN,
- tool_choice: Literal["auto", "required", "none"] | NotGiven = NOT_GIVEN,
- tool_config: inference_chat_completion_params.ToolConfig | NotGiven = NOT_GIVEN,
- tool_prompt_format: Literal["json", "function_tag", "python_list"] | NotGiven = NOT_GIVEN,
- tools: Iterable[inference_chat_completion_params.Tool] | NotGiven = NOT_GIVEN,
+ logprobs: inference_chat_completion_params.Logprobs | Omit = omit,
+ response_format: ResponseFormat | Omit = omit,
+ sampling_params: SamplingParams | Omit = omit,
+ tool_choice: Literal["auto", "required", "none"] | Omit = omit,
+ tool_config: inference_chat_completion_params.ToolConfig | Omit = omit,
+ tool_prompt_format: Literal["json", "function_tag", "python_list"] | Omit = omit,
+ tools: Iterable[inference_chat_completion_params.Tool] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ChatCompletionResponse | AsyncStream[ChatCompletionResponseStreamChunk]:
"""
Generate a chat completion for the given messages using the specified model.
@@ -1061,20 +1122,20 @@ async def chat_completion(
*,
messages: Iterable[Message],
model_id: str,
- logprobs: inference_chat_completion_params.Logprobs | NotGiven = NOT_GIVEN,
- response_format: ResponseFormat | NotGiven = NOT_GIVEN,
- sampling_params: SamplingParams | NotGiven = NOT_GIVEN,
- stream: Literal[False] | Literal[True] | NotGiven = NOT_GIVEN,
- tool_choice: Literal["auto", "required", "none"] | NotGiven = NOT_GIVEN,
- tool_config: inference_chat_completion_params.ToolConfig | NotGiven = NOT_GIVEN,
- tool_prompt_format: Literal["json", "function_tag", "python_list"] | NotGiven = NOT_GIVEN,
- tools: Iterable[inference_chat_completion_params.Tool] | NotGiven = NOT_GIVEN,
+ logprobs: inference_chat_completion_params.Logprobs | Omit = omit,
+ response_format: ResponseFormat | Omit = omit,
+ sampling_params: SamplingParams | Omit = omit,
+ stream: Literal[False] | Literal[True] | Omit = omit,
+ tool_choice: Literal["auto", "required", "none"] | Omit = omit,
+ tool_config: inference_chat_completion_params.ToolConfig | Omit = omit,
+ tool_prompt_format: Literal["json", "function_tag", "python_list"] | Omit = omit,
+ tools: Iterable[inference_chat_completion_params.Tool] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ChatCompletionResponse | AsyncStream[ChatCompletionResponseStreamChunk]:
if stream:
extra_headers = {"Accept": "text/event-stream", **(extra_headers or {})}
@@ -1112,16 +1173,16 @@ async def completion(
*,
content: InterleavedContent,
model_id: str,
- logprobs: inference_completion_params.Logprobs | NotGiven = NOT_GIVEN,
- response_format: ResponseFormat | NotGiven = NOT_GIVEN,
- sampling_params: SamplingParams | NotGiven = NOT_GIVEN,
- stream: Literal[False] | NotGiven = NOT_GIVEN,
+ logprobs: inference_completion_params.Logprobs | Omit = omit,
+ response_format: ResponseFormat | Omit = omit,
+ sampling_params: SamplingParams | Omit = omit,
+ stream: Literal[False] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> CompletionResponse:
"""
Generate a completion for the given content using the specified model.
@@ -1160,15 +1221,15 @@ async def completion(
content: InterleavedContent,
model_id: str,
stream: Literal[True],
- logprobs: inference_completion_params.Logprobs | NotGiven = NOT_GIVEN,
- response_format: ResponseFormat | NotGiven = NOT_GIVEN,
- sampling_params: SamplingParams | NotGiven = NOT_GIVEN,
+ logprobs: inference_completion_params.Logprobs | Omit = omit,
+ response_format: ResponseFormat | Omit = omit,
+ sampling_params: SamplingParams | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> AsyncStream[CompletionResponse]:
"""
Generate a completion for the given content using the specified model.
@@ -1207,15 +1268,15 @@ async def completion(
content: InterleavedContent,
model_id: str,
stream: bool,
- logprobs: inference_completion_params.Logprobs | NotGiven = NOT_GIVEN,
- response_format: ResponseFormat | NotGiven = NOT_GIVEN,
- sampling_params: SamplingParams | NotGiven = NOT_GIVEN,
+ logprobs: inference_completion_params.Logprobs | Omit = omit,
+ response_format: ResponseFormat | Omit = omit,
+ sampling_params: SamplingParams | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> CompletionResponse | AsyncStream[CompletionResponse]:
"""
Generate a completion for the given content using the specified model.
@@ -1253,16 +1314,16 @@ async def completion(
*,
content: InterleavedContent,
model_id: str,
- logprobs: inference_completion_params.Logprobs | NotGiven = NOT_GIVEN,
- response_format: ResponseFormat | NotGiven = NOT_GIVEN,
- sampling_params: SamplingParams | NotGiven = NOT_GIVEN,
- stream: Literal[False] | Literal[True] | NotGiven = NOT_GIVEN,
+ logprobs: inference_completion_params.Logprobs | Omit = omit,
+ response_format: ResponseFormat | Omit = omit,
+ sampling_params: SamplingParams | Omit = omit,
+ stream: Literal[False] | Literal[True] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> CompletionResponse | AsyncStream[CompletionResponse]:
if stream:
extra_headers = {"Accept": "text/event-stream", **(extra_headers or {})}
@@ -1293,17 +1354,17 @@ async def completion(
async def embeddings(
self,
*,
- contents: Union[List[str], Iterable[InterleavedContentItem]],
+ contents: Union[SequenceNotStr[str], Iterable[InterleavedContentItem]],
model_id: str,
- output_dimension: int | NotGiven = NOT_GIVEN,
- task_type: Literal["query", "document"] | NotGiven = NOT_GIVEN,
- text_truncation: Literal["none", "start", "end"] | NotGiven = NOT_GIVEN,
+ output_dimension: int | Omit = omit,
+ task_type: Literal["query", "document"] | Omit = omit,
+ text_truncation: Literal["none", "start", "end"] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> EmbeddingsResponse:
"""
Generate embeddings for content pieces using the specified model.
@@ -1351,6 +1412,64 @@ async def embeddings(
cast_to=EmbeddingsResponse,
)
+ async def rerank(
+ self,
+ *,
+ items: SequenceNotStr[inference_rerank_params.Item],
+ model: str,
+ query: inference_rerank_params.Query,
+ max_num_results: int | Omit = omit,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> InferenceRerankResponse:
+ """
+ Rerank a list of documents based on their relevance to a query.
+
+ Args:
+ items: List of items to rerank. Each item can be a string, text content part, or image
+ content part. Each input must not exceed the model's max input token length.
+
+ model: The identifier of the reranking model to use.
+
+ query: The search query to rank items against. Can be a string, text content part, or
+ image content part. The input must not exceed the model's max input token
+ length.
+
+ max_num_results: (Optional) Maximum number of results to return. Default: returns all.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return await self._post(
+ "/v1/inference/rerank",
+ body=await async_maybe_transform(
+ {
+ "items": items,
+ "model": model,
+ "query": query,
+ "max_num_results": max_num_results,
+ },
+ inference_rerank_params.InferenceRerankParams,
+ ),
+ options=make_request_options(
+ extra_headers=extra_headers,
+ extra_query=extra_query,
+ extra_body=extra_body,
+ timeout=timeout,
+ post_parser=DataWrapper[InferenceRerankResponse]._unwrapper,
+ ),
+ cast_to=cast(Type[InferenceRerankResponse], DataWrapper[InferenceRerankResponse]),
+ )
+
class InferenceResourceWithRawResponse:
def __init__(self, inference: InferenceResource) -> None:
@@ -1364,19 +1483,22 @@ def __init__(self, inference: InferenceResource) -> None:
)
self.chat_completion = ( # pyright: ignore[reportDeprecated]
to_raw_response_wrapper(
- inference.chat_completion # pyright: ignore[reportDeprecated],
+ inference.chat_completion, # pyright: ignore[reportDeprecated],
)
)
self.completion = ( # pyright: ignore[reportDeprecated]
to_raw_response_wrapper(
- inference.completion # pyright: ignore[reportDeprecated],
+ inference.completion, # pyright: ignore[reportDeprecated],
)
)
self.embeddings = ( # pyright: ignore[reportDeprecated]
to_raw_response_wrapper(
- inference.embeddings # pyright: ignore[reportDeprecated],
+ inference.embeddings, # pyright: ignore[reportDeprecated],
)
)
+ self.rerank = to_raw_response_wrapper(
+ inference.rerank,
+ )
class AsyncInferenceResourceWithRawResponse:
@@ -1391,19 +1513,22 @@ def __init__(self, inference: AsyncInferenceResource) -> None:
)
self.chat_completion = ( # pyright: ignore[reportDeprecated]
async_to_raw_response_wrapper(
- inference.chat_completion # pyright: ignore[reportDeprecated],
+ inference.chat_completion, # pyright: ignore[reportDeprecated],
)
)
self.completion = ( # pyright: ignore[reportDeprecated]
async_to_raw_response_wrapper(
- inference.completion # pyright: ignore[reportDeprecated],
+ inference.completion, # pyright: ignore[reportDeprecated],
)
)
self.embeddings = ( # pyright: ignore[reportDeprecated]
async_to_raw_response_wrapper(
- inference.embeddings # pyright: ignore[reportDeprecated],
+ inference.embeddings, # pyright: ignore[reportDeprecated],
)
)
+ self.rerank = async_to_raw_response_wrapper(
+ inference.rerank,
+ )
class InferenceResourceWithStreamingResponse:
@@ -1418,19 +1543,22 @@ def __init__(self, inference: InferenceResource) -> None:
)
self.chat_completion = ( # pyright: ignore[reportDeprecated]
to_streamed_response_wrapper(
- inference.chat_completion # pyright: ignore[reportDeprecated],
+ inference.chat_completion, # pyright: ignore[reportDeprecated],
)
)
self.completion = ( # pyright: ignore[reportDeprecated]
to_streamed_response_wrapper(
- inference.completion # pyright: ignore[reportDeprecated],
+ inference.completion, # pyright: ignore[reportDeprecated],
)
)
self.embeddings = ( # pyright: ignore[reportDeprecated]
to_streamed_response_wrapper(
- inference.embeddings # pyright: ignore[reportDeprecated],
+ inference.embeddings, # pyright: ignore[reportDeprecated],
)
)
+ self.rerank = to_streamed_response_wrapper(
+ inference.rerank,
+ )
class AsyncInferenceResourceWithStreamingResponse:
@@ -1445,16 +1573,19 @@ def __init__(self, inference: AsyncInferenceResource) -> None:
)
self.chat_completion = ( # pyright: ignore[reportDeprecated]
async_to_streamed_response_wrapper(
- inference.chat_completion # pyright: ignore[reportDeprecated],
+ inference.chat_completion, # pyright: ignore[reportDeprecated],
)
)
self.completion = ( # pyright: ignore[reportDeprecated]
async_to_streamed_response_wrapper(
- inference.completion # pyright: ignore[reportDeprecated],
+ inference.completion, # pyright: ignore[reportDeprecated],
)
)
self.embeddings = ( # pyright: ignore[reportDeprecated]
async_to_streamed_response_wrapper(
- inference.embeddings # pyright: ignore[reportDeprecated],
+ inference.embeddings, # pyright: ignore[reportDeprecated],
)
)
+ self.rerank = async_to_streamed_response_wrapper(
+ inference.rerank,
+ )
diff --git a/src/llama_stack_client/resources/inspect.py b/src/llama_stack_client/resources/inspect.py
index bd67ae96..cca2f501 100644
--- a/src/llama_stack_client/resources/inspect.py
+++ b/src/llama_stack_client/resources/inspect.py
@@ -4,7 +4,7 @@
import httpx
-from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from .._types import Body, Query, Headers, NotGiven, not_given
from .._compat import cached_property
from .._resource import SyncAPIResource, AsyncAPIResource
from .._response import (
@@ -48,7 +48,7 @@ def health(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> HealthInfo:
"""Get the current health status of the service."""
return self._get(
@@ -67,7 +67,7 @@ def version(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> VersionInfo:
"""Get the version of the service."""
return self._get(
@@ -107,7 +107,7 @@ async def health(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> HealthInfo:
"""Get the current health status of the service."""
return await self._get(
@@ -126,7 +126,7 @@ async def version(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> VersionInfo:
"""Get the version of the service."""
return await self._get(
diff --git a/src/llama_stack_client/resources/models/__init__.py b/src/llama_stack_client/resources/models/__init__.py
new file mode 100644
index 00000000..fc06a000
--- /dev/null
+++ b/src/llama_stack_client/resources/models/__init__.py
@@ -0,0 +1,33 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from .models import (
+ ModelsResource,
+ AsyncModelsResource,
+ ModelsResourceWithRawResponse,
+ AsyncModelsResourceWithRawResponse,
+ ModelsResourceWithStreamingResponse,
+ AsyncModelsResourceWithStreamingResponse,
+)
+from .openai import (
+ OpenAIResource,
+ AsyncOpenAIResource,
+ OpenAIResourceWithRawResponse,
+ AsyncOpenAIResourceWithRawResponse,
+ OpenAIResourceWithStreamingResponse,
+ AsyncOpenAIResourceWithStreamingResponse,
+)
+
+__all__ = [
+ "OpenAIResource",
+ "AsyncOpenAIResource",
+ "OpenAIResourceWithRawResponse",
+ "AsyncOpenAIResourceWithRawResponse",
+ "OpenAIResourceWithStreamingResponse",
+ "AsyncOpenAIResourceWithStreamingResponse",
+ "ModelsResource",
+ "AsyncModelsResource",
+ "ModelsResourceWithRawResponse",
+ "AsyncModelsResourceWithRawResponse",
+ "ModelsResourceWithStreamingResponse",
+ "AsyncModelsResourceWithStreamingResponse",
+]
diff --git a/src/llama_stack_client/resources/models.py b/src/llama_stack_client/resources/models/models.py
similarity index 85%
rename from src/llama_stack_client/resources/models.py
rename to src/llama_stack_client/resources/models/models.py
index 4efb632a..f044c50d 100644
--- a/src/llama_stack_client/resources/models.py
+++ b/src/llama_stack_client/resources/models/models.py
@@ -7,26 +7,38 @@
import httpx
-from ..types import model_register_params
-from .._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
-from .._utils import maybe_transform, async_maybe_transform
-from .._compat import cached_property
-from .._resource import SyncAPIResource, AsyncAPIResource
-from .._response import (
+from .openai import (
+ OpenAIResource,
+ AsyncOpenAIResource,
+ OpenAIResourceWithRawResponse,
+ AsyncOpenAIResourceWithRawResponse,
+ OpenAIResourceWithStreamingResponse,
+ AsyncOpenAIResourceWithStreamingResponse,
+)
+from ...types import model_register_params
+from ..._types import Body, Omit, Query, Headers, NoneType, NotGiven, omit, not_given
+from ..._utils import maybe_transform, async_maybe_transform
+from ..._compat import cached_property
+from ..._resource import SyncAPIResource, AsyncAPIResource
+from ..._response import (
to_raw_response_wrapper,
to_streamed_response_wrapper,
async_to_raw_response_wrapper,
async_to_streamed_response_wrapper,
)
-from .._wrappers import DataWrapper
-from ..types.model import Model
-from .._base_client import make_request_options
-from ..types.model_list_response import ModelListResponse
+from ..._wrappers import DataWrapper
+from ...types.model import Model
+from ..._base_client import make_request_options
+from ...types.model_list_response import ModelListResponse
__all__ = ["ModelsResource", "AsyncModelsResource"]
class ModelsResource(SyncAPIResource):
+ @cached_property
+ def openai(self) -> OpenAIResource:
+ return OpenAIResource(self._client)
+
@cached_property
def with_raw_response(self) -> ModelsResourceWithRawResponse:
"""
@@ -55,7 +67,7 @@ def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Model:
"""
Get a model by its identifier.
@@ -87,7 +99,7 @@ def list(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ModelListResponse:
"""List all models."""
return self._get(
@@ -106,16 +118,16 @@ def register(
self,
*,
model_id: str,
- metadata: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
- model_type: Literal["llm", "embedding"] | NotGiven = NOT_GIVEN,
- provider_id: str | NotGiven = NOT_GIVEN,
- provider_model_id: str | NotGiven = NOT_GIVEN,
+ metadata: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
+ model_type: Literal["llm", "embedding"] | Omit = omit,
+ provider_id: str | Omit = omit,
+ provider_model_id: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Model:
"""
Register a model.
@@ -166,7 +178,7 @@ def unregister(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> None:
"""
Unregister a model.
@@ -193,6 +205,10 @@ def unregister(
class AsyncModelsResource(AsyncAPIResource):
+ @cached_property
+ def openai(self) -> AsyncOpenAIResource:
+ return AsyncOpenAIResource(self._client)
+
@cached_property
def with_raw_response(self) -> AsyncModelsResourceWithRawResponse:
"""
@@ -221,7 +237,7 @@ async def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Model:
"""
Get a model by its identifier.
@@ -253,7 +269,7 @@ async def list(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ModelListResponse:
"""List all models."""
return await self._get(
@@ -272,16 +288,16 @@ async def register(
self,
*,
model_id: str,
- metadata: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
- model_type: Literal["llm", "embedding"] | NotGiven = NOT_GIVEN,
- provider_id: str | NotGiven = NOT_GIVEN,
- provider_model_id: str | NotGiven = NOT_GIVEN,
+ metadata: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
+ model_type: Literal["llm", "embedding"] | Omit = omit,
+ provider_id: str | Omit = omit,
+ provider_model_id: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Model:
"""
Register a model.
@@ -332,7 +348,7 @@ async def unregister(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> None:
"""
Unregister a model.
@@ -375,6 +391,10 @@ def __init__(self, models: ModelsResource) -> None:
models.unregister,
)
+ @cached_property
+ def openai(self) -> OpenAIResourceWithRawResponse:
+ return OpenAIResourceWithRawResponse(self._models.openai)
+
class AsyncModelsResourceWithRawResponse:
def __init__(self, models: AsyncModelsResource) -> None:
@@ -393,6 +413,10 @@ def __init__(self, models: AsyncModelsResource) -> None:
models.unregister,
)
+ @cached_property
+ def openai(self) -> AsyncOpenAIResourceWithRawResponse:
+ return AsyncOpenAIResourceWithRawResponse(self._models.openai)
+
class ModelsResourceWithStreamingResponse:
def __init__(self, models: ModelsResource) -> None:
@@ -411,6 +435,10 @@ def __init__(self, models: ModelsResource) -> None:
models.unregister,
)
+ @cached_property
+ def openai(self) -> OpenAIResourceWithStreamingResponse:
+ return OpenAIResourceWithStreamingResponse(self._models.openai)
+
class AsyncModelsResourceWithStreamingResponse:
def __init__(self, models: AsyncModelsResource) -> None:
@@ -428,3 +456,7 @@ def __init__(self, models: AsyncModelsResource) -> None:
self.unregister = async_to_streamed_response_wrapper(
models.unregister,
)
+
+ @cached_property
+ def openai(self) -> AsyncOpenAIResourceWithStreamingResponse:
+ return AsyncOpenAIResourceWithStreamingResponse(self._models.openai)
diff --git a/src/llama_stack_client/resources/models/openai.py b/src/llama_stack_client/resources/models/openai.py
new file mode 100644
index 00000000..e4b2fbd8
--- /dev/null
+++ b/src/llama_stack_client/resources/models/openai.py
@@ -0,0 +1,146 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from typing import Type, cast
+
+import httpx
+
+from ..._types import Body, Query, Headers, NotGiven, not_given
+from ..._compat import cached_property
+from ..._resource import SyncAPIResource, AsyncAPIResource
+from ..._response import (
+ to_raw_response_wrapper,
+ to_streamed_response_wrapper,
+ async_to_raw_response_wrapper,
+ async_to_streamed_response_wrapper,
+)
+from ..._wrappers import DataWrapper
+from ..._base_client import make_request_options
+from ...types.models.openai_list_response import OpenAIListResponse
+
+__all__ = ["OpenAIResource", "AsyncOpenAIResource"]
+
+
+class OpenAIResource(SyncAPIResource):
+ @cached_property
+ def with_raw_response(self) -> OpenAIResourceWithRawResponse:
+ """
+ This property can be used as a prefix for any HTTP method call to return
+ the raw response object instead of the parsed content.
+
+ For more information, see https://www.github.com/llamastack/llama-stack-client-python#accessing-raw-response-data-eg-headers
+ """
+ return OpenAIResourceWithRawResponse(self)
+
+ @cached_property
+ def with_streaming_response(self) -> OpenAIResourceWithStreamingResponse:
+ """
+ An alternative to `.with_raw_response` that doesn't eagerly read the response body.
+
+ For more information, see https://www.github.com/llamastack/llama-stack-client-python#with_streaming_response
+ """
+ return OpenAIResourceWithStreamingResponse(self)
+
+ def list(
+ self,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> OpenAIListResponse:
+ """List models using the OpenAI API."""
+ return self._get(
+ "/v1/openai/v1/models",
+ options=make_request_options(
+ extra_headers=extra_headers,
+ extra_query=extra_query,
+ extra_body=extra_body,
+ timeout=timeout,
+ post_parser=DataWrapper[OpenAIListResponse]._unwrapper,
+ ),
+ cast_to=cast(Type[OpenAIListResponse], DataWrapper[OpenAIListResponse]),
+ )
+
+
+class AsyncOpenAIResource(AsyncAPIResource):
+ @cached_property
+ def with_raw_response(self) -> AsyncOpenAIResourceWithRawResponse:
+ """
+ This property can be used as a prefix for any HTTP method call to return
+ the raw response object instead of the parsed content.
+
+ For more information, see https://www.github.com/llamastack/llama-stack-client-python#accessing-raw-response-data-eg-headers
+ """
+ return AsyncOpenAIResourceWithRawResponse(self)
+
+ @cached_property
+ def with_streaming_response(self) -> AsyncOpenAIResourceWithStreamingResponse:
+ """
+ An alternative to `.with_raw_response` that doesn't eagerly read the response body.
+
+ For more information, see https://www.github.com/llamastack/llama-stack-client-python#with_streaming_response
+ """
+ return AsyncOpenAIResourceWithStreamingResponse(self)
+
+ async def list(
+ self,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> OpenAIListResponse:
+ """List models using the OpenAI API."""
+ return await self._get(
+ "/v1/openai/v1/models",
+ options=make_request_options(
+ extra_headers=extra_headers,
+ extra_query=extra_query,
+ extra_body=extra_body,
+ timeout=timeout,
+ post_parser=DataWrapper[OpenAIListResponse]._unwrapper,
+ ),
+ cast_to=cast(Type[OpenAIListResponse], DataWrapper[OpenAIListResponse]),
+ )
+
+
+class OpenAIResourceWithRawResponse:
+ def __init__(self, openai: OpenAIResource) -> None:
+ self._openai = openai
+
+ self.list = to_raw_response_wrapper(
+ openai.list,
+ )
+
+
+class AsyncOpenAIResourceWithRawResponse:
+ def __init__(self, openai: AsyncOpenAIResource) -> None:
+ self._openai = openai
+
+ self.list = async_to_raw_response_wrapper(
+ openai.list,
+ )
+
+
+class OpenAIResourceWithStreamingResponse:
+ def __init__(self, openai: OpenAIResource) -> None:
+ self._openai = openai
+
+ self.list = to_streamed_response_wrapper(
+ openai.list,
+ )
+
+
+class AsyncOpenAIResourceWithStreamingResponse:
+ def __init__(self, openai: AsyncOpenAIResource) -> None:
+ self._openai = openai
+
+ self.list = async_to_streamed_response_wrapper(
+ openai.list,
+ )
diff --git a/src/llama_stack_client/resources/moderations.py b/src/llama_stack_client/resources/moderations.py
index 165f3ce3..a016b5b0 100644
--- a/src/llama_stack_client/resources/moderations.py
+++ b/src/llama_stack_client/resources/moderations.py
@@ -2,12 +2,12 @@
from __future__ import annotations
-from typing import List, Union
+from typing import Union
import httpx
from ..types import moderation_create_params
-from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from .._types import Body, Query, Headers, NotGiven, SequenceNotStr, not_given
from .._utils import maybe_transform, async_maybe_transform
from .._compat import cached_property
from .._resource import SyncAPIResource, AsyncAPIResource
@@ -46,14 +46,14 @@ def with_streaming_response(self) -> ModerationsResourceWithStreamingResponse:
def create(
self,
*,
- input: Union[str, List[str]],
+ input: Union[str, SequenceNotStr[str]],
model: str,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> CreateResponse:
"""
Classifies if text and/or image inputs are potentially harmful.
@@ -111,14 +111,14 @@ def with_streaming_response(self) -> AsyncModerationsResourceWithStreamingRespon
async def create(
self,
*,
- input: Union[str, List[str]],
+ input: Union[str, SequenceNotStr[str]],
model: str,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> CreateResponse:
"""
Classifies if text and/or image inputs are potentially harmful.
diff --git a/src/llama_stack_client/resources/post_training/job.py b/src/llama_stack_client/resources/post_training/job.py
index 2252b19e..ab00e054 100644
--- a/src/llama_stack_client/resources/post_training/job.py
+++ b/src/llama_stack_client/resources/post_training/job.py
@@ -6,7 +6,7 @@
import httpx
-from ..._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
+from ..._types import Body, Query, Headers, NoneType, NotGiven, not_given
from ..._utils import maybe_transform, async_maybe_transform
from ..._compat import cached_property
from ..._resource import SyncAPIResource, AsyncAPIResource
@@ -54,7 +54,7 @@ def list(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> List[Data]:
"""Get all training jobs."""
return self._get(
@@ -78,7 +78,7 @@ def artifacts(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> JobArtifactsResponse:
"""
Get the artifacts of a training job.
@@ -115,7 +115,7 @@ def cancel(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> None:
"""
Cancel a training job.
@@ -150,7 +150,7 @@ def status(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> JobStatusResponse:
"""
Get the status of a training job.
@@ -207,7 +207,7 @@ async def list(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> List[Data]:
"""Get all training jobs."""
return await self._get(
@@ -231,7 +231,7 @@ async def artifacts(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> JobArtifactsResponse:
"""
Get the artifacts of a training job.
@@ -268,7 +268,7 @@ async def cancel(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> None:
"""
Cancel a training job.
@@ -303,7 +303,7 @@ async def status(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> JobStatusResponse:
"""
Get the status of a training job.
diff --git a/src/llama_stack_client/resources/post_training/post_training.py b/src/llama_stack_client/resources/post_training/post_training.py
index ff1fab45..760d9cb2 100644
--- a/src/llama_stack_client/resources/post_training/post_training.py
+++ b/src/llama_stack_client/resources/post_training/post_training.py
@@ -18,7 +18,7 @@
post_training_preference_optimize_params,
post_training_supervised_fine_tune_params,
)
-from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from ..._types import Body, Omit, Query, Headers, NotGiven, omit, not_given
from ..._utils import maybe_transform, async_maybe_transform
from ..._compat import cached_property
from ..._resource import SyncAPIResource, AsyncAPIResource
@@ -73,7 +73,7 @@ def preference_optimize(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> PostTrainingJob:
"""
Run preference optimization of a model.
@@ -125,15 +125,15 @@ def supervised_fine_tune(
job_uuid: str,
logger_config: Dict[str, Union[bool, float, str, Iterable[object], object, None]],
training_config: post_training_supervised_fine_tune_params.TrainingConfig,
- algorithm_config: AlgorithmConfigParam | NotGiven = NOT_GIVEN,
- checkpoint_dir: str | NotGiven = NOT_GIVEN,
- model: str | NotGiven = NOT_GIVEN,
+ algorithm_config: AlgorithmConfigParam | Omit = omit,
+ checkpoint_dir: str | Omit = omit,
+ model: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> PostTrainingJob:
"""
Run supervised fine-tuning of a model.
@@ -220,7 +220,7 @@ async def preference_optimize(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> PostTrainingJob:
"""
Run preference optimization of a model.
@@ -272,15 +272,15 @@ async def supervised_fine_tune(
job_uuid: str,
logger_config: Dict[str, Union[bool, float, str, Iterable[object], object, None]],
training_config: post_training_supervised_fine_tune_params.TrainingConfig,
- algorithm_config: AlgorithmConfigParam | NotGiven = NOT_GIVEN,
- checkpoint_dir: str | NotGiven = NOT_GIVEN,
- model: str | NotGiven = NOT_GIVEN,
+ algorithm_config: AlgorithmConfigParam | Omit = omit,
+ checkpoint_dir: str | Omit = omit,
+ model: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> PostTrainingJob:
"""
Run supervised fine-tuning of a model.
diff --git a/src/llama_stack_client/resources/providers.py b/src/llama_stack_client/resources/providers.py
index a50f7d67..5a52e070 100644
--- a/src/llama_stack_client/resources/providers.py
+++ b/src/llama_stack_client/resources/providers.py
@@ -6,7 +6,7 @@
import httpx
-from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from .._types import Body, Query, Headers, NotGiven, not_given
from .._compat import cached_property
from .._resource import SyncAPIResource, AsyncAPIResource
from .._response import (
@@ -52,7 +52,7 @@ def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ProviderInfo:
"""
Get detailed information about a specific provider.
@@ -84,7 +84,7 @@ def list(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ProviderListResponse:
"""List all available providers."""
return self._get(
@@ -129,7 +129,7 @@ async def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ProviderInfo:
"""
Get detailed information about a specific provider.
@@ -161,7 +161,7 @@ async def list(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ProviderListResponse:
"""List all available providers."""
return await self._get(
diff --git a/src/llama_stack_client/resources/responses/input_items.py b/src/llama_stack_client/resources/responses/input_items.py
index 08139af7..da06debd 100644
--- a/src/llama_stack_client/resources/responses/input_items.py
+++ b/src/llama_stack_client/resources/responses/input_items.py
@@ -2,12 +2,11 @@
from __future__ import annotations
-from typing import List
from typing_extensions import Literal
import httpx
-from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from ..._types import Body, Omit, Query, Headers, NotGiven, SequenceNotStr, omit, not_given
from ..._utils import maybe_transform, async_maybe_transform
from ..._compat import cached_property
from ..._resource import SyncAPIResource, AsyncAPIResource
@@ -48,17 +47,17 @@ def list(
self,
response_id: str,
*,
- after: str | NotGiven = NOT_GIVEN,
- before: str | NotGiven = NOT_GIVEN,
- include: List[str] | NotGiven = NOT_GIVEN,
- limit: int | NotGiven = NOT_GIVEN,
- order: Literal["asc", "desc"] | NotGiven = NOT_GIVEN,
+ after: str | Omit = omit,
+ before: str | Omit = omit,
+ include: SequenceNotStr[str] | Omit = omit,
+ limit: int | Omit = omit,
+ order: Literal["asc", "desc"] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> InputItemListResponse:
"""
List input items for a given OpenAI response.
@@ -131,17 +130,17 @@ async def list(
self,
response_id: str,
*,
- after: str | NotGiven = NOT_GIVEN,
- before: str | NotGiven = NOT_GIVEN,
- include: List[str] | NotGiven = NOT_GIVEN,
- limit: int | NotGiven = NOT_GIVEN,
- order: Literal["asc", "desc"] | NotGiven = NOT_GIVEN,
+ after: str | Omit = omit,
+ before: str | Omit = omit,
+ include: SequenceNotStr[str] | Omit = omit,
+ limit: int | Omit = omit,
+ order: Literal["asc", "desc"] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> InputItemListResponse:
"""
List input items for a given OpenAI response.
diff --git a/src/llama_stack_client/resources/responses/responses.py b/src/llama_stack_client/resources/responses/responses.py
index b73be85f..7f21f3ea 100644
--- a/src/llama_stack_client/resources/responses/responses.py
+++ b/src/llama_stack_client/resources/responses/responses.py
@@ -2,13 +2,13 @@
from __future__ import annotations
-from typing import List, Union, Iterable
+from typing import Union, Iterable
from typing_extensions import Literal, overload
import httpx
from ...types import response_list_params, response_create_params
-from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from ..._types import Body, Omit, Query, Headers, NotGiven, SequenceNotStr, omit, not_given
from ..._utils import required_args, maybe_transform, async_maybe_transform
from ..._compat import cached_property
from ..._resource import SyncAPIResource, AsyncAPIResource
@@ -32,6 +32,7 @@
from ...types.response_object import ResponseObject
from ...types.response_list_response import ResponseListResponse
from ...types.response_object_stream import ResponseObjectStream
+from ...types.response_delete_response import ResponseDeleteResponse
__all__ = ["ResponsesResource", "AsyncResponsesResource"]
@@ -66,21 +67,21 @@ def create(
*,
input: Union[str, Iterable[response_create_params.InputUnionMember1]],
model: str,
- include: List[str] | NotGiven = NOT_GIVEN,
- instructions: str | NotGiven = NOT_GIVEN,
- max_infer_iters: int | NotGiven = NOT_GIVEN,
- previous_response_id: str | NotGiven = NOT_GIVEN,
- store: bool | NotGiven = NOT_GIVEN,
- stream: Literal[False] | NotGiven = NOT_GIVEN,
- temperature: float | NotGiven = NOT_GIVEN,
- text: response_create_params.Text | NotGiven = NOT_GIVEN,
- tools: Iterable[response_create_params.Tool] | NotGiven = NOT_GIVEN,
+ include: SequenceNotStr[str] | Omit = omit,
+ instructions: str | Omit = omit,
+ max_infer_iters: int | Omit = omit,
+ previous_response_id: str | Omit = omit,
+ store: bool | Omit = omit,
+ stream: Literal[False] | Omit = omit,
+ temperature: float | Omit = omit,
+ text: response_create_params.Text | Omit = omit,
+ tools: Iterable[response_create_params.Tool] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ResponseObject:
"""
Create a new OpenAI response.
@@ -115,20 +116,20 @@ def create(
input: Union[str, Iterable[response_create_params.InputUnionMember1]],
model: str,
stream: Literal[True],
- include: List[str] | NotGiven = NOT_GIVEN,
- instructions: str | NotGiven = NOT_GIVEN,
- max_infer_iters: int | NotGiven = NOT_GIVEN,
- previous_response_id: str | NotGiven = NOT_GIVEN,
- store: bool | NotGiven = NOT_GIVEN,
- temperature: float | NotGiven = NOT_GIVEN,
- text: response_create_params.Text | NotGiven = NOT_GIVEN,
- tools: Iterable[response_create_params.Tool] | NotGiven = NOT_GIVEN,
+ include: SequenceNotStr[str] | Omit = omit,
+ instructions: str | Omit = omit,
+ max_infer_iters: int | Omit = omit,
+ previous_response_id: str | Omit = omit,
+ store: bool | Omit = omit,
+ temperature: float | Omit = omit,
+ text: response_create_params.Text | Omit = omit,
+ tools: Iterable[response_create_params.Tool] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Stream[ResponseObjectStream]:
"""
Create a new OpenAI response.
@@ -163,20 +164,20 @@ def create(
input: Union[str, Iterable[response_create_params.InputUnionMember1]],
model: str,
stream: bool,
- include: List[str] | NotGiven = NOT_GIVEN,
- instructions: str | NotGiven = NOT_GIVEN,
- max_infer_iters: int | NotGiven = NOT_GIVEN,
- previous_response_id: str | NotGiven = NOT_GIVEN,
- store: bool | NotGiven = NOT_GIVEN,
- temperature: float | NotGiven = NOT_GIVEN,
- text: response_create_params.Text | NotGiven = NOT_GIVEN,
- tools: Iterable[response_create_params.Tool] | NotGiven = NOT_GIVEN,
+ include: SequenceNotStr[str] | Omit = omit,
+ instructions: str | Omit = omit,
+ max_infer_iters: int | Omit = omit,
+ previous_response_id: str | Omit = omit,
+ store: bool | Omit = omit,
+ temperature: float | Omit = omit,
+ text: response_create_params.Text | Omit = omit,
+ tools: Iterable[response_create_params.Tool] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ResponseObject | Stream[ResponseObjectStream]:
"""
Create a new OpenAI response.
@@ -210,21 +211,21 @@ def create(
*,
input: Union[str, Iterable[response_create_params.InputUnionMember1]],
model: str,
- include: List[str] | NotGiven = NOT_GIVEN,
- instructions: str | NotGiven = NOT_GIVEN,
- max_infer_iters: int | NotGiven = NOT_GIVEN,
- previous_response_id: str | NotGiven = NOT_GIVEN,
- store: bool | NotGiven = NOT_GIVEN,
- stream: Literal[False] | Literal[True] | NotGiven = NOT_GIVEN,
- temperature: float | NotGiven = NOT_GIVEN,
- text: response_create_params.Text | NotGiven = NOT_GIVEN,
- tools: Iterable[response_create_params.Tool] | NotGiven = NOT_GIVEN,
+ include: SequenceNotStr[str] | Omit = omit,
+ instructions: str | Omit = omit,
+ max_infer_iters: int | Omit = omit,
+ previous_response_id: str | Omit = omit,
+ store: bool | Omit = omit,
+ stream: Literal[False] | Literal[True] | Omit = omit,
+ temperature: float | Omit = omit,
+ text: response_create_params.Text | Omit = omit,
+ tools: Iterable[response_create_params.Tool] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ResponseObject | Stream[ResponseObjectStream]:
return self._post(
"/v1/openai/v1/responses",
@@ -263,7 +264,7 @@ def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ResponseObject:
"""
Retrieve an OpenAI response by its ID.
@@ -290,16 +291,16 @@ def retrieve(
def list(
self,
*,
- after: str | NotGiven = NOT_GIVEN,
- limit: int | NotGiven = NOT_GIVEN,
- model: str | NotGiven = NOT_GIVEN,
- order: Literal["asc", "desc"] | NotGiven = NOT_GIVEN,
+ after: str | Omit = omit,
+ limit: int | Omit = omit,
+ model: str | Omit = omit,
+ order: Literal["asc", "desc"] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> SyncOpenAICursorPage[ResponseListResponse]:
"""
List all OpenAI responses.
@@ -342,6 +343,39 @@ def list(
model=ResponseListResponse,
)
+ def delete(
+ self,
+ response_id: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> ResponseDeleteResponse:
+ """
+ Delete an OpenAI response by its ID.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not response_id:
+ raise ValueError(f"Expected a non-empty value for `response_id` but received {response_id!r}")
+ return self._delete(
+ f"/v1/openai/v1/responses/{response_id}",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=ResponseDeleteResponse,
+ )
+
class AsyncResponsesResource(AsyncAPIResource):
@cached_property
@@ -373,21 +407,21 @@ async def create(
*,
input: Union[str, Iterable[response_create_params.InputUnionMember1]],
model: str,
- include: List[str] | NotGiven = NOT_GIVEN,
- instructions: str | NotGiven = NOT_GIVEN,
- max_infer_iters: int | NotGiven = NOT_GIVEN,
- previous_response_id: str | NotGiven = NOT_GIVEN,
- store: bool | NotGiven = NOT_GIVEN,
- stream: Literal[False] | NotGiven = NOT_GIVEN,
- temperature: float | NotGiven = NOT_GIVEN,
- text: response_create_params.Text | NotGiven = NOT_GIVEN,
- tools: Iterable[response_create_params.Tool] | NotGiven = NOT_GIVEN,
+ include: SequenceNotStr[str] | Omit = omit,
+ instructions: str | Omit = omit,
+ max_infer_iters: int | Omit = omit,
+ previous_response_id: str | Omit = omit,
+ store: bool | Omit = omit,
+ stream: Literal[False] | Omit = omit,
+ temperature: float | Omit = omit,
+ text: response_create_params.Text | Omit = omit,
+ tools: Iterable[response_create_params.Tool] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ResponseObject:
"""
Create a new OpenAI response.
@@ -422,20 +456,20 @@ async def create(
input: Union[str, Iterable[response_create_params.InputUnionMember1]],
model: str,
stream: Literal[True],
- include: List[str] | NotGiven = NOT_GIVEN,
- instructions: str | NotGiven = NOT_GIVEN,
- max_infer_iters: int | NotGiven = NOT_GIVEN,
- previous_response_id: str | NotGiven = NOT_GIVEN,
- store: bool | NotGiven = NOT_GIVEN,
- temperature: float | NotGiven = NOT_GIVEN,
- text: response_create_params.Text | NotGiven = NOT_GIVEN,
- tools: Iterable[response_create_params.Tool] | NotGiven = NOT_GIVEN,
+ include: SequenceNotStr[str] | Omit = omit,
+ instructions: str | Omit = omit,
+ max_infer_iters: int | Omit = omit,
+ previous_response_id: str | Omit = omit,
+ store: bool | Omit = omit,
+ temperature: float | Omit = omit,
+ text: response_create_params.Text | Omit = omit,
+ tools: Iterable[response_create_params.Tool] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> AsyncStream[ResponseObjectStream]:
"""
Create a new OpenAI response.
@@ -470,20 +504,20 @@ async def create(
input: Union[str, Iterable[response_create_params.InputUnionMember1]],
model: str,
stream: bool,
- include: List[str] | NotGiven = NOT_GIVEN,
- instructions: str | NotGiven = NOT_GIVEN,
- max_infer_iters: int | NotGiven = NOT_GIVEN,
- previous_response_id: str | NotGiven = NOT_GIVEN,
- store: bool | NotGiven = NOT_GIVEN,
- temperature: float | NotGiven = NOT_GIVEN,
- text: response_create_params.Text | NotGiven = NOT_GIVEN,
- tools: Iterable[response_create_params.Tool] | NotGiven = NOT_GIVEN,
+ include: SequenceNotStr[str] | Omit = omit,
+ instructions: str | Omit = omit,
+ max_infer_iters: int | Omit = omit,
+ previous_response_id: str | Omit = omit,
+ store: bool | Omit = omit,
+ temperature: float | Omit = omit,
+ text: response_create_params.Text | Omit = omit,
+ tools: Iterable[response_create_params.Tool] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ResponseObject | AsyncStream[ResponseObjectStream]:
"""
Create a new OpenAI response.
@@ -517,21 +551,21 @@ async def create(
*,
input: Union[str, Iterable[response_create_params.InputUnionMember1]],
model: str,
- include: List[str] | NotGiven = NOT_GIVEN,
- instructions: str | NotGiven = NOT_GIVEN,
- max_infer_iters: int | NotGiven = NOT_GIVEN,
- previous_response_id: str | NotGiven = NOT_GIVEN,
- store: bool | NotGiven = NOT_GIVEN,
- stream: Literal[False] | Literal[True] | NotGiven = NOT_GIVEN,
- temperature: float | NotGiven = NOT_GIVEN,
- text: response_create_params.Text | NotGiven = NOT_GIVEN,
- tools: Iterable[response_create_params.Tool] | NotGiven = NOT_GIVEN,
+ include: SequenceNotStr[str] | Omit = omit,
+ instructions: str | Omit = omit,
+ max_infer_iters: int | Omit = omit,
+ previous_response_id: str | Omit = omit,
+ store: bool | Omit = omit,
+ stream: Literal[False] | Literal[True] | Omit = omit,
+ temperature: float | Omit = omit,
+ text: response_create_params.Text | Omit = omit,
+ tools: Iterable[response_create_params.Tool] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ResponseObject | AsyncStream[ResponseObjectStream]:
return await self._post(
"/v1/openai/v1/responses",
@@ -570,7 +604,7 @@ async def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ResponseObject:
"""
Retrieve an OpenAI response by its ID.
@@ -597,16 +631,16 @@ async def retrieve(
def list(
self,
*,
- after: str | NotGiven = NOT_GIVEN,
- limit: int | NotGiven = NOT_GIVEN,
- model: str | NotGiven = NOT_GIVEN,
- order: Literal["asc", "desc"] | NotGiven = NOT_GIVEN,
+ after: str | Omit = omit,
+ limit: int | Omit = omit,
+ model: str | Omit = omit,
+ order: Literal["asc", "desc"] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> AsyncPaginator[ResponseListResponse, AsyncOpenAICursorPage[ResponseListResponse]]:
"""
List all OpenAI responses.
@@ -649,6 +683,39 @@ def list(
model=ResponseListResponse,
)
+ async def delete(
+ self,
+ response_id: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> ResponseDeleteResponse:
+ """
+ Delete an OpenAI response by its ID.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not response_id:
+ raise ValueError(f"Expected a non-empty value for `response_id` but received {response_id!r}")
+ return await self._delete(
+ f"/v1/openai/v1/responses/{response_id}",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=ResponseDeleteResponse,
+ )
+
class ResponsesResourceWithRawResponse:
def __init__(self, responses: ResponsesResource) -> None:
@@ -663,6 +730,9 @@ def __init__(self, responses: ResponsesResource) -> None:
self.list = to_raw_response_wrapper(
responses.list,
)
+ self.delete = to_raw_response_wrapper(
+ responses.delete,
+ )
@cached_property
def input_items(self) -> InputItemsResourceWithRawResponse:
@@ -682,6 +752,9 @@ def __init__(self, responses: AsyncResponsesResource) -> None:
self.list = async_to_raw_response_wrapper(
responses.list,
)
+ self.delete = async_to_raw_response_wrapper(
+ responses.delete,
+ )
@cached_property
def input_items(self) -> AsyncInputItemsResourceWithRawResponse:
@@ -701,6 +774,9 @@ def __init__(self, responses: ResponsesResource) -> None:
self.list = to_streamed_response_wrapper(
responses.list,
)
+ self.delete = to_streamed_response_wrapper(
+ responses.delete,
+ )
@cached_property
def input_items(self) -> InputItemsResourceWithStreamingResponse:
@@ -720,6 +796,9 @@ def __init__(self, responses: AsyncResponsesResource) -> None:
self.list = async_to_streamed_response_wrapper(
responses.list,
)
+ self.delete = async_to_streamed_response_wrapper(
+ responses.delete,
+ )
@cached_property
def input_items(self) -> AsyncInputItemsResourceWithStreamingResponse:
diff --git a/src/llama_stack_client/resources/routes.py b/src/llama_stack_client/resources/routes.py
index 7d544c0e..9a1e73e6 100644
--- a/src/llama_stack_client/resources/routes.py
+++ b/src/llama_stack_client/resources/routes.py
@@ -6,7 +6,7 @@
import httpx
-from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from .._types import Body, Query, Headers, NotGiven, not_given
from .._compat import cached_property
from .._resource import SyncAPIResource, AsyncAPIResource
from .._response import (
@@ -50,7 +50,7 @@ def list(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> RouteListResponse:
"""List all available API routes with their methods and implementing providers."""
return self._get(
@@ -94,7 +94,7 @@ async def list(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> RouteListResponse:
"""List all available API routes with their methods and implementing providers."""
return await self._get(
diff --git a/src/llama_stack_client/resources/safety.py b/src/llama_stack_client/resources/safety.py
index 813a1f67..e886dc08 100644
--- a/src/llama_stack_client/resources/safety.py
+++ b/src/llama_stack_client/resources/safety.py
@@ -7,7 +7,7 @@
import httpx
from ..types import safety_run_shield_params
-from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from .._types import Body, Query, Headers, NotGiven, not_given
from .._utils import maybe_transform, async_maybe_transform
from .._compat import cached_property
from .._resource import SyncAPIResource, AsyncAPIResource
@@ -55,7 +55,7 @@ def run_shield(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> RunShieldResponse:
"""
Run a shield.
@@ -123,7 +123,7 @@ async def run_shield(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> RunShieldResponse:
"""
Run a shield.
diff --git a/src/llama_stack_client/resources/scoring.py b/src/llama_stack_client/resources/scoring.py
index 3e64f8eb..dddf8002 100644
--- a/src/llama_stack_client/resources/scoring.py
+++ b/src/llama_stack_client/resources/scoring.py
@@ -7,7 +7,7 @@
import httpx
from ..types import scoring_score_params, scoring_score_batch_params
-from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from .._types import Body, Query, Headers, NotGiven, not_given
from .._utils import maybe_transform, async_maybe_transform
from .._compat import cached_property
from .._resource import SyncAPIResource, AsyncAPIResource
@@ -55,7 +55,7 @@ def score(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ScoringScoreResponse:
"""
Score a list of rows.
@@ -99,7 +99,7 @@ def score_batch(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ScoringScoreBatchResponse:
"""
Score a batch of rows.
@@ -166,7 +166,7 @@ async def score(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ScoringScoreResponse:
"""
Score a list of rows.
@@ -210,7 +210,7 @@ async def score_batch(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ScoringScoreBatchResponse:
"""
Score a batch of rows.
diff --git a/src/llama_stack_client/resources/scoring_functions.py b/src/llama_stack_client/resources/scoring_functions.py
index 1546fa3e..ea3d8a6d 100644
--- a/src/llama_stack_client/resources/scoring_functions.py
+++ b/src/llama_stack_client/resources/scoring_functions.py
@@ -7,7 +7,7 @@
import httpx
from ..types import scoring_function_register_params
-from .._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
+from .._types import Body, Omit, Query, Headers, NoneType, NotGiven, omit, not_given
from .._utils import maybe_transform, async_maybe_transform
from .._compat import cached_property
from .._resource import SyncAPIResource, AsyncAPIResource
@@ -55,7 +55,7 @@ def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ScoringFn:
"""
Get a scoring function by its ID.
@@ -87,7 +87,7 @@ def list(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ScoringFunctionListResponse:
"""List all scoring functions."""
return self._get(
@@ -108,15 +108,15 @@ def register(
description: str,
return_type: scoring_function_register_params.ReturnType,
scoring_fn_id: str,
- params: ScoringFnParamsParam | NotGiven = NOT_GIVEN,
- provider_id: str | NotGiven = NOT_GIVEN,
- provider_scoring_fn_id: str | NotGiven = NOT_GIVEN,
+ params: ScoringFnParamsParam | Omit = omit,
+ provider_id: str | Omit = omit,
+ provider_scoring_fn_id: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> None:
"""
Register a scoring function.
@@ -191,7 +191,7 @@ async def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ScoringFn:
"""
Get a scoring function by its ID.
@@ -223,7 +223,7 @@ async def list(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ScoringFunctionListResponse:
"""List all scoring functions."""
return await self._get(
@@ -244,15 +244,15 @@ async def register(
description: str,
return_type: scoring_function_register_params.ReturnType,
scoring_fn_id: str,
- params: ScoringFnParamsParam | NotGiven = NOT_GIVEN,
- provider_id: str | NotGiven = NOT_GIVEN,
- provider_scoring_fn_id: str | NotGiven = NOT_GIVEN,
+ params: ScoringFnParamsParam | Omit = omit,
+ provider_id: str | Omit = omit,
+ provider_scoring_fn_id: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> None:
"""
Register a scoring function.
diff --git a/src/llama_stack_client/resources/shields.py b/src/llama_stack_client/resources/shields.py
index cf0c7678..a7893939 100644
--- a/src/llama_stack_client/resources/shields.py
+++ b/src/llama_stack_client/resources/shields.py
@@ -7,7 +7,7 @@
import httpx
from ..types import shield_register_params
-from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from .._types import Body, Omit, Query, Headers, NoneType, NotGiven, omit, not_given
from .._utils import maybe_transform, async_maybe_transform
from .._compat import cached_property
from .._resource import SyncAPIResource, AsyncAPIResource
@@ -54,7 +54,7 @@ def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Shield:
"""
Get a shield by its identifier.
@@ -86,7 +86,7 @@ def list(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ShieldListResponse:
"""List all shields."""
return self._get(
@@ -101,19 +101,53 @@ def list(
cast_to=cast(Type[ShieldListResponse], DataWrapper[ShieldListResponse]),
)
+ def delete(
+ self,
+ identifier: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> None:
+ """
+ Unregister a shield.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not identifier:
+ raise ValueError(f"Expected a non-empty value for `identifier` but received {identifier!r}")
+ extra_headers = {"Accept": "*/*", **(extra_headers or {})}
+ return self._delete(
+ f"/v1/shields/{identifier}",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=NoneType,
+ )
+
def register(
self,
*,
shield_id: str,
- params: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
- provider_id: str | NotGiven = NOT_GIVEN,
- provider_shield_id: str | NotGiven = NOT_GIVEN,
+ params: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
+ provider_id: str | Omit = omit,
+ provider_shield_id: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Shield:
"""
Register a shield.
@@ -182,7 +216,7 @@ async def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Shield:
"""
Get a shield by its identifier.
@@ -214,7 +248,7 @@ async def list(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ShieldListResponse:
"""List all shields."""
return await self._get(
@@ -229,19 +263,53 @@ async def list(
cast_to=cast(Type[ShieldListResponse], DataWrapper[ShieldListResponse]),
)
+ async def delete(
+ self,
+ identifier: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> None:
+ """
+ Unregister a shield.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not identifier:
+ raise ValueError(f"Expected a non-empty value for `identifier` but received {identifier!r}")
+ extra_headers = {"Accept": "*/*", **(extra_headers or {})}
+ return await self._delete(
+ f"/v1/shields/{identifier}",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=NoneType,
+ )
+
async def register(
self,
*,
shield_id: str,
- params: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
- provider_id: str | NotGiven = NOT_GIVEN,
- provider_shield_id: str | NotGiven = NOT_GIVEN,
+ params: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
+ provider_id: str | Omit = omit,
+ provider_shield_id: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Shield:
"""
Register a shield.
@@ -291,6 +359,9 @@ def __init__(self, shields: ShieldsResource) -> None:
self.list = to_raw_response_wrapper(
shields.list,
)
+ self.delete = to_raw_response_wrapper(
+ shields.delete,
+ )
self.register = to_raw_response_wrapper(
shields.register,
)
@@ -306,6 +377,9 @@ def __init__(self, shields: AsyncShieldsResource) -> None:
self.list = async_to_raw_response_wrapper(
shields.list,
)
+ self.delete = async_to_raw_response_wrapper(
+ shields.delete,
+ )
self.register = async_to_raw_response_wrapper(
shields.register,
)
@@ -321,6 +395,9 @@ def __init__(self, shields: ShieldsResource) -> None:
self.list = to_streamed_response_wrapper(
shields.list,
)
+ self.delete = to_streamed_response_wrapper(
+ shields.delete,
+ )
self.register = to_streamed_response_wrapper(
shields.register,
)
@@ -336,6 +413,9 @@ def __init__(self, shields: AsyncShieldsResource) -> None:
self.list = async_to_streamed_response_wrapper(
shields.list,
)
+ self.delete = async_to_streamed_response_wrapper(
+ shields.delete,
+ )
self.register = async_to_streamed_response_wrapper(
shields.register,
)
diff --git a/src/llama_stack_client/resources/synthetic_data_generation.py b/src/llama_stack_client/resources/synthetic_data_generation.py
index 0843eafe..55eddcad 100644
--- a/src/llama_stack_client/resources/synthetic_data_generation.py
+++ b/src/llama_stack_client/resources/synthetic_data_generation.py
@@ -8,7 +8,7 @@
import httpx
from ..types import synthetic_data_generation_generate_params
-from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from .._types import Body, Omit, Query, Headers, NotGiven, omit, not_given
from .._utils import maybe_transform, async_maybe_transform
from .._compat import cached_property
from .._resource import SyncAPIResource, AsyncAPIResource
@@ -50,13 +50,13 @@ def generate(
*,
dialogs: Iterable[Message],
filtering_function: Literal["none", "random", "top_k", "top_p", "top_k_top_p", "sigmoid"],
- model: str | NotGiven = NOT_GIVEN,
+ model: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> SyntheticDataGenerationResponse:
"""
Generate synthetic data based on input dialogs and apply filtering.
@@ -119,13 +119,13 @@ async def generate(
*,
dialogs: Iterable[Message],
filtering_function: Literal["none", "random", "top_k", "top_p", "top_k_top_p", "sigmoid"],
- model: str | NotGiven = NOT_GIVEN,
+ model: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> SyntheticDataGenerationResponse:
"""
Generate synthetic data based on input dialogs and apply filtering.
diff --git a/src/llama_stack_client/resources/telemetry.py b/src/llama_stack_client/resources/telemetry.py
index b8b47c49..daafbb50 100644
--- a/src/llama_stack_client/resources/telemetry.py
+++ b/src/llama_stack_client/resources/telemetry.py
@@ -2,7 +2,7 @@
from __future__ import annotations
-from typing import List, Type, Iterable, cast
+from typing import Type, Iterable, cast
from typing_extensions import Literal
import httpx
@@ -15,7 +15,7 @@
telemetry_query_metrics_params,
telemetry_save_spans_to_dataset_params,
)
-from .._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
+from .._types import Body, Omit, Query, Headers, NoneType, NotGiven, SequenceNotStr, omit, not_given
from .._utils import maybe_transform, async_maybe_transform
from .._compat import cached_property
from .._resource import SyncAPIResource, AsyncAPIResource
@@ -69,7 +69,7 @@ def get_span(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> TelemetryGetSpanResponse:
"""
Get a span by its ID.
@@ -99,14 +99,14 @@ def get_span_tree(
self,
span_id: str,
*,
- attributes_to_return: List[str] | NotGiven = NOT_GIVEN,
- max_depth: int | NotGiven = NOT_GIVEN,
+ attributes_to_return: SequenceNotStr[str] | Omit = omit,
+ max_depth: int | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> TelemetryGetSpanTreeResponse:
"""
Get a span tree by its ID.
@@ -154,7 +154,7 @@ def get_trace(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Trace:
"""
Get a trace by its ID.
@@ -188,7 +188,7 @@ def log_event(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> None:
"""
Log an event.
@@ -228,15 +228,15 @@ def query_metrics(
*,
query_type: Literal["range", "instant"],
start_time: int,
- end_time: int | NotGiven = NOT_GIVEN,
- granularity: str | NotGiven = NOT_GIVEN,
- label_matchers: Iterable[telemetry_query_metrics_params.LabelMatcher] | NotGiven = NOT_GIVEN,
+ end_time: int | Omit = omit,
+ granularity: str | Omit = omit,
+ label_matchers: Iterable[telemetry_query_metrics_params.LabelMatcher] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> TelemetryQueryMetricsResponse:
"""
Query metrics.
@@ -288,14 +288,14 @@ def query_spans(
self,
*,
attribute_filters: Iterable[QueryConditionParam],
- attributes_to_return: List[str],
- max_depth: int | NotGiven = NOT_GIVEN,
+ attributes_to_return: SequenceNotStr[str],
+ max_depth: int | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> TelemetryQuerySpansResponse:
"""
Query spans.
@@ -338,16 +338,16 @@ def query_spans(
def query_traces(
self,
*,
- attribute_filters: Iterable[QueryConditionParam] | NotGiven = NOT_GIVEN,
- limit: int | NotGiven = NOT_GIVEN,
- offset: int | NotGiven = NOT_GIVEN,
- order_by: List[str] | NotGiven = NOT_GIVEN,
+ attribute_filters: Iterable[QueryConditionParam] | Omit = omit,
+ limit: int | Omit = omit,
+ offset: int | Omit = omit,
+ order_by: SequenceNotStr[str] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> TelemetryQueryTracesResponse:
"""
Query traces.
@@ -394,15 +394,15 @@ def save_spans_to_dataset(
self,
*,
attribute_filters: Iterable[QueryConditionParam],
- attributes_to_save: List[str],
+ attributes_to_save: SequenceNotStr[str],
dataset_id: str,
- max_depth: int | NotGiven = NOT_GIVEN,
+ max_depth: int | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> None:
"""
Save spans to a dataset.
@@ -473,7 +473,7 @@ async def get_span(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> TelemetryGetSpanResponse:
"""
Get a span by its ID.
@@ -503,14 +503,14 @@ async def get_span_tree(
self,
span_id: str,
*,
- attributes_to_return: List[str] | NotGiven = NOT_GIVEN,
- max_depth: int | NotGiven = NOT_GIVEN,
+ attributes_to_return: SequenceNotStr[str] | Omit = omit,
+ max_depth: int | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> TelemetryGetSpanTreeResponse:
"""
Get a span tree by its ID.
@@ -558,7 +558,7 @@ async def get_trace(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Trace:
"""
Get a trace by its ID.
@@ -592,7 +592,7 @@ async def log_event(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> None:
"""
Log an event.
@@ -632,15 +632,15 @@ async def query_metrics(
*,
query_type: Literal["range", "instant"],
start_time: int,
- end_time: int | NotGiven = NOT_GIVEN,
- granularity: str | NotGiven = NOT_GIVEN,
- label_matchers: Iterable[telemetry_query_metrics_params.LabelMatcher] | NotGiven = NOT_GIVEN,
+ end_time: int | Omit = omit,
+ granularity: str | Omit = omit,
+ label_matchers: Iterable[telemetry_query_metrics_params.LabelMatcher] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> TelemetryQueryMetricsResponse:
"""
Query metrics.
@@ -692,14 +692,14 @@ async def query_spans(
self,
*,
attribute_filters: Iterable[QueryConditionParam],
- attributes_to_return: List[str],
- max_depth: int | NotGiven = NOT_GIVEN,
+ attributes_to_return: SequenceNotStr[str],
+ max_depth: int | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> TelemetryQuerySpansResponse:
"""
Query spans.
@@ -742,16 +742,16 @@ async def query_spans(
async def query_traces(
self,
*,
- attribute_filters: Iterable[QueryConditionParam] | NotGiven = NOT_GIVEN,
- limit: int | NotGiven = NOT_GIVEN,
- offset: int | NotGiven = NOT_GIVEN,
- order_by: List[str] | NotGiven = NOT_GIVEN,
+ attribute_filters: Iterable[QueryConditionParam] | Omit = omit,
+ limit: int | Omit = omit,
+ offset: int | Omit = omit,
+ order_by: SequenceNotStr[str] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> TelemetryQueryTracesResponse:
"""
Query traces.
@@ -798,15 +798,15 @@ async def save_spans_to_dataset(
self,
*,
attribute_filters: Iterable[QueryConditionParam],
- attributes_to_save: List[str],
+ attributes_to_save: SequenceNotStr[str],
dataset_id: str,
- max_depth: int | NotGiven = NOT_GIVEN,
+ max_depth: int | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> None:
"""
Save spans to a dataset.
diff --git a/src/llama_stack_client/resources/tool_runtime/rag_tool.py b/src/llama_stack_client/resources/tool_runtime/rag_tool.py
index 3ff25968..5b3bd4d3 100644
--- a/src/llama_stack_client/resources/tool_runtime/rag_tool.py
+++ b/src/llama_stack_client/resources/tool_runtime/rag_tool.py
@@ -2,11 +2,11 @@
from __future__ import annotations
-from typing import List, Iterable
+from typing import Iterable
import httpx
-from ..._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
+from ..._types import Body, Omit, Query, Headers, NoneType, NotGiven, SequenceNotStr, omit, not_given
from ..._utils import maybe_transform, async_maybe_transform
from ..._compat import cached_property
from ..._resource import SyncAPIResource, AsyncAPIResource
@@ -57,7 +57,7 @@ def insert(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> None:
"""
Index documents so they can be used by the RAG system.
@@ -98,14 +98,14 @@ def query(
self,
*,
content: InterleavedContent,
- vector_db_ids: List[str],
- query_config: QueryConfig | NotGiven = NOT_GIVEN,
+ vector_db_ids: SequenceNotStr[str],
+ query_config: QueryConfig | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> QueryResult:
"""
Query the RAG system for context; typically invoked by the agent.
@@ -173,7 +173,7 @@ async def insert(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> None:
"""
Index documents so they can be used by the RAG system.
@@ -214,14 +214,14 @@ async def query(
self,
*,
content: InterleavedContent,
- vector_db_ids: List[str],
- query_config: QueryConfig | NotGiven = NOT_GIVEN,
+ vector_db_ids: SequenceNotStr[str],
+ query_config: QueryConfig | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> QueryResult:
"""
Query the RAG system for context; typically invoked by the agent.
diff --git a/src/llama_stack_client/resources/tool_runtime/tool_runtime.py b/src/llama_stack_client/resources/tool_runtime/tool_runtime.py
index ecb17c38..db90cb22 100644
--- a/src/llama_stack_client/resources/tool_runtime/tool_runtime.py
+++ b/src/llama_stack_client/resources/tool_runtime/tool_runtime.py
@@ -7,7 +7,7 @@
import httpx
from ...types import tool_runtime_list_tools_params, tool_runtime_invoke_tool_params
-from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from ..._types import Body, Omit, Query, Headers, NotGiven, omit, not_given
from ..._utils import maybe_transform, async_maybe_transform
from .rag_tool import (
RagToolResource,
@@ -67,7 +67,7 @@ def invoke_tool(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ToolInvocationResult:
"""
Run a tool with the given arguments.
@@ -103,14 +103,14 @@ def invoke_tool(
def list_tools(
self,
*,
- mcp_endpoint: tool_runtime_list_tools_params.McpEndpoint | NotGiven = NOT_GIVEN,
- tool_group_id: str | NotGiven = NOT_GIVEN,
+ mcp_endpoint: tool_runtime_list_tools_params.McpEndpoint | Omit = omit,
+ tool_group_id: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ToolRuntimeListToolsResponse:
"""
List all tools in the runtime.
@@ -182,7 +182,7 @@ async def invoke_tool(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ToolInvocationResult:
"""
Run a tool with the given arguments.
@@ -218,14 +218,14 @@ async def invoke_tool(
async def list_tools(
self,
*,
- mcp_endpoint: tool_runtime_list_tools_params.McpEndpoint | NotGiven = NOT_GIVEN,
- tool_group_id: str | NotGiven = NOT_GIVEN,
+ mcp_endpoint: tool_runtime_list_tools_params.McpEndpoint | Omit = omit,
+ tool_group_id: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ToolRuntimeListToolsResponse:
"""
List all tools in the runtime.
diff --git a/src/llama_stack_client/resources/toolgroups.py b/src/llama_stack_client/resources/toolgroups.py
index 3f0ba200..31551d20 100644
--- a/src/llama_stack_client/resources/toolgroups.py
+++ b/src/llama_stack_client/resources/toolgroups.py
@@ -7,7 +7,7 @@
import httpx
from ..types import toolgroup_register_params
-from .._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
+from .._types import Body, Omit, Query, Headers, NoneType, NotGiven, omit, not_given
from .._utils import maybe_transform, async_maybe_transform
from .._compat import cached_property
from .._resource import SyncAPIResource, AsyncAPIResource
@@ -53,7 +53,7 @@ def list(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ToolgroupListResponse:
"""List tool groups with optional provider."""
return self._get(
@@ -77,7 +77,7 @@ def get(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ToolGroup:
"""
Get a tool group by its ID.
@@ -106,14 +106,14 @@ def register(
*,
provider_id: str,
toolgroup_id: str,
- args: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
- mcp_endpoint: toolgroup_register_params.McpEndpoint | NotGiven = NOT_GIVEN,
+ args: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
+ mcp_endpoint: toolgroup_register_params.McpEndpoint | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> None:
"""
Register a tool group.
@@ -162,7 +162,7 @@ def unregister(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> None:
"""
Unregister a tool group.
@@ -216,7 +216,7 @@ async def list(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ToolgroupListResponse:
"""List tool groups with optional provider."""
return await self._get(
@@ -240,7 +240,7 @@ async def get(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ToolGroup:
"""
Get a tool group by its ID.
@@ -269,14 +269,14 @@ async def register(
*,
provider_id: str,
toolgroup_id: str,
- args: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
- mcp_endpoint: toolgroup_register_params.McpEndpoint | NotGiven = NOT_GIVEN,
+ args: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
+ mcp_endpoint: toolgroup_register_params.McpEndpoint | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> None:
"""
Register a tool group.
@@ -325,7 +325,7 @@ async def unregister(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> None:
"""
Unregister a tool group.
diff --git a/src/llama_stack_client/resources/tools.py b/src/llama_stack_client/resources/tools.py
index 7954f776..6d405bed 100644
--- a/src/llama_stack_client/resources/tools.py
+++ b/src/llama_stack_client/resources/tools.py
@@ -7,7 +7,7 @@
import httpx
from ..types import tool_list_params
-from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from .._types import Body, Omit, Query, Headers, NotGiven, omit, not_given
from .._utils import maybe_transform, async_maybe_transform
from .._compat import cached_property
from .._resource import SyncAPIResource, AsyncAPIResource
@@ -48,13 +48,13 @@ def with_streaming_response(self) -> ToolsResourceWithStreamingResponse:
def list(
self,
*,
- toolgroup_id: str | NotGiven = NOT_GIVEN,
+ toolgroup_id: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ToolListResponse:
"""
List tools with optional tool group.
@@ -92,7 +92,7 @@ def get(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Tool:
"""
Get a tool by its name.
@@ -140,13 +140,13 @@ def with_streaming_response(self) -> AsyncToolsResourceWithStreamingResponse:
async def list(
self,
*,
- toolgroup_id: str | NotGiven = NOT_GIVEN,
+ toolgroup_id: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> ToolListResponse:
"""
List tools with optional tool group.
@@ -184,7 +184,7 @@ async def get(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> Tool:
"""
Get a tool by its name.
diff --git a/src/llama_stack_client/resources/vector_dbs.py b/src/llama_stack_client/resources/vector_dbs.py
index ab62fa6a..3524bdf2 100644
--- a/src/llama_stack_client/resources/vector_dbs.py
+++ b/src/llama_stack_client/resources/vector_dbs.py
@@ -7,7 +7,7 @@
import httpx
from ..types import vector_db_register_params
-from .._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
+from .._types import Body, Omit, Query, Headers, NoneType, NotGiven, omit, not_given
from .._utils import maybe_transform, async_maybe_transform
from .._compat import cached_property
from .._resource import SyncAPIResource, AsyncAPIResource
@@ -55,7 +55,7 @@ def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> VectorDBRetrieveResponse:
"""
Get a vector database by its identifier.
@@ -87,7 +87,7 @@ def list(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> VectorDBListResponse:
"""List all vector databases."""
return self._get(
@@ -107,16 +107,16 @@ def register(
*,
embedding_model: str,
vector_db_id: str,
- embedding_dimension: int | NotGiven = NOT_GIVEN,
- provider_id: str | NotGiven = NOT_GIVEN,
- provider_vector_db_id: str | NotGiven = NOT_GIVEN,
- vector_db_name: str | NotGiven = NOT_GIVEN,
+ embedding_dimension: int | Omit = omit,
+ provider_id: str | Omit = omit,
+ provider_vector_db_id: str | Omit = omit,
+ vector_db_name: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> VectorDBRegisterResponse:
"""
Register a vector database.
@@ -170,7 +170,7 @@ def unregister(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> None:
"""
Unregister a vector database.
@@ -225,7 +225,7 @@ async def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> VectorDBRetrieveResponse:
"""
Get a vector database by its identifier.
@@ -257,7 +257,7 @@ async def list(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> VectorDBListResponse:
"""List all vector databases."""
return await self._get(
@@ -277,16 +277,16 @@ async def register(
*,
embedding_model: str,
vector_db_id: str,
- embedding_dimension: int | NotGiven = NOT_GIVEN,
- provider_id: str | NotGiven = NOT_GIVEN,
- provider_vector_db_id: str | NotGiven = NOT_GIVEN,
- vector_db_name: str | NotGiven = NOT_GIVEN,
+ embedding_dimension: int | Omit = omit,
+ provider_id: str | Omit = omit,
+ provider_vector_db_id: str | Omit = omit,
+ vector_db_name: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> VectorDBRegisterResponse:
"""
Register a vector database.
@@ -340,7 +340,7 @@ async def unregister(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> None:
"""
Unregister a vector database.
diff --git a/src/llama_stack_client/resources/vector_io.py b/src/llama_stack_client/resources/vector_io.py
index 3e361435..f9647342 100644
--- a/src/llama_stack_client/resources/vector_io.py
+++ b/src/llama_stack_client/resources/vector_io.py
@@ -7,7 +7,7 @@
import httpx
from ..types import vector_io_query_params, vector_io_insert_params
-from .._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
+from .._types import Body, Omit, Query, Headers, NoneType, NotGiven, omit, not_given
from .._utils import maybe_transform, async_maybe_transform
from .._compat import cached_property
from .._resource import SyncAPIResource, AsyncAPIResource
@@ -49,13 +49,13 @@ def insert(
*,
chunks: Iterable[vector_io_insert_params.Chunk],
vector_db_id: str,
- ttl_seconds: int | NotGiven = NOT_GIVEN,
+ ttl_seconds: int | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> None:
"""Insert chunks into a vector database.
@@ -102,13 +102,13 @@ def query(
*,
query: InterleavedContent,
vector_db_id: str,
- params: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
+ params: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> QueryChunksResponse:
"""
Query chunks from a vector database.
@@ -170,13 +170,13 @@ async def insert(
*,
chunks: Iterable[vector_io_insert_params.Chunk],
vector_db_id: str,
- ttl_seconds: int | NotGiven = NOT_GIVEN,
+ ttl_seconds: int | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> None:
"""Insert chunks into a vector database.
@@ -223,13 +223,13 @@ async def query(
*,
query: InterleavedContent,
vector_db_id: str,
- params: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
+ params: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> QueryChunksResponse:
"""
Query chunks from a vector database.
diff --git a/src/llama_stack_client/resources/vector_stores/files.py b/src/llama_stack_client/resources/vector_stores/files.py
index 8589ebc6..39f16a66 100644
--- a/src/llama_stack_client/resources/vector_stores/files.py
+++ b/src/llama_stack_client/resources/vector_stores/files.py
@@ -7,7 +7,7 @@
import httpx
-from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from ..._types import Body, Omit, Query, Headers, NotGiven, omit, not_given
from ..._utils import maybe_transform, async_maybe_transform
from ..._compat import cached_property
from ..._resource import SyncAPIResource, AsyncAPIResource
@@ -52,14 +52,14 @@ def create(
vector_store_id: str,
*,
file_id: str,
- attributes: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
- chunking_strategy: file_create_params.ChunkingStrategy | NotGiven = NOT_GIVEN,
+ attributes: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
+ chunking_strategy: file_create_params.ChunkingStrategy | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> VectorStoreFile:
"""
Attach a file to a vector store.
@@ -107,7 +107,7 @@ def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> VectorStoreFile:
"""
Retrieves a vector store file.
@@ -144,7 +144,7 @@ def update(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> VectorStoreFile:
"""
Updates a vector store file.
@@ -177,17 +177,17 @@ def list(
self,
vector_store_id: str,
*,
- after: str | NotGiven = NOT_GIVEN,
- before: str | NotGiven = NOT_GIVEN,
- filter: Literal["completed", "in_progress", "cancelled", "failed"] | NotGiven = NOT_GIVEN,
- limit: int | NotGiven = NOT_GIVEN,
- order: str | NotGiven = NOT_GIVEN,
+ after: str | Omit = omit,
+ before: str | Omit = omit,
+ filter: Literal["completed", "in_progress", "cancelled", "failed"] | Omit = omit,
+ limit: int | Omit = omit,
+ order: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> SyncOpenAICursorPage[VectorStoreFile]:
"""
List files in a vector store.
@@ -249,7 +249,7 @@ def delete(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> FileDeleteResponse:
"""
Delete a vector store file.
@@ -285,7 +285,7 @@ def content(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> FileContentResponse:
"""
Retrieves the contents of a vector store file.
@@ -337,14 +337,14 @@ async def create(
vector_store_id: str,
*,
file_id: str,
- attributes: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
- chunking_strategy: file_create_params.ChunkingStrategy | NotGiven = NOT_GIVEN,
+ attributes: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
+ chunking_strategy: file_create_params.ChunkingStrategy | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> VectorStoreFile:
"""
Attach a file to a vector store.
@@ -392,7 +392,7 @@ async def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> VectorStoreFile:
"""
Retrieves a vector store file.
@@ -429,7 +429,7 @@ async def update(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> VectorStoreFile:
"""
Updates a vector store file.
@@ -462,17 +462,17 @@ def list(
self,
vector_store_id: str,
*,
- after: str | NotGiven = NOT_GIVEN,
- before: str | NotGiven = NOT_GIVEN,
- filter: Literal["completed", "in_progress", "cancelled", "failed"] | NotGiven = NOT_GIVEN,
- limit: int | NotGiven = NOT_GIVEN,
- order: str | NotGiven = NOT_GIVEN,
+ after: str | Omit = omit,
+ before: str | Omit = omit,
+ filter: Literal["completed", "in_progress", "cancelled", "failed"] | Omit = omit,
+ limit: int | Omit = omit,
+ order: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> AsyncPaginator[VectorStoreFile, AsyncOpenAICursorPage[VectorStoreFile]]:
"""
List files in a vector store.
@@ -534,7 +534,7 @@ async def delete(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> FileDeleteResponse:
"""
Delete a vector store file.
@@ -570,7 +570,7 @@ async def content(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> FileContentResponse:
"""
Retrieves the contents of a vector store file.
diff --git a/src/llama_stack_client/resources/vector_stores/vector_stores.py b/src/llama_stack_client/resources/vector_stores/vector_stores.py
index bdc38e19..f3ab01f2 100644
--- a/src/llama_stack_client/resources/vector_stores/vector_stores.py
+++ b/src/llama_stack_client/resources/vector_stores/vector_stores.py
@@ -2,7 +2,7 @@
from __future__ import annotations
-from typing import Dict, List, Union, Iterable
+from typing import Dict, Union, Iterable
import httpx
@@ -20,7 +20,7 @@
vector_store_search_params,
vector_store_update_params,
)
-from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from ..._types import Body, Omit, Query, Headers, NotGiven, SequenceNotStr, omit, not_given
from ..._utils import maybe_transform, async_maybe_transform
from ..._compat import cached_property
from ..._resource import SyncAPIResource, AsyncAPIResource
@@ -66,20 +66,20 @@ def with_streaming_response(self) -> VectorStoresResourceWithStreamingResponse:
def create(
self,
*,
- chunking_strategy: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
- embedding_dimension: int | NotGiven = NOT_GIVEN,
- embedding_model: str | NotGiven = NOT_GIVEN,
- expires_after: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
- file_ids: List[str] | NotGiven = NOT_GIVEN,
- metadata: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
- name: str | NotGiven = NOT_GIVEN,
- provider_id: str | NotGiven = NOT_GIVEN,
+ chunking_strategy: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
+ embedding_dimension: int | Omit = omit,
+ embedding_model: str | Omit = omit,
+ expires_after: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
+ file_ids: SequenceNotStr[str] | Omit = omit,
+ metadata: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
+ name: str | Omit = omit,
+ provider_id: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> VectorStore:
"""
Creates a vector store.
@@ -141,7 +141,7 @@ def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> VectorStore:
"""
Retrieves a vector store.
@@ -169,15 +169,15 @@ def update(
self,
vector_store_id: str,
*,
- expires_after: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
- metadata: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
- name: str | NotGiven = NOT_GIVEN,
+ expires_after: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
+ metadata: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
+ name: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> VectorStore:
"""
Updates a vector store.
@@ -218,16 +218,16 @@ def update(
def list(
self,
*,
- after: str | NotGiven = NOT_GIVEN,
- before: str | NotGiven = NOT_GIVEN,
- limit: int | NotGiven = NOT_GIVEN,
- order: str | NotGiven = NOT_GIVEN,
+ after: str | Omit = omit,
+ before: str | Omit = omit,
+ limit: int | Omit = omit,
+ order: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> SyncOpenAICursorPage[VectorStore]:
"""Returns a list of vector stores.
@@ -284,7 +284,7 @@ def delete(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> VectorStoreDeleteResponse:
"""
Delete a vector store.
@@ -312,18 +312,18 @@ def search(
self,
vector_store_id: str,
*,
- query: Union[str, List[str]],
- filters: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
- max_num_results: int | NotGiven = NOT_GIVEN,
- ranking_options: vector_store_search_params.RankingOptions | NotGiven = NOT_GIVEN,
- rewrite_query: bool | NotGiven = NOT_GIVEN,
- search_mode: str | NotGiven = NOT_GIVEN,
+ query: Union[str, SequenceNotStr[str]],
+ filters: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
+ max_num_results: int | Omit = omit,
+ ranking_options: vector_store_search_params.RankingOptions | Omit = omit,
+ rewrite_query: bool | Omit = omit,
+ search_mode: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> VectorStoreSearchResponse:
"""Search for chunks in a vector store.
@@ -400,20 +400,20 @@ def with_streaming_response(self) -> AsyncVectorStoresResourceWithStreamingRespo
async def create(
self,
*,
- chunking_strategy: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
- embedding_dimension: int | NotGiven = NOT_GIVEN,
- embedding_model: str | NotGiven = NOT_GIVEN,
- expires_after: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
- file_ids: List[str] | NotGiven = NOT_GIVEN,
- metadata: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
- name: str | NotGiven = NOT_GIVEN,
- provider_id: str | NotGiven = NOT_GIVEN,
+ chunking_strategy: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
+ embedding_dimension: int | Omit = omit,
+ embedding_model: str | Omit = omit,
+ expires_after: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
+ file_ids: SequenceNotStr[str] | Omit = omit,
+ metadata: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
+ name: str | Omit = omit,
+ provider_id: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> VectorStore:
"""
Creates a vector store.
@@ -475,7 +475,7 @@ async def retrieve(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> VectorStore:
"""
Retrieves a vector store.
@@ -503,15 +503,15 @@ async def update(
self,
vector_store_id: str,
*,
- expires_after: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
- metadata: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
- name: str | NotGiven = NOT_GIVEN,
+ expires_after: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
+ metadata: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
+ name: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> VectorStore:
"""
Updates a vector store.
@@ -552,16 +552,16 @@ async def update(
def list(
self,
*,
- after: str | NotGiven = NOT_GIVEN,
- before: str | NotGiven = NOT_GIVEN,
- limit: int | NotGiven = NOT_GIVEN,
- order: str | NotGiven = NOT_GIVEN,
+ after: str | Omit = omit,
+ before: str | Omit = omit,
+ limit: int | Omit = omit,
+ order: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> AsyncPaginator[VectorStore, AsyncOpenAICursorPage[VectorStore]]:
"""Returns a list of vector stores.
@@ -618,7 +618,7 @@ async def delete(
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> VectorStoreDeleteResponse:
"""
Delete a vector store.
@@ -646,18 +646,18 @@ async def search(
self,
vector_store_id: str,
*,
- query: Union[str, List[str]],
- filters: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | NotGiven = NOT_GIVEN,
- max_num_results: int | NotGiven = NOT_GIVEN,
- ranking_options: vector_store_search_params.RankingOptions | NotGiven = NOT_GIVEN,
- rewrite_query: bool | NotGiven = NOT_GIVEN,
- search_mode: str | NotGiven = NOT_GIVEN,
+ query: Union[str, SequenceNotStr[str]],
+ filters: Dict[str, Union[bool, float, str, Iterable[object], object, None]] | Omit = omit,
+ max_num_results: int | Omit = omit,
+ ranking_options: vector_store_search_params.RankingOptions | Omit = omit,
+ rewrite_query: bool | Omit = omit,
+ search_mode: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> VectorStoreSearchResponse:
"""Search for chunks in a vector store.
diff --git a/src/llama_stack_client/types/__init__.py b/src/llama_stack_client/types/__init__.py
index 1d8ac792..56b7f887 100644
--- a/src/llama_stack_client/types/__init__.py
+++ b/src/llama_stack_client/types/__init__.py
@@ -103,6 +103,7 @@
from .dataset_iterrows_params import DatasetIterrowsParams as DatasetIterrowsParams
from .dataset_register_params import DatasetRegisterParams as DatasetRegisterParams
from .embedding_create_params import EmbeddingCreateParams as EmbeddingCreateParams
+from .inference_rerank_params import InferenceRerankParams as InferenceRerankParams
from .list_providers_response import ListProvidersResponse as ListProvidersResponse
from .scoring_fn_params_param import ScoringFnParamsParam as ScoringFnParamsParam
from .toolgroup_list_response import ToolgroupListResponse as ToolgroupListResponse
@@ -112,6 +113,7 @@
from .list_benchmarks_response import ListBenchmarksResponse as ListBenchmarksResponse
from .list_vector_dbs_response import ListVectorDBsResponse as ListVectorDBsResponse
from .moderation_create_params import ModerationCreateParams as ModerationCreateParams
+from .response_delete_response import ResponseDeleteResponse as ResponseDeleteResponse
from .safety_run_shield_params import SafetyRunShieldParams as SafetyRunShieldParams
from .vector_store_list_params import VectorStoreListParams as VectorStoreListParams
from .benchmark_register_params import BenchmarkRegisterParams as BenchmarkRegisterParams
@@ -120,6 +122,7 @@
from .dataset_register_response import DatasetRegisterResponse as DatasetRegisterResponse
from .dataset_retrieve_response import DatasetRetrieveResponse as DatasetRetrieveResponse
from .eval_evaluate_rows_params import EvalEvaluateRowsParams as EvalEvaluateRowsParams
+from .inference_rerank_response import InferenceRerankResponse as InferenceRerankResponse
from .list_tool_groups_response import ListToolGroupsResponse as ListToolGroupsResponse
from .toolgroup_register_params import ToolgroupRegisterParams as ToolgroupRegisterParams
from .vector_db_register_params import VectorDBRegisterParams as VectorDBRegisterParams
diff --git a/src/llama_stack_client/types/agents/session_retrieve_params.py b/src/llama_stack_client/types/agents/session_retrieve_params.py
index 30337586..aeff0ff7 100644
--- a/src/llama_stack_client/types/agents/session_retrieve_params.py
+++ b/src/llama_stack_client/types/agents/session_retrieve_params.py
@@ -2,14 +2,15 @@
from __future__ import annotations
-from typing import List
from typing_extensions import Required, TypedDict
+from ..._types import SequenceNotStr
+
__all__ = ["SessionRetrieveParams"]
class SessionRetrieveParams(TypedDict, total=False):
agent_id: Required[str]
- turn_ids: List[str]
+ turn_ids: SequenceNotStr[str]
"""(Optional) List of turn IDs to filter the session by."""
diff --git a/src/llama_stack_client/types/agents/turn_create_params.py b/src/llama_stack_client/types/agents/turn_create_params.py
index fbb8de8e..8c8e4999 100644
--- a/src/llama_stack_client/types/agents/turn_create_params.py
+++ b/src/llama_stack_client/types/agents/turn_create_params.py
@@ -2,9 +2,10 @@
from __future__ import annotations
-from typing import Dict, List, Union, Iterable
+from typing import Dict, Union, Iterable
from typing_extensions import Literal, Required, TypeAlias, TypedDict
+from ..._types import SequenceNotStr
from ..shared_params.user_message import UserMessage
from ..shared_params.tool_response_message import ToolResponseMessage
from ..shared_params.interleaved_content_item import InterleavedContentItem
@@ -42,7 +43,7 @@ class TurnCreateParamsBase(TypedDict, total=False):
override the agent's tool_config.
"""
- toolgroups: List[Toolgroup]
+ toolgroups: SequenceNotStr[Toolgroup]
"""
(Optional) List of toolgroups to create the turn with, will be used in addition
to the agent's config toolgroups for the request.
diff --git a/src/llama_stack_client/types/algorithm_config_param.py b/src/llama_stack_client/types/algorithm_config_param.py
index f2856526..6940953e 100644
--- a/src/llama_stack_client/types/algorithm_config_param.py
+++ b/src/llama_stack_client/types/algorithm_config_param.py
@@ -2,9 +2,11 @@
from __future__ import annotations
-from typing import List, Union
+from typing import Union
from typing_extensions import Literal, Required, TypeAlias, TypedDict
+from .._types import SequenceNotStr
+
__all__ = ["AlgorithmConfigParam", "LoraFinetuningConfig", "QatFinetuningConfig"]
@@ -18,7 +20,7 @@ class LoraFinetuningConfig(TypedDict, total=False):
apply_lora_to_output: Required[bool]
"""Whether to apply LoRA to output projection layers"""
- lora_attn_modules: Required[List[str]]
+ lora_attn_modules: Required[SequenceNotStr[str]]
"""List of attention module names to apply LoRA to"""
rank: Required[int]
diff --git a/src/llama_stack_client/types/benchmark_register_params.py b/src/llama_stack_client/types/benchmark_register_params.py
index 0fa9d508..322e2da8 100644
--- a/src/llama_stack_client/types/benchmark_register_params.py
+++ b/src/llama_stack_client/types/benchmark_register_params.py
@@ -2,9 +2,11 @@
from __future__ import annotations
-from typing import Dict, List, Union, Iterable
+from typing import Dict, Union, Iterable
from typing_extensions import Required, TypedDict
+from .._types import SequenceNotStr
+
__all__ = ["BenchmarkRegisterParams"]
@@ -15,7 +17,7 @@ class BenchmarkRegisterParams(TypedDict, total=False):
dataset_id: Required[str]
"""The ID of the dataset to use for the benchmark."""
- scoring_functions: Required[List[str]]
+ scoring_functions: Required[SequenceNotStr[str]]
"""The scoring functions to use for the benchmark."""
metadata: Dict[str, Union[bool, float, str, Iterable[object], object, None]]
diff --git a/src/llama_stack_client/types/chat/completion_create_params.py b/src/llama_stack_client/types/chat/completion_create_params.py
index 263c1c78..1b930dfe 100644
--- a/src/llama_stack_client/types/chat/completion_create_params.py
+++ b/src/llama_stack_client/types/chat/completion_create_params.py
@@ -2,9 +2,11 @@
from __future__ import annotations
-from typing import Dict, List, Union, Iterable
+from typing import Dict, Union, Iterable
from typing_extensions import Literal, Required, TypeAlias, TypedDict
+from ..._types import SequenceNotStr
+
__all__ = [
"CompletionCreateParamsBase",
"Message",
@@ -82,7 +84,7 @@ class CompletionCreateParamsBase(TypedDict, total=False):
seed: int
"""(Optional) The seed to use."""
- stop: Union[str, List[str]]
+ stop: Union[str, SequenceNotStr[str]]
"""(Optional) The stop tokens to use."""
stream_options: Dict[str, Union[bool, float, str, Iterable[object], object, None]]
diff --git a/src/llama_stack_client/types/completion_create_params.py b/src/llama_stack_client/types/completion_create_params.py
index 50900e25..f8ca049f 100644
--- a/src/llama_stack_client/types/completion_create_params.py
+++ b/src/llama_stack_client/types/completion_create_params.py
@@ -2,9 +2,11 @@
from __future__ import annotations
-from typing import Dict, List, Union, Iterable
+from typing import Dict, Union, Iterable
from typing_extensions import Literal, Required, TypedDict
+from .._types import SequenceNotStr
+
__all__ = ["CompletionCreateParamsBase", "CompletionCreateParamsNonStreaming", "CompletionCreateParamsStreaming"]
@@ -16,7 +18,7 @@ class CompletionCreateParamsBase(TypedDict, total=False):
endpoint.
"""
- prompt: Required[Union[str, List[str], Iterable[int], Iterable[Iterable[int]]]]
+ prompt: Required[Union[str, SequenceNotStr[str], Iterable[int], Iterable[Iterable[int]]]]
"""The prompt to generate a completion for."""
best_of: int
@@ -28,7 +30,7 @@ class CompletionCreateParamsBase(TypedDict, total=False):
frequency_penalty: float
"""(Optional) The penalty for repeated tokens."""
- guided_choice: List[str]
+ guided_choice: SequenceNotStr[str]
logit_bias: Dict[str, float]
"""(Optional) The logit bias to use."""
@@ -50,7 +52,7 @@ class CompletionCreateParamsBase(TypedDict, total=False):
seed: int
"""(Optional) The seed to use."""
- stop: Union[str, List[str]]
+ stop: Union[str, SequenceNotStr[str]]
"""(Optional) The stop tokens to use."""
stream_options: Dict[str, Union[bool, float, str, Iterable[object], object, None]]
diff --git a/src/llama_stack_client/types/embedding_create_params.py b/src/llama_stack_client/types/embedding_create_params.py
index c87096eb..2710ba55 100644
--- a/src/llama_stack_client/types/embedding_create_params.py
+++ b/src/llama_stack_client/types/embedding_create_params.py
@@ -2,14 +2,16 @@
from __future__ import annotations
-from typing import List, Union
+from typing import Union
from typing_extensions import Required, TypedDict
+from .._types import SequenceNotStr
+
__all__ = ["EmbeddingCreateParams"]
class EmbeddingCreateParams(TypedDict, total=False):
- input: Required[Union[str, List[str]]]
+ input: Required[Union[str, SequenceNotStr[str]]]
"""Input text to embed, encoded as a string or array of strings.
To embed multiple inputs in a single request, pass an array of strings.
diff --git a/src/llama_stack_client/types/eval_evaluate_rows_alpha_params.py b/src/llama_stack_client/types/eval_evaluate_rows_alpha_params.py
index eb9443b0..e4953252 100644
--- a/src/llama_stack_client/types/eval_evaluate_rows_alpha_params.py
+++ b/src/llama_stack_client/types/eval_evaluate_rows_alpha_params.py
@@ -2,9 +2,10 @@
from __future__ import annotations
-from typing import Dict, List, Union, Iterable
+from typing import Dict, Union, Iterable
from typing_extensions import Required, TypedDict
+from .._types import SequenceNotStr
from .benchmark_config_param import BenchmarkConfigParam
__all__ = ["EvalEvaluateRowsAlphaParams"]
@@ -17,5 +18,5 @@ class EvalEvaluateRowsAlphaParams(TypedDict, total=False):
input_rows: Required[Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]]]
"""The rows to evaluate."""
- scoring_functions: Required[List[str]]
+ scoring_functions: Required[SequenceNotStr[str]]
"""The scoring functions to use for the evaluation."""
diff --git a/src/llama_stack_client/types/eval_evaluate_rows_params.py b/src/llama_stack_client/types/eval_evaluate_rows_params.py
index 37e7b978..128f363d 100644
--- a/src/llama_stack_client/types/eval_evaluate_rows_params.py
+++ b/src/llama_stack_client/types/eval_evaluate_rows_params.py
@@ -2,9 +2,10 @@
from __future__ import annotations
-from typing import Dict, List, Union, Iterable
+from typing import Dict, Union, Iterable
from typing_extensions import Required, TypedDict
+from .._types import SequenceNotStr
from .benchmark_config_param import BenchmarkConfigParam
__all__ = ["EvalEvaluateRowsParams"]
@@ -17,5 +18,5 @@ class EvalEvaluateRowsParams(TypedDict, total=False):
input_rows: Required[Iterable[Dict[str, Union[bool, float, str, Iterable[object], object, None]]]]
"""The rows to evaluate."""
- scoring_functions: Required[List[str]]
+ scoring_functions: Required[SequenceNotStr[str]]
"""The scoring functions to use for the evaluation."""
diff --git a/src/llama_stack_client/types/inference_batch_completion_params.py b/src/llama_stack_client/types/inference_batch_completion_params.py
index d3db8e13..b225b883 100644
--- a/src/llama_stack_client/types/inference_batch_completion_params.py
+++ b/src/llama_stack_client/types/inference_batch_completion_params.py
@@ -2,9 +2,9 @@
from __future__ import annotations
-from typing import List
from typing_extensions import Required, TypedDict
+from .._types import SequenceNotStr
from .shared_params.response_format import ResponseFormat
from .shared_params.sampling_params import SamplingParams
from .shared_params.interleaved_content import InterleavedContent
@@ -13,7 +13,7 @@
class InferenceBatchCompletionParams(TypedDict, total=False):
- content_batch: Required[List[InterleavedContent]]
+ content_batch: Required[SequenceNotStr[InterleavedContent]]
"""The content to generate completions for."""
model_id: Required[str]
diff --git a/src/llama_stack_client/types/inference_embeddings_params.py b/src/llama_stack_client/types/inference_embeddings_params.py
index 7bf5339f..a1be545b 100644
--- a/src/llama_stack_client/types/inference_embeddings_params.py
+++ b/src/llama_stack_client/types/inference_embeddings_params.py
@@ -2,16 +2,17 @@
from __future__ import annotations
-from typing import List, Union, Iterable
+from typing import Union, Iterable
from typing_extensions import Literal, Required, TypedDict
+from .._types import SequenceNotStr
from .shared_params.interleaved_content_item import InterleavedContentItem
__all__ = ["InferenceEmbeddingsParams"]
class InferenceEmbeddingsParams(TypedDict, total=False):
- contents: Required[Union[List[str], Iterable[InterleavedContentItem]]]
+ contents: Required[Union[SequenceNotStr[str], Iterable[InterleavedContentItem]]]
"""List of contents to generate embeddings for.
Each content can be a string or an InterleavedContentItem (and hence can be
diff --git a/src/llama_stack_client/types/inference_rerank_params.py b/src/llama_stack_client/types/inference_rerank_params.py
new file mode 100644
index 00000000..8f8c4d64
--- /dev/null
+++ b/src/llama_stack_client/types/inference_rerank_params.py
@@ -0,0 +1,106 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from typing import Union
+from typing_extensions import Literal, Required, TypeAlias, TypedDict
+
+from .._types import SequenceNotStr
+
+__all__ = [
+ "InferenceRerankParams",
+ "Item",
+ "ItemOpenAIChatCompletionContentPartTextParam",
+ "ItemOpenAIChatCompletionContentPartImageParam",
+ "ItemOpenAIChatCompletionContentPartImageParamImageURL",
+ "Query",
+ "QueryOpenAIChatCompletionContentPartTextParam",
+ "QueryOpenAIChatCompletionContentPartImageParam",
+ "QueryOpenAIChatCompletionContentPartImageParamImageURL",
+]
+
+
+class InferenceRerankParams(TypedDict, total=False):
+ items: Required[SequenceNotStr[Item]]
+ """List of items to rerank.
+
+ Each item can be a string, text content part, or image content part. Each input
+ must not exceed the model's max input token length.
+ """
+
+ model: Required[str]
+ """The identifier of the reranking model to use."""
+
+ query: Required[Query]
+ """The search query to rank items against.
+
+ Can be a string, text content part, or image content part. The input must not
+ exceed the model's max input token length.
+ """
+
+ max_num_results: int
+ """(Optional) Maximum number of results to return. Default: returns all."""
+
+
+class ItemOpenAIChatCompletionContentPartTextParam(TypedDict, total=False):
+ text: Required[str]
+ """The text content of the message"""
+
+ type: Required[Literal["text"]]
+ """Must be "text" to identify this as text content"""
+
+
+class ItemOpenAIChatCompletionContentPartImageParamImageURL(TypedDict, total=False):
+ url: Required[str]
+ """URL of the image to include in the message"""
+
+ detail: str
+ """(Optional) Level of detail for image processing.
+
+ Can be "low", "high", or "auto"
+ """
+
+
+class ItemOpenAIChatCompletionContentPartImageParam(TypedDict, total=False):
+ image_url: Required[ItemOpenAIChatCompletionContentPartImageParamImageURL]
+ """Image URL specification and processing details"""
+
+ type: Required[Literal["image_url"]]
+ """Must be "image_url" to identify this as image content"""
+
+
+Item: TypeAlias = Union[
+ str, ItemOpenAIChatCompletionContentPartTextParam, ItemOpenAIChatCompletionContentPartImageParam
+]
+
+
+class QueryOpenAIChatCompletionContentPartTextParam(TypedDict, total=False):
+ text: Required[str]
+ """The text content of the message"""
+
+ type: Required[Literal["text"]]
+ """Must be "text" to identify this as text content"""
+
+
+class QueryOpenAIChatCompletionContentPartImageParamImageURL(TypedDict, total=False):
+ url: Required[str]
+ """URL of the image to include in the message"""
+
+ detail: str
+ """(Optional) Level of detail for image processing.
+
+ Can be "low", "high", or "auto"
+ """
+
+
+class QueryOpenAIChatCompletionContentPartImageParam(TypedDict, total=False):
+ image_url: Required[QueryOpenAIChatCompletionContentPartImageParamImageURL]
+ """Image URL specification and processing details"""
+
+ type: Required[Literal["image_url"]]
+ """Must be "image_url" to identify this as image content"""
+
+
+Query: TypeAlias = Union[
+ str, QueryOpenAIChatCompletionContentPartTextParam, QueryOpenAIChatCompletionContentPartImageParam
+]
diff --git a/src/llama_stack_client/types/inference_rerank_response.py b/src/llama_stack_client/types/inference_rerank_response.py
new file mode 100644
index 00000000..e74fc7e6
--- /dev/null
+++ b/src/llama_stack_client/types/inference_rerank_response.py
@@ -0,0 +1,23 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import List
+from typing_extensions import TypeAlias
+
+from .._models import BaseModel
+
+__all__ = ["InferenceRerankResponse", "InferenceRerankResponseItem"]
+
+
+class InferenceRerankResponseItem(BaseModel):
+ index: int
+ """The original index of the document in the input list"""
+
+ relevance_score: float
+ """The relevance score from the model output.
+
+ Values are inverted when applicable so that higher scores indicate greater
+ relevance.
+ """
+
+
+InferenceRerankResponse: TypeAlias = List[InferenceRerankResponseItem]
diff --git a/src/llama_stack_client/types/models/__init__.py b/src/llama_stack_client/types/models/__init__.py
new file mode 100644
index 00000000..6b0c3091
--- /dev/null
+++ b/src/llama_stack_client/types/models/__init__.py
@@ -0,0 +1,5 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from .openai_list_response import OpenAIListResponse as OpenAIListResponse
diff --git a/src/llama_stack_client/types/models/openai_list_response.py b/src/llama_stack_client/types/models/openai_list_response.py
new file mode 100644
index 00000000..f14845d5
--- /dev/null
+++ b/src/llama_stack_client/types/models/openai_list_response.py
@@ -0,0 +1,21 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import List
+from typing_extensions import Literal, TypeAlias
+
+from ..._models import BaseModel
+
+__all__ = ["OpenAIListResponse", "OpenAIListResponseItem"]
+
+
+class OpenAIListResponseItem(BaseModel):
+ id: str
+
+ created: int
+
+ object: Literal["model"]
+
+ owned_by: str
+
+
+OpenAIListResponse: TypeAlias = List[OpenAIListResponseItem]
diff --git a/src/llama_stack_client/types/moderation_create_params.py b/src/llama_stack_client/types/moderation_create_params.py
index 61f7bc1b..99b4228e 100644
--- a/src/llama_stack_client/types/moderation_create_params.py
+++ b/src/llama_stack_client/types/moderation_create_params.py
@@ -2,14 +2,16 @@
from __future__ import annotations
-from typing import List, Union
+from typing import Union
from typing_extensions import Required, TypedDict
+from .._types import SequenceNotStr
+
__all__ = ["ModerationCreateParams"]
class ModerationCreateParams(TypedDict, total=False):
- input: Required[Union[str, List[str]]]
+ input: Required[Union[str, SequenceNotStr[str]]]
"""Input (or inputs) to classify.
Can be a single string, an array of strings, or an array of multi-modal input
diff --git a/src/llama_stack_client/types/response_create_params.py b/src/llama_stack_client/types/response_create_params.py
index ac434963..ecd8da4e 100644
--- a/src/llama_stack_client/types/response_create_params.py
+++ b/src/llama_stack_client/types/response_create_params.py
@@ -2,9 +2,11 @@
from __future__ import annotations
-from typing import Dict, List, Union, Iterable
+from typing import Dict, Union, Iterable
from typing_extensions import Literal, Required, TypeAlias, TypedDict
+from .._types import SequenceNotStr
+
__all__ = [
"ResponseCreateParamsBase",
"InputUnionMember1",
@@ -47,7 +49,7 @@ class ResponseCreateParamsBase(TypedDict, total=False):
model: Required[str]
"""The underlying LLM used for completions."""
- include: List[str]
+ include: SequenceNotStr[str]
"""(Optional) Additional fields to include in the response."""
instructions: str
@@ -103,7 +105,7 @@ class InputUnionMember1OpenAIResponseOutputMessageFileSearchToolCall(TypedDict,
id: Required[str]
"""Unique identifier for this tool call"""
- queries: Required[List[str]]
+ queries: Required[SequenceNotStr[str]]
"""List of search queries executed"""
status: Required[str]
@@ -329,7 +331,7 @@ class ToolOpenAIResponseInputToolFileSearch(TypedDict, total=False):
type: Required[Literal["file_search"]]
"""Tool type identifier, always "file_search" """
- vector_store_ids: Required[List[str]]
+ vector_store_ids: Required[SequenceNotStr[str]]
"""List of vector store identifiers to search within"""
filters: Dict[str, Union[bool, float, str, Iterable[object], object, None]]
@@ -360,10 +362,10 @@ class ToolOpenAIResponseInputToolFunction(TypedDict, total=False):
class ToolOpenAIResponseInputToolMcpRequireApprovalApprovalFilter(TypedDict, total=False):
- always: List[str]
+ always: SequenceNotStr[str]
"""(Optional) List of tool names that always require approval"""
- never: List[str]
+ never: SequenceNotStr[str]
"""(Optional) List of tool names that never require approval"""
@@ -373,12 +375,12 @@ class ToolOpenAIResponseInputToolMcpRequireApprovalApprovalFilter(TypedDict, tot
class ToolOpenAIResponseInputToolMcpAllowedToolsAllowedToolsFilter(TypedDict, total=False):
- tool_names: List[str]
+ tool_names: SequenceNotStr[str]
"""(Optional) List of specific tool names that are allowed"""
ToolOpenAIResponseInputToolMcpAllowedTools: TypeAlias = Union[
- List[str], ToolOpenAIResponseInputToolMcpAllowedToolsAllowedToolsFilter
+ SequenceNotStr[str], ToolOpenAIResponseInputToolMcpAllowedToolsAllowedToolsFilter
]
diff --git a/src/llama_stack_client/types/response_delete_response.py b/src/llama_stack_client/types/response_delete_response.py
new file mode 100644
index 00000000..e772fe35
--- /dev/null
+++ b/src/llama_stack_client/types/response_delete_response.py
@@ -0,0 +1,18 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing_extensions import Literal
+
+from .._models import BaseModel
+
+__all__ = ["ResponseDeleteResponse"]
+
+
+class ResponseDeleteResponse(BaseModel):
+ id: str
+ """Unique identifier of the deleted response"""
+
+ deleted: bool
+ """Deletion confirmation flag, always True"""
+
+ object: Literal["response"]
+ """Object type identifier, always "response" """
diff --git a/src/llama_stack_client/types/responses/input_item_list_params.py b/src/llama_stack_client/types/responses/input_item_list_params.py
index a78bd215..ff0e8f0a 100644
--- a/src/llama_stack_client/types/responses/input_item_list_params.py
+++ b/src/llama_stack_client/types/responses/input_item_list_params.py
@@ -2,9 +2,10 @@
from __future__ import annotations
-from typing import List
from typing_extensions import Literal, TypedDict
+from ..._types import SequenceNotStr
+
__all__ = ["InputItemListParams"]
@@ -15,7 +16,7 @@ class InputItemListParams(TypedDict, total=False):
before: str
"""An item ID to list items before, used for pagination."""
- include: List[str]
+ include: SequenceNotStr[str]
"""Additional fields to include in the response."""
limit: int
diff --git a/src/llama_stack_client/types/scoring_fn_params_param.py b/src/llama_stack_client/types/scoring_fn_params_param.py
index 9753ddeb..46264833 100644
--- a/src/llama_stack_client/types/scoring_fn_params_param.py
+++ b/src/llama_stack_client/types/scoring_fn_params_param.py
@@ -5,6 +5,8 @@
from typing import List, Union
from typing_extensions import Literal, Required, TypeAlias, TypedDict
+from .._types import SequenceNotStr
+
__all__ = ["ScoringFnParamsParam", "LlmAsJudgeScoringFnParams", "RegexParserScoringFnParams", "BasicScoringFnParams"]
@@ -17,7 +19,7 @@ class LlmAsJudgeScoringFnParams(TypedDict, total=False):
judge_model: Required[str]
"""Identifier of the LLM model to use as a judge for scoring"""
- judge_score_regexes: Required[List[str]]
+ judge_score_regexes: Required[SequenceNotStr[str]]
"""Regexes to extract the answer from generated response"""
type: Required[Literal["llm_as_judge"]]
@@ -33,7 +35,7 @@ class RegexParserScoringFnParams(TypedDict, total=False):
]
"""Aggregation functions to apply to the scores of each row"""
- parsing_regexes: Required[List[str]]
+ parsing_regexes: Required[SequenceNotStr[str]]
"""Regex to extract the answer from generated response"""
type: Required[Literal["regex_parser"]]
diff --git a/src/llama_stack_client/types/shared_params/agent_config.py b/src/llama_stack_client/types/shared_params/agent_config.py
index 5cebec3f..c1206bd5 100644
--- a/src/llama_stack_client/types/shared_params/agent_config.py
+++ b/src/llama_stack_client/types/shared_params/agent_config.py
@@ -2,9 +2,10 @@
from __future__ import annotations
-from typing import Dict, List, Union, Iterable
+from typing import Dict, Union, Iterable
from typing_extensions import Literal, Required, TypeAlias, TypedDict
+from ..._types import SequenceNotStr
from ..tool_def_param import ToolDefParam
from .response_format import ResponseFormat
from .sampling_params import SamplingParams
@@ -62,14 +63,14 @@ class AgentConfig(TypedDict, total=False):
enable_session_persistence: bool
"""Optional flag indicating whether session data has to be persisted"""
- input_shields: List[str]
+ input_shields: SequenceNotStr[str]
max_infer_iters: int
name: str
"""Optional name for the agent, used in telemetry and identification"""
- output_shields: List[str]
+ output_shields: SequenceNotStr[str]
response_format: ResponseFormat
"""Optional response format configuration"""
@@ -90,4 +91,4 @@ class AgentConfig(TypedDict, total=False):
tool_prompt_format: Literal["json", "function_tag", "python_list"]
"""Prompt format for calling custom / zero shot tools."""
- toolgroups: List[Toolgroup]
+ toolgroups: SequenceNotStr[Toolgroup]
diff --git a/src/llama_stack_client/types/shared_params/sampling_params.py b/src/llama_stack_client/types/shared_params/sampling_params.py
index 55f05e8b..e5eebddd 100644
--- a/src/llama_stack_client/types/shared_params/sampling_params.py
+++ b/src/llama_stack_client/types/shared_params/sampling_params.py
@@ -2,9 +2,11 @@
from __future__ import annotations
-from typing import List, Union
+from typing import Union
from typing_extensions import Literal, Required, TypeAlias, TypedDict
+from ..._types import SequenceNotStr
+
__all__ = [
"SamplingParams",
"Strategy",
@@ -59,7 +61,7 @@ class SamplingParams(TypedDict, total=False):
far, increasing the model's likelihood to talk about new topics.
"""
- stop: List[str]
+ stop: SequenceNotStr[str]
"""Up to 4 sequences where the API will stop generating further tokens.
The returned text will not contain the stop sequence.
diff --git a/src/llama_stack_client/types/shared_params/tool_call.py b/src/llama_stack_client/types/shared_params/tool_call.py
index 801716e9..55d53099 100644
--- a/src/llama_stack_client/types/shared_params/tool_call.py
+++ b/src/llama_stack_client/types/shared_params/tool_call.py
@@ -2,9 +2,11 @@
from __future__ import annotations
-from typing import Dict, List, Union
+from typing import Dict, Union
from typing_extensions import Literal, Required, TypedDict
+from ..._types import SequenceNotStr
+
__all__ = ["ToolCall"]
@@ -18,7 +20,7 @@ class ToolCall(TypedDict, total=False):
str,
float,
bool,
- List[Union[str, float, bool, None]],
+ SequenceNotStr[Union[str, float, bool, None]],
Dict[str, Union[str, float, bool, None]],
None,
],
diff --git a/src/llama_stack_client/types/telemetry_get_span_tree_params.py b/src/llama_stack_client/types/telemetry_get_span_tree_params.py
index 7d309d3e..92dc7e1d 100644
--- a/src/llama_stack_client/types/telemetry_get_span_tree_params.py
+++ b/src/llama_stack_client/types/telemetry_get_span_tree_params.py
@@ -2,14 +2,15 @@
from __future__ import annotations
-from typing import List
from typing_extensions import TypedDict
+from .._types import SequenceNotStr
+
__all__ = ["TelemetryGetSpanTreeParams"]
class TelemetryGetSpanTreeParams(TypedDict, total=False):
- attributes_to_return: List[str]
+ attributes_to_return: SequenceNotStr[str]
"""The attributes to return in the tree."""
max_depth: int
diff --git a/src/llama_stack_client/types/telemetry_query_spans_params.py b/src/llama_stack_client/types/telemetry_query_spans_params.py
index 6429c08f..452439e3 100644
--- a/src/llama_stack_client/types/telemetry_query_spans_params.py
+++ b/src/llama_stack_client/types/telemetry_query_spans_params.py
@@ -2,9 +2,10 @@
from __future__ import annotations
-from typing import List, Iterable
+from typing import Iterable
from typing_extensions import Required, TypedDict
+from .._types import SequenceNotStr
from .query_condition_param import QueryConditionParam
__all__ = ["TelemetryQuerySpansParams"]
@@ -14,7 +15,7 @@ class TelemetryQuerySpansParams(TypedDict, total=False):
attribute_filters: Required[Iterable[QueryConditionParam]]
"""The attribute filters to apply to the spans."""
- attributes_to_return: Required[List[str]]
+ attributes_to_return: Required[SequenceNotStr[str]]
"""The attributes to return in the spans."""
max_depth: int
diff --git a/src/llama_stack_client/types/telemetry_query_traces_params.py b/src/llama_stack_client/types/telemetry_query_traces_params.py
index 7c82ef14..2a6eb334 100644
--- a/src/llama_stack_client/types/telemetry_query_traces_params.py
+++ b/src/llama_stack_client/types/telemetry_query_traces_params.py
@@ -2,9 +2,10 @@
from __future__ import annotations
-from typing import List, Iterable
+from typing import Iterable
from typing_extensions import TypedDict
+from .._types import SequenceNotStr
from .query_condition_param import QueryConditionParam
__all__ = ["TelemetryQueryTracesParams"]
@@ -20,5 +21,5 @@ class TelemetryQueryTracesParams(TypedDict, total=False):
offset: int
"""The offset of the traces to return."""
- order_by: List[str]
+ order_by: SequenceNotStr[str]
"""The order by of the traces to return."""
diff --git a/src/llama_stack_client/types/telemetry_save_spans_to_dataset_params.py b/src/llama_stack_client/types/telemetry_save_spans_to_dataset_params.py
index bb96f8e3..f0bdebbd 100644
--- a/src/llama_stack_client/types/telemetry_save_spans_to_dataset_params.py
+++ b/src/llama_stack_client/types/telemetry_save_spans_to_dataset_params.py
@@ -2,9 +2,10 @@
from __future__ import annotations
-from typing import List, Iterable
+from typing import Iterable
from typing_extensions import Required, TypedDict
+from .._types import SequenceNotStr
from .query_condition_param import QueryConditionParam
__all__ = ["TelemetrySaveSpansToDatasetParams"]
@@ -14,7 +15,7 @@ class TelemetrySaveSpansToDatasetParams(TypedDict, total=False):
attribute_filters: Required[Iterable[QueryConditionParam]]
"""The attribute filters to apply to the spans."""
- attributes_to_save: Required[List[str]]
+ attributes_to_save: Required[SequenceNotStr[str]]
"""The attributes to save to the dataset."""
dataset_id: Required[str]
diff --git a/src/llama_stack_client/types/tool_runtime/rag_tool_query_params.py b/src/llama_stack_client/types/tool_runtime/rag_tool_query_params.py
index a28faf2b..f8aa463b 100644
--- a/src/llama_stack_client/types/tool_runtime/rag_tool_query_params.py
+++ b/src/llama_stack_client/types/tool_runtime/rag_tool_query_params.py
@@ -2,9 +2,9 @@
from __future__ import annotations
-from typing import List
from typing_extensions import Required, TypedDict
+from ..._types import SequenceNotStr
from ..shared_params.query_config import QueryConfig
from ..shared_params.interleaved_content import InterleavedContent
@@ -15,7 +15,7 @@ class RagToolQueryParams(TypedDict, total=False):
content: Required[InterleavedContent]
"""The query content to search for in the indexed documents"""
- vector_db_ids: Required[List[str]]
+ vector_db_ids: Required[SequenceNotStr[str]]
"""List of vector database IDs to search within"""
query_config: QueryConfig
diff --git a/src/llama_stack_client/types/vector_store_create_params.py b/src/llama_stack_client/types/vector_store_create_params.py
index dc6ea47e..ccdb15c4 100644
--- a/src/llama_stack_client/types/vector_store_create_params.py
+++ b/src/llama_stack_client/types/vector_store_create_params.py
@@ -2,9 +2,11 @@
from __future__ import annotations
-from typing import Dict, List, Union, Iterable
+from typing import Dict, Union, Iterable
from typing_extensions import TypedDict
+from .._types import SequenceNotStr
+
__all__ = ["VectorStoreCreateParams"]
@@ -24,7 +26,7 @@ class VectorStoreCreateParams(TypedDict, total=False):
expires_after: Dict[str, Union[bool, float, str, Iterable[object], object, None]]
"""The expiration policy for a vector store."""
- file_ids: List[str]
+ file_ids: SequenceNotStr[str]
"""A list of File IDs that the vector store should use.
Useful for tools like `file_search` that can access files.
diff --git a/src/llama_stack_client/types/vector_store_search_params.py b/src/llama_stack_client/types/vector_store_search_params.py
index 5a429b79..96d4be63 100644
--- a/src/llama_stack_client/types/vector_store_search_params.py
+++ b/src/llama_stack_client/types/vector_store_search_params.py
@@ -2,14 +2,16 @@
from __future__ import annotations
-from typing import Dict, List, Union, Iterable
+from typing import Dict, Union, Iterable
from typing_extensions import Required, TypedDict
+from .._types import SequenceNotStr
+
__all__ = ["VectorStoreSearchParams", "RankingOptions"]
class VectorStoreSearchParams(TypedDict, total=False):
- query: Required[Union[str, List[str]]]
+ query: Required[Union[str, SequenceNotStr[str]]]
"""The query string or array for performing the search."""
filters: Dict[str, Union[bool, float, str, Iterable[object], object, None]]
diff --git a/tests/api_resources/models/__init__.py b/tests/api_resources/models/__init__.py
new file mode 100644
index 00000000..fd8019a9
--- /dev/null
+++ b/tests/api_resources/models/__init__.py
@@ -0,0 +1 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
diff --git a/tests/api_resources/models/test_openai.py b/tests/api_resources/models/test_openai.py
new file mode 100644
index 00000000..ea64cce2
--- /dev/null
+++ b/tests/api_resources/models/test_openai.py
@@ -0,0 +1,74 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+import os
+from typing import Any, cast
+
+import pytest
+
+from tests.utils import assert_matches_type
+from llama_stack_client import LlamaStackClient, AsyncLlamaStackClient
+from llama_stack_client.types.models import OpenAIListResponse
+
+base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
+
+
+class TestOpenAI:
+ parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
+
+ @parametrize
+ def test_method_list(self, client: LlamaStackClient) -> None:
+ openai = client.models.openai.list()
+ assert_matches_type(OpenAIListResponse, openai, path=["response"])
+
+ @parametrize
+ def test_raw_response_list(self, client: LlamaStackClient) -> None:
+ response = client.models.openai.with_raw_response.list()
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ openai = response.parse()
+ assert_matches_type(OpenAIListResponse, openai, path=["response"])
+
+ @parametrize
+ def test_streaming_response_list(self, client: LlamaStackClient) -> None:
+ with client.models.openai.with_streaming_response.list() as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ openai = response.parse()
+ assert_matches_type(OpenAIListResponse, openai, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+
+class TestAsyncOpenAI:
+ parametrize = pytest.mark.parametrize(
+ "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
+ )
+
+ @parametrize
+ async def test_method_list(self, async_client: AsyncLlamaStackClient) -> None:
+ openai = await async_client.models.openai.list()
+ assert_matches_type(OpenAIListResponse, openai, path=["response"])
+
+ @parametrize
+ async def test_raw_response_list(self, async_client: AsyncLlamaStackClient) -> None:
+ response = await async_client.models.openai.with_raw_response.list()
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ openai = await response.parse()
+ assert_matches_type(OpenAIListResponse, openai, path=["response"])
+
+ @parametrize
+ async def test_streaming_response_list(self, async_client: AsyncLlamaStackClient) -> None:
+ async with async_client.models.openai.with_streaming_response.list() as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ openai = await response.parse()
+ assert_matches_type(OpenAIListResponse, openai, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
diff --git a/tests/api_resources/test_inference.py b/tests/api_resources/test_inference.py
index d5ef46d1..474ff7cf 100644
--- a/tests/api_resources/test_inference.py
+++ b/tests/api_resources/test_inference.py
@@ -12,6 +12,7 @@
from llama_stack_client.types import (
CompletionResponse,
EmbeddingsResponse,
+ InferenceRerankResponse,
InferenceBatchChatCompletionResponse,
)
from llama_stack_client.types.shared import BatchCompletion, ChatCompletionResponse
@@ -557,6 +558,53 @@ def test_streaming_response_embeddings(self, client: LlamaStackClient) -> None:
assert cast(Any, response.is_closed) is True
+ @parametrize
+ def test_method_rerank(self, client: LlamaStackClient) -> None:
+ inference = client.inference.rerank(
+ items=["string"],
+ model="model",
+ query="string",
+ )
+ assert_matches_type(InferenceRerankResponse, inference, path=["response"])
+
+ @parametrize
+ def test_method_rerank_with_all_params(self, client: LlamaStackClient) -> None:
+ inference = client.inference.rerank(
+ items=["string"],
+ model="model",
+ query="string",
+ max_num_results=0,
+ )
+ assert_matches_type(InferenceRerankResponse, inference, path=["response"])
+
+ @parametrize
+ def test_raw_response_rerank(self, client: LlamaStackClient) -> None:
+ response = client.inference.with_raw_response.rerank(
+ items=["string"],
+ model="model",
+ query="string",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ inference = response.parse()
+ assert_matches_type(InferenceRerankResponse, inference, path=["response"])
+
+ @parametrize
+ def test_streaming_response_rerank(self, client: LlamaStackClient) -> None:
+ with client.inference.with_streaming_response.rerank(
+ items=["string"],
+ model="model",
+ query="string",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ inference = response.parse()
+ assert_matches_type(InferenceRerankResponse, inference, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
class TestAsyncInference:
parametrize = pytest.mark.parametrize(
@@ -1095,3 +1143,50 @@ async def test_streaming_response_embeddings(self, async_client: AsyncLlamaStack
assert_matches_type(EmbeddingsResponse, inference, path=["response"])
assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ async def test_method_rerank(self, async_client: AsyncLlamaStackClient) -> None:
+ inference = await async_client.inference.rerank(
+ items=["string"],
+ model="model",
+ query="string",
+ )
+ assert_matches_type(InferenceRerankResponse, inference, path=["response"])
+
+ @parametrize
+ async def test_method_rerank_with_all_params(self, async_client: AsyncLlamaStackClient) -> None:
+ inference = await async_client.inference.rerank(
+ items=["string"],
+ model="model",
+ query="string",
+ max_num_results=0,
+ )
+ assert_matches_type(InferenceRerankResponse, inference, path=["response"])
+
+ @parametrize
+ async def test_raw_response_rerank(self, async_client: AsyncLlamaStackClient) -> None:
+ response = await async_client.inference.with_raw_response.rerank(
+ items=["string"],
+ model="model",
+ query="string",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ inference = await response.parse()
+ assert_matches_type(InferenceRerankResponse, inference, path=["response"])
+
+ @parametrize
+ async def test_streaming_response_rerank(self, async_client: AsyncLlamaStackClient) -> None:
+ async with async_client.inference.with_streaming_response.rerank(
+ items=["string"],
+ model="model",
+ query="string",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ inference = await response.parse()
+ assert_matches_type(InferenceRerankResponse, inference, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
diff --git a/tests/api_resources/test_responses.py b/tests/api_resources/test_responses.py
index 44366d61..ad2ab3be 100644
--- a/tests/api_resources/test_responses.py
+++ b/tests/api_resources/test_responses.py
@@ -9,7 +9,11 @@
from tests.utils import assert_matches_type
from llama_stack_client import LlamaStackClient, AsyncLlamaStackClient
-from llama_stack_client.types import ResponseObject, ResponseListResponse
+from llama_stack_client.types import (
+ ResponseObject,
+ ResponseListResponse,
+ ResponseDeleteResponse,
+)
from llama_stack_client.pagination import SyncOpenAICursorPage, AsyncOpenAICursorPage
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
@@ -221,6 +225,44 @@ def test_streaming_response_list(self, client: LlamaStackClient) -> None:
assert cast(Any, http_response.is_closed) is True
+ @parametrize
+ def test_method_delete(self, client: LlamaStackClient) -> None:
+ response = client.responses.delete(
+ "response_id",
+ )
+ assert_matches_type(ResponseDeleteResponse, response, path=["response"])
+
+ @parametrize
+ def test_raw_response_delete(self, client: LlamaStackClient) -> None:
+ http_response = client.responses.with_raw_response.delete(
+ "response_id",
+ )
+
+ assert http_response.is_closed is True
+ assert http_response.http_request.headers.get("X-Stainless-Lang") == "python"
+ response = http_response.parse()
+ assert_matches_type(ResponseDeleteResponse, response, path=["response"])
+
+ @parametrize
+ def test_streaming_response_delete(self, client: LlamaStackClient) -> None:
+ with client.responses.with_streaming_response.delete(
+ "response_id",
+ ) as http_response:
+ assert not http_response.is_closed
+ assert http_response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ response = http_response.parse()
+ assert_matches_type(ResponseDeleteResponse, response, path=["response"])
+
+ assert cast(Any, http_response.is_closed) is True
+
+ @parametrize
+ def test_path_params_delete(self, client: LlamaStackClient) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `response_id` but received ''"):
+ client.responses.with_raw_response.delete(
+ "",
+ )
+
class TestAsyncResponses:
parametrize = pytest.mark.parametrize(
@@ -429,3 +471,41 @@ async def test_streaming_response_list(self, async_client: AsyncLlamaStackClient
assert_matches_type(AsyncOpenAICursorPage[ResponseListResponse], response, path=["response"])
assert cast(Any, http_response.is_closed) is True
+
+ @parametrize
+ async def test_method_delete(self, async_client: AsyncLlamaStackClient) -> None:
+ response = await async_client.responses.delete(
+ "response_id",
+ )
+ assert_matches_type(ResponseDeleteResponse, response, path=["response"])
+
+ @parametrize
+ async def test_raw_response_delete(self, async_client: AsyncLlamaStackClient) -> None:
+ http_response = await async_client.responses.with_raw_response.delete(
+ "response_id",
+ )
+
+ assert http_response.is_closed is True
+ assert http_response.http_request.headers.get("X-Stainless-Lang") == "python"
+ response = await http_response.parse()
+ assert_matches_type(ResponseDeleteResponse, response, path=["response"])
+
+ @parametrize
+ async def test_streaming_response_delete(self, async_client: AsyncLlamaStackClient) -> None:
+ async with async_client.responses.with_streaming_response.delete(
+ "response_id",
+ ) as http_response:
+ assert not http_response.is_closed
+ assert http_response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ response = await http_response.parse()
+ assert_matches_type(ResponseDeleteResponse, response, path=["response"])
+
+ assert cast(Any, http_response.is_closed) is True
+
+ @parametrize
+ async def test_path_params_delete(self, async_client: AsyncLlamaStackClient) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `response_id` but received ''"):
+ await async_client.responses.with_raw_response.delete(
+ "",
+ )
diff --git a/tests/api_resources/test_shields.py b/tests/api_resources/test_shields.py
index 037a66d3..8eaab0f9 100644
--- a/tests/api_resources/test_shields.py
+++ b/tests/api_resources/test_shields.py
@@ -80,6 +80,44 @@ def test_streaming_response_list(self, client: LlamaStackClient) -> None:
assert cast(Any, response.is_closed) is True
+ @parametrize
+ def test_method_delete(self, client: LlamaStackClient) -> None:
+ shield = client.shields.delete(
+ "identifier",
+ )
+ assert shield is None
+
+ @parametrize
+ def test_raw_response_delete(self, client: LlamaStackClient) -> None:
+ response = client.shields.with_raw_response.delete(
+ "identifier",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ shield = response.parse()
+ assert shield is None
+
+ @parametrize
+ def test_streaming_response_delete(self, client: LlamaStackClient) -> None:
+ with client.shields.with_streaming_response.delete(
+ "identifier",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ shield = response.parse()
+ assert shield is None
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ def test_path_params_delete(self, client: LlamaStackClient) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `identifier` but received ''"):
+ client.shields.with_raw_response.delete(
+ "",
+ )
+
@parametrize
def test_method_register(self, client: LlamaStackClient) -> None:
shield = client.shields.register(
@@ -190,6 +228,44 @@ async def test_streaming_response_list(self, async_client: AsyncLlamaStackClient
assert cast(Any, response.is_closed) is True
+ @parametrize
+ async def test_method_delete(self, async_client: AsyncLlamaStackClient) -> None:
+ shield = await async_client.shields.delete(
+ "identifier",
+ )
+ assert shield is None
+
+ @parametrize
+ async def test_raw_response_delete(self, async_client: AsyncLlamaStackClient) -> None:
+ response = await async_client.shields.with_raw_response.delete(
+ "identifier",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ shield = await response.parse()
+ assert shield is None
+
+ @parametrize
+ async def test_streaming_response_delete(self, async_client: AsyncLlamaStackClient) -> None:
+ async with async_client.shields.with_streaming_response.delete(
+ "identifier",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ shield = await response.parse()
+ assert shield is None
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ async def test_path_params_delete(self, async_client: AsyncLlamaStackClient) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `identifier` but received ''"):
+ await async_client.shields.with_raw_response.delete(
+ "",
+ )
+
@parametrize
async def test_method_register(self, async_client: AsyncLlamaStackClient) -> None:
shield = await async_client.shields.register(
diff --git a/tests/test_client.py b/tests/test_client.py
index 14889fae..a5bce12c 100644
--- a/tests/test_client.py
+++ b/tests/test_client.py
@@ -6,13 +6,10 @@
import os
import sys
import json
-import time
import asyncio
import inspect
-import subprocess
import tracemalloc
from typing import Any, Union, cast
-from textwrap import dedent
from unittest import mock
from typing_extensions import Literal
@@ -23,14 +20,17 @@
from llama_stack_client import LlamaStackClient, AsyncLlamaStackClient, APIResponseValidationError
from llama_stack_client._types import Omit
+from llama_stack_client._utils import asyncify
from llama_stack_client._models import BaseModel, FinalRequestOptions
from llama_stack_client._exceptions import APIStatusError, APITimeoutError, APIResponseValidationError
from llama_stack_client._base_client import (
DEFAULT_TIMEOUT,
HTTPX_DEFAULT_TIMEOUT,
BaseClient,
+ OtherPlatform,
DefaultHttpxClient,
DefaultAsyncHttpxClient,
+ get_platform,
make_request_options,
)
@@ -1638,50 +1638,9 @@ def retry_handler(_request: httpx.Request) -> httpx.Response:
assert response.http_request.headers.get("x-stainless-retry-count") == "42"
- def test_get_platform(self) -> None:
- # A previous implementation of asyncify could leave threads unterminated when
- # used with nest_asyncio.
- #
- # Since nest_asyncio.apply() is global and cannot be un-applied, this
- # test is run in a separate process to avoid affecting other tests.
- test_code = dedent("""
- import asyncio
- import nest_asyncio
- import threading
-
- from llama_stack_client._utils import asyncify
- from llama_stack_client._base_client import get_platform
-
- async def test_main() -> None:
- result = await asyncify(get_platform)()
- print(result)
- for thread in threading.enumerate():
- print(thread.name)
-
- nest_asyncio.apply()
- asyncio.run(test_main())
- """)
- with subprocess.Popen(
- [sys.executable, "-c", test_code],
- text=True,
- ) as process:
- timeout = 10 # seconds
-
- start_time = time.monotonic()
- while True:
- return_code = process.poll()
- if return_code is not None:
- if return_code != 0:
- raise AssertionError("calling get_platform using asyncify resulted in a non-zero exit code")
-
- # success
- break
-
- if time.monotonic() - start_time > timeout:
- process.kill()
- raise AssertionError("calling get_platform using asyncify resulted in a hung process")
-
- time.sleep(0.1)
+ async def test_get_platform(self) -> None:
+ platform = await asyncify(get_platform)()
+ assert isinstance(platform, (str, OtherPlatform))
async def test_proxy_environment_variables(self, monkeypatch: pytest.MonkeyPatch) -> None:
# Test that the proxy environment variables are set correctly
diff --git a/tests/test_models.py b/tests/test_models.py
index c5135234..396f2bf2 100644
--- a/tests/test_models.py
+++ b/tests/test_models.py
@@ -8,7 +8,7 @@
from pydantic import Field
from llama_stack_client._utils import PropertyInfo
-from llama_stack_client._compat import PYDANTIC_V2, parse_obj, model_dump, model_json
+from llama_stack_client._compat import PYDANTIC_V1, parse_obj, model_dump, model_json
from llama_stack_client._models import BaseModel, construct_type
@@ -294,12 +294,12 @@ class Model(BaseModel):
assert cast(bool, m.foo) is True
m = Model.construct(foo={"name": 3})
- if PYDANTIC_V2:
- assert isinstance(m.foo, Submodel1)
- assert m.foo.name == 3 # type: ignore
- else:
+ if PYDANTIC_V1:
assert isinstance(m.foo, Submodel2)
assert m.foo.name == "3"
+ else:
+ assert isinstance(m.foo, Submodel1)
+ assert m.foo.name == 3 # type: ignore
def test_list_of_unions() -> None:
@@ -426,10 +426,10 @@ class Model(BaseModel):
expected = datetime(2019, 12, 27, 18, 11, 19, 117000, tzinfo=timezone.utc)
- if PYDANTIC_V2:
- expected_json = '{"created_at":"2019-12-27T18:11:19.117000Z"}'
- else:
+ if PYDANTIC_V1:
expected_json = '{"created_at": "2019-12-27T18:11:19.117000+00:00"}'
+ else:
+ expected_json = '{"created_at":"2019-12-27T18:11:19.117000Z"}'
model = Model.construct(created_at="2019-12-27T18:11:19.117Z")
assert model.created_at == expected
@@ -531,7 +531,7 @@ class Model2(BaseModel):
assert m4.to_dict(mode="python") == {"created_at": datetime.fromisoformat(time_str)}
assert m4.to_dict(mode="json") == {"created_at": time_str}
- if not PYDANTIC_V2:
+ if PYDANTIC_V1:
with pytest.raises(ValueError, match="warnings is only supported in Pydantic v2"):
m.to_dict(warnings=False)
@@ -556,7 +556,7 @@ class Model(BaseModel):
assert m3.model_dump() == {"foo": None}
assert m3.model_dump(exclude_none=True) == {}
- if not PYDANTIC_V2:
+ if PYDANTIC_V1:
with pytest.raises(ValueError, match="round_trip is only supported in Pydantic v2"):
m.model_dump(round_trip=True)
@@ -580,10 +580,10 @@ class Model(BaseModel):
assert json.loads(m.to_json()) == {"FOO": "hello"}
assert json.loads(m.to_json(use_api_names=False)) == {"foo": "hello"}
- if PYDANTIC_V2:
- assert m.to_json(indent=None) == '{"FOO":"hello"}'
- else:
+ if PYDANTIC_V1:
assert m.to_json(indent=None) == '{"FOO": "hello"}'
+ else:
+ assert m.to_json(indent=None) == '{"FOO":"hello"}'
m2 = Model()
assert json.loads(m2.to_json()) == {}
@@ -595,7 +595,7 @@ class Model(BaseModel):
assert json.loads(m3.to_json()) == {"FOO": None}
assert json.loads(m3.to_json(exclude_none=True)) == {}
- if not PYDANTIC_V2:
+ if PYDANTIC_V1:
with pytest.raises(ValueError, match="warnings is only supported in Pydantic v2"):
m.to_json(warnings=False)
@@ -622,7 +622,7 @@ class Model(BaseModel):
assert json.loads(m3.model_dump_json()) == {"foo": None}
assert json.loads(m3.model_dump_json(exclude_none=True)) == {}
- if not PYDANTIC_V2:
+ if PYDANTIC_V1:
with pytest.raises(ValueError, match="round_trip is only supported in Pydantic v2"):
m.model_dump_json(round_trip=True)
@@ -679,12 +679,12 @@ class B(BaseModel):
)
assert isinstance(m, A)
assert m.type == "a"
- if PYDANTIC_V2:
- assert m.data == 100 # type: ignore[comparison-overlap]
- else:
+ if PYDANTIC_V1:
# pydantic v1 automatically converts inputs to strings
# if the expected type is a str
assert m.data == "100"
+ else:
+ assert m.data == 100 # type: ignore[comparison-overlap]
def test_discriminated_unions_unknown_variant() -> None:
@@ -768,12 +768,12 @@ class B(BaseModel):
)
assert isinstance(m, A)
assert m.foo_type == "a"
- if PYDANTIC_V2:
- assert m.data == 100 # type: ignore[comparison-overlap]
- else:
+ if PYDANTIC_V1:
# pydantic v1 automatically converts inputs to strings
# if the expected type is a str
assert m.data == "100"
+ else:
+ assert m.data == 100 # type: ignore[comparison-overlap]
def test_discriminated_unions_overlapping_discriminators_invalid_data() -> None:
@@ -833,7 +833,7 @@ class B(BaseModel):
assert UnionType.__discriminator__ is discriminator
-@pytest.mark.skipif(not PYDANTIC_V2, reason="TypeAliasType is not supported in Pydantic v1")
+@pytest.mark.skipif(PYDANTIC_V1, reason="TypeAliasType is not supported in Pydantic v1")
def test_type_alias_type() -> None:
Alias = TypeAliasType("Alias", str) # pyright: ignore
@@ -849,7 +849,7 @@ class Model(BaseModel):
assert m.union == "bar"
-@pytest.mark.skipif(not PYDANTIC_V2, reason="TypeAliasType is not supported in Pydantic v1")
+@pytest.mark.skipif(PYDANTIC_V1, reason="TypeAliasType is not supported in Pydantic v1")
def test_field_named_cls() -> None:
class Model(BaseModel):
cls: str
@@ -936,7 +936,7 @@ class Type2(BaseModel):
assert isinstance(model.value, InnerType2)
-@pytest.mark.skipif(not PYDANTIC_V2, reason="this is only supported in pydantic v2 for now")
+@pytest.mark.skipif(PYDANTIC_V1, reason="this is only supported in pydantic v2 for now")
def test_extra_properties() -> None:
class Item(BaseModel):
prop: int
diff --git a/tests/test_transform.py b/tests/test_transform.py
index b6eb411d..b81e616f 100644
--- a/tests/test_transform.py
+++ b/tests/test_transform.py
@@ -8,14 +8,14 @@
import pytest
-from llama_stack_client._types import NOT_GIVEN, Base64FileInput
+from llama_stack_client._types import Base64FileInput, omit, not_given
from llama_stack_client._utils import (
PropertyInfo,
transform as _transform,
parse_datetime,
async_transform as _async_transform,
)
-from llama_stack_client._compat import PYDANTIC_V2
+from llama_stack_client._compat import PYDANTIC_V1
from llama_stack_client._models import BaseModel
_T = TypeVar("_T")
@@ -189,7 +189,7 @@ class DateModel(BaseModel):
@pytest.mark.asyncio
async def test_iso8601_format(use_async: bool) -> None:
dt = datetime.fromisoformat("2023-02-23T14:16:36.337692+00:00")
- tz = "Z" if PYDANTIC_V2 else "+00:00"
+ tz = "+00:00" if PYDANTIC_V1 else "Z"
assert await transform({"foo": dt}, DatetimeDict, use_async) == {"foo": "2023-02-23T14:16:36.337692+00:00"} # type: ignore[comparison-overlap]
assert await transform(DatetimeModel(foo=dt), Any, use_async) == {"foo": "2023-02-23T14:16:36.337692" + tz} # type: ignore[comparison-overlap]
@@ -297,11 +297,11 @@ async def test_pydantic_unknown_field(use_async: bool) -> None:
@pytest.mark.asyncio
async def test_pydantic_mismatched_types(use_async: bool) -> None:
model = MyModel.construct(foo=True)
- if PYDANTIC_V2:
+ if PYDANTIC_V1:
+ params = await transform(model, Any, use_async)
+ else:
with pytest.warns(UserWarning):
params = await transform(model, Any, use_async)
- else:
- params = await transform(model, Any, use_async)
assert cast(Any, params) == {"foo": True}
@@ -309,11 +309,11 @@ async def test_pydantic_mismatched_types(use_async: bool) -> None:
@pytest.mark.asyncio
async def test_pydantic_mismatched_object_type(use_async: bool) -> None:
model = MyModel.construct(foo=MyModel.construct(hello="world"))
- if PYDANTIC_V2:
+ if PYDANTIC_V1:
+ params = await transform(model, Any, use_async)
+ else:
with pytest.warns(UserWarning):
params = await transform(model, Any, use_async)
- else:
- params = await transform(model, Any, use_async)
assert cast(Any, params) == {"foo": {"hello": "world"}}
@@ -450,4 +450,11 @@ async def test_transform_skipping(use_async: bool) -> None:
@pytest.mark.asyncio
async def test_strips_notgiven(use_async: bool) -> None:
assert await transform({"foo_bar": "bar"}, Foo1, use_async) == {"fooBar": "bar"}
- assert await transform({"foo_bar": NOT_GIVEN}, Foo1, use_async) == {}
+ assert await transform({"foo_bar": not_given}, Foo1, use_async) == {}
+
+
+@parametrize
+@pytest.mark.asyncio
+async def test_strips_omit(use_async: bool) -> None:
+ assert await transform({"foo_bar": "bar"}, Foo1, use_async) == {"fooBar": "bar"}
+ assert await transform({"foo_bar": omit}, Foo1, use_async) == {}
diff --git a/tests/test_utils/test_datetime_parse.py b/tests/test_utils/test_datetime_parse.py
new file mode 100644
index 00000000..a95a9e55
--- /dev/null
+++ b/tests/test_utils/test_datetime_parse.py
@@ -0,0 +1,110 @@
+"""
+Copied from https://github.com/pydantic/pydantic/blob/v1.10.22/tests/test_datetime_parse.py
+with modifications so it works without pydantic v1 imports.
+"""
+
+from typing import Type, Union
+from datetime import date, datetime, timezone, timedelta
+
+import pytest
+
+from llama_stack_client._utils import parse_date, parse_datetime
+
+
+def create_tz(minutes: int) -> timezone:
+ return timezone(timedelta(minutes=minutes))
+
+
+@pytest.mark.parametrize(
+ "value,result",
+ [
+ # Valid inputs
+ ("1494012444.883309", date(2017, 5, 5)),
+ (b"1494012444.883309", date(2017, 5, 5)),
+ (1_494_012_444.883_309, date(2017, 5, 5)),
+ ("1494012444", date(2017, 5, 5)),
+ (1_494_012_444, date(2017, 5, 5)),
+ (0, date(1970, 1, 1)),
+ ("2012-04-23", date(2012, 4, 23)),
+ (b"2012-04-23", date(2012, 4, 23)),
+ ("2012-4-9", date(2012, 4, 9)),
+ (date(2012, 4, 9), date(2012, 4, 9)),
+ (datetime(2012, 4, 9, 12, 15), date(2012, 4, 9)),
+ # Invalid inputs
+ ("x20120423", ValueError),
+ ("2012-04-56", ValueError),
+ (19_999_999_999, date(2603, 10, 11)), # just before watershed
+ (20_000_000_001, date(1970, 8, 20)), # just after watershed
+ (1_549_316_052, date(2019, 2, 4)), # nowish in s
+ (1_549_316_052_104, date(2019, 2, 4)), # nowish in ms
+ (1_549_316_052_104_324, date(2019, 2, 4)), # nowish in ÎĽs
+ (1_549_316_052_104_324_096, date(2019, 2, 4)), # nowish in ns
+ ("infinity", date(9999, 12, 31)),
+ ("inf", date(9999, 12, 31)),
+ (float("inf"), date(9999, 12, 31)),
+ ("infinity ", date(9999, 12, 31)),
+ (int("1" + "0" * 100), date(9999, 12, 31)),
+ (1e1000, date(9999, 12, 31)),
+ ("-infinity", date(1, 1, 1)),
+ ("-inf", date(1, 1, 1)),
+ ("nan", ValueError),
+ ],
+)
+def test_date_parsing(value: Union[str, bytes, int, float], result: Union[date, Type[Exception]]) -> None:
+ if type(result) == type and issubclass(result, Exception): # pyright: ignore[reportUnnecessaryIsInstance]
+ with pytest.raises(result):
+ parse_date(value)
+ else:
+ assert parse_date(value) == result
+
+
+@pytest.mark.parametrize(
+ "value,result",
+ [
+ # Valid inputs
+ # values in seconds
+ ("1494012444.883309", datetime(2017, 5, 5, 19, 27, 24, 883_309, tzinfo=timezone.utc)),
+ (1_494_012_444.883_309, datetime(2017, 5, 5, 19, 27, 24, 883_309, tzinfo=timezone.utc)),
+ ("1494012444", datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)),
+ (b"1494012444", datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)),
+ (1_494_012_444, datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)),
+ # values in ms
+ ("1494012444000.883309", datetime(2017, 5, 5, 19, 27, 24, 883, tzinfo=timezone.utc)),
+ ("-1494012444000.883309", datetime(1922, 8, 29, 4, 32, 35, 999117, tzinfo=timezone.utc)),
+ (1_494_012_444_000, datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)),
+ ("2012-04-23T09:15:00", datetime(2012, 4, 23, 9, 15)),
+ ("2012-4-9 4:8:16", datetime(2012, 4, 9, 4, 8, 16)),
+ ("2012-04-23T09:15:00Z", datetime(2012, 4, 23, 9, 15, 0, 0, timezone.utc)),
+ ("2012-4-9 4:8:16-0320", datetime(2012, 4, 9, 4, 8, 16, 0, create_tz(-200))),
+ ("2012-04-23T10:20:30.400+02:30", datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(150))),
+ ("2012-04-23T10:20:30.400+02", datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(120))),
+ ("2012-04-23T10:20:30.400-02", datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(-120))),
+ (b"2012-04-23T10:20:30.400-02", datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(-120))),
+ (datetime(2017, 5, 5), datetime(2017, 5, 5)),
+ (0, datetime(1970, 1, 1, 0, 0, 0, tzinfo=timezone.utc)),
+ # Invalid inputs
+ ("x20120423091500", ValueError),
+ ("2012-04-56T09:15:90", ValueError),
+ ("2012-04-23T11:05:00-25:00", ValueError),
+ (19_999_999_999, datetime(2603, 10, 11, 11, 33, 19, tzinfo=timezone.utc)), # just before watershed
+ (20_000_000_001, datetime(1970, 8, 20, 11, 33, 20, 1000, tzinfo=timezone.utc)), # just after watershed
+ (1_549_316_052, datetime(2019, 2, 4, 21, 34, 12, 0, tzinfo=timezone.utc)), # nowish in s
+ (1_549_316_052_104, datetime(2019, 2, 4, 21, 34, 12, 104_000, tzinfo=timezone.utc)), # nowish in ms
+ (1_549_316_052_104_324, datetime(2019, 2, 4, 21, 34, 12, 104_324, tzinfo=timezone.utc)), # nowish in ÎĽs
+ (1_549_316_052_104_324_096, datetime(2019, 2, 4, 21, 34, 12, 104_324, tzinfo=timezone.utc)), # nowish in ns
+ ("infinity", datetime(9999, 12, 31, 23, 59, 59, 999999)),
+ ("inf", datetime(9999, 12, 31, 23, 59, 59, 999999)),
+ ("inf ", datetime(9999, 12, 31, 23, 59, 59, 999999)),
+ (1e50, datetime(9999, 12, 31, 23, 59, 59, 999999)),
+ (float("inf"), datetime(9999, 12, 31, 23, 59, 59, 999999)),
+ ("-infinity", datetime(1, 1, 1, 0, 0)),
+ ("-inf", datetime(1, 1, 1, 0, 0)),
+ ("nan", ValueError),
+ ],
+)
+def test_datetime_parsing(value: Union[str, bytes, int, float], result: Union[datetime, Type[Exception]]) -> None:
+ if type(result) == type and issubclass(result, Exception): # pyright: ignore[reportUnnecessaryIsInstance]
+ with pytest.raises(result):
+ parse_datetime(value)
+ else:
+ assert parse_datetime(value) == result
diff --git a/tests/utils.py b/tests/utils.py
index 9d0ce74f..ba5e26bf 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -4,7 +4,7 @@
import inspect
import traceback
import contextlib
-from typing import Any, TypeVar, Iterator, cast
+from typing import Any, TypeVar, Iterator, Sequence, cast
from datetime import date, datetime
from typing_extensions import Literal, get_args, get_origin, assert_type
@@ -15,10 +15,11 @@
is_list_type,
is_union_type,
extract_type_arg,
+ is_sequence_type,
is_annotated_type,
is_type_alias_type,
)
-from llama_stack_client._compat import PYDANTIC_V2, field_outer_type, get_model_fields
+from llama_stack_client._compat import PYDANTIC_V1, field_outer_type, get_model_fields
from llama_stack_client._models import BaseModel
BaseModelT = TypeVar("BaseModelT", bound=BaseModel)
@@ -27,12 +28,12 @@
def assert_matches_model(model: type[BaseModelT], value: BaseModelT, *, path: list[str]) -> bool:
for name, field in get_model_fields(model).items():
field_value = getattr(value, name)
- if PYDANTIC_V2:
- allow_none = False
- else:
+ if PYDANTIC_V1:
# in v1 nullability was structured differently
# https://docs.pydantic.dev/2.0/migration/#required-optional-and-nullable-fields
allow_none = getattr(field, "allow_none", False)
+ else:
+ allow_none = False
assert_matches_type(
field_outer_type(field),
@@ -71,6 +72,13 @@ def assert_matches_type(
if is_list_type(type_):
return _assert_list_type(type_, value)
+ if is_sequence_type(type_):
+ assert isinstance(value, Sequence)
+ inner_type = get_args(type_)[0]
+ for entry in value: # type: ignore
+ assert_type(inner_type, entry) # type: ignore
+ return
+
if origin == str:
assert isinstance(value, str)
elif origin == int:
diff --git a/uv.lock b/uv.lock
index 3a43e2dd..13182b4f 100644
--- a/uv.lock
+++ b/uv.lock
@@ -17,7 +17,7 @@ wheels = [
[[package]]
name = "aiohttp"
-version = "3.12.15"
+version = "3.12.13"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "aiohappyeyeballs" },
@@ -28,42 +28,42 @@ dependencies = [
{ name = "propcache" },
{ name = "yarl" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/9b/e7/d92a237d8802ca88483906c388f7c201bbe96cd80a165ffd0ac2f6a8d59f/aiohttp-3.12.15.tar.gz", hash = "sha256:4fc61385e9c98d72fcdf47e6dd81833f47b2f77c114c29cd64a361be57a763a2", size = 7823716, upload-time = "2025-07-29T05:52:32.215Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/63/97/77cb2450d9b35f517d6cf506256bf4f5bda3f93a66b4ad64ba7fc917899c/aiohttp-3.12.15-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:802d3868f5776e28f7bf69d349c26fc0efadb81676d0afa88ed00d98a26340b7", size = 702333, upload-time = "2025-07-29T05:50:46.507Z" },
- { url = "https://files.pythonhosted.org/packages/83/6d/0544e6b08b748682c30b9f65640d006e51f90763b41d7c546693bc22900d/aiohttp-3.12.15-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2800614cd560287be05e33a679638e586a2d7401f4ddf99e304d98878c29444", size = 476948, upload-time = "2025-07-29T05:50:48.067Z" },
- { url = "https://files.pythonhosted.org/packages/3a/1d/c8c40e611e5094330284b1aea8a4b02ca0858f8458614fa35754cab42b9c/aiohttp-3.12.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8466151554b593909d30a0a125d638b4e5f3836e5aecde85b66b80ded1cb5b0d", size = 469787, upload-time = "2025-07-29T05:50:49.669Z" },
- { url = "https://files.pythonhosted.org/packages/38/7d/b76438e70319796bfff717f325d97ce2e9310f752a267bfdf5192ac6082b/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e5a495cb1be69dae4b08f35a6c4579c539e9b5706f606632102c0f855bcba7c", size = 1716590, upload-time = "2025-07-29T05:50:51.368Z" },
- { url = "https://files.pythonhosted.org/packages/79/b1/60370d70cdf8b269ee1444b390cbd72ce514f0d1cd1a715821c784d272c9/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6404dfc8cdde35c69aaa489bb3542fb86ef215fc70277c892be8af540e5e21c0", size = 1699241, upload-time = "2025-07-29T05:50:53.628Z" },
- { url = "https://files.pythonhosted.org/packages/a3/2b/4968a7b8792437ebc12186db31523f541943e99bda8f30335c482bea6879/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ead1c00f8521a5c9070fcb88f02967b1d8a0544e6d85c253f6968b785e1a2ab", size = 1754335, upload-time = "2025-07-29T05:50:55.394Z" },
- { url = "https://files.pythonhosted.org/packages/fb/c1/49524ed553f9a0bec1a11fac09e790f49ff669bcd14164f9fab608831c4d/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6990ef617f14450bc6b34941dba4f12d5613cbf4e33805932f853fbd1cf18bfb", size = 1800491, upload-time = "2025-07-29T05:50:57.202Z" },
- { url = "https://files.pythonhosted.org/packages/de/5e/3bf5acea47a96a28c121b167f5ef659cf71208b19e52a88cdfa5c37f1fcc/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd736ed420f4db2b8148b52b46b88ed038d0354255f9a73196b7bbce3ea97545", size = 1719929, upload-time = "2025-07-29T05:50:59.192Z" },
- { url = "https://files.pythonhosted.org/packages/39/94/8ae30b806835bcd1cba799ba35347dee6961a11bd507db634516210e91d8/aiohttp-3.12.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c5092ce14361a73086b90c6efb3948ffa5be2f5b6fbcf52e8d8c8b8848bb97c", size = 1635733, upload-time = "2025-07-29T05:51:01.394Z" },
- { url = "https://files.pythonhosted.org/packages/7a/46/06cdef71dd03acd9da7f51ab3a9107318aee12ad38d273f654e4f981583a/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:aaa2234bb60c4dbf82893e934d8ee8dea30446f0647e024074237a56a08c01bd", size = 1696790, upload-time = "2025-07-29T05:51:03.657Z" },
- { url = "https://files.pythonhosted.org/packages/02/90/6b4cfaaf92ed98d0ec4d173e78b99b4b1a7551250be8937d9d67ecb356b4/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6d86a2fbdd14192e2f234a92d3b494dd4457e683ba07e5905a0b3ee25389ac9f", size = 1718245, upload-time = "2025-07-29T05:51:05.911Z" },
- { url = "https://files.pythonhosted.org/packages/2e/e6/2593751670fa06f080a846f37f112cbe6f873ba510d070136a6ed46117c6/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a041e7e2612041a6ddf1c6a33b883be6a421247c7afd47e885969ee4cc58bd8d", size = 1658899, upload-time = "2025-07-29T05:51:07.753Z" },
- { url = "https://files.pythonhosted.org/packages/8f/28/c15bacbdb8b8eb5bf39b10680d129ea7410b859e379b03190f02fa104ffd/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5015082477abeafad7203757ae44299a610e89ee82a1503e3d4184e6bafdd519", size = 1738459, upload-time = "2025-07-29T05:51:09.56Z" },
- { url = "https://files.pythonhosted.org/packages/00/de/c269cbc4faa01fb10f143b1670633a8ddd5b2e1ffd0548f7aa49cb5c70e2/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:56822ff5ddfd1b745534e658faba944012346184fbfe732e0d6134b744516eea", size = 1766434, upload-time = "2025-07-29T05:51:11.423Z" },
- { url = "https://files.pythonhosted.org/packages/52/b0/4ff3abd81aa7d929b27d2e1403722a65fc87b763e3a97b3a2a494bfc63bc/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b2acbbfff69019d9014508c4ba0401822e8bae5a5fdc3b6814285b71231b60f3", size = 1726045, upload-time = "2025-07-29T05:51:13.689Z" },
- { url = "https://files.pythonhosted.org/packages/71/16/949225a6a2dd6efcbd855fbd90cf476052e648fb011aa538e3b15b89a57a/aiohttp-3.12.15-cp312-cp312-win32.whl", hash = "sha256:d849b0901b50f2185874b9a232f38e26b9b3d4810095a7572eacea939132d4e1", size = 423591, upload-time = "2025-07-29T05:51:15.452Z" },
- { url = "https://files.pythonhosted.org/packages/2b/d8/fa65d2a349fe938b76d309db1a56a75c4fb8cc7b17a398b698488a939903/aiohttp-3.12.15-cp312-cp312-win_amd64.whl", hash = "sha256:b390ef5f62bb508a9d67cb3bba9b8356e23b3996da7062f1a57ce1a79d2b3d34", size = 450266, upload-time = "2025-07-29T05:51:17.239Z" },
- { url = "https://files.pythonhosted.org/packages/f2/33/918091abcf102e39d15aba2476ad9e7bd35ddb190dcdd43a854000d3da0d/aiohttp-3.12.15-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:9f922ffd05034d439dde1c77a20461cf4a1b0831e6caa26151fe7aa8aaebc315", size = 696741, upload-time = "2025-07-29T05:51:19.021Z" },
- { url = "https://files.pythonhosted.org/packages/b5/2a/7495a81e39a998e400f3ecdd44a62107254803d1681d9189be5c2e4530cd/aiohttp-3.12.15-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2ee8a8ac39ce45f3e55663891d4b1d15598c157b4d494a4613e704c8b43112cd", size = 474407, upload-time = "2025-07-29T05:51:21.165Z" },
- { url = "https://files.pythonhosted.org/packages/49/fc/a9576ab4be2dcbd0f73ee8675d16c707cfc12d5ee80ccf4015ba543480c9/aiohttp-3.12.15-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3eae49032c29d356b94eee45a3f39fdf4b0814b397638c2f718e96cfadf4c4e4", size = 466703, upload-time = "2025-07-29T05:51:22.948Z" },
- { url = "https://files.pythonhosted.org/packages/09/2f/d4bcc8448cf536b2b54eed48f19682031ad182faa3a3fee54ebe5b156387/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b97752ff12cc12f46a9b20327104448042fce5c33a624f88c18f66f9368091c7", size = 1705532, upload-time = "2025-07-29T05:51:25.211Z" },
- { url = "https://files.pythonhosted.org/packages/f1/f3/59406396083f8b489261e3c011aa8aee9df360a96ac8fa5c2e7e1b8f0466/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:894261472691d6fe76ebb7fcf2e5870a2ac284c7406ddc95823c8598a1390f0d", size = 1686794, upload-time = "2025-07-29T05:51:27.145Z" },
- { url = "https://files.pythonhosted.org/packages/dc/71/164d194993a8d114ee5656c3b7ae9c12ceee7040d076bf7b32fb98a8c5c6/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5fa5d9eb82ce98959fc1031c28198b431b4d9396894f385cb63f1e2f3f20ca6b", size = 1738865, upload-time = "2025-07-29T05:51:29.366Z" },
- { url = "https://files.pythonhosted.org/packages/1c/00/d198461b699188a93ead39cb458554d9f0f69879b95078dce416d3209b54/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0fa751efb11a541f57db59c1dd821bec09031e01452b2b6217319b3a1f34f3d", size = 1788238, upload-time = "2025-07-29T05:51:31.285Z" },
- { url = "https://files.pythonhosted.org/packages/85/b8/9e7175e1fa0ac8e56baa83bf3c214823ce250d0028955dfb23f43d5e61fd/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5346b93e62ab51ee2a9d68e8f73c7cf96ffb73568a23e683f931e52450e4148d", size = 1710566, upload-time = "2025-07-29T05:51:33.219Z" },
- { url = "https://files.pythonhosted.org/packages/59/e4/16a8eac9df39b48ae102ec030fa9f726d3570732e46ba0c592aeeb507b93/aiohttp-3.12.15-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:049ec0360f939cd164ecbfd2873eaa432613d5e77d6b04535e3d1fbae5a9e645", size = 1624270, upload-time = "2025-07-29T05:51:35.195Z" },
- { url = "https://files.pythonhosted.org/packages/1f/f8/cd84dee7b6ace0740908fd0af170f9fab50c2a41ccbc3806aabcb1050141/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b52dcf013b57464b6d1e51b627adfd69a8053e84b7103a7cd49c030f9ca44461", size = 1677294, upload-time = "2025-07-29T05:51:37.215Z" },
- { url = "https://files.pythonhosted.org/packages/ce/42/d0f1f85e50d401eccd12bf85c46ba84f947a84839c8a1c2c5f6e8ab1eb50/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:9b2af240143dd2765e0fb661fd0361a1b469cab235039ea57663cda087250ea9", size = 1708958, upload-time = "2025-07-29T05:51:39.328Z" },
- { url = "https://files.pythonhosted.org/packages/d5/6b/f6fa6c5790fb602538483aa5a1b86fcbad66244997e5230d88f9412ef24c/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ac77f709a2cde2cc71257ab2d8c74dd157c67a0558a0d2799d5d571b4c63d44d", size = 1651553, upload-time = "2025-07-29T05:51:41.356Z" },
- { url = "https://files.pythonhosted.org/packages/04/36/a6d36ad545fa12e61d11d1932eef273928b0495e6a576eb2af04297fdd3c/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:47f6b962246f0a774fbd3b6b7be25d59b06fdb2f164cf2513097998fc6a29693", size = 1727688, upload-time = "2025-07-29T05:51:43.452Z" },
- { url = "https://files.pythonhosted.org/packages/aa/c8/f195e5e06608a97a4e52c5d41c7927301bf757a8e8bb5bbf8cef6c314961/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:760fb7db442f284996e39cf9915a94492e1896baac44f06ae551974907922b64", size = 1761157, upload-time = "2025-07-29T05:51:45.643Z" },
- { url = "https://files.pythonhosted.org/packages/05/6a/ea199e61b67f25ba688d3ce93f63b49b0a4e3b3d380f03971b4646412fc6/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad702e57dc385cae679c39d318def49aef754455f237499d5b99bea4ef582e51", size = 1710050, upload-time = "2025-07-29T05:51:48.203Z" },
- { url = "https://files.pythonhosted.org/packages/b4/2e/ffeb7f6256b33635c29dbed29a22a723ff2dd7401fff42ea60cf2060abfb/aiohttp-3.12.15-cp313-cp313-win32.whl", hash = "sha256:f813c3e9032331024de2eb2e32a88d86afb69291fbc37a3a3ae81cc9917fb3d0", size = 422647, upload-time = "2025-07-29T05:51:50.718Z" },
- { url = "https://files.pythonhosted.org/packages/1b/8e/78ee35774201f38d5e1ba079c9958f7629b1fd079459aea9467441dbfbf5/aiohttp-3.12.15-cp313-cp313-win_amd64.whl", hash = "sha256:1a649001580bdb37c6fdb1bebbd7e3bc688e8ec2b5c6f52edbb664662b17dc84", size = 449067, upload-time = "2025-07-29T05:51:52.549Z" },
+sdist = { url = "https://files.pythonhosted.org/packages/42/6e/ab88e7cb2a4058bed2f7870276454f85a7c56cd6da79349eb314fc7bbcaa/aiohttp-3.12.13.tar.gz", hash = "sha256:47e2da578528264a12e4e3dd8dd72a7289e5f812758fe086473fab037a10fcce", size = 7819160, upload-time = "2025-06-14T15:15:41.354Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b4/6a/ce40e329788013cd190b1d62bbabb2b6a9673ecb6d836298635b939562ef/aiohttp-3.12.13-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0aa580cf80558557285b49452151b9c69f2fa3ad94c5c9e76e684719a8791b73", size = 700491, upload-time = "2025-06-14T15:14:00.048Z" },
+ { url = "https://files.pythonhosted.org/packages/28/d9/7150d5cf9163e05081f1c5c64a0cdf3c32d2f56e2ac95db2a28fe90eca69/aiohttp-3.12.13-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b103a7e414b57e6939cc4dece8e282cfb22043efd0c7298044f6594cf83ab347", size = 475104, upload-time = "2025-06-14T15:14:01.691Z" },
+ { url = "https://files.pythonhosted.org/packages/f8/91/d42ba4aed039ce6e449b3e2db694328756c152a79804e64e3da5bc19dffc/aiohttp-3.12.13-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:78f64e748e9e741d2eccff9597d09fb3cd962210e5b5716047cbb646dc8fe06f", size = 467948, upload-time = "2025-06-14T15:14:03.561Z" },
+ { url = "https://files.pythonhosted.org/packages/99/3b/06f0a632775946981d7c4e5a865cddb6e8dfdbaed2f56f9ade7bb4a1039b/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29c955989bf4c696d2ededc6b0ccb85a73623ae6e112439398935362bacfaaf6", size = 1714742, upload-time = "2025-06-14T15:14:05.558Z" },
+ { url = "https://files.pythonhosted.org/packages/92/a6/2552eebad9ec5e3581a89256276009e6a974dc0793632796af144df8b740/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d640191016763fab76072c87d8854a19e8e65d7a6fcfcbf017926bdbbb30a7e5", size = 1697393, upload-time = "2025-06-14T15:14:07.194Z" },
+ { url = "https://files.pythonhosted.org/packages/d8/9f/bd08fdde114b3fec7a021381b537b21920cdd2aa29ad48c5dffd8ee314f1/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4dc507481266b410dede95dd9f26c8d6f5a14315372cc48a6e43eac652237d9b", size = 1752486, upload-time = "2025-06-14T15:14:08.808Z" },
+ { url = "https://files.pythonhosted.org/packages/f7/e1/affdea8723aec5bd0959171b5490dccd9a91fcc505c8c26c9f1dca73474d/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8a94daa873465d518db073bd95d75f14302e0208a08e8c942b2f3f1c07288a75", size = 1798643, upload-time = "2025-06-14T15:14:10.767Z" },
+ { url = "https://files.pythonhosted.org/packages/f3/9d/666d856cc3af3a62ae86393baa3074cc1d591a47d89dc3bf16f6eb2c8d32/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f52420cde4ce0bb9425a375d95577fe082cb5721ecb61da3049b55189e4e6", size = 1718082, upload-time = "2025-06-14T15:14:12.38Z" },
+ { url = "https://files.pythonhosted.org/packages/f3/ce/3c185293843d17be063dada45efd2712bb6bf6370b37104b4eda908ffdbd/aiohttp-3.12.13-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f7df1f620ec40f1a7fbcb99ea17d7326ea6996715e78f71a1c9a021e31b96b8", size = 1633884, upload-time = "2025-06-14T15:14:14.415Z" },
+ { url = "https://files.pythonhosted.org/packages/3a/5b/f3413f4b238113be35dfd6794e65029250d4b93caa0974ca572217745bdb/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3062d4ad53b36e17796dce1c0d6da0ad27a015c321e663657ba1cc7659cfc710", size = 1694943, upload-time = "2025-06-14T15:14:16.48Z" },
+ { url = "https://files.pythonhosted.org/packages/82/c8/0e56e8bf12081faca85d14a6929ad5c1263c146149cd66caa7bc12255b6d/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:8605e22d2a86b8e51ffb5253d9045ea73683d92d47c0b1438e11a359bdb94462", size = 1716398, upload-time = "2025-06-14T15:14:18.589Z" },
+ { url = "https://files.pythonhosted.org/packages/ea/f3/33192b4761f7f9b2f7f4281365d925d663629cfaea093a64b658b94fc8e1/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:54fbbe6beafc2820de71ece2198458a711e224e116efefa01b7969f3e2b3ddae", size = 1657051, upload-time = "2025-06-14T15:14:20.223Z" },
+ { url = "https://files.pythonhosted.org/packages/5e/0b/26ddd91ca8f84c48452431cb4c5dd9523b13bc0c9766bda468e072ac9e29/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:050bd277dfc3768b606fd4eae79dd58ceda67d8b0b3c565656a89ae34525d15e", size = 1736611, upload-time = "2025-06-14T15:14:21.988Z" },
+ { url = "https://files.pythonhosted.org/packages/c3/8d/e04569aae853302648e2c138a680a6a2f02e374c5b6711732b29f1e129cc/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2637a60910b58f50f22379b6797466c3aa6ae28a6ab6404e09175ce4955b4e6a", size = 1764586, upload-time = "2025-06-14T15:14:23.979Z" },
+ { url = "https://files.pythonhosted.org/packages/ac/98/c193c1d1198571d988454e4ed75adc21c55af247a9fda08236602921c8c8/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e986067357550d1aaa21cfe9897fa19e680110551518a5a7cf44e6c5638cb8b5", size = 1724197, upload-time = "2025-06-14T15:14:25.692Z" },
+ { url = "https://files.pythonhosted.org/packages/e7/9e/07bb8aa11eec762c6b1ff61575eeeb2657df11ab3d3abfa528d95f3e9337/aiohttp-3.12.13-cp312-cp312-win32.whl", hash = "sha256:ac941a80aeea2aaae2875c9500861a3ba356f9ff17b9cb2dbfb5cbf91baaf5bf", size = 421771, upload-time = "2025-06-14T15:14:27.364Z" },
+ { url = "https://files.pythonhosted.org/packages/52/66/3ce877e56ec0813069cdc9607cd979575859c597b6fb9b4182c6d5f31886/aiohttp-3.12.13-cp312-cp312-win_amd64.whl", hash = "sha256:671f41e6146a749b6c81cb7fd07f5a8356d46febdaaaf07b0e774ff04830461e", size = 447869, upload-time = "2025-06-14T15:14:29.05Z" },
+ { url = "https://files.pythonhosted.org/packages/11/0f/db19abdf2d86aa1deec3c1e0e5ea46a587b97c07a16516b6438428b3a3f8/aiohttp-3.12.13-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d4a18e61f271127465bdb0e8ff36e8f02ac4a32a80d8927aa52371e93cd87938", size = 694910, upload-time = "2025-06-14T15:14:30.604Z" },
+ { url = "https://files.pythonhosted.org/packages/d5/81/0ab551e1b5d7f1339e2d6eb482456ccbe9025605b28eed2b1c0203aaaade/aiohttp-3.12.13-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:532542cb48691179455fab429cdb0d558b5e5290b033b87478f2aa6af5d20ace", size = 472566, upload-time = "2025-06-14T15:14:32.275Z" },
+ { url = "https://files.pythonhosted.org/packages/34/3f/6b7d336663337672d29b1f82d1f252ec1a040fe2d548f709d3f90fa2218a/aiohttp-3.12.13-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d7eea18b52f23c050ae9db5d01f3d264ab08f09e7356d6f68e3f3ac2de9dfabb", size = 464856, upload-time = "2025-06-14T15:14:34.132Z" },
+ { url = "https://files.pythonhosted.org/packages/26/7f/32ca0f170496aa2ab9b812630fac0c2372c531b797e1deb3deb4cea904bd/aiohttp-3.12.13-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad7c8e5c25f2a26842a7c239de3f7b6bfb92304593ef997c04ac49fb703ff4d7", size = 1703683, upload-time = "2025-06-14T15:14:36.034Z" },
+ { url = "https://files.pythonhosted.org/packages/ec/53/d5513624b33a811c0abea8461e30a732294112318276ce3dbf047dbd9d8b/aiohttp-3.12.13-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6af355b483e3fe9d7336d84539fef460120c2f6e50e06c658fe2907c69262d6b", size = 1684946, upload-time = "2025-06-14T15:14:38Z" },
+ { url = "https://files.pythonhosted.org/packages/37/72/4c237dd127827b0247dc138d3ebd49c2ded6114c6991bbe969058575f25f/aiohttp-3.12.13-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a95cf9f097498f35c88e3609f55bb47b28a5ef67f6888f4390b3d73e2bac6177", size = 1737017, upload-time = "2025-06-14T15:14:39.951Z" },
+ { url = "https://files.pythonhosted.org/packages/0d/67/8a7eb3afa01e9d0acc26e1ef847c1a9111f8b42b82955fcd9faeb84edeb4/aiohttp-3.12.13-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8ed8c38a1c584fe99a475a8f60eefc0b682ea413a84c6ce769bb19a7ff1c5ef", size = 1786390, upload-time = "2025-06-14T15:14:42.151Z" },
+ { url = "https://files.pythonhosted.org/packages/48/19/0377df97dd0176ad23cd8cad4fd4232cfeadcec6c1b7f036315305c98e3f/aiohttp-3.12.13-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a0b9170d5d800126b5bc89d3053a2363406d6e327afb6afaeda2d19ee8bb103", size = 1708719, upload-time = "2025-06-14T15:14:44.039Z" },
+ { url = "https://files.pythonhosted.org/packages/61/97/ade1982a5c642b45f3622255173e40c3eed289c169f89d00eeac29a89906/aiohttp-3.12.13-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:372feeace612ef8eb41f05ae014a92121a512bd5067db8f25101dd88a8db11da", size = 1622424, upload-time = "2025-06-14T15:14:45.945Z" },
+ { url = "https://files.pythonhosted.org/packages/99/ab/00ad3eea004e1d07ccc406e44cfe2b8da5acb72f8c66aeeb11a096798868/aiohttp-3.12.13-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a946d3702f7965d81f7af7ea8fb03bb33fe53d311df48a46eeca17e9e0beed2d", size = 1675447, upload-time = "2025-06-14T15:14:47.911Z" },
+ { url = "https://files.pythonhosted.org/packages/3f/fe/74e5ce8b2ccaba445fe0087abc201bfd7259431d92ae608f684fcac5d143/aiohttp-3.12.13-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:a0c4725fae86555bbb1d4082129e21de7264f4ab14baf735278c974785cd2041", size = 1707110, upload-time = "2025-06-14T15:14:50.334Z" },
+ { url = "https://files.pythonhosted.org/packages/ef/c4/39af17807f694f7a267bd8ab1fbacf16ad66740862192a6c8abac2bff813/aiohttp-3.12.13-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9b28ea2f708234f0a5c44eb6c7d9eb63a148ce3252ba0140d050b091b6e842d1", size = 1649706, upload-time = "2025-06-14T15:14:52.378Z" },
+ { url = "https://files.pythonhosted.org/packages/38/e8/f5a0a5f44f19f171d8477059aa5f28a158d7d57fe1a46c553e231f698435/aiohttp-3.12.13-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d4f5becd2a5791829f79608c6f3dc745388162376f310eb9c142c985f9441cc1", size = 1725839, upload-time = "2025-06-14T15:14:54.617Z" },
+ { url = "https://files.pythonhosted.org/packages/fd/ac/81acc594c7f529ef4419d3866913f628cd4fa9cab17f7bf410a5c3c04c53/aiohttp-3.12.13-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:60f2ce6b944e97649051d5f5cc0f439360690b73909230e107fd45a359d3e911", size = 1759311, upload-time = "2025-06-14T15:14:56.597Z" },
+ { url = "https://files.pythonhosted.org/packages/38/0d/aabe636bd25c6ab7b18825e5a97d40024da75152bec39aa6ac8b7a677630/aiohttp-3.12.13-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:69fc1909857401b67bf599c793f2183fbc4804717388b0b888f27f9929aa41f3", size = 1708202, upload-time = "2025-06-14T15:14:58.598Z" },
+ { url = "https://files.pythonhosted.org/packages/1f/ab/561ef2d8a223261683fb95a6283ad0d36cb66c87503f3a7dde7afe208bb2/aiohttp-3.12.13-cp313-cp313-win32.whl", hash = "sha256:7d7e68787a2046b0e44ba5587aa723ce05d711e3a3665b6b7545328ac8e3c0dd", size = 420794, upload-time = "2025-06-14T15:15:00.939Z" },
+ { url = "https://files.pythonhosted.org/packages/9d/47/b11d0089875a23bff0abd3edb5516bcd454db3fefab8604f5e4b07bd6210/aiohttp-3.12.13-cp313-cp313-win_amd64.whl", hash = "sha256:5a178390ca90419bfd41419a809688c368e63c86bd725e1186dd97f6b89c2706", size = 446735, upload-time = "2025-06-14T15:15:02.858Z" },
]
[[package]]
@@ -90,16 +90,16 @@ wheels = [
[[package]]
name = "anyio"
-version = "4.10.0"
+version = "4.8.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "idna" },
{ name = "sniffio" },
{ name = "typing-extensions", marker = "python_full_version < '3.13' or (extra == 'group-18-llama-stack-client-pydantic-v1' and extra == 'group-18-llama-stack-client-pydantic-v2')" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/f1/b4/636b3b65173d3ce9a38ef5f0522789614e590dab6a8d505340a4efe4c567/anyio-4.10.0.tar.gz", hash = "sha256:3f3fae35c96039744587aa5b8371e7e8e603c0702999535961dd336026973ba6", size = 213252, upload-time = "2025-08-04T08:54:26.451Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/a3/73/199a98fc2dae33535d6b8e8e6ec01f8c1d76c9adb096c6b7d64823038cde/anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a", size = 181126, upload-time = "2025-01-05T13:13:11.095Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/6f/12/e5e0282d673bb9746bacfb6e2dba8719989d3660cdb2ea79aee9a9651afb/anyio-4.10.0-py3-none-any.whl", hash = "sha256:60e474ac86736bbfd6f210f7a61218939c318f43f9972497381f1c5e930ed3d1", size = 107213, upload-time = "2025-08-04T08:54:24.882Z" },
+ { url = "https://files.pythonhosted.org/packages/46/eb/e7f063ad1fec6b3178a3cd82d1a3c4de82cccf283fc42746168188e1cdd5/anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a", size = 96041, upload-time = "2025-01-05T13:13:07.985Z" },
]
[[package]]
@@ -137,11 +137,11 @@ wheels = [
[[package]]
name = "certifi"
-version = "2025.8.3"
+version = "2024.12.14"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/dc/67/960ebe6bf230a96cda2e0abcf73af550ec4f090005363542f0765df162e0/certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407", size = 162386, upload-time = "2025-08-03T03:07:47.08Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/0f/bd/1d41ee578ce09523c81a15426705dd20969f5abf006d1afe8aeff0dd776a/certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db", size = 166010, upload-time = "2024-12-14T13:52:38.02Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216, upload-time = "2025-08-03T03:07:45.777Z" },
+ { url = "https://files.pythonhosted.org/packages/a5/32/8f6669fc4798494966bf446c8c4a162e0b5d893dff088afddf76414f70e1/certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56", size = 164927, upload-time = "2024-12-14T13:52:36.114Z" },
]
[[package]]
@@ -385,11 +385,11 @@ wheels = [
[[package]]
name = "identify"
-version = "2.6.13"
+version = "2.6.14"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/82/ca/ffbabe3635bb839aa36b3a893c91a9b0d368cb4d8073e03a12896970af82/identify-2.6.13.tar.gz", hash = "sha256:da8d6c828e773620e13bfa86ea601c5a5310ba4bcd65edf378198b56a1f9fb32", size = 99243, upload-time = "2025-08-09T19:35:00.6Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/52/c4/62963f25a678f6a050fb0505a65e9e726996171e6dbe1547f79619eefb15/identify-2.6.14.tar.gz", hash = "sha256:663494103b4f717cb26921c52f8751363dc89db64364cd836a9bf1535f53cd6a", size = 99283, upload-time = "2025-09-06T19:30:52.938Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/e7/ce/461b60a3ee109518c055953729bf9ed089a04db895d47e95444071dcdef2/identify-2.6.13-py2.py3-none-any.whl", hash = "sha256:60381139b3ae39447482ecc406944190f690d4a2997f2584062089848361b33b", size = 99153, upload-time = "2025-08-09T19:34:59.1Z" },
+ { url = "https://files.pythonhosted.org/packages/e5/ae/2ad30f4652712c82f1c23423d79136fbce338932ad166d70c1efb86a5998/identify-2.6.14-py2.py3-none-any.whl", hash = "sha256:11a073da82212c6646b1f39bb20d4483bfb9543bd5566fec60053c4bb309bf2e", size = 99172, upload-time = "2025-09-06T19:30:51.759Z" },
]
[[package]]
@@ -403,23 +403,23 @@ wheels = [
[[package]]
name = "importlib-metadata"
-version = "8.7.0"
+version = "8.6.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "zipp" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000", size = 56641, upload-time = "2025-04-27T15:29:01.736Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/33/08/c1395a292bb23fd03bdf572a1357c5a733d3eecbab877641ceacab23db6e/importlib_metadata-8.6.1.tar.gz", hash = "sha256:310b41d755445d74569f993ccfc22838295d9fe005425094fad953d7f15c8580", size = 55767, upload-time = "2025-01-20T22:21:30.429Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd", size = 27656, upload-time = "2025-04-27T15:29:00.214Z" },
+ { url = "https://files.pythonhosted.org/packages/79/9d/0fb148dc4d6fa4a7dd1d8378168d9b4cd8d4560a6fbf6f0121c5fc34eb68/importlib_metadata-8.6.1-py3-none-any.whl", hash = "sha256:02a89390c1e15fdfdc0d7c6b25cb3e62650d0494005c97d6f148bf5b9787525e", size = 26971, upload-time = "2025-01-20T22:21:29.177Z" },
]
[[package]]
name = "iniconfig"
-version = "2.1.0"
+version = "2.0.0"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646, upload-time = "2023-01-07T11:08:11.254Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" },
+ { url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892, upload-time = "2023-01-07T11:08:09.864Z" },
]
[[package]]
@@ -435,8 +435,8 @@ dependencies = [
{ name = "pandas" },
{ name = "prompt-toolkit" },
{ name = "pyaml" },
- { name = "pydantic", version = "1.10.22", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'group-18-llama-stack-client-pydantic-v1'" },
- { name = "pydantic", version = "2.11.7", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'group-18-llama-stack-client-pydantic-v2' or extra != 'group-18-llama-stack-client-pydantic-v1'" },
+ { name = "pydantic", version = "1.10.23", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'group-18-llama-stack-client-pydantic-v1'" },
+ { name = "pydantic", version = "2.11.9", source = { registry = "https://pypi.org/simple" }, marker = "extra == 'group-18-llama-stack-client-pydantic-v2' or extra != 'group-18-llama-stack-client-pydantic-v1'" },
{ name = "requests" },
{ name = "rich" },
{ name = "sniffio" },
@@ -457,7 +457,6 @@ dev = [
{ name = "dirty-equals" },
{ name = "importlib-metadata" },
{ name = "mypy" },
- { name = "nest-asyncio" },
{ name = "pre-commit" },
{ name = "pyright" },
{ name = "pytest" },
@@ -469,10 +468,10 @@ dev = [
{ name = "time-machine" },
]
pydantic-v1 = [
- { name = "pydantic", version = "1.10.22", source = { registry = "https://pypi.org/simple" } },
+ { name = "pydantic", version = "1.10.23", source = { registry = "https://pypi.org/simple" } },
]
pydantic-v2 = [
- { name = "pydantic", version = "2.11.7", source = { registry = "https://pypi.org/simple" } },
+ { name = "pydantic", version = "2.11.9", source = { registry = "https://pypi.org/simple" } },
]
[package.metadata]
@@ -500,14 +499,11 @@ provides-extras = ["aiohttp"]
[package.metadata.requires-dev]
dev = [
{ name = "black" },
- { name = "dirty-equals" },
{ name = "dirty-equals", specifier = ">=0.6.0" },
{ name = "importlib-metadata", specifier = ">=6.7.0" },
{ name = "mypy" },
- { name = "nest-asyncio", specifier = "==1.6.0" },
{ name = "pre-commit" },
{ name = "pyright", specifier = "==1.1.399" },
- { name = "pytest" },
{ name = "pytest", specifier = ">=7.1.1" },
{ name = "pytest-asyncio" },
{ name = "pytest-xdist", specifier = ">=3.6.1" },
@@ -521,14 +517,14 @@ pydantic-v2 = [{ name = "pydantic", specifier = ">=2,<3" }]
[[package]]
name = "markdown-it-py"
-version = "4.0.0"
+version = "3.0.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "mdurl" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596, upload-time = "2023-06-03T06:41:14.443Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" },
+ { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528, upload-time = "2023-06-03T06:41:11.019Z" },
]
[[package]]
@@ -542,115 +538,99 @@ wheels = [
[[package]]
name = "multidict"
-version = "6.6.4"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/69/7f/0652e6ed47ab288e3756ea9c0df8b14950781184d4bd7883f4d87dd41245/multidict-6.6.4.tar.gz", hash = "sha256:d2d4e4787672911b48350df02ed3fa3fffdc2f2e8ca06dd6afdf34189b76a9dd", size = 101843, upload-time = "2025-08-11T12:08:48.217Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/05/f6/512ffd8fd8b37fb2680e5ac35d788f1d71bbaf37789d21a820bdc441e565/multidict-6.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0ffb87be160942d56d7b87b0fdf098e81ed565add09eaa1294268c7f3caac4c8", size = 76516, upload-time = "2025-08-11T12:06:53.393Z" },
- { url = "https://files.pythonhosted.org/packages/99/58/45c3e75deb8855c36bd66cc1658007589662ba584dbf423d01df478dd1c5/multidict-6.6.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d191de6cbab2aff5de6c5723101705fd044b3e4c7cfd587a1929b5028b9714b3", size = 45394, upload-time = "2025-08-11T12:06:54.555Z" },
- { url = "https://files.pythonhosted.org/packages/fd/ca/e8c4472a93a26e4507c0b8e1f0762c0d8a32de1328ef72fd704ef9cc5447/multidict-6.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:38a0956dd92d918ad5feff3db8fcb4a5eb7dba114da917e1a88475619781b57b", size = 43591, upload-time = "2025-08-11T12:06:55.672Z" },
- { url = "https://files.pythonhosted.org/packages/05/51/edf414f4df058574a7265034d04c935aa84a89e79ce90fcf4df211f47b16/multidict-6.6.4-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:6865f6d3b7900ae020b495d599fcf3765653bc927951c1abb959017f81ae8287", size = 237215, upload-time = "2025-08-11T12:06:57.213Z" },
- { url = "https://files.pythonhosted.org/packages/c8/45/8b3d6dbad8cf3252553cc41abea09ad527b33ce47a5e199072620b296902/multidict-6.6.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a2088c126b6f72db6c9212ad827d0ba088c01d951cee25e758c450da732c138", size = 258299, upload-time = "2025-08-11T12:06:58.946Z" },
- { url = "https://files.pythonhosted.org/packages/3c/e8/8ca2e9a9f5a435fc6db40438a55730a4bf4956b554e487fa1b9ae920f825/multidict-6.6.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0f37bed7319b848097085d7d48116f545985db988e2256b2e6f00563a3416ee6", size = 242357, upload-time = "2025-08-11T12:07:00.301Z" },
- { url = "https://files.pythonhosted.org/packages/0f/84/80c77c99df05a75c28490b2af8f7cba2a12621186e0a8b0865d8e745c104/multidict-6.6.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:01368e3c94032ba6ca0b78e7ccb099643466cf24f8dc8eefcfdc0571d56e58f9", size = 268369, upload-time = "2025-08-11T12:07:01.638Z" },
- { url = "https://files.pythonhosted.org/packages/0d/e9/920bfa46c27b05fb3e1ad85121fd49f441492dca2449c5bcfe42e4565d8a/multidict-6.6.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8fe323540c255db0bffee79ad7f048c909f2ab0edb87a597e1c17da6a54e493c", size = 269341, upload-time = "2025-08-11T12:07:02.943Z" },
- { url = "https://files.pythonhosted.org/packages/af/65/753a2d8b05daf496f4a9c367fe844e90a1b2cac78e2be2c844200d10cc4c/multidict-6.6.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8eb3025f17b0a4c3cd08cda49acf312a19ad6e8a4edd9dbd591e6506d999402", size = 256100, upload-time = "2025-08-11T12:07:04.564Z" },
- { url = "https://files.pythonhosted.org/packages/09/54/655be13ae324212bf0bc15d665a4e34844f34c206f78801be42f7a0a8aaa/multidict-6.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bbc14f0365534d35a06970d6a83478b249752e922d662dc24d489af1aa0d1be7", size = 253584, upload-time = "2025-08-11T12:07:05.914Z" },
- { url = "https://files.pythonhosted.org/packages/5c/74/ab2039ecc05264b5cec73eb018ce417af3ebb384ae9c0e9ed42cb33f8151/multidict-6.6.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:75aa52fba2d96bf972e85451b99d8e19cc37ce26fd016f6d4aa60da9ab2b005f", size = 251018, upload-time = "2025-08-11T12:07:08.301Z" },
- { url = "https://files.pythonhosted.org/packages/af/0a/ccbb244ac848e56c6427f2392741c06302bbfba49c0042f1eb3c5b606497/multidict-6.6.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4fefd4a815e362d4f011919d97d7b4a1e566f1dde83dc4ad8cfb5b41de1df68d", size = 251477, upload-time = "2025-08-11T12:07:10.248Z" },
- { url = "https://files.pythonhosted.org/packages/0e/b0/0ed49bba775b135937f52fe13922bc64a7eaf0a3ead84a36e8e4e446e096/multidict-6.6.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:db9801fe021f59a5b375ab778973127ca0ac52429a26e2fd86aa9508f4d26eb7", size = 263575, upload-time = "2025-08-11T12:07:11.928Z" },
- { url = "https://files.pythonhosted.org/packages/3e/d9/7fb85a85e14de2e44dfb6a24f03c41e2af8697a6df83daddb0e9b7569f73/multidict-6.6.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a650629970fa21ac1fb06ba25dabfc5b8a2054fcbf6ae97c758aa956b8dba802", size = 259649, upload-time = "2025-08-11T12:07:13.244Z" },
- { url = "https://files.pythonhosted.org/packages/03/9e/b3a459bcf9b6e74fa461a5222a10ff9b544cb1cd52fd482fb1b75ecda2a2/multidict-6.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:452ff5da78d4720d7516a3a2abd804957532dd69296cb77319c193e3ffb87e24", size = 251505, upload-time = "2025-08-11T12:07:14.57Z" },
- { url = "https://files.pythonhosted.org/packages/86/a2/8022f78f041dfe6d71e364001a5cf987c30edfc83c8a5fb7a3f0974cff39/multidict-6.6.4-cp312-cp312-win32.whl", hash = "sha256:8c2fcb12136530ed19572bbba61b407f655e3953ba669b96a35036a11a485793", size = 41888, upload-time = "2025-08-11T12:07:15.904Z" },
- { url = "https://files.pythonhosted.org/packages/c7/eb/d88b1780d43a56db2cba24289fa744a9d216c1a8546a0dc3956563fd53ea/multidict-6.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:047d9425860a8c9544fed1b9584f0c8bcd31bcde9568b047c5e567a1025ecd6e", size = 46072, upload-time = "2025-08-11T12:07:17.045Z" },
- { url = "https://files.pythonhosted.org/packages/9f/16/b929320bf5750e2d9d4931835a4c638a19d2494a5b519caaaa7492ebe105/multidict-6.6.4-cp312-cp312-win_arm64.whl", hash = "sha256:14754eb72feaa1e8ae528468f24250dd997b8e2188c3d2f593f9eba259e4b364", size = 43222, upload-time = "2025-08-11T12:07:18.328Z" },
- { url = "https://files.pythonhosted.org/packages/3a/5d/e1db626f64f60008320aab00fbe4f23fc3300d75892a3381275b3d284580/multidict-6.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f46a6e8597f9bd71b31cc708195d42b634c8527fecbcf93febf1052cacc1f16e", size = 75848, upload-time = "2025-08-11T12:07:19.912Z" },
- { url = "https://files.pythonhosted.org/packages/4c/aa/8b6f548d839b6c13887253af4e29c939af22a18591bfb5d0ee6f1931dae8/multidict-6.6.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:22e38b2bc176c5eb9c0a0e379f9d188ae4cd8b28c0f53b52bce7ab0a9e534657", size = 45060, upload-time = "2025-08-11T12:07:21.163Z" },
- { url = "https://files.pythonhosted.org/packages/eb/c6/f5e97e5d99a729bc2aa58eb3ebfa9f1e56a9b517cc38c60537c81834a73f/multidict-6.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5df8afd26f162da59e218ac0eefaa01b01b2e6cd606cffa46608f699539246da", size = 43269, upload-time = "2025-08-11T12:07:22.392Z" },
- { url = "https://files.pythonhosted.org/packages/dc/31/d54eb0c62516776f36fe67f84a732f97e0b0e12f98d5685bebcc6d396910/multidict-6.6.4-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:49517449b58d043023720aa58e62b2f74ce9b28f740a0b5d33971149553d72aa", size = 237158, upload-time = "2025-08-11T12:07:23.636Z" },
- { url = "https://files.pythonhosted.org/packages/c4/1c/8a10c1c25b23156e63b12165a929d8eb49a6ed769fdbefb06e6f07c1e50d/multidict-6.6.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ae9408439537c5afdca05edd128a63f56a62680f4b3c234301055d7a2000220f", size = 257076, upload-time = "2025-08-11T12:07:25.049Z" },
- { url = "https://files.pythonhosted.org/packages/ad/86/90e20b5771d6805a119e483fd3d1e8393e745a11511aebca41f0da38c3e2/multidict-6.6.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:87a32d20759dc52a9e850fe1061b6e41ab28e2998d44168a8a341b99ded1dba0", size = 240694, upload-time = "2025-08-11T12:07:26.458Z" },
- { url = "https://files.pythonhosted.org/packages/e7/49/484d3e6b535bc0555b52a0a26ba86e4d8d03fd5587d4936dc59ba7583221/multidict-6.6.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:52e3c8d43cdfff587ceedce9deb25e6ae77daba560b626e97a56ddcad3756879", size = 266350, upload-time = "2025-08-11T12:07:27.94Z" },
- { url = "https://files.pythonhosted.org/packages/bf/b4/aa4c5c379b11895083d50021e229e90c408d7d875471cb3abf721e4670d6/multidict-6.6.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ad8850921d3a8d8ff6fbef790e773cecfc260bbfa0566998980d3fa8f520bc4a", size = 267250, upload-time = "2025-08-11T12:07:29.303Z" },
- { url = "https://files.pythonhosted.org/packages/80/e5/5e22c5bf96a64bdd43518b1834c6d95a4922cc2066b7d8e467dae9b6cee6/multidict-6.6.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:497a2954adc25c08daff36f795077f63ad33e13f19bfff7736e72c785391534f", size = 254900, upload-time = "2025-08-11T12:07:30.764Z" },
- { url = "https://files.pythonhosted.org/packages/17/38/58b27fed927c07035abc02befacab42491e7388ca105e087e6e0215ead64/multidict-6.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:024ce601f92d780ca1617ad4be5ac15b501cc2414970ffa2bb2bbc2bd5a68fa5", size = 252355, upload-time = "2025-08-11T12:07:32.205Z" },
- { url = "https://files.pythonhosted.org/packages/d0/a1/dad75d23a90c29c02b5d6f3d7c10ab36c3197613be5d07ec49c7791e186c/multidict-6.6.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:a693fc5ed9bdd1c9e898013e0da4dcc640de7963a371c0bd458e50e046bf6438", size = 250061, upload-time = "2025-08-11T12:07:33.623Z" },
- { url = "https://files.pythonhosted.org/packages/b8/1a/ac2216b61c7f116edab6dc3378cca6c70dc019c9a457ff0d754067c58b20/multidict-6.6.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:190766dac95aab54cae5b152a56520fd99298f32a1266d66d27fdd1b5ac00f4e", size = 249675, upload-time = "2025-08-11T12:07:34.958Z" },
- { url = "https://files.pythonhosted.org/packages/d4/79/1916af833b800d13883e452e8e0977c065c4ee3ab7a26941fbfdebc11895/multidict-6.6.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:34d8f2a5ffdceab9dcd97c7a016deb2308531d5f0fced2bb0c9e1df45b3363d7", size = 261247, upload-time = "2025-08-11T12:07:36.588Z" },
- { url = "https://files.pythonhosted.org/packages/c5/65/d1f84fe08ac44a5fc7391cbc20a7cedc433ea616b266284413fd86062f8c/multidict-6.6.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:59e8d40ab1f5a8597abcef00d04845155a5693b5da00d2c93dbe88f2050f2812", size = 257960, upload-time = "2025-08-11T12:07:39.735Z" },
- { url = "https://files.pythonhosted.org/packages/13/b5/29ec78057d377b195ac2c5248c773703a6b602e132a763e20ec0457e7440/multidict-6.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:467fe64138cfac771f0e949b938c2e1ada2b5af22f39692aa9258715e9ea613a", size = 250078, upload-time = "2025-08-11T12:07:41.525Z" },
- { url = "https://files.pythonhosted.org/packages/c4/0e/7e79d38f70a872cae32e29b0d77024bef7834b0afb406ddae6558d9e2414/multidict-6.6.4-cp313-cp313-win32.whl", hash = "sha256:14616a30fe6d0a48d0a48d1a633ab3b8bec4cf293aac65f32ed116f620adfd69", size = 41708, upload-time = "2025-08-11T12:07:43.405Z" },
- { url = "https://files.pythonhosted.org/packages/9d/34/746696dffff742e97cd6a23da953e55d0ea51fa601fa2ff387b3edcfaa2c/multidict-6.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:40cd05eaeb39e2bc8939451f033e57feaa2ac99e07dbca8afe2be450a4a3b6cf", size = 45912, upload-time = "2025-08-11T12:07:45.082Z" },
- { url = "https://files.pythonhosted.org/packages/c7/87/3bac136181e271e29170d8d71929cdeddeb77f3e8b6a0c08da3a8e9da114/multidict-6.6.4-cp313-cp313-win_arm64.whl", hash = "sha256:f6eb37d511bfae9e13e82cb4d1af36b91150466f24d9b2b8a9785816deb16605", size = 43076, upload-time = "2025-08-11T12:07:46.746Z" },
- { url = "https://files.pythonhosted.org/packages/64/94/0a8e63e36c049b571c9ae41ee301ada29c3fee9643d9c2548d7d558a1d99/multidict-6.6.4-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:6c84378acd4f37d1b507dfa0d459b449e2321b3ba5f2338f9b085cf7a7ba95eb", size = 82812, upload-time = "2025-08-11T12:07:48.402Z" },
- { url = "https://files.pythonhosted.org/packages/25/1a/be8e369dfcd260d2070a67e65dd3990dd635cbd735b98da31e00ea84cd4e/multidict-6.6.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0e0558693063c75f3d952abf645c78f3c5dfdd825a41d8c4d8156fc0b0da6e7e", size = 48313, upload-time = "2025-08-11T12:07:49.679Z" },
- { url = "https://files.pythonhosted.org/packages/26/5a/dd4ade298674b2f9a7b06a32c94ffbc0497354df8285f27317c66433ce3b/multidict-6.6.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3f8e2384cb83ebd23fd07e9eada8ba64afc4c759cd94817433ab8c81ee4b403f", size = 46777, upload-time = "2025-08-11T12:07:51.318Z" },
- { url = "https://files.pythonhosted.org/packages/89/db/98aa28bc7e071bfba611ac2ae803c24e96dd3a452b4118c587d3d872c64c/multidict-6.6.4-cp313-cp313t-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:f996b87b420995a9174b2a7c1a8daf7db4750be6848b03eb5e639674f7963773", size = 229321, upload-time = "2025-08-11T12:07:52.965Z" },
- { url = "https://files.pythonhosted.org/packages/c7/bc/01ddda2a73dd9d167bd85d0e8ef4293836a8f82b786c63fb1a429bc3e678/multidict-6.6.4-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc356250cffd6e78416cf5b40dc6a74f1edf3be8e834cf8862d9ed5265cf9b0e", size = 249954, upload-time = "2025-08-11T12:07:54.423Z" },
- { url = "https://files.pythonhosted.org/packages/06/78/6b7c0f020f9aa0acf66d0ab4eb9f08375bac9a50ff5e3edb1c4ccd59eafc/multidict-6.6.4-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:dadf95aa862714ea468a49ad1e09fe00fcc9ec67d122f6596a8d40caf6cec7d0", size = 228612, upload-time = "2025-08-11T12:07:55.914Z" },
- { url = "https://files.pythonhosted.org/packages/00/44/3faa416f89b2d5d76e9d447296a81521e1c832ad6e40b92f990697b43192/multidict-6.6.4-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7dd57515bebffd8ebd714d101d4c434063322e4fe24042e90ced41f18b6d3395", size = 257528, upload-time = "2025-08-11T12:07:57.371Z" },
- { url = "https://files.pythonhosted.org/packages/05/5f/77c03b89af0fcb16f018f668207768191fb9dcfb5e3361a5e706a11db2c9/multidict-6.6.4-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:967af5f238ebc2eb1da4e77af5492219fbd9b4b812347da39a7b5f5c72c0fa45", size = 256329, upload-time = "2025-08-11T12:07:58.844Z" },
- { url = "https://files.pythonhosted.org/packages/cf/e9/ed750a2a9afb4f8dc6f13dc5b67b514832101b95714f1211cd42e0aafc26/multidict-6.6.4-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2a4c6875c37aae9794308ec43e3530e4aa0d36579ce38d89979bbf89582002bb", size = 247928, upload-time = "2025-08-11T12:08:01.037Z" },
- { url = "https://files.pythonhosted.org/packages/1f/b5/e0571bc13cda277db7e6e8a532791d4403dacc9850006cb66d2556e649c0/multidict-6.6.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:7f683a551e92bdb7fac545b9c6f9fa2aebdeefa61d607510b3533286fcab67f5", size = 245228, upload-time = "2025-08-11T12:08:02.96Z" },
- { url = "https://files.pythonhosted.org/packages/f3/a3/69a84b0eccb9824491f06368f5b86e72e4af54c3067c37c39099b6687109/multidict-6.6.4-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:3ba5aaf600edaf2a868a391779f7a85d93bed147854925f34edd24cc70a3e141", size = 235869, upload-time = "2025-08-11T12:08:04.746Z" },
- { url = "https://files.pythonhosted.org/packages/a9/9d/28802e8f9121a6a0804fa009debf4e753d0a59969ea9f70be5f5fdfcb18f/multidict-6.6.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:580b643b7fd2c295d83cad90d78419081f53fd532d1f1eb67ceb7060f61cff0d", size = 243446, upload-time = "2025-08-11T12:08:06.332Z" },
- { url = "https://files.pythonhosted.org/packages/38/ea/6c98add069b4878c1d66428a5f5149ddb6d32b1f9836a826ac764b9940be/multidict-6.6.4-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:37b7187197da6af3ee0b044dbc9625afd0c885f2800815b228a0e70f9a7f473d", size = 252299, upload-time = "2025-08-11T12:08:07.931Z" },
- { url = "https://files.pythonhosted.org/packages/3a/09/8fe02d204473e14c0af3affd50af9078839dfca1742f025cca765435d6b4/multidict-6.6.4-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e1b93790ed0bc26feb72e2f08299691ceb6da5e9e14a0d13cc74f1869af327a0", size = 246926, upload-time = "2025-08-11T12:08:09.467Z" },
- { url = "https://files.pythonhosted.org/packages/37/3d/7b1e10d774a6df5175ecd3c92bff069e77bed9ec2a927fdd4ff5fe182f67/multidict-6.6.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a506a77ddee1efcca81ecbeae27ade3e09cdf21a8ae854d766c2bb4f14053f92", size = 243383, upload-time = "2025-08-11T12:08:10.981Z" },
- { url = "https://files.pythonhosted.org/packages/50/b0/a6fae46071b645ae98786ab738447de1ef53742eaad949f27e960864bb49/multidict-6.6.4-cp313-cp313t-win32.whl", hash = "sha256:f93b2b2279883d1d0a9e1bd01f312d6fc315c5e4c1f09e112e4736e2f650bc4e", size = 47775, upload-time = "2025-08-11T12:08:12.439Z" },
- { url = "https://files.pythonhosted.org/packages/b2/0a/2436550b1520091af0600dff547913cb2d66fbac27a8c33bc1b1bccd8d98/multidict-6.6.4-cp313-cp313t-win_amd64.whl", hash = "sha256:6d46a180acdf6e87cc41dc15d8f5c2986e1e8739dc25dbb7dac826731ef381a4", size = 53100, upload-time = "2025-08-11T12:08:13.823Z" },
- { url = "https://files.pythonhosted.org/packages/97/ea/43ac51faff934086db9c072a94d327d71b7d8b40cd5dcb47311330929ef0/multidict-6.6.4-cp313-cp313t-win_arm64.whl", hash = "sha256:756989334015e3335d087a27331659820d53ba432befdef6a718398b0a8493ad", size = 45501, upload-time = "2025-08-11T12:08:15.173Z" },
- { url = "https://files.pythonhosted.org/packages/fd/69/b547032297c7e63ba2af494edba695d781af8a0c6e89e4d06cf848b21d80/multidict-6.6.4-py3-none-any.whl", hash = "sha256:27d8f8e125c07cb954e54d75d04905a9bba8a439c1d84aca94949d4d03d8601c", size = 12313, upload-time = "2025-08-11T12:08:46.891Z" },
+version = "6.6.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/3d/2c/5dad12e82fbdf7470f29bff2171484bf07cb3b16ada60a6589af8f376440/multidict-6.6.3.tar.gz", hash = "sha256:798a9eb12dab0a6c2e29c1de6f3468af5cb2da6053a20dfa3344907eed0937cc", size = 101006, upload-time = "2025-06-30T15:53:46.929Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/0e/a0/6b57988ea102da0623ea814160ed78d45a2645e4bbb499c2896d12833a70/multidict-6.6.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:056bebbeda16b2e38642d75e9e5310c484b7c24e3841dc0fb943206a72ec89d6", size = 76514, upload-time = "2025-06-30T15:51:48.728Z" },
+ { url = "https://files.pythonhosted.org/packages/07/7a/d1e92665b0850c6c0508f101f9cf0410c1afa24973e1115fe9c6a185ebf7/multidict-6.6.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e5f481cccb3c5c5e5de5d00b5141dc589c1047e60d07e85bbd7dea3d4580d63f", size = 45394, upload-time = "2025-06-30T15:51:49.986Z" },
+ { url = "https://files.pythonhosted.org/packages/52/6f/dd104490e01be6ef8bf9573705d8572f8c2d2c561f06e3826b081d9e6591/multidict-6.6.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:10bea2ee839a759ee368b5a6e47787f399b41e70cf0c20d90dfaf4158dfb4e55", size = 43590, upload-time = "2025-06-30T15:51:51.331Z" },
+ { url = "https://files.pythonhosted.org/packages/44/fe/06e0e01b1b0611e6581b7fd5a85b43dacc08b6cea3034f902f383b0873e5/multidict-6.6.3-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:2334cfb0fa9549d6ce2c21af2bfbcd3ac4ec3646b1b1581c88e3e2b1779ec92b", size = 237292, upload-time = "2025-06-30T15:51:52.584Z" },
+ { url = "https://files.pythonhosted.org/packages/ce/71/4f0e558fb77696b89c233c1ee2d92f3e1d5459070a0e89153c9e9e804186/multidict-6.6.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8fee016722550a2276ca2cb5bb624480e0ed2bd49125b2b73b7010b9090e888", size = 258385, upload-time = "2025-06-30T15:51:53.913Z" },
+ { url = "https://files.pythonhosted.org/packages/e3/25/cca0e68228addad24903801ed1ab42e21307a1b4b6dd2cf63da5d3ae082a/multidict-6.6.3-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5511cb35f5c50a2db21047c875eb42f308c5583edf96bd8ebf7d770a9d68f6d", size = 242328, upload-time = "2025-06-30T15:51:55.672Z" },
+ { url = "https://files.pythonhosted.org/packages/6e/a3/46f2d420d86bbcb8fe660b26a10a219871a0fbf4d43cb846a4031533f3e0/multidict-6.6.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:712b348f7f449948e0a6c4564a21c7db965af900973a67db432d724619b3c680", size = 268057, upload-time = "2025-06-30T15:51:57.037Z" },
+ { url = "https://files.pythonhosted.org/packages/9e/73/1c743542fe00794a2ec7466abd3f312ccb8fad8dff9f36d42e18fb1ec33e/multidict-6.6.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e4e15d2138ee2694e038e33b7c3da70e6b0ad8868b9f8094a72e1414aeda9c1a", size = 269341, upload-time = "2025-06-30T15:51:59.111Z" },
+ { url = "https://files.pythonhosted.org/packages/a4/11/6ec9dcbe2264b92778eeb85407d1df18812248bf3506a5a1754bc035db0c/multidict-6.6.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8df25594989aebff8a130f7899fa03cbfcc5d2b5f4a461cf2518236fe6f15961", size = 256081, upload-time = "2025-06-30T15:52:00.533Z" },
+ { url = "https://files.pythonhosted.org/packages/9b/2b/631b1e2afeb5f1696846d747d36cda075bfdc0bc7245d6ba5c319278d6c4/multidict-6.6.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:159ca68bfd284a8860f8d8112cf0521113bffd9c17568579e4d13d1f1dc76b65", size = 253581, upload-time = "2025-06-30T15:52:02.43Z" },
+ { url = "https://files.pythonhosted.org/packages/bf/0e/7e3b93f79efeb6111d3bf9a1a69e555ba1d07ad1c11bceb56b7310d0d7ee/multidict-6.6.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:e098c17856a8c9ade81b4810888c5ad1914099657226283cab3062c0540b0643", size = 250750, upload-time = "2025-06-30T15:52:04.26Z" },
+ { url = "https://files.pythonhosted.org/packages/ad/9e/086846c1d6601948e7de556ee464a2d4c85e33883e749f46b9547d7b0704/multidict-6.6.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:67c92ed673049dec52d7ed39f8cf9ebbadf5032c774058b4406d18c8f8fe7063", size = 251548, upload-time = "2025-06-30T15:52:06.002Z" },
+ { url = "https://files.pythonhosted.org/packages/8c/7b/86ec260118e522f1a31550e87b23542294880c97cfbf6fb18cc67b044c66/multidict-6.6.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:bd0578596e3a835ef451784053cfd327d607fc39ea1a14812139339a18a0dbc3", size = 262718, upload-time = "2025-06-30T15:52:07.707Z" },
+ { url = "https://files.pythonhosted.org/packages/8c/bd/22ce8f47abb0be04692c9fc4638508b8340987b18691aa7775d927b73f72/multidict-6.6.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:346055630a2df2115cd23ae271910b4cae40f4e336773550dca4889b12916e75", size = 259603, upload-time = "2025-06-30T15:52:09.58Z" },
+ { url = "https://files.pythonhosted.org/packages/07/9c/91b7ac1691be95cd1f4a26e36a74b97cda6aa9820632d31aab4410f46ebd/multidict-6.6.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:555ff55a359302b79de97e0468e9ee80637b0de1fce77721639f7cd9440b3a10", size = 251351, upload-time = "2025-06-30T15:52:10.947Z" },
+ { url = "https://files.pythonhosted.org/packages/6f/5c/4d7adc739884f7a9fbe00d1eac8c034023ef8bad71f2ebe12823ca2e3649/multidict-6.6.3-cp312-cp312-win32.whl", hash = "sha256:73ab034fb8d58ff85c2bcbadc470efc3fafeea8affcf8722855fb94557f14cc5", size = 41860, upload-time = "2025-06-30T15:52:12.334Z" },
+ { url = "https://files.pythonhosted.org/packages/6a/a3/0fbc7afdf7cb1aa12a086b02959307848eb6bcc8f66fcb66c0cb57e2a2c1/multidict-6.6.3-cp312-cp312-win_amd64.whl", hash = "sha256:04cbcce84f63b9af41bad04a54d4cc4e60e90c35b9e6ccb130be2d75b71f8c17", size = 45982, upload-time = "2025-06-30T15:52:13.6Z" },
+ { url = "https://files.pythonhosted.org/packages/b8/95/8c825bd70ff9b02462dc18d1295dd08d3e9e4eb66856d292ffa62cfe1920/multidict-6.6.3-cp312-cp312-win_arm64.whl", hash = "sha256:0f1130b896ecb52d2a1e615260f3ea2af55fa7dc3d7c3003ba0c3121a759b18b", size = 43210, upload-time = "2025-06-30T15:52:14.893Z" },
+ { url = "https://files.pythonhosted.org/packages/52/1d/0bebcbbb4f000751fbd09957257903d6e002943fc668d841a4cf2fb7f872/multidict-6.6.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:540d3c06d48507357a7d57721e5094b4f7093399a0106c211f33540fdc374d55", size = 75843, upload-time = "2025-06-30T15:52:16.155Z" },
+ { url = "https://files.pythonhosted.org/packages/07/8f/cbe241b0434cfe257f65c2b1bcf9e8d5fb52bc708c5061fb29b0fed22bdf/multidict-6.6.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9c19cea2a690f04247d43f366d03e4eb110a0dc4cd1bbeee4d445435428ed35b", size = 45053, upload-time = "2025-06-30T15:52:17.429Z" },
+ { url = "https://files.pythonhosted.org/packages/32/d2/0b3b23f9dbad5b270b22a3ac3ea73ed0a50ef2d9a390447061178ed6bdb8/multidict-6.6.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7af039820cfd00effec86bda5d8debef711a3e86a1d3772e85bea0f243a4bd65", size = 43273, upload-time = "2025-06-30T15:52:19.346Z" },
+ { url = "https://files.pythonhosted.org/packages/fd/fe/6eb68927e823999e3683bc49678eb20374ba9615097d085298fd5b386564/multidict-6.6.3-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:500b84f51654fdc3944e936f2922114349bf8fdcac77c3092b03449f0e5bc2b3", size = 237124, upload-time = "2025-06-30T15:52:20.773Z" },
+ { url = "https://files.pythonhosted.org/packages/e7/ab/320d8507e7726c460cb77117848b3834ea0d59e769f36fdae495f7669929/multidict-6.6.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f3fc723ab8a5c5ed6c50418e9bfcd8e6dceba6c271cee6728a10a4ed8561520c", size = 256892, upload-time = "2025-06-30T15:52:22.242Z" },
+ { url = "https://files.pythonhosted.org/packages/76/60/38ee422db515ac69834e60142a1a69111ac96026e76e8e9aa347fd2e4591/multidict-6.6.3-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:94c47ea3ade005b5976789baaed66d4de4480d0a0bf31cef6edaa41c1e7b56a6", size = 240547, upload-time = "2025-06-30T15:52:23.736Z" },
+ { url = "https://files.pythonhosted.org/packages/27/fb/905224fde2dff042b030c27ad95a7ae744325cf54b890b443d30a789b80e/multidict-6.6.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:dbc7cf464cc6d67e83e136c9f55726da3a30176f020a36ead246eceed87f1cd8", size = 266223, upload-time = "2025-06-30T15:52:25.185Z" },
+ { url = "https://files.pythonhosted.org/packages/76/35/dc38ab361051beae08d1a53965e3e1a418752fc5be4d3fb983c5582d8784/multidict-6.6.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:900eb9f9da25ada070f8ee4a23f884e0ee66fe4e1a38c3af644256a508ad81ca", size = 267262, upload-time = "2025-06-30T15:52:26.969Z" },
+ { url = "https://files.pythonhosted.org/packages/1f/a3/0a485b7f36e422421b17e2bbb5a81c1af10eac1d4476f2ff92927c730479/multidict-6.6.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7c6df517cf177da5d47ab15407143a89cd1a23f8b335f3a28d57e8b0a3dbb884", size = 254345, upload-time = "2025-06-30T15:52:28.467Z" },
+ { url = "https://files.pythonhosted.org/packages/b4/59/bcdd52c1dab7c0e0d75ff19cac751fbd5f850d1fc39172ce809a74aa9ea4/multidict-6.6.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4ef421045f13879e21c994b36e728d8e7d126c91a64b9185810ab51d474f27e7", size = 252248, upload-time = "2025-06-30T15:52:29.938Z" },
+ { url = "https://files.pythonhosted.org/packages/bb/a4/2d96aaa6eae8067ce108d4acee6f45ced5728beda55c0f02ae1072c730d1/multidict-6.6.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:6c1e61bb4f80895c081790b6b09fa49e13566df8fbff817da3f85b3a8192e36b", size = 250115, upload-time = "2025-06-30T15:52:31.416Z" },
+ { url = "https://files.pythonhosted.org/packages/25/d2/ed9f847fa5c7d0677d4f02ea2c163d5e48573de3f57bacf5670e43a5ffaa/multidict-6.6.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e5e8523bb12d7623cd8300dbd91b9e439a46a028cd078ca695eb66ba31adee3c", size = 249649, upload-time = "2025-06-30T15:52:32.996Z" },
+ { url = "https://files.pythonhosted.org/packages/1f/af/9155850372563fc550803d3f25373308aa70f59b52cff25854086ecb4a79/multidict-6.6.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:ef58340cc896219e4e653dade08fea5c55c6df41bcc68122e3be3e9d873d9a7b", size = 261203, upload-time = "2025-06-30T15:52:34.521Z" },
+ { url = "https://files.pythonhosted.org/packages/36/2f/c6a728f699896252cf309769089568a33c6439626648843f78743660709d/multidict-6.6.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fc9dc435ec8699e7b602b94fe0cd4703e69273a01cbc34409af29e7820f777f1", size = 258051, upload-time = "2025-06-30T15:52:35.999Z" },
+ { url = "https://files.pythonhosted.org/packages/d0/60/689880776d6b18fa2b70f6cc74ff87dd6c6b9b47bd9cf74c16fecfaa6ad9/multidict-6.6.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9e864486ef4ab07db5e9cb997bad2b681514158d6954dd1958dfb163b83d53e6", size = 249601, upload-time = "2025-06-30T15:52:37.473Z" },
+ { url = "https://files.pythonhosted.org/packages/75/5e/325b11f2222a549019cf2ef879c1f81f94a0d40ace3ef55cf529915ba6cc/multidict-6.6.3-cp313-cp313-win32.whl", hash = "sha256:5633a82fba8e841bc5c5c06b16e21529573cd654f67fd833650a215520a6210e", size = 41683, upload-time = "2025-06-30T15:52:38.927Z" },
+ { url = "https://files.pythonhosted.org/packages/b1/ad/cf46e73f5d6e3c775cabd2a05976547f3f18b39bee06260369a42501f053/multidict-6.6.3-cp313-cp313-win_amd64.whl", hash = "sha256:e93089c1570a4ad54c3714a12c2cef549dc9d58e97bcded193d928649cab78e9", size = 45811, upload-time = "2025-06-30T15:52:40.207Z" },
+ { url = "https://files.pythonhosted.org/packages/c5/c9/2e3fe950db28fb7c62e1a5f46e1e38759b072e2089209bc033c2798bb5ec/multidict-6.6.3-cp313-cp313-win_arm64.whl", hash = "sha256:c60b401f192e79caec61f166da9c924e9f8bc65548d4246842df91651e83d600", size = 43056, upload-time = "2025-06-30T15:52:41.575Z" },
+ { url = "https://files.pythonhosted.org/packages/3a/58/aaf8114cf34966e084a8cc9517771288adb53465188843d5a19862cb6dc3/multidict-6.6.3-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:02fd8f32d403a6ff13864b0851f1f523d4c988051eea0471d4f1fd8010f11134", size = 82811, upload-time = "2025-06-30T15:52:43.281Z" },
+ { url = "https://files.pythonhosted.org/packages/71/af/5402e7b58a1f5b987a07ad98f2501fdba2a4f4b4c30cf114e3ce8db64c87/multidict-6.6.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:f3aa090106b1543f3f87b2041eef3c156c8da2aed90c63a2fbed62d875c49c37", size = 48304, upload-time = "2025-06-30T15:52:45.026Z" },
+ { url = "https://files.pythonhosted.org/packages/39/65/ab3c8cafe21adb45b24a50266fd747147dec7847425bc2a0f6934b3ae9ce/multidict-6.6.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e924fb978615a5e33ff644cc42e6aa241effcf4f3322c09d4f8cebde95aff5f8", size = 46775, upload-time = "2025-06-30T15:52:46.459Z" },
+ { url = "https://files.pythonhosted.org/packages/49/ba/9fcc1b332f67cc0c0c8079e263bfab6660f87fe4e28a35921771ff3eea0d/multidict-6.6.3-cp313-cp313t-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:b9fe5a0e57c6dbd0e2ce81ca66272282c32cd11d31658ee9553849d91289e1c1", size = 229773, upload-time = "2025-06-30T15:52:47.88Z" },
+ { url = "https://files.pythonhosted.org/packages/a4/14/0145a251f555f7c754ce2dcbcd012939bbd1f34f066fa5d28a50e722a054/multidict-6.6.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b24576f208793ebae00280c59927c3b7c2a3b1655e443a25f753c4611bc1c373", size = 250083, upload-time = "2025-06-30T15:52:49.366Z" },
+ { url = "https://files.pythonhosted.org/packages/9e/d4/d5c0bd2bbb173b586c249a151a26d2fb3ec7d53c96e42091c9fef4e1f10c/multidict-6.6.3-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:135631cb6c58eac37d7ac0df380294fecdc026b28837fa07c02e459c7fb9c54e", size = 228980, upload-time = "2025-06-30T15:52:50.903Z" },
+ { url = "https://files.pythonhosted.org/packages/21/32/c9a2d8444a50ec48c4733ccc67254100c10e1c8ae8e40c7a2d2183b59b97/multidict-6.6.3-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:274d416b0df887aef98f19f21578653982cfb8a05b4e187d4a17103322eeaf8f", size = 257776, upload-time = "2025-06-30T15:52:52.764Z" },
+ { url = "https://files.pythonhosted.org/packages/68/d0/14fa1699f4ef629eae08ad6201c6b476098f5efb051b296f4c26be7a9fdf/multidict-6.6.3-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e252017a817fad7ce05cafbe5711ed40faeb580e63b16755a3a24e66fa1d87c0", size = 256882, upload-time = "2025-06-30T15:52:54.596Z" },
+ { url = "https://files.pythonhosted.org/packages/da/88/84a27570fbe303c65607d517a5f147cd2fc046c2d1da02b84b17b9bdc2aa/multidict-6.6.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e4cc8d848cd4fe1cdee28c13ea79ab0ed37fc2e89dd77bac86a2e7959a8c3bc", size = 247816, upload-time = "2025-06-30T15:52:56.175Z" },
+ { url = "https://files.pythonhosted.org/packages/1c/60/dca352a0c999ce96a5d8b8ee0b2b9f729dcad2e0b0c195f8286269a2074c/multidict-6.6.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9e236a7094b9c4c1b7585f6b9cca34b9d833cf079f7e4c49e6a4a6ec9bfdc68f", size = 245341, upload-time = "2025-06-30T15:52:57.752Z" },
+ { url = "https://files.pythonhosted.org/packages/50/ef/433fa3ed06028f03946f3993223dada70fb700f763f70c00079533c34578/multidict-6.6.3-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:e0cb0ab69915c55627c933f0b555a943d98ba71b4d1c57bc0d0a66e2567c7471", size = 235854, upload-time = "2025-06-30T15:52:59.74Z" },
+ { url = "https://files.pythonhosted.org/packages/1b/1f/487612ab56fbe35715320905215a57fede20de7db40a261759690dc80471/multidict-6.6.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:81ef2f64593aba09c5212a3d0f8c906a0d38d710a011f2f42759704d4557d3f2", size = 243432, upload-time = "2025-06-30T15:53:01.602Z" },
+ { url = "https://files.pythonhosted.org/packages/da/6f/ce8b79de16cd885c6f9052c96a3671373d00c59b3ee635ea93e6e81b8ccf/multidict-6.6.3-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:b9cbc60010de3562545fa198bfc6d3825df430ea96d2cc509c39bd71e2e7d648", size = 252731, upload-time = "2025-06-30T15:53:03.517Z" },
+ { url = "https://files.pythonhosted.org/packages/bb/fe/a2514a6aba78e5abefa1624ca85ae18f542d95ac5cde2e3815a9fbf369aa/multidict-6.6.3-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:70d974eaaa37211390cd02ef93b7e938de564bbffa866f0b08d07e5e65da783d", size = 247086, upload-time = "2025-06-30T15:53:05.48Z" },
+ { url = "https://files.pythonhosted.org/packages/8c/22/b788718d63bb3cce752d107a57c85fcd1a212c6c778628567c9713f9345a/multidict-6.6.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:3713303e4a6663c6d01d648a68f2848701001f3390a030edaaf3fc949c90bf7c", size = 243338, upload-time = "2025-06-30T15:53:07.522Z" },
+ { url = "https://files.pythonhosted.org/packages/22/d6/fdb3d0670819f2228f3f7d9af613d5e652c15d170c83e5f1c94fbc55a25b/multidict-6.6.3-cp313-cp313t-win32.whl", hash = "sha256:639ecc9fe7cd73f2495f62c213e964843826f44505a3e5d82805aa85cac6f89e", size = 47812, upload-time = "2025-06-30T15:53:09.263Z" },
+ { url = "https://files.pythonhosted.org/packages/b6/d6/a9d2c808f2c489ad199723197419207ecbfbc1776f6e155e1ecea9c883aa/multidict-6.6.3-cp313-cp313t-win_amd64.whl", hash = "sha256:9f97e181f344a0ef3881b573d31de8542cc0dbc559ec68c8f8b5ce2c2e91646d", size = 53011, upload-time = "2025-06-30T15:53:11.038Z" },
+ { url = "https://files.pythonhosted.org/packages/f2/40/b68001cba8188dd267590a111f9661b6256debc327137667e832bf5d66e8/multidict-6.6.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ce8b7693da41a3c4fde5871c738a81490cea5496c671d74374c8ab889e1834fb", size = 45254, upload-time = "2025-06-30T15:53:12.421Z" },
+ { url = "https://files.pythonhosted.org/packages/d8/30/9aec301e9772b098c1f5c0ca0279237c9766d94b97802e9888010c64b0ed/multidict-6.6.3-py3-none-any.whl", hash = "sha256:8db10f29c7541fc5da4defd8cd697e1ca429db743fa716325f236079b96f775a", size = 12313, upload-time = "2025-06-30T15:53:45.437Z" },
]
[[package]]
name = "mypy"
-version = "1.17.1"
+version = "1.14.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "mypy-extensions" },
- { name = "pathspec" },
{ name = "typing-extensions" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/8e/22/ea637422dedf0bf36f3ef238eab4e455e2a0dcc3082b5cc067615347ab8e/mypy-1.17.1.tar.gz", hash = "sha256:25e01ec741ab5bb3eec8ba9cdb0f769230368a22c959c4937360efb89b7e9f01", size = 3352570, upload-time = "2025-07-31T07:54:19.204Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/17/a2/7034d0d61af8098ec47902108553122baa0f438df8a713be860f7407c9e6/mypy-1.17.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:69e83ea6553a3ba79c08c6e15dbd9bfa912ec1e493bf75489ef93beb65209aeb", size = 11086295, upload-time = "2025-07-31T07:53:28.124Z" },
- { url = "https://files.pythonhosted.org/packages/14/1f/19e7e44b594d4b12f6ba8064dbe136505cec813549ca3e5191e40b1d3cc2/mypy-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1b16708a66d38abb1e6b5702f5c2c87e133289da36f6a1d15f6a5221085c6403", size = 10112355, upload-time = "2025-07-31T07:53:21.121Z" },
- { url = "https://files.pythonhosted.org/packages/5b/69/baa33927e29e6b4c55d798a9d44db5d394072eef2bdc18c3e2048c9ed1e9/mypy-1.17.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:89e972c0035e9e05823907ad5398c5a73b9f47a002b22359b177d40bdaee7056", size = 11875285, upload-time = "2025-07-31T07:53:55.293Z" },
- { url = "https://files.pythonhosted.org/packages/90/13/f3a89c76b0a41e19490b01e7069713a30949d9a6c147289ee1521bcea245/mypy-1.17.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:03b6d0ed2b188e35ee6d5c36b5580cffd6da23319991c49ab5556c023ccf1341", size = 12737895, upload-time = "2025-07-31T07:53:43.623Z" },
- { url = "https://files.pythonhosted.org/packages/23/a1/c4ee79ac484241301564072e6476c5a5be2590bc2e7bfd28220033d2ef8f/mypy-1.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c837b896b37cd103570d776bda106eabb8737aa6dd4f248451aecf53030cdbeb", size = 12931025, upload-time = "2025-07-31T07:54:17.125Z" },
- { url = "https://files.pythonhosted.org/packages/89/b8/7409477be7919a0608900e6320b155c72caab4fef46427c5cc75f85edadd/mypy-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:665afab0963a4b39dff7c1fa563cc8b11ecff7910206db4b2e64dd1ba25aed19", size = 9584664, upload-time = "2025-07-31T07:54:12.842Z" },
- { url = "https://files.pythonhosted.org/packages/5b/82/aec2fc9b9b149f372850291827537a508d6c4d3664b1750a324b91f71355/mypy-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93378d3203a5c0800c6b6d850ad2f19f7a3cdf1a3701d3416dbf128805c6a6a7", size = 11075338, upload-time = "2025-07-31T07:53:38.873Z" },
- { url = "https://files.pythonhosted.org/packages/07/ac/ee93fbde9d2242657128af8c86f5d917cd2887584cf948a8e3663d0cd737/mypy-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:15d54056f7fe7a826d897789f53dd6377ec2ea8ba6f776dc83c2902b899fee81", size = 10113066, upload-time = "2025-07-31T07:54:14.707Z" },
- { url = "https://files.pythonhosted.org/packages/5a/68/946a1e0be93f17f7caa56c45844ec691ca153ee8b62f21eddda336a2d203/mypy-1.17.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:209a58fed9987eccc20f2ca94afe7257a8f46eb5df1fb69958650973230f91e6", size = 11875473, upload-time = "2025-07-31T07:53:14.504Z" },
- { url = "https://files.pythonhosted.org/packages/9f/0f/478b4dce1cb4f43cf0f0d00fba3030b21ca04a01b74d1cd272a528cf446f/mypy-1.17.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:099b9a5da47de9e2cb5165e581f158e854d9e19d2e96b6698c0d64de911dd849", size = 12744296, upload-time = "2025-07-31T07:53:03.896Z" },
- { url = "https://files.pythonhosted.org/packages/ca/70/afa5850176379d1b303f992a828de95fc14487429a7139a4e0bdd17a8279/mypy-1.17.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa6ffadfbe6994d724c5a1bb6123a7d27dd68fc9c059561cd33b664a79578e14", size = 12914657, upload-time = "2025-07-31T07:54:08.576Z" },
- { url = "https://files.pythonhosted.org/packages/53/f9/4a83e1c856a3d9c8f6edaa4749a4864ee98486e9b9dbfbc93842891029c2/mypy-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:9a2b7d9180aed171f033c9f2fc6c204c1245cf60b0cb61cf2e7acc24eea78e0a", size = 9593320, upload-time = "2025-07-31T07:53:01.341Z" },
- { url = "https://files.pythonhosted.org/packages/38/56/79c2fac86da57c7d8c48622a05873eaab40b905096c33597462713f5af90/mypy-1.17.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:15a83369400454c41ed3a118e0cc58bd8123921a602f385cb6d6ea5df050c733", size = 11040037, upload-time = "2025-07-31T07:54:10.942Z" },
- { url = "https://files.pythonhosted.org/packages/4d/c3/adabe6ff53638e3cad19e3547268482408323b1e68bf082c9119000cd049/mypy-1.17.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:55b918670f692fc9fba55c3298d8a3beae295c5cded0a55dccdc5bbead814acd", size = 10131550, upload-time = "2025-07-31T07:53:41.307Z" },
- { url = "https://files.pythonhosted.org/packages/b8/c5/2e234c22c3bdeb23a7817af57a58865a39753bde52c74e2c661ee0cfc640/mypy-1.17.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:62761474061feef6f720149d7ba876122007ddc64adff5ba6f374fda35a018a0", size = 11872963, upload-time = "2025-07-31T07:53:16.878Z" },
- { url = "https://files.pythonhosted.org/packages/ab/26/c13c130f35ca8caa5f2ceab68a247775648fdcd6c9a18f158825f2bc2410/mypy-1.17.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c49562d3d908fd49ed0938e5423daed8d407774a479b595b143a3d7f87cdae6a", size = 12710189, upload-time = "2025-07-31T07:54:01.962Z" },
- { url = "https://files.pythonhosted.org/packages/82/df/c7d79d09f6de8383fe800521d066d877e54d30b4fb94281c262be2df84ef/mypy-1.17.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:397fba5d7616a5bc60b45c7ed204717eaddc38f826e3645402c426057ead9a91", size = 12900322, upload-time = "2025-07-31T07:53:10.551Z" },
- { url = "https://files.pythonhosted.org/packages/b8/98/3d5a48978b4f708c55ae832619addc66d677f6dc59f3ebad71bae8285ca6/mypy-1.17.1-cp314-cp314-win_amd64.whl", hash = "sha256:9d6b20b97d373f41617bd0708fd46aa656059af57f2ef72aa8c7d6a2b73b74ed", size = 9751879, upload-time = "2025-07-31T07:52:56.683Z" },
- { url = "https://files.pythonhosted.org/packages/1d/f3/8fcd2af0f5b806f6cf463efaffd3c9548a28f84220493ecd38d127b6b66d/mypy-1.17.1-py3-none-any.whl", hash = "sha256:a9f52c0351c21fe24c21d8c0eb1f62967b262d6729393397b6f443c3b773c3b9", size = 2283411, upload-time = "2025-07-31T07:53:24.664Z" },
-]
-
-[[package]]
-name = "mypy-extensions"
-version = "1.1.0"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/b9/eb/2c92d8ea1e684440f54fa49ac5d9a5f19967b7b472a281f419e69a8d228e/mypy-1.14.1.tar.gz", hash = "sha256:7ec88144fe9b510e8475ec2f5f251992690fcf89ccb4500b214b4226abcd32d6", size = 3216051, upload-time = "2024-12-30T16:39:07.335Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" },
+ { url = "https://files.pythonhosted.org/packages/43/1b/b38c079609bb4627905b74fc6a49849835acf68547ac33d8ceb707de5f52/mypy-1.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:30ff5ef8519bbc2e18b3b54521ec319513a26f1bba19a7582e7b1f58a6e69f14", size = 11266668, upload-time = "2024-12-30T16:38:02.211Z" },
+ { url = "https://files.pythonhosted.org/packages/6b/75/2ed0d2964c1ffc9971c729f7a544e9cd34b2cdabbe2d11afd148d7838aa2/mypy-1.14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cb9f255c18052343c70234907e2e532bc7e55a62565d64536dbc7706a20b78b9", size = 10254060, upload-time = "2024-12-30T16:37:46.131Z" },
+ { url = "https://files.pythonhosted.org/packages/a1/5f/7b8051552d4da3c51bbe8fcafffd76a6823779101a2b198d80886cd8f08e/mypy-1.14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b4e3413e0bddea671012b063e27591b953d653209e7a4fa5e48759cda77ca11", size = 11933167, upload-time = "2024-12-30T16:37:43.534Z" },
+ { url = "https://files.pythonhosted.org/packages/04/90/f53971d3ac39d8b68bbaab9a4c6c58c8caa4d5fd3d587d16f5927eeeabe1/mypy-1.14.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:553c293b1fbdebb6c3c4030589dab9fafb6dfa768995a453d8a5d3b23784af2e", size = 12864341, upload-time = "2024-12-30T16:37:36.249Z" },
+ { url = "https://files.pythonhosted.org/packages/03/d2/8bc0aeaaf2e88c977db41583559319f1821c069e943ada2701e86d0430b7/mypy-1.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fad79bfe3b65fe6a1efaed97b445c3d37f7be9fdc348bdb2d7cac75579607c89", size = 12972991, upload-time = "2024-12-30T16:37:06.743Z" },
+ { url = "https://files.pythonhosted.org/packages/6f/17/07815114b903b49b0f2cf7499f1c130e5aa459411596668267535fe9243c/mypy-1.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:8fa2220e54d2946e94ab6dbb3ba0a992795bd68b16dc852db33028df2b00191b", size = 9879016, upload-time = "2024-12-30T16:37:15.02Z" },
+ { url = "https://files.pythonhosted.org/packages/9e/15/bb6a686901f59222275ab228453de741185f9d54fecbaacec041679496c6/mypy-1.14.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:92c3ed5afb06c3a8e188cb5da4984cab9ec9a77ba956ee419c68a388b4595255", size = 11252097, upload-time = "2024-12-30T16:37:25.144Z" },
+ { url = "https://files.pythonhosted.org/packages/f8/b3/8b0f74dfd072c802b7fa368829defdf3ee1566ba74c32a2cb2403f68024c/mypy-1.14.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:dbec574648b3e25f43d23577309b16534431db4ddc09fda50841f1e34e64ed34", size = 10239728, upload-time = "2024-12-30T16:38:08.634Z" },
+ { url = "https://files.pythonhosted.org/packages/c5/9b/4fd95ab20c52bb5b8c03cc49169be5905d931de17edfe4d9d2986800b52e/mypy-1.14.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8c6d94b16d62eb3e947281aa7347d78236688e21081f11de976376cf010eb31a", size = 11924965, upload-time = "2024-12-30T16:38:12.132Z" },
+ { url = "https://files.pythonhosted.org/packages/56/9d/4a236b9c57f5d8f08ed346914b3f091a62dd7e19336b2b2a0d85485f82ff/mypy-1.14.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d4b19b03fdf54f3c5b2fa474c56b4c13c9dbfb9a2db4370ede7ec11a2c5927d9", size = 12867660, upload-time = "2024-12-30T16:38:17.342Z" },
+ { url = "https://files.pythonhosted.org/packages/40/88/a61a5497e2f68d9027de2bb139c7bb9abaeb1be1584649fa9d807f80a338/mypy-1.14.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0c911fde686394753fff899c409fd4e16e9b294c24bfd5e1ea4675deae1ac6fd", size = 12969198, upload-time = "2024-12-30T16:38:32.839Z" },
+ { url = "https://files.pythonhosted.org/packages/54/da/3d6fc5d92d324701b0c23fb413c853892bfe0e1dbe06c9138037d459756b/mypy-1.14.1-cp313-cp313-win_amd64.whl", hash = "sha256:8b21525cb51671219f5307be85f7e646a153e5acc656e5cebf64bfa076c50107", size = 9885276, upload-time = "2024-12-30T16:38:20.828Z" },
+ { url = "https://files.pythonhosted.org/packages/a0/b5/32dd67b69a16d088e533962e5044e51004176a9952419de0370cdaead0f8/mypy-1.14.1-py3-none-any.whl", hash = "sha256:b66a60cc4073aeb8ae00057f9c1f64d49e90f918fbcef9a977eb121da8b8f1d1", size = 2752905, upload-time = "2024-12-30T16:38:42.021Z" },
]
[[package]]
-name = "nest-asyncio"
-version = "1.6.0"
+name = "mypy-extensions"
+version = "1.0.0"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/83/f8/51569ac65d696c8ecbee95938f89d4abf00f47d58d48f6fbabfe8f0baefe/nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe", size = 7418, upload-time = "2024-01-21T14:25:19.227Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/98/a4/1ab47638b92648243faf97a5aeb6ea83059cc3624972ab6b8d2316078d3f/mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782", size = 4433, upload-time = "2023-02-04T12:11:27.157Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/a0/c4/c2971a3ba4c6103a3d10c4b0f24f461ddc027f0f09763220cf35ca1401b3/nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c", size = 5195, upload-time = "2024-01-21T14:25:17.223Z" },
+ { url = "https://files.pythonhosted.org/packages/2a/e2/5d3f6ada4297caebe1a2add3b126fe800c96f56dbe5d1988a2cbe0b267aa/mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", size = 4695, upload-time = "2023-02-04T12:11:25.002Z" },
]
[[package]]
@@ -664,74 +644,74 @@ wheels = [
[[package]]
name = "numpy"
-version = "2.3.2"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/37/7d/3fec4199c5ffb892bed55cff901e4f39a58c81df9c44c280499e92cad264/numpy-2.3.2.tar.gz", hash = "sha256:e0486a11ec30cdecb53f184d496d1c6a20786c81e55e41640270130056f8ee48", size = 20489306, upload-time = "2025-07-24T21:32:07.553Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/00/6d/745dd1c1c5c284d17725e5c802ca4d45cfc6803519d777f087b71c9f4069/numpy-2.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bc3186bea41fae9d8e90c2b4fb5f0a1f5a690682da79b92574d63f56b529080b", size = 20956420, upload-time = "2025-07-24T20:28:18.002Z" },
- { url = "https://files.pythonhosted.org/packages/bc/96/e7b533ea5740641dd62b07a790af5d9d8fec36000b8e2d0472bd7574105f/numpy-2.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f4f0215edb189048a3c03bd5b19345bdfa7b45a7a6f72ae5945d2a28272727f", size = 14184660, upload-time = "2025-07-24T20:28:39.522Z" },
- { url = "https://files.pythonhosted.org/packages/2b/53/102c6122db45a62aa20d1b18c9986f67e6b97e0d6fbc1ae13e3e4c84430c/numpy-2.3.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:8b1224a734cd509f70816455c3cffe13a4f599b1bf7130f913ba0e2c0b2006c0", size = 5113382, upload-time = "2025-07-24T20:28:48.544Z" },
- { url = "https://files.pythonhosted.org/packages/2b/21/376257efcbf63e624250717e82b4fae93d60178f09eb03ed766dbb48ec9c/numpy-2.3.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:3dcf02866b977a38ba3ec10215220609ab9667378a9e2150615673f3ffd6c73b", size = 6647258, upload-time = "2025-07-24T20:28:59.104Z" },
- { url = "https://files.pythonhosted.org/packages/91/ba/f4ebf257f08affa464fe6036e13f2bf9d4642a40228781dc1235da81be9f/numpy-2.3.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:572d5512df5470f50ada8d1972c5f1082d9a0b7aa5944db8084077570cf98370", size = 14281409, upload-time = "2025-07-24T20:40:30.298Z" },
- { url = "https://files.pythonhosted.org/packages/59/ef/f96536f1df42c668cbacb727a8c6da7afc9c05ece6d558927fb1722693e1/numpy-2.3.2-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8145dd6d10df13c559d1e4314df29695613575183fa2e2d11fac4c208c8a1f73", size = 16641317, upload-time = "2025-07-24T20:40:56.625Z" },
- { url = "https://files.pythonhosted.org/packages/f6/a7/af813a7b4f9a42f498dde8a4c6fcbff8100eed00182cc91dbaf095645f38/numpy-2.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:103ea7063fa624af04a791c39f97070bf93b96d7af7eb23530cd087dc8dbe9dc", size = 16056262, upload-time = "2025-07-24T20:41:20.797Z" },
- { url = "https://files.pythonhosted.org/packages/8b/5d/41c4ef8404caaa7f05ed1cfb06afe16a25895260eacbd29b4d84dff2920b/numpy-2.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fc927d7f289d14f5e037be917539620603294454130b6de200091e23d27dc9be", size = 18579342, upload-time = "2025-07-24T20:41:50.753Z" },
- { url = "https://files.pythonhosted.org/packages/a1/4f/9950e44c5a11636f4a3af6e825ec23003475cc9a466edb7a759ed3ea63bd/numpy-2.3.2-cp312-cp312-win32.whl", hash = "sha256:d95f59afe7f808c103be692175008bab926b59309ade3e6d25009e9a171f7036", size = 6320610, upload-time = "2025-07-24T20:42:01.551Z" },
- { url = "https://files.pythonhosted.org/packages/7c/2f/244643a5ce54a94f0a9a2ab578189c061e4a87c002e037b0829dd77293b6/numpy-2.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:9e196ade2400c0c737d93465327d1ae7c06c7cb8a1756121ebf54b06ca183c7f", size = 12786292, upload-time = "2025-07-24T20:42:20.738Z" },
- { url = "https://files.pythonhosted.org/packages/54/cd/7b5f49d5d78db7badab22d8323c1b6ae458fbf86c4fdfa194ab3cd4eb39b/numpy-2.3.2-cp312-cp312-win_arm64.whl", hash = "sha256:ee807923782faaf60d0d7331f5e86da7d5e3079e28b291973c545476c2b00d07", size = 10194071, upload-time = "2025-07-24T20:42:36.657Z" },
- { url = "https://files.pythonhosted.org/packages/1c/c0/c6bb172c916b00700ed3bf71cb56175fd1f7dbecebf8353545d0b5519f6c/numpy-2.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c8d9727f5316a256425892b043736d63e89ed15bbfe6556c5ff4d9d4448ff3b3", size = 20949074, upload-time = "2025-07-24T20:43:07.813Z" },
- { url = "https://files.pythonhosted.org/packages/20/4e/c116466d22acaf4573e58421c956c6076dc526e24a6be0903219775d862e/numpy-2.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:efc81393f25f14d11c9d161e46e6ee348637c0a1e8a54bf9dedc472a3fae993b", size = 14177311, upload-time = "2025-07-24T20:43:29.335Z" },
- { url = "https://files.pythonhosted.org/packages/78/45/d4698c182895af189c463fc91d70805d455a227261d950e4e0f1310c2550/numpy-2.3.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:dd937f088a2df683cbb79dda9a772b62a3e5a8a7e76690612c2737f38c6ef1b6", size = 5106022, upload-time = "2025-07-24T20:43:37.999Z" },
- { url = "https://files.pythonhosted.org/packages/9f/76/3e6880fef4420179309dba72a8c11f6166c431cf6dee54c577af8906f914/numpy-2.3.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:11e58218c0c46c80509186e460d79fbdc9ca1eb8d8aee39d8f2dc768eb781089", size = 6640135, upload-time = "2025-07-24T20:43:49.28Z" },
- { url = "https://files.pythonhosted.org/packages/34/fa/87ff7f25b3c4ce9085a62554460b7db686fef1e0207e8977795c7b7d7ba1/numpy-2.3.2-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5ad4ebcb683a1f99f4f392cc522ee20a18b2bb12a2c1c42c3d48d5a1adc9d3d2", size = 14278147, upload-time = "2025-07-24T20:44:10.328Z" },
- { url = "https://files.pythonhosted.org/packages/1d/0f/571b2c7a3833ae419fe69ff7b479a78d313581785203cc70a8db90121b9a/numpy-2.3.2-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:938065908d1d869c7d75d8ec45f735a034771c6ea07088867f713d1cd3bbbe4f", size = 16635989, upload-time = "2025-07-24T20:44:34.88Z" },
- { url = "https://files.pythonhosted.org/packages/24/5a/84ae8dca9c9a4c592fe11340b36a86ffa9fd3e40513198daf8a97839345c/numpy-2.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:66459dccc65d8ec98cc7df61307b64bf9e08101f9598755d42d8ae65d9a7a6ee", size = 16053052, upload-time = "2025-07-24T20:44:58.872Z" },
- { url = "https://files.pythonhosted.org/packages/57/7c/e5725d99a9133b9813fcf148d3f858df98511686e853169dbaf63aec6097/numpy-2.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a7af9ed2aa9ec5950daf05bb11abc4076a108bd3c7db9aa7251d5f107079b6a6", size = 18577955, upload-time = "2025-07-24T20:45:26.714Z" },
- { url = "https://files.pythonhosted.org/packages/ae/11/7c546fcf42145f29b71e4d6f429e96d8d68e5a7ba1830b2e68d7418f0bbd/numpy-2.3.2-cp313-cp313-win32.whl", hash = "sha256:906a30249315f9c8e17b085cc5f87d3f369b35fedd0051d4a84686967bdbbd0b", size = 6311843, upload-time = "2025-07-24T20:49:24.444Z" },
- { url = "https://files.pythonhosted.org/packages/aa/6f/a428fd1cb7ed39b4280d057720fed5121b0d7754fd2a9768640160f5517b/numpy-2.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:c63d95dc9d67b676e9108fe0d2182987ccb0f11933c1e8959f42fa0da8d4fa56", size = 12782876, upload-time = "2025-07-24T20:49:43.227Z" },
- { url = "https://files.pythonhosted.org/packages/65/85/4ea455c9040a12595fb6c43f2c217257c7b52dd0ba332c6a6c1d28b289fe/numpy-2.3.2-cp313-cp313-win_arm64.whl", hash = "sha256:b05a89f2fb84d21235f93de47129dd4f11c16f64c87c33f5e284e6a3a54e43f2", size = 10192786, upload-time = "2025-07-24T20:49:59.443Z" },
- { url = "https://files.pythonhosted.org/packages/80/23/8278f40282d10c3f258ec3ff1b103d4994bcad78b0cba9208317f6bb73da/numpy-2.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4e6ecfeddfa83b02318f4d84acf15fbdbf9ded18e46989a15a8b6995dfbf85ab", size = 21047395, upload-time = "2025-07-24T20:45:58.821Z" },
- { url = "https://files.pythonhosted.org/packages/1f/2d/624f2ce4a5df52628b4ccd16a4f9437b37c35f4f8a50d00e962aae6efd7a/numpy-2.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:508b0eada3eded10a3b55725b40806a4b855961040180028f52580c4729916a2", size = 14300374, upload-time = "2025-07-24T20:46:20.207Z" },
- { url = "https://files.pythonhosted.org/packages/f6/62/ff1e512cdbb829b80a6bd08318a58698867bca0ca2499d101b4af063ee97/numpy-2.3.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:754d6755d9a7588bdc6ac47dc4ee97867271b17cee39cb87aef079574366db0a", size = 5228864, upload-time = "2025-07-24T20:46:30.58Z" },
- { url = "https://files.pythonhosted.org/packages/7d/8e/74bc18078fff03192d4032cfa99d5a5ca937807136d6f5790ce07ca53515/numpy-2.3.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:a9f66e7d2b2d7712410d3bc5684149040ef5f19856f20277cd17ea83e5006286", size = 6737533, upload-time = "2025-07-24T20:46:46.111Z" },
- { url = "https://files.pythonhosted.org/packages/19/ea/0731efe2c9073ccca5698ef6a8c3667c4cf4eea53fcdcd0b50140aba03bc/numpy-2.3.2-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:de6ea4e5a65d5a90c7d286ddff2b87f3f4ad61faa3db8dabe936b34c2275b6f8", size = 14352007, upload-time = "2025-07-24T20:47:07.1Z" },
- { url = "https://files.pythonhosted.org/packages/cf/90/36be0865f16dfed20f4bc7f75235b963d5939707d4b591f086777412ff7b/numpy-2.3.2-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a3ef07ec8cbc8fc9e369c8dcd52019510c12da4de81367d8b20bc692aa07573a", size = 16701914, upload-time = "2025-07-24T20:47:32.459Z" },
- { url = "https://files.pythonhosted.org/packages/94/30/06cd055e24cb6c38e5989a9e747042b4e723535758e6153f11afea88c01b/numpy-2.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:27c9f90e7481275c7800dc9c24b7cc40ace3fdb970ae4d21eaff983a32f70c91", size = 16132708, upload-time = "2025-07-24T20:47:58.129Z" },
- { url = "https://files.pythonhosted.org/packages/9a/14/ecede608ea73e58267fd7cb78f42341b3b37ba576e778a1a06baffbe585c/numpy-2.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:07b62978075b67eee4065b166d000d457c82a1efe726cce608b9db9dd66a73a5", size = 18651678, upload-time = "2025-07-24T20:48:25.402Z" },
- { url = "https://files.pythonhosted.org/packages/40/f3/2fe6066b8d07c3685509bc24d56386534c008b462a488b7f503ba82b8923/numpy-2.3.2-cp313-cp313t-win32.whl", hash = "sha256:c771cfac34a4f2c0de8e8c97312d07d64fd8f8ed45bc9f5726a7e947270152b5", size = 6441832, upload-time = "2025-07-24T20:48:37.181Z" },
- { url = "https://files.pythonhosted.org/packages/0b/ba/0937d66d05204d8f28630c9c60bc3eda68824abde4cf756c4d6aad03b0c6/numpy-2.3.2-cp313-cp313t-win_amd64.whl", hash = "sha256:72dbebb2dcc8305c431b2836bcc66af967df91be793d63a24e3d9b741374c450", size = 12927049, upload-time = "2025-07-24T20:48:56.24Z" },
- { url = "https://files.pythonhosted.org/packages/e9/ed/13542dd59c104d5e654dfa2ac282c199ba64846a74c2c4bcdbc3a0f75df1/numpy-2.3.2-cp313-cp313t-win_arm64.whl", hash = "sha256:72c6df2267e926a6d5286b0a6d556ebe49eae261062059317837fda12ddf0c1a", size = 10262935, upload-time = "2025-07-24T20:49:13.136Z" },
- { url = "https://files.pythonhosted.org/packages/c9/7c/7659048aaf498f7611b783e000c7268fcc4dcf0ce21cd10aad7b2e8f9591/numpy-2.3.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:448a66d052d0cf14ce9865d159bfc403282c9bc7bb2a31b03cc18b651eca8b1a", size = 20950906, upload-time = "2025-07-24T20:50:30.346Z" },
- { url = "https://files.pythonhosted.org/packages/80/db/984bea9d4ddf7112a04cfdfb22b1050af5757864cfffe8e09e44b7f11a10/numpy-2.3.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:546aaf78e81b4081b2eba1d105c3b34064783027a06b3ab20b6eba21fb64132b", size = 14185607, upload-time = "2025-07-24T20:50:51.923Z" },
- { url = "https://files.pythonhosted.org/packages/e4/76/b3d6f414f4eca568f469ac112a3b510938d892bc5a6c190cb883af080b77/numpy-2.3.2-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:87c930d52f45df092f7578889711a0768094debf73cfcde105e2d66954358125", size = 5114110, upload-time = "2025-07-24T20:51:01.041Z" },
- { url = "https://files.pythonhosted.org/packages/9e/d2/6f5e6826abd6bca52392ed88fe44a4b52aacb60567ac3bc86c67834c3a56/numpy-2.3.2-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:8dc082ea901a62edb8f59713c6a7e28a85daddcb67454c839de57656478f5b19", size = 6642050, upload-time = "2025-07-24T20:51:11.64Z" },
- { url = "https://files.pythonhosted.org/packages/c4/43/f12b2ade99199e39c73ad182f103f9d9791f48d885c600c8e05927865baf/numpy-2.3.2-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:af58de8745f7fa9ca1c0c7c943616c6fe28e75d0c81f5c295810e3c83b5be92f", size = 14296292, upload-time = "2025-07-24T20:51:33.488Z" },
- { url = "https://files.pythonhosted.org/packages/5d/f9/77c07d94bf110a916b17210fac38680ed8734c236bfed9982fd8524a7b47/numpy-2.3.2-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed5527c4cf10f16c6d0b6bee1f89958bccb0ad2522c8cadc2efd318bcd545f5", size = 16638913, upload-time = "2025-07-24T20:51:58.517Z" },
- { url = "https://files.pythonhosted.org/packages/9b/d1/9d9f2c8ea399cc05cfff8a7437453bd4e7d894373a93cdc46361bbb49a7d/numpy-2.3.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:095737ed986e00393ec18ec0b21b47c22889ae4b0cd2d5e88342e08b01141f58", size = 16071180, upload-time = "2025-07-24T20:52:22.827Z" },
- { url = "https://files.pythonhosted.org/packages/4c/41/82e2c68aff2a0c9bf315e47d61951099fed65d8cb2c8d9dc388cb87e947e/numpy-2.3.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5e40e80299607f597e1a8a247ff8d71d79c5b52baa11cc1cce30aa92d2da6e0", size = 18576809, upload-time = "2025-07-24T20:52:51.015Z" },
- { url = "https://files.pythonhosted.org/packages/14/14/4b4fd3efb0837ed252d0f583c5c35a75121038a8c4e065f2c259be06d2d8/numpy-2.3.2-cp314-cp314-win32.whl", hash = "sha256:7d6e390423cc1f76e1b8108c9b6889d20a7a1f59d9a60cac4a050fa734d6c1e2", size = 6366410, upload-time = "2025-07-24T20:56:44.949Z" },
- { url = "https://files.pythonhosted.org/packages/11/9e/b4c24a6b8467b61aced5c8dc7dcfce23621baa2e17f661edb2444a418040/numpy-2.3.2-cp314-cp314-win_amd64.whl", hash = "sha256:b9d0878b21e3918d76d2209c924ebb272340da1fb51abc00f986c258cd5e957b", size = 12918821, upload-time = "2025-07-24T20:57:06.479Z" },
- { url = "https://files.pythonhosted.org/packages/0e/0f/0dc44007c70b1007c1cef86b06986a3812dd7106d8f946c09cfa75782556/numpy-2.3.2-cp314-cp314-win_arm64.whl", hash = "sha256:2738534837c6a1d0c39340a190177d7d66fdf432894f469728da901f8f6dc910", size = 10477303, upload-time = "2025-07-24T20:57:22.879Z" },
- { url = "https://files.pythonhosted.org/packages/8b/3e/075752b79140b78ddfc9c0a1634d234cfdbc6f9bbbfa6b7504e445ad7d19/numpy-2.3.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:4d002ecf7c9b53240be3bb69d80f86ddbd34078bae04d87be81c1f58466f264e", size = 21047524, upload-time = "2025-07-24T20:53:22.086Z" },
- { url = "https://files.pythonhosted.org/packages/fe/6d/60e8247564a72426570d0e0ea1151b95ce5bd2f1597bb878a18d32aec855/numpy-2.3.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:293b2192c6bcce487dbc6326de5853787f870aeb6c43f8f9c6496db5b1781e45", size = 14300519, upload-time = "2025-07-24T20:53:44.053Z" },
- { url = "https://files.pythonhosted.org/packages/4d/73/d8326c442cd428d47a067070c3ac6cc3b651a6e53613a1668342a12d4479/numpy-2.3.2-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:0a4f2021a6da53a0d580d6ef5db29947025ae8b35b3250141805ea9a32bbe86b", size = 5228972, upload-time = "2025-07-24T20:53:53.81Z" },
- { url = "https://files.pythonhosted.org/packages/34/2e/e71b2d6dad075271e7079db776196829019b90ce3ece5c69639e4f6fdc44/numpy-2.3.2-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:9c144440db4bf3bb6372d2c3e49834cc0ff7bb4c24975ab33e01199e645416f2", size = 6737439, upload-time = "2025-07-24T20:54:04.742Z" },
- { url = "https://files.pythonhosted.org/packages/15/b0/d004bcd56c2c5e0500ffc65385eb6d569ffd3363cb5e593ae742749b2daa/numpy-2.3.2-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f92d6c2a8535dc4fe4419562294ff957f83a16ebdec66df0805e473ffaad8bd0", size = 14352479, upload-time = "2025-07-24T20:54:25.819Z" },
- { url = "https://files.pythonhosted.org/packages/11/e3/285142fcff8721e0c99b51686426165059874c150ea9ab898e12a492e291/numpy-2.3.2-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cefc2219baa48e468e3db7e706305fcd0c095534a192a08f31e98d83a7d45fb0", size = 16702805, upload-time = "2025-07-24T20:54:50.814Z" },
- { url = "https://files.pythonhosted.org/packages/33/c3/33b56b0e47e604af2c7cd065edca892d180f5899599b76830652875249a3/numpy-2.3.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:76c3e9501ceb50b2ff3824c3589d5d1ab4ac857b0ee3f8f49629d0de55ecf7c2", size = 16133830, upload-time = "2025-07-24T20:55:17.306Z" },
- { url = "https://files.pythonhosted.org/packages/6e/ae/7b1476a1f4d6a48bc669b8deb09939c56dd2a439db1ab03017844374fb67/numpy-2.3.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:122bf5ed9a0221b3419672493878ba4967121514b1d7d4656a7580cd11dddcbf", size = 18652665, upload-time = "2025-07-24T20:55:46.665Z" },
- { url = "https://files.pythonhosted.org/packages/14/ba/5b5c9978c4bb161034148ade2de9db44ec316fab89ce8c400db0e0c81f86/numpy-2.3.2-cp314-cp314t-win32.whl", hash = "sha256:6f1ae3dcb840edccc45af496f312528c15b1f79ac318169d094e85e4bb35fdf1", size = 6514777, upload-time = "2025-07-24T20:55:57.66Z" },
- { url = "https://files.pythonhosted.org/packages/eb/46/3dbaf0ae7c17cdc46b9f662c56da2054887b8d9e737c1476f335c83d33db/numpy-2.3.2-cp314-cp314t-win_amd64.whl", hash = "sha256:087ffc25890d89a43536f75c5fe8770922008758e8eeeef61733957041ed2f9b", size = 13111856, upload-time = "2025-07-24T20:56:17.318Z" },
- { url = "https://files.pythonhosted.org/packages/c1/9e/1652778bce745a67b5fe05adde60ed362d38eb17d919a540e813d30f6874/numpy-2.3.2-cp314-cp314t-win_arm64.whl", hash = "sha256:092aeb3449833ea9c0bf0089d70c29ae480685dd2377ec9cdbbb620257f84631", size = 10544226, upload-time = "2025-07-24T20:56:34.509Z" },
+version = "2.3.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/d0/19/95b3d357407220ed24c139018d2518fab0a61a948e68286a25f1a4d049ff/numpy-2.3.3.tar.gz", hash = "sha256:ddc7c39727ba62b80dfdbedf400d1c10ddfa8eefbd7ec8dcb118be8b56d31029", size = 20576648, upload-time = "2025-09-09T16:54:12.543Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/51/5d/bb7fc075b762c96329147799e1bcc9176ab07ca6375ea976c475482ad5b3/numpy-2.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cfdd09f9c84a1a934cde1eec2267f0a43a7cd44b2cca4ff95b7c0d14d144b0bf", size = 20957014, upload-time = "2025-09-09T15:56:29.966Z" },
+ { url = "https://files.pythonhosted.org/packages/6b/0e/c6211bb92af26517acd52125a237a92afe9c3124c6a68d3b9f81b62a0568/numpy-2.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cb32e3cf0f762aee47ad1ddc6672988f7f27045b0783c887190545baba73aa25", size = 14185220, upload-time = "2025-09-09T15:56:32.175Z" },
+ { url = "https://files.pythonhosted.org/packages/22/f2/07bb754eb2ede9073f4054f7c0286b0d9d2e23982e090a80d478b26d35ca/numpy-2.3.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:396b254daeb0a57b1fe0ecb5e3cff6fa79a380fa97c8f7781a6d08cd429418fe", size = 5113918, upload-time = "2025-09-09T15:56:34.175Z" },
+ { url = "https://files.pythonhosted.org/packages/81/0a/afa51697e9fb74642f231ea36aca80fa17c8fb89f7a82abd5174023c3960/numpy-2.3.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:067e3d7159a5d8f8a0b46ee11148fc35ca9b21f61e3c49fbd0a027450e65a33b", size = 6647922, upload-time = "2025-09-09T15:56:36.149Z" },
+ { url = "https://files.pythonhosted.org/packages/5d/f5/122d9cdb3f51c520d150fef6e87df9279e33d19a9611a87c0d2cf78a89f4/numpy-2.3.3-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1c02d0629d25d426585fb2e45a66154081b9fa677bc92a881ff1d216bc9919a8", size = 14281991, upload-time = "2025-09-09T15:56:40.548Z" },
+ { url = "https://files.pythonhosted.org/packages/51/64/7de3c91e821a2debf77c92962ea3fe6ac2bc45d0778c1cbe15d4fce2fd94/numpy-2.3.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9192da52b9745f7f0766531dcfa978b7763916f158bb63bdb8a1eca0068ab20", size = 16641643, upload-time = "2025-09-09T15:56:43.343Z" },
+ { url = "https://files.pythonhosted.org/packages/30/e4/961a5fa681502cd0d68907818b69f67542695b74e3ceaa513918103b7e80/numpy-2.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cd7de500a5b66319db419dc3c345244404a164beae0d0937283b907d8152e6ea", size = 16056787, upload-time = "2025-09-09T15:56:46.141Z" },
+ { url = "https://files.pythonhosted.org/packages/99/26/92c912b966e47fbbdf2ad556cb17e3a3088e2e1292b9833be1dfa5361a1a/numpy-2.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:93d4962d8f82af58f0b2eb85daaf1b3ca23fe0a85d0be8f1f2b7bb46034e56d7", size = 18579598, upload-time = "2025-09-09T15:56:49.844Z" },
+ { url = "https://files.pythonhosted.org/packages/17/b6/fc8f82cb3520768718834f310c37d96380d9dc61bfdaf05fe5c0b7653e01/numpy-2.3.3-cp312-cp312-win32.whl", hash = "sha256:5534ed6b92f9b7dca6c0a19d6df12d41c68b991cef051d108f6dbff3babc4ebf", size = 6320800, upload-time = "2025-09-09T15:56:52.499Z" },
+ { url = "https://files.pythonhosted.org/packages/32/ee/de999f2625b80d043d6d2d628c07d0d5555a677a3cf78fdf868d409b8766/numpy-2.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:497d7cad08e7092dba36e3d296fe4c97708c93daf26643a1ae4b03f6294d30eb", size = 12786615, upload-time = "2025-09-09T15:56:54.422Z" },
+ { url = "https://files.pythonhosted.org/packages/49/6e/b479032f8a43559c383acb20816644f5f91c88f633d9271ee84f3b3a996c/numpy-2.3.3-cp312-cp312-win_arm64.whl", hash = "sha256:ca0309a18d4dfea6fc6262a66d06c26cfe4640c3926ceec90e57791a82b6eee5", size = 10195936, upload-time = "2025-09-09T15:56:56.541Z" },
+ { url = "https://files.pythonhosted.org/packages/7d/b9/984c2b1ee61a8b803bf63582b4ac4242cf76e2dbd663efeafcb620cc0ccb/numpy-2.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f5415fb78995644253370985342cd03572ef8620b934da27d77377a2285955bf", size = 20949588, upload-time = "2025-09-09T15:56:59.087Z" },
+ { url = "https://files.pythonhosted.org/packages/a6/e4/07970e3bed0b1384d22af1e9912527ecbeb47d3b26e9b6a3bced068b3bea/numpy-2.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d00de139a3324e26ed5b95870ce63be7ec7352171bc69a4cf1f157a48e3eb6b7", size = 14177802, upload-time = "2025-09-09T15:57:01.73Z" },
+ { url = "https://files.pythonhosted.org/packages/35/c7/477a83887f9de61f1203bad89cf208b7c19cc9fef0cebef65d5a1a0619f2/numpy-2.3.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:9dc13c6a5829610cc07422bc74d3ac083bd8323f14e2827d992f9e52e22cd6a6", size = 5106537, upload-time = "2025-09-09T15:57:03.765Z" },
+ { url = "https://files.pythonhosted.org/packages/52/47/93b953bd5866a6f6986344d045a207d3f1cfbad99db29f534ea9cee5108c/numpy-2.3.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:d79715d95f1894771eb4e60fb23f065663b2298f7d22945d66877aadf33d00c7", size = 6640743, upload-time = "2025-09-09T15:57:07.921Z" },
+ { url = "https://files.pythonhosted.org/packages/23/83/377f84aaeb800b64c0ef4de58b08769e782edcefa4fea712910b6f0afd3c/numpy-2.3.3-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:952cfd0748514ea7c3afc729a0fc639e61655ce4c55ab9acfab14bda4f402b4c", size = 14278881, upload-time = "2025-09-09T15:57:11.349Z" },
+ { url = "https://files.pythonhosted.org/packages/9a/a5/bf3db6e66c4b160d6ea10b534c381a1955dfab34cb1017ea93aa33c70ed3/numpy-2.3.3-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5b83648633d46f77039c29078751f80da65aa64d5622a3cd62aaef9d835b6c93", size = 16636301, upload-time = "2025-09-09T15:57:14.245Z" },
+ { url = "https://files.pythonhosted.org/packages/a2/59/1287924242eb4fa3f9b3a2c30400f2e17eb2707020d1c5e3086fe7330717/numpy-2.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b001bae8cea1c7dfdb2ae2b017ed0a6f2102d7a70059df1e338e307a4c78a8ae", size = 16053645, upload-time = "2025-09-09T15:57:16.534Z" },
+ { url = "https://files.pythonhosted.org/packages/e6/93/b3d47ed882027c35e94ac2320c37e452a549f582a5e801f2d34b56973c97/numpy-2.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8e9aced64054739037d42fb84c54dd38b81ee238816c948c8f3ed134665dcd86", size = 18578179, upload-time = "2025-09-09T15:57:18.883Z" },
+ { url = "https://files.pythonhosted.org/packages/20/d9/487a2bccbf7cc9d4bfc5f0f197761a5ef27ba870f1e3bbb9afc4bbe3fcc2/numpy-2.3.3-cp313-cp313-win32.whl", hash = "sha256:9591e1221db3f37751e6442850429b3aabf7026d3b05542d102944ca7f00c8a8", size = 6312250, upload-time = "2025-09-09T15:57:21.296Z" },
+ { url = "https://files.pythonhosted.org/packages/1b/b5/263ebbbbcede85028f30047eab3d58028d7ebe389d6493fc95ae66c636ab/numpy-2.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:f0dadeb302887f07431910f67a14d57209ed91130be0adea2f9793f1a4f817cf", size = 12783269, upload-time = "2025-09-09T15:57:23.034Z" },
+ { url = "https://files.pythonhosted.org/packages/fa/75/67b8ca554bbeaaeb3fac2e8bce46967a5a06544c9108ec0cf5cece559b6c/numpy-2.3.3-cp313-cp313-win_arm64.whl", hash = "sha256:3c7cf302ac6e0b76a64c4aecf1a09e51abd9b01fc7feee80f6c43e3ab1b1dbc5", size = 10195314, upload-time = "2025-09-09T15:57:25.045Z" },
+ { url = "https://files.pythonhosted.org/packages/11/d0/0d1ddec56b162042ddfafeeb293bac672de9b0cfd688383590090963720a/numpy-2.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:eda59e44957d272846bb407aad19f89dc6f58fecf3504bd144f4c5cf81a7eacc", size = 21048025, upload-time = "2025-09-09T15:57:27.257Z" },
+ { url = "https://files.pythonhosted.org/packages/36/9e/1996ca6b6d00415b6acbdd3c42f7f03ea256e2c3f158f80bd7436a8a19f3/numpy-2.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:823d04112bc85ef5c4fda73ba24e6096c8f869931405a80aa8b0e604510a26bc", size = 14301053, upload-time = "2025-09-09T15:57:30.077Z" },
+ { url = "https://files.pythonhosted.org/packages/05/24/43da09aa764c68694b76e84b3d3f0c44cb7c18cdc1ba80e48b0ac1d2cd39/numpy-2.3.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:40051003e03db4041aa325da2a0971ba41cf65714e65d296397cc0e32de6018b", size = 5229444, upload-time = "2025-09-09T15:57:32.733Z" },
+ { url = "https://files.pythonhosted.org/packages/bc/14/50ffb0f22f7218ef8af28dd089f79f68289a7a05a208db9a2c5dcbe123c1/numpy-2.3.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:6ee9086235dd6ab7ae75aba5662f582a81ced49f0f1c6de4260a78d8f2d91a19", size = 6738039, upload-time = "2025-09-09T15:57:34.328Z" },
+ { url = "https://files.pythonhosted.org/packages/55/52/af46ac0795e09657d45a7f4db961917314377edecf66db0e39fa7ab5c3d3/numpy-2.3.3-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:94fcaa68757c3e2e668ddadeaa86ab05499a70725811e582b6a9858dd472fb30", size = 14352314, upload-time = "2025-09-09T15:57:36.255Z" },
+ { url = "https://files.pythonhosted.org/packages/a7/b1/dc226b4c90eb9f07a3fff95c2f0db3268e2e54e5cce97c4ac91518aee71b/numpy-2.3.3-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da1a74b90e7483d6ce5244053399a614b1d6b7bc30a60d2f570e5071f8959d3e", size = 16701722, upload-time = "2025-09-09T15:57:38.622Z" },
+ { url = "https://files.pythonhosted.org/packages/9d/9d/9d8d358f2eb5eced14dba99f110d83b5cd9a4460895230f3b396ad19a323/numpy-2.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2990adf06d1ecee3b3dcbb4977dfab6e9f09807598d647f04d385d29e7a3c3d3", size = 16132755, upload-time = "2025-09-09T15:57:41.16Z" },
+ { url = "https://files.pythonhosted.org/packages/b6/27/b3922660c45513f9377b3fb42240bec63f203c71416093476ec9aa0719dc/numpy-2.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ed635ff692483b8e3f0fcaa8e7eb8a75ee71aa6d975388224f70821421800cea", size = 18651560, upload-time = "2025-09-09T15:57:43.459Z" },
+ { url = "https://files.pythonhosted.org/packages/5b/8e/3ab61a730bdbbc201bb245a71102aa609f0008b9ed15255500a99cd7f780/numpy-2.3.3-cp313-cp313t-win32.whl", hash = "sha256:a333b4ed33d8dc2b373cc955ca57babc00cd6f9009991d9edc5ddbc1bac36bcd", size = 6442776, upload-time = "2025-09-09T15:57:45.793Z" },
+ { url = "https://files.pythonhosted.org/packages/1c/3a/e22b766b11f6030dc2decdeff5c2fb1610768055603f9f3be88b6d192fb2/numpy-2.3.3-cp313-cp313t-win_amd64.whl", hash = "sha256:4384a169c4d8f97195980815d6fcad04933a7e1ab3b530921c3fef7a1c63426d", size = 12927281, upload-time = "2025-09-09T15:57:47.492Z" },
+ { url = "https://files.pythonhosted.org/packages/7b/42/c2e2bc48c5e9b2a83423f99733950fbefd86f165b468a3d85d52b30bf782/numpy-2.3.3-cp313-cp313t-win_arm64.whl", hash = "sha256:75370986cc0bc66f4ce5110ad35aae6d182cc4ce6433c40ad151f53690130bf1", size = 10265275, upload-time = "2025-09-09T15:57:49.647Z" },
+ { url = "https://files.pythonhosted.org/packages/6b/01/342ad585ad82419b99bcf7cebe99e61da6bedb89e213c5fd71acc467faee/numpy-2.3.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cd052f1fa6a78dee696b58a914b7229ecfa41f0a6d96dc663c1220a55e137593", size = 20951527, upload-time = "2025-09-09T15:57:52.006Z" },
+ { url = "https://files.pythonhosted.org/packages/ef/d8/204e0d73fc1b7a9ee80ab1fe1983dd33a4d64a4e30a05364b0208e9a241a/numpy-2.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:414a97499480067d305fcac9716c29cf4d0d76db6ebf0bf3cbce666677f12652", size = 14186159, upload-time = "2025-09-09T15:57:54.407Z" },
+ { url = "https://files.pythonhosted.org/packages/22/af/f11c916d08f3a18fb8ba81ab72b5b74a6e42ead4c2846d270eb19845bf74/numpy-2.3.3-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:50a5fe69f135f88a2be9b6ca0481a68a136f6febe1916e4920e12f1a34e708a7", size = 5114624, upload-time = "2025-09-09T15:57:56.5Z" },
+ { url = "https://files.pythonhosted.org/packages/fb/11/0ed919c8381ac9d2ffacd63fd1f0c34d27e99cab650f0eb6f110e6ae4858/numpy-2.3.3-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:b912f2ed2b67a129e6a601e9d93d4fa37bef67e54cac442a2f588a54afe5c67a", size = 6642627, upload-time = "2025-09-09T15:57:58.206Z" },
+ { url = "https://files.pythonhosted.org/packages/ee/83/deb5f77cb0f7ba6cb52b91ed388b47f8f3c2e9930d4665c600408d9b90b9/numpy-2.3.3-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9e318ee0596d76d4cb3d78535dc005fa60e5ea348cd131a51e99d0bdbe0b54fe", size = 14296926, upload-time = "2025-09-09T15:58:00.035Z" },
+ { url = "https://files.pythonhosted.org/packages/77/cc/70e59dcb84f2b005d4f306310ff0a892518cc0c8000a33d0e6faf7ca8d80/numpy-2.3.3-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ce020080e4a52426202bdb6f7691c65bb55e49f261f31a8f506c9f6bc7450421", size = 16638958, upload-time = "2025-09-09T15:58:02.738Z" },
+ { url = "https://files.pythonhosted.org/packages/b6/5a/b2ab6c18b4257e099587d5b7f903317bd7115333ad8d4ec4874278eafa61/numpy-2.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:e6687dc183aa55dae4a705b35f9c0f8cb178bcaa2f029b241ac5356221d5c021", size = 16071920, upload-time = "2025-09-09T15:58:05.029Z" },
+ { url = "https://files.pythonhosted.org/packages/b8/f1/8b3fdc44324a259298520dd82147ff648979bed085feeacc1250ef1656c0/numpy-2.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d8f3b1080782469fdc1718c4ed1d22549b5fb12af0d57d35e992158a772a37cf", size = 18577076, upload-time = "2025-09-09T15:58:07.745Z" },
+ { url = "https://files.pythonhosted.org/packages/f0/a1/b87a284fb15a42e9274e7fcea0dad259d12ddbf07c1595b26883151ca3b4/numpy-2.3.3-cp314-cp314-win32.whl", hash = "sha256:cb248499b0bc3be66ebd6578b83e5acacf1d6cb2a77f2248ce0e40fbec5a76d0", size = 6366952, upload-time = "2025-09-09T15:58:10.096Z" },
+ { url = "https://files.pythonhosted.org/packages/70/5f/1816f4d08f3b8f66576d8433a66f8fa35a5acfb3bbd0bf6c31183b003f3d/numpy-2.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:691808c2b26b0f002a032c73255d0bd89751425f379f7bcd22d140db593a96e8", size = 12919322, upload-time = "2025-09-09T15:58:12.138Z" },
+ { url = "https://files.pythonhosted.org/packages/8c/de/072420342e46a8ea41c324a555fa90fcc11637583fb8df722936aed1736d/numpy-2.3.3-cp314-cp314-win_arm64.whl", hash = "sha256:9ad12e976ca7b10f1774b03615a2a4bab8addce37ecc77394d8e986927dc0dfe", size = 10478630, upload-time = "2025-09-09T15:58:14.64Z" },
+ { url = "https://files.pythonhosted.org/packages/d5/df/ee2f1c0a9de7347f14da5dd3cd3c3b034d1b8607ccb6883d7dd5c035d631/numpy-2.3.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9cc48e09feb11e1db00b320e9d30a4151f7369afb96bd0e48d942d09da3a0d00", size = 21047987, upload-time = "2025-09-09T15:58:16.889Z" },
+ { url = "https://files.pythonhosted.org/packages/d6/92/9453bdc5a4e9e69cf4358463f25e8260e2ffc126d52e10038b9077815989/numpy-2.3.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:901bf6123879b7f251d3631967fd574690734236075082078e0571977c6a8e6a", size = 14301076, upload-time = "2025-09-09T15:58:20.343Z" },
+ { url = "https://files.pythonhosted.org/packages/13/77/1447b9eb500f028bb44253105bd67534af60499588a5149a94f18f2ca917/numpy-2.3.3-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:7f025652034199c301049296b59fa7d52c7e625017cae4c75d8662e377bf487d", size = 5229491, upload-time = "2025-09-09T15:58:22.481Z" },
+ { url = "https://files.pythonhosted.org/packages/3d/f9/d72221b6ca205f9736cb4b2ce3b002f6e45cd67cd6a6d1c8af11a2f0b649/numpy-2.3.3-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:533ca5f6d325c80b6007d4d7fb1984c303553534191024ec6a524a4c92a5935a", size = 6737913, upload-time = "2025-09-09T15:58:24.569Z" },
+ { url = "https://files.pythonhosted.org/packages/3c/5f/d12834711962ad9c46af72f79bb31e73e416ee49d17f4c797f72c96b6ca5/numpy-2.3.3-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0edd58682a399824633b66885d699d7de982800053acf20be1eaa46d92009c54", size = 14352811, upload-time = "2025-09-09T15:58:26.416Z" },
+ { url = "https://files.pythonhosted.org/packages/a1/0d/fdbec6629d97fd1bebed56cd742884e4eead593611bbe1abc3eb40d304b2/numpy-2.3.3-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:367ad5d8fbec5d9296d18478804a530f1191e24ab4d75ab408346ae88045d25e", size = 16702689, upload-time = "2025-09-09T15:58:28.831Z" },
+ { url = "https://files.pythonhosted.org/packages/9b/09/0a35196dc5575adde1eb97ddfbc3e1687a814f905377621d18ca9bc2b7dd/numpy-2.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8f6ac61a217437946a1fa48d24c47c91a0c4f725237871117dea264982128097", size = 16133855, upload-time = "2025-09-09T15:58:31.349Z" },
+ { url = "https://files.pythonhosted.org/packages/7a/ca/c9de3ea397d576f1b6753eaa906d4cdef1bf97589a6d9825a349b4729cc2/numpy-2.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:179a42101b845a816d464b6fe9a845dfaf308fdfc7925387195570789bb2c970", size = 18652520, upload-time = "2025-09-09T15:58:33.762Z" },
+ { url = "https://files.pythonhosted.org/packages/fd/c2/e5ed830e08cd0196351db55db82f65bc0ab05da6ef2b72a836dcf1936d2f/numpy-2.3.3-cp314-cp314t-win32.whl", hash = "sha256:1250c5d3d2562ec4174bce2e3a1523041595f9b651065e4a4473f5f48a6bc8a5", size = 6515371, upload-time = "2025-09-09T15:58:36.04Z" },
+ { url = "https://files.pythonhosted.org/packages/47/c7/b0f6b5b67f6788a0725f744496badbb604d226bf233ba716683ebb47b570/numpy-2.3.3-cp314-cp314t-win_amd64.whl", hash = "sha256:b37a0b2e5935409daebe82c1e42274d30d9dd355852529eab91dab8dcca7419f", size = 13112576, upload-time = "2025-09-09T15:58:37.927Z" },
+ { url = "https://files.pythonhosted.org/packages/06/b9/33bba5ff6fb679aa0b1f8a07e853f002a6b04b9394db3069a1270a7784ca/numpy-2.3.3-cp314-cp314t-win_arm64.whl", hash = "sha256:78c9f6560dc7e6b3990e32df7ea1a50bbd0e2a111e05209963f5ddcab7073b0b", size = 10545953, upload-time = "2025-09-09T15:58:40.576Z" },
]
[[package]]
name = "packaging"
-version = "25.0"
+version = "24.2"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/d0/63/68dbb6eb2de9cb10ee4c9c14a0148804425e13c4fb20d61cce69f53106da/packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f", size = 163950, upload-time = "2024-11-08T09:47:47.202Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" },
+ { url = "https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", size = 65451, upload-time = "2024-11-08T09:47:44.722Z" },
]
[[package]]
@@ -779,20 +759,20 @@ wheels = [
[[package]]
name = "platformdirs"
-version = "4.3.8"
+version = "4.4.0"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/fe/8b/3c73abc9c759ecd3f1f7ceff6685840859e8070c4d947c93fae71f6a0bf2/platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc", size = 21362, upload-time = "2025-05-07T22:47:42.121Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/23/e8/21db9c9987b0e728855bd57bff6984f67952bea55d6f75e055c46b5383e8/platformdirs-4.4.0.tar.gz", hash = "sha256:ca753cf4d81dc309bc67b0ea38fd15dc97bc30ce419a7f58d13eb3bf14c4febf", size = 21634, upload-time = "2025-08-26T14:32:04.268Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/fe/39/979e8e21520d4e47a0bbe349e2713c0aac6f3d853d0e5b34d76206c439aa/platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4", size = 18567, upload-time = "2025-05-07T22:47:40.376Z" },
+ { url = "https://files.pythonhosted.org/packages/40/4b/2028861e724d3bd36227adfa20d3fd24c3fc6d52032f4a93c133be5d17ce/platformdirs-4.4.0-py3-none-any.whl", hash = "sha256:abd01743f24e5287cd7a5db3752faf1a2d65353f38ec26d98e25a6db65958c85", size = 18654, upload-time = "2025-08-26T14:32:02.735Z" },
]
[[package]]
name = "pluggy"
-version = "1.6.0"
+version = "1.5.0"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/96/2d/02d4312c973c6050a18b314a5ad0b3210edb65a906f868e31c111dede4a6/pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", size = 67955, upload-time = "2024-04-20T21:34:42.531Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" },
+ { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556, upload-time = "2024-04-20T21:34:40.434Z" },
]
[[package]]
@@ -813,14 +793,14 @@ wheels = [
[[package]]
name = "prompt-toolkit"
-version = "3.0.51"
+version = "3.0.52"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "wcwidth" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/bb/6e/9d084c929dfe9e3bfe0c6a47e31f78a25c54627d64a66e884a8bf5474f1c/prompt_toolkit-3.0.51.tar.gz", hash = "sha256:931a162e3b27fc90c86f1b48bb1fb2c528c2761475e57c9c06de13311c7b54ed", size = 428940, upload-time = "2025-04-15T09:18:47.731Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/a1/96/06e01a7b38dce6fe1db213e061a4602dd6032a8a97ef6c1a862537732421/prompt_toolkit-3.0.52.tar.gz", hash = "sha256:28cde192929c8e7321de85de1ddbe736f1375148b02f2e17edd840042b1be855", size = 434198, upload-time = "2025-08-27T15:24:02.057Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/ce/4f/5249960887b1fbe561d9ff265496d170b55a735b76724f10ef19f9e40716/prompt_toolkit-3.0.51-py3-none-any.whl", hash = "sha256:52742911fde84e2d423e2f9a4cf1de7d7ac4e51958f648d9540e0fb8db077b07", size = 387810, upload-time = "2025-04-15T09:18:44.753Z" },
+ { url = "https://files.pythonhosted.org/packages/84/03/0d3ce49e2505ae70cf43bc5bb3033955d2fc9f932163e84dc0779cc47f48/prompt_toolkit-3.0.52-py3-none-any.whl", hash = "sha256:9aac639a3bbd33284347de5ad8d68ecc044b91a762dc39b7c21095fcd6a19955", size = 391431, upload-time = "2025-08-27T15:23:59.498Z" },
]
[[package]]
@@ -894,33 +874,33 @@ wheels = [
[[package]]
name = "pydantic"
-version = "1.10.22"
+version = "1.10.23"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "typing-extensions", marker = "extra == 'group-18-llama-stack-client-pydantic-v1'" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/9a/57/5996c63f0deec09e9e901a2b838247c97c6844999562eac4e435bcb83938/pydantic-1.10.22.tar.gz", hash = "sha256:ee1006cebd43a8e7158fb7190bb8f4e2da9649719bff65d0c287282ec38dec6d", size = 356771, upload-time = "2025-04-24T13:38:43.605Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/8f/3d/bd64466a91ec17b73f5c6c723373c352086dedd405c9f8dc1141aaddc59e/pydantic-1.10.23.tar.gz", hash = "sha256:81ee80fe4bd69236aeb65c8beeb5150655b8a49b946fce6664a720d6cf5ec717", size = 356864, upload-time = "2025-09-13T02:39:34.915Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/f6/a3/ec66239ed7c9e90edfb85b23b6b18eb290ed7aa05f54837cdcb6a14faa98/pydantic-1.10.22-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:92d0f97828a075a71d9efc65cf75db5f149b4d79a38c89648a63d2932894d8c9", size = 2794865, upload-time = "2025-04-24T13:37:25.087Z" },
- { url = "https://files.pythonhosted.org/packages/49/6a/99cf3fee612d93210c85f45a161e98c1c5b45b6dcadb21c9f1f838fa9e28/pydantic-1.10.22-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6af5a2811b6b95b58b829aeac5996d465a5f0c7ed84bd871d603cf8646edf6ff", size = 2534212, upload-time = "2025-04-24T13:37:26.848Z" },
- { url = "https://files.pythonhosted.org/packages/f1/e6/0f8882775cd9a60b221103ee7d6a89e10eb5a892d877c398df0da7140704/pydantic-1.10.22-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6cf06d8d40993e79af0ab2102ef5da77b9ddba51248e4cb27f9f3f591fbb096e", size = 2994027, upload-time = "2025-04-24T13:37:28.683Z" },
- { url = "https://files.pythonhosted.org/packages/e7/a3/f20fdecbaa2a2721a6a8ee9e4f344d1f72bd7d56e679371c3f2be15eb8c8/pydantic-1.10.22-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:184b7865b171a6057ad97f4a17fbac81cec29bd103e996e7add3d16b0d95f609", size = 3036716, upload-time = "2025-04-24T13:37:30.547Z" },
- { url = "https://files.pythonhosted.org/packages/1f/83/dab34436d830c38706685acc77219fc2a209fea2a2301a1b05a2865b28bf/pydantic-1.10.22-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:923ad861677ab09d89be35d36111156063a7ebb44322cdb7b49266e1adaba4bb", size = 3171801, upload-time = "2025-04-24T13:37:32.474Z" },
- { url = "https://files.pythonhosted.org/packages/1e/6e/b64deccb8a7304d584088972437ea3091e9d99d27a8e7bf2bd08e29ae84e/pydantic-1.10.22-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:82d9a3da1686443fb854c8d2ab9a473251f8f4cdd11b125522efb4d7c646e7bc", size = 3123560, upload-time = "2025-04-24T13:37:34.855Z" },
- { url = "https://files.pythonhosted.org/packages/08/9a/90d1ab704329a7ae8666354be84b5327d655764003974364767c9d307d3a/pydantic-1.10.22-cp312-cp312-win_amd64.whl", hash = "sha256:1612604929af4c602694a7f3338b18039d402eb5ddfbf0db44f1ebfaf07f93e7", size = 2191378, upload-time = "2025-04-24T13:37:36.649Z" },
- { url = "https://files.pythonhosted.org/packages/47/8f/67befe3607b342dd6eb80237134ebcc6e8db42138609306eaf2b30e1f273/pydantic-1.10.22-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b259dc89c9abcd24bf42f31951fb46c62e904ccf4316393f317abeeecda39978", size = 2797042, upload-time = "2025-04-24T13:37:38.753Z" },
- { url = "https://files.pythonhosted.org/packages/aa/91/bfde7d301f8e1c4cff949b3f1eb2c9b27bdd4b2368da0fe88e7350bbe4bc/pydantic-1.10.22-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9238aa0964d80c0908d2f385e981add58faead4412ca80ef0fa352094c24e46d", size = 2538572, upload-time = "2025-04-24T13:37:41.653Z" },
- { url = "https://files.pythonhosted.org/packages/d7/ce/1b0097ece420354df77d2f01c72278fb43770c8ed732d6b7a303c0c70875/pydantic-1.10.22-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f8029f05b04080e3f1a550575a1bca747c0ea4be48e2d551473d47fd768fc1b", size = 2986271, upload-time = "2025-04-24T13:37:43.551Z" },
- { url = "https://files.pythonhosted.org/packages/eb/4c/e257edfd5a0025a428aee7a2835e21b51c76a6b1c8994bcccb14d5721eea/pydantic-1.10.22-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5c06918894f119e0431a36c9393bc7cceeb34d1feeb66670ef9b9ca48c073937", size = 3015617, upload-time = "2025-04-24T13:37:45.466Z" },
- { url = "https://files.pythonhosted.org/packages/00/17/ecf46ff31fd62d382424a07ed60540d4479094204bebeebb6dea597e88c3/pydantic-1.10.22-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e205311649622ee8fc1ec9089bd2076823797f5cd2c1e3182dc0e12aab835b35", size = 3164222, upload-time = "2025-04-24T13:37:47.35Z" },
- { url = "https://files.pythonhosted.org/packages/1a/47/2d55ec452c9a87347234bbbc70df268e1f081154b1851f0db89638558a1c/pydantic-1.10.22-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:815f0a73d5688d6dd0796a7edb9eca7071bfef961a7b33f91e618822ae7345b7", size = 3117572, upload-time = "2025-04-24T13:37:49.339Z" },
- { url = "https://files.pythonhosted.org/packages/03/2f/30359a36245b029bec7e442dd780fc242c66e66ad7dd5b50af2dcfd41ff3/pydantic-1.10.22-cp313-cp313-win_amd64.whl", hash = "sha256:9dfce71d42a5cde10e78a469e3d986f656afc245ab1b97c7106036f088dd91f8", size = 2174666, upload-time = "2025-04-24T13:37:51.114Z" },
- { url = "https://files.pythonhosted.org/packages/e9/e0/1ed151a56869be1588ad2d8cda9f8c1d95b16f74f09a7cea879ca9b63a8b/pydantic-1.10.22-py3-none-any.whl", hash = "sha256:343037d608bcbd34df937ac259708bfc83664dadf88afe8516c4f282d7d471a9", size = 166503, upload-time = "2025-04-24T13:38:41.374Z" },
+ { url = "https://files.pythonhosted.org/packages/7d/db/91114a2c7ae0ee946ef54f51aeaba188eb728fa265eea16a2e83801e7174/pydantic-1.10.23-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:136b6428b644691d0ee2a3a7de395d0d1b01ce6388b747bd371bb9ca65b4cc32", size = 2532742, upload-time = "2025-09-13T02:38:33.273Z" },
+ { url = "https://files.pythonhosted.org/packages/12/95/5a5b2413eb68c996f9ebe36d3d40c72f79417710f72c3347fe2f7436d699/pydantic-1.10.23-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b47c210416606b7c8e236c7546647500856eaba0d01564d5cddacb30d1b5daec", size = 2321478, upload-time = "2025-09-13T02:38:35.285Z" },
+ { url = "https://files.pythonhosted.org/packages/23/4b/34996785baf88fb907fb51a608b81085167b0acfdee3e6920c67bb145738/pydantic-1.10.23-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9c436e12d81b7c3d3208f66a5d35b401c8ec25fafc9bff446c77f1072ed5f5", size = 2778508, upload-time = "2025-09-13T02:38:37.046Z" },
+ { url = "https://files.pythonhosted.org/packages/13/d7/5a748c85d4cd0e82016484642ab6a2ab86cdf4b3d94a3e4969a4f22a85a4/pydantic-1.10.23-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edba6ac9bd6040daf3e876c284257624171884011e0729d90b90f5fe11f7217d", size = 2828364, upload-time = "2025-09-13T02:38:38.829Z" },
+ { url = "https://files.pythonhosted.org/packages/d5/8a/8a04eb44ebf5e64644a3c6c8dbd1b7c3c506774b88bd6bdc4fff9779cfc2/pydantic-1.10.23-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:101ae7dcd77c23003d4766bcf23e08cd3d012173379ad549bdb2c898855b2a57", size = 2910201, upload-time = "2025-09-13T02:38:40.603Z" },
+ { url = "https://files.pythonhosted.org/packages/99/a0/9f4c10a0673996f2980c131a857f6eb4d7711fc047df06f4346561af03a0/pydantic-1.10.23-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9b05416d8dcf36fd291348bba8f565d015fce4c594577d2ef5bc0c5ec8f54a29", size = 2858353, upload-time = "2025-09-13T02:38:42.12Z" },
+ { url = "https://files.pythonhosted.org/packages/67/48/5719e361f87245fe798b4ddcfa70ad69a8aa7cf143b4e15467e26d46d453/pydantic-1.10.23-cp312-cp312-win_amd64.whl", hash = "sha256:0f309b3dd80ed4d3487b1219c69dfd9036e51a863aeaa41a3b67497b81c4cb8f", size = 1968607, upload-time = "2025-09-13T02:38:43.788Z" },
+ { url = "https://files.pythonhosted.org/packages/fa/d8/02fc15e5af12cf6ee3b58b47ec71bbf0236bc32f621c7d18fd09035c54e5/pydantic-1.10.23-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a6aa6881a18657fe66b9249e0572a9690ebc6f0b37c32872beb06bc51bff738f", size = 2588217, upload-time = "2025-09-13T02:38:45.588Z" },
+ { url = "https://files.pythonhosted.org/packages/4c/0a/f56de9d891ef61cf64dcd6c9324a3168b46b268a7489c2f0f62b45bf29aa/pydantic-1.10.23-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c57bfade74111335062c8ac830d5a14004742ff5b037c52c410d9e9011258bf5", size = 2349602, upload-time = "2025-09-13T02:38:47.598Z" },
+ { url = "https://files.pythonhosted.org/packages/02/9c/3e1854d854a11169e7865e55c9e1f081aed3039a9fa3f90b2ae6aa1d1bfc/pydantic-1.10.23-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:723cb7f6aca0b471bac9d7dceb42d151d26aabdca655bd4190be3a7de008a054", size = 2762249, upload-time = "2025-09-13T02:38:49.862Z" },
+ { url = "https://files.pythonhosted.org/packages/51/b9/910fb8ea0b347f9b02b7641d14a86e1c2d2306d0aeca785f0efc18e23500/pydantic-1.10.23-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b81db6c9108c3f6509c897fcdf842e5993d7233e9140a7f1c4dc1c2f85b0a8d", size = 2799069, upload-time = "2025-09-13T02:38:51.427Z" },
+ { url = "https://files.pythonhosted.org/packages/b5/bf/b9eb1bf92f229a4420627f23f2be8d92decc4f9b33d43fcad220baee0a0a/pydantic-1.10.23-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9e9544d07d96df6c6405b64462fb3122e5d0af46a1c4ddf94c180b17ac29f047", size = 2888516, upload-time = "2025-09-13T02:38:53.882Z" },
+ { url = "https://files.pythonhosted.org/packages/a6/35/58ef579a4ad591f54b20787d123fe7c356c553bdcbde40abb1745778e49d/pydantic-1.10.23-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4dff5e26a14424f4e6cf44cbd45861b458f31e37d316e92a78bf99ec6d1ef2ee", size = 2850293, upload-time = "2025-09-13T02:38:55.839Z" },
+ { url = "https://files.pythonhosted.org/packages/54/73/f9825643f110ac763edd1abef9559d17e0a440348f94f6e5803b23d5aaaf/pydantic-1.10.23-cp313-cp313-win_amd64.whl", hash = "sha256:633235236c9af4a16e7cf47be3ffab15445552613a122b44a8946868c2ace426", size = 1970494, upload-time = "2025-09-13T02:38:57.378Z" },
+ { url = "https://files.pythonhosted.org/packages/f0/d6/43d8913ca252c52c5f5b8d84ae7bfa05059d4d7be3b428170f303d67fe3f/pydantic-1.10.23-py3-none-any.whl", hash = "sha256:6294bb84565c294a3a6408c52b26a42803f258d5ebfdb3ae896cd7cccfa07211", size = 166525, upload-time = "2025-09-13T02:39:33.055Z" },
]
[[package]]
name = "pydantic"
-version = "2.11.7"
+version = "2.11.9"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "annotated-types" },
@@ -928,9 +908,9 @@ dependencies = [
{ name = "typing-extensions" },
{ name = "typing-inspection" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/00/dd/4325abf92c39ba8623b5af936ddb36ffcfe0beae70405d456ab1fb2f5b8c/pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db", size = 788350, upload-time = "2025-06-14T08:33:17.137Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/ff/5d/09a551ba512d7ca404d785072700d3f6727a02f6f3c24ecfd081c7cf0aa8/pydantic-2.11.9.tar.gz", hash = "sha256:6b8ffda597a14812a7975c90b82a8a2e777d9257aba3453f973acd3c032a18e2", size = 788495, upload-time = "2025-09-13T11:26:39.325Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/6a/c0/ec2b1c8712ca690e5d61979dee872603e92b8a32f94cc1b72d53beab008a/pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b", size = 444782, upload-time = "2025-06-14T08:33:14.905Z" },
+ { url = "https://files.pythonhosted.org/packages/3e/d3/108f2006987c58e76691d5ae5d200dd3e0f532cb4e5fa3560751c3a1feba/pydantic-2.11.9-py3-none-any.whl", hash = "sha256:c42dd626f5cfc1c6950ce6205ea58c93efa406da65f479dcb4029d5934857da2", size = 444855, upload-time = "2025-09-13T11:26:36.909Z" },
]
[[package]]
@@ -977,11 +957,11 @@ wheels = [
[[package]]
name = "pygments"
-version = "2.19.2"
+version = "2.19.1"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/7c/2d/c3338d48ea6cc0feb8446d8e6937e1408088a72a39937982cc6111d17f84/pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f", size = 4968581, upload-time = "2025-01-06T17:26:30.443Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
+ { url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293, upload-time = "2025-01-06T17:26:25.553Z" },
]
[[package]]
@@ -1015,27 +995,27 @@ wheels = [
[[package]]
name = "pytest-asyncio"
-version = "1.1.0"
+version = "0.24.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "pytest" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/4e/51/f8794af39eeb870e87a8c8068642fc07bce0c854d6865d7dd0f2a9d338c2/pytest_asyncio-1.1.0.tar.gz", hash = "sha256:796aa822981e01b68c12e4827b8697108f7205020f24b5793b3c41555dab68ea", size = 46652, upload-time = "2025-07-16T04:29:26.393Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/52/6d/c6cf50ce320cf8611df7a1254d86233b3df7cc07f9b5f5cbcb82e08aa534/pytest_asyncio-0.24.0.tar.gz", hash = "sha256:d081d828e576d85f875399194281e92bf8a68d60d72d1a2faf2feddb6c46b276", size = 49855, upload-time = "2024-08-22T08:03:18.145Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/c7/9d/bf86eddabf8c6c9cb1ea9a869d6873b46f105a5d292d3a6f7071f5b07935/pytest_asyncio-1.1.0-py3-none-any.whl", hash = "sha256:5fe2d69607b0bd75c656d1211f969cadba035030156745ee09e7d71740e58ecf", size = 15157, upload-time = "2025-07-16T04:29:24.929Z" },
+ { url = "https://files.pythonhosted.org/packages/96/31/6607dab48616902f76885dfcf62c08d929796fc3b2d2318faf9fd54dbed9/pytest_asyncio-0.24.0-py3-none-any.whl", hash = "sha256:a811296ed596b69bf0b6f3dc40f83bcaf341b155a269052d82efa2b25ac7037b", size = 18024, upload-time = "2024-08-22T08:03:15.536Z" },
]
[[package]]
name = "pytest-xdist"
-version = "3.8.0"
+version = "3.7.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "execnet" },
{ name = "pytest" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/78/b4/439b179d1ff526791eb921115fca8e44e596a13efeda518b9d845a619450/pytest_xdist-3.8.0.tar.gz", hash = "sha256:7e578125ec9bc6050861aa93f2d59f1d8d085595d6551c2c90b6f4fad8d3a9f1", size = 88069, upload-time = "2025-07-01T13:30:59.346Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/49/dc/865845cfe987b21658e871d16e0a24e871e00884c545f246dd8f6f69edda/pytest_xdist-3.7.0.tar.gz", hash = "sha256:f9248c99a7c15b7d2f90715df93610353a485827bc06eefb6566d23f6400f126", size = 87550, upload-time = "2025-05-26T21:18:20.251Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/ca/31/d4e37e9e550c2b92a9cbc2e4d0b7420a27224968580b5a447f420847c975/pytest_xdist-3.8.0-py3-none-any.whl", hash = "sha256:202ca578cfeb7370784a8c33d6d05bc6e13b4f25b5053c30a152269fd10f0b88", size = 46396, upload-time = "2025-07-01T13:30:56.632Z" },
+ { url = "https://files.pythonhosted.org/packages/0d/b2/0e802fde6f1c5b2f7ae7e9ad42b83fd4ecebac18a8a8c2f2f14e39dce6e1/pytest_xdist-3.7.0-py3-none-any.whl", hash = "sha256:7d3fbd255998265052435eb9daa4e99b62e6fb9cfb6efd1f858d4d8c0c7f0ca0", size = 46142, upload-time = "2025-05-26T21:18:18.759Z" },
]
[[package]]
@@ -1052,11 +1032,11 @@ wheels = [
[[package]]
name = "pytz"
-version = "2025.2"
+version = "2024.2"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884, upload-time = "2025-03-25T02:25:00.538Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/3a/31/3c70bf7603cc2dca0f19bdc53b4537a797747a58875b552c8c413d963a3f/pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a", size = 319692, upload-time = "2024-09-11T02:24:47.91Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225, upload-time = "2025-03-25T02:24:58.468Z" },
+ { url = "https://files.pythonhosted.org/packages/11/c3/005fcca25ce078d2cc29fd559379817424e94885510568bc1bc53d7d5846/pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725", size = 508002, upload-time = "2024-09-11T02:24:45.8Z" },
]
[[package]]
@@ -1114,41 +1094,40 @@ wheels = [
[[package]]
name = "rich"
-version = "14.1.0"
+version = "13.9.4"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "markdown-it-py" },
{ name = "pygments" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/fe/75/af448d8e52bf1d8fa6a9d089ca6c07ff4453d86c65c145d0a300bb073b9b/rich-14.1.0.tar.gz", hash = "sha256:e497a48b844b0320d45007cdebfeaeed8db2a4f4bcf49f15e455cfc4af11eaa8", size = 224441, upload-time = "2025-07-25T07:32:58.125Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/ab/3a/0316b28d0761c6734d6bc14e770d85506c986c85ffb239e688eeaab2c2bc/rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098", size = 223149, upload-time = "2024-11-01T16:43:57.873Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/e3/30/3c4d035596d3cf444529e0b2953ad0466f6049528a879d27534700580395/rich-14.1.0-py3-none-any.whl", hash = "sha256:536f5f1785986d6dbdea3c75205c473f970777b4a0d6c6dd1b696aa05a3fa04f", size = 243368, upload-time = "2025-07-25T07:32:56.73Z" },
+ { url = "https://files.pythonhosted.org/packages/19/71/39c7c0d87f8d4e6c020a393182060eaefeeae6c01dab6a84ec346f2567df/rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90", size = 242424, upload-time = "2024-11-01T16:43:55.817Z" },
]
[[package]]
name = "ruff"
-version = "0.12.10"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/3b/eb/8c073deb376e46ae767f4961390d17545e8535921d2f65101720ed8bd434/ruff-0.12.10.tar.gz", hash = "sha256:189ab65149d11ea69a2d775343adf5f49bb2426fc4780f65ee33b423ad2e47f9", size = 5310076, upload-time = "2025-08-21T18:23:22.595Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/24/e7/560d049d15585d6c201f9eeacd2fd130def3741323e5ccf123786e0e3c95/ruff-0.12.10-py3-none-linux_armv6l.whl", hash = "sha256:8b593cb0fb55cc8692dac7b06deb29afda78c721c7ccfed22db941201b7b8f7b", size = 11935161, upload-time = "2025-08-21T18:22:26.965Z" },
- { url = "https://files.pythonhosted.org/packages/d1/b0/ad2464922a1113c365d12b8f80ed70fcfb39764288ac77c995156080488d/ruff-0.12.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ebb7333a45d56efc7c110a46a69a1b32365d5c5161e7244aaf3aa20ce62399c1", size = 12660884, upload-time = "2025-08-21T18:22:30.925Z" },
- { url = "https://files.pythonhosted.org/packages/d7/f1/97f509b4108d7bae16c48389f54f005b62ce86712120fd8b2d8e88a7cb49/ruff-0.12.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d59e58586829f8e4a9920788f6efba97a13d1fa320b047814e8afede381c6839", size = 11872754, upload-time = "2025-08-21T18:22:34.035Z" },
- { url = "https://files.pythonhosted.org/packages/12/ad/44f606d243f744a75adc432275217296095101f83f966842063d78eee2d3/ruff-0.12.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:822d9677b560f1fdeab69b89d1f444bf5459da4aa04e06e766cf0121771ab844", size = 12092276, upload-time = "2025-08-21T18:22:36.764Z" },
- { url = "https://files.pythonhosted.org/packages/06/1f/ed6c265e199568010197909b25c896d66e4ef2c5e1c3808caf461f6f3579/ruff-0.12.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:37b4a64f4062a50c75019c61c7017ff598cb444984b638511f48539d3a1c98db", size = 11734700, upload-time = "2025-08-21T18:22:39.822Z" },
- { url = "https://files.pythonhosted.org/packages/63/c5/b21cde720f54a1d1db71538c0bc9b73dee4b563a7dd7d2e404914904d7f5/ruff-0.12.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c6f4064c69d2542029b2a61d39920c85240c39837599d7f2e32e80d36401d6e", size = 13468783, upload-time = "2025-08-21T18:22:42.559Z" },
- { url = "https://files.pythonhosted.org/packages/02/9e/39369e6ac7f2a1848f22fb0b00b690492f20811a1ac5c1fd1d2798329263/ruff-0.12.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:059e863ea3a9ade41407ad71c1de2badfbe01539117f38f763ba42a1206f7559", size = 14436642, upload-time = "2025-08-21T18:22:45.612Z" },
- { url = "https://files.pythonhosted.org/packages/e3/03/5da8cad4b0d5242a936eb203b58318016db44f5c5d351b07e3f5e211bb89/ruff-0.12.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1bef6161e297c68908b7218fa6e0e93e99a286e5ed9653d4be71e687dff101cf", size = 13859107, upload-time = "2025-08-21T18:22:48.886Z" },
- { url = "https://files.pythonhosted.org/packages/19/19/dd7273b69bf7f93a070c9cec9494a94048325ad18fdcf50114f07e6bf417/ruff-0.12.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4f1345fbf8fb0531cd722285b5f15af49b2932742fc96b633e883da8d841896b", size = 12886521, upload-time = "2025-08-21T18:22:51.567Z" },
- { url = "https://files.pythonhosted.org/packages/c0/1d/b4207ec35e7babaee62c462769e77457e26eb853fbdc877af29417033333/ruff-0.12.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f68433c4fbc63efbfa3ba5db31727db229fa4e61000f452c540474b03de52a9", size = 13097528, upload-time = "2025-08-21T18:22:54.609Z" },
- { url = "https://files.pythonhosted.org/packages/ff/00/58f7b873b21114456e880b75176af3490d7a2836033779ca42f50de3b47a/ruff-0.12.10-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:141ce3d88803c625257b8a6debf4a0473eb6eed9643a6189b68838b43e78165a", size = 13080443, upload-time = "2025-08-21T18:22:57.413Z" },
- { url = "https://files.pythonhosted.org/packages/12/8c/9e6660007fb10189ccb78a02b41691288038e51e4788bf49b0a60f740604/ruff-0.12.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:f3fc21178cd44c98142ae7590f42ddcb587b8e09a3b849cbc84edb62ee95de60", size = 11896759, upload-time = "2025-08-21T18:23:00.473Z" },
- { url = "https://files.pythonhosted.org/packages/67/4c/6d092bb99ea9ea6ebda817a0e7ad886f42a58b4501a7e27cd97371d0ba54/ruff-0.12.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:7d1a4e0bdfafcd2e3e235ecf50bf0176f74dd37902f241588ae1f6c827a36c56", size = 11701463, upload-time = "2025-08-21T18:23:03.211Z" },
- { url = "https://files.pythonhosted.org/packages/59/80/d982c55e91df981f3ab62559371380616c57ffd0172d96850280c2b04fa8/ruff-0.12.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:e67d96827854f50b9e3e8327b031647e7bcc090dbe7bb11101a81a3a2cbf1cc9", size = 12691603, upload-time = "2025-08-21T18:23:06.935Z" },
- { url = "https://files.pythonhosted.org/packages/ad/37/63a9c788bbe0b0850611669ec6b8589838faf2f4f959647f2d3e320383ae/ruff-0.12.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:ae479e1a18b439c59138f066ae79cc0f3ee250712a873d00dbafadaad9481e5b", size = 13164356, upload-time = "2025-08-21T18:23:10.225Z" },
- { url = "https://files.pythonhosted.org/packages/47/d4/1aaa7fb201a74181989970ebccd12f88c0fc074777027e2a21de5a90657e/ruff-0.12.10-py3-none-win32.whl", hash = "sha256:9de785e95dc2f09846c5e6e1d3a3d32ecd0b283a979898ad427a9be7be22b266", size = 11896089, upload-time = "2025-08-21T18:23:14.232Z" },
- { url = "https://files.pythonhosted.org/packages/ad/14/2ad38fd4037daab9e023456a4a40ed0154e9971f8d6aed41bdea390aabd9/ruff-0.12.10-py3-none-win_amd64.whl", hash = "sha256:7837eca8787f076f67aba2ca559cefd9c5cbc3a9852fd66186f4201b87c1563e", size = 13004616, upload-time = "2025-08-21T18:23:17.422Z" },
- { url = "https://files.pythonhosted.org/packages/24/3c/21cf283d67af33a8e6ed242396863af195a8a6134ec581524fd22b9811b6/ruff-0.12.10-py3-none-win_arm64.whl", hash = "sha256:cc138cc06ed9d4bfa9d667a65af7172b47840e1a98b02ce7011c391e54635ffc", size = 12074225, upload-time = "2025-08-21T18:23:20.137Z" },
+version = "0.9.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/c0/17/529e78f49fc6f8076f50d985edd9a2cf011d1dbadb1cdeacc1d12afc1d26/ruff-0.9.4.tar.gz", hash = "sha256:6907ee3529244bb0ed066683e075f09285b38dd5b4039370df6ff06041ca19e7", size = 3599458, upload-time = "2025-01-30T18:09:51.03Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b6/f8/3fafb7804d82e0699a122101b5bee5f0d6e17c3a806dcbc527bb7d3f5b7a/ruff-0.9.4-py3-none-linux_armv6l.whl", hash = "sha256:64e73d25b954f71ff100bb70f39f1ee09e880728efb4250c632ceed4e4cdf706", size = 11668400, upload-time = "2025-01-30T18:08:46.508Z" },
+ { url = "https://files.pythonhosted.org/packages/2e/a6/2efa772d335da48a70ab2c6bb41a096c8517ca43c086ea672d51079e3d1f/ruff-0.9.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6ce6743ed64d9afab4fafeaea70d3631b4d4b28b592db21a5c2d1f0ef52934bf", size = 11628395, upload-time = "2025-01-30T18:08:50.87Z" },
+ { url = "https://files.pythonhosted.org/packages/dc/d7/cd822437561082f1c9d7225cc0d0fbb4bad117ad7ac3c41cd5d7f0fa948c/ruff-0.9.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:54499fb08408e32b57360f6f9de7157a5fec24ad79cb3f42ef2c3f3f728dfe2b", size = 11090052, upload-time = "2025-01-30T18:08:54.498Z" },
+ { url = "https://files.pythonhosted.org/packages/9e/67/3660d58e893d470abb9a13f679223368ff1684a4ef40f254a0157f51b448/ruff-0.9.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37c892540108314a6f01f105040b5106aeb829fa5fb0561d2dcaf71485021137", size = 11882221, upload-time = "2025-01-30T18:08:57.784Z" },
+ { url = "https://files.pythonhosted.org/packages/79/d1/757559995c8ba5f14dfec4459ef2dd3fcea82ac43bc4e7c7bf47484180c0/ruff-0.9.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:de9edf2ce4b9ddf43fd93e20ef635a900e25f622f87ed6e3047a664d0e8f810e", size = 11424862, upload-time = "2025-01-30T18:09:01.167Z" },
+ { url = "https://files.pythonhosted.org/packages/c0/96/7915a7c6877bb734caa6a2af424045baf6419f685632469643dbd8eb2958/ruff-0.9.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87c90c32357c74f11deb7fbb065126d91771b207bf9bfaaee01277ca59b574ec", size = 12626735, upload-time = "2025-01-30T18:09:05.312Z" },
+ { url = "https://files.pythonhosted.org/packages/0e/cc/dadb9b35473d7cb17c7ffe4737b4377aeec519a446ee8514123ff4a26091/ruff-0.9.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:56acd6c694da3695a7461cc55775f3a409c3815ac467279dfa126061d84b314b", size = 13255976, upload-time = "2025-01-30T18:09:09.425Z" },
+ { url = "https://files.pythonhosted.org/packages/5f/c3/ad2dd59d3cabbc12df308cced780f9c14367f0321e7800ca0fe52849da4c/ruff-0.9.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0c93e7d47ed951b9394cf352d6695b31498e68fd5782d6cbc282425655f687a", size = 12752262, upload-time = "2025-01-30T18:09:13.112Z" },
+ { url = "https://files.pythonhosted.org/packages/c7/17/5f1971e54bd71604da6788efd84d66d789362b1105e17e5ccc53bba0289b/ruff-0.9.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1d4c8772670aecf037d1bf7a07c39106574d143b26cfe5ed1787d2f31e800214", size = 14401648, upload-time = "2025-01-30T18:09:17.086Z" },
+ { url = "https://files.pythonhosted.org/packages/30/24/6200b13ea611b83260501b6955b764bb320e23b2b75884c60ee7d3f0b68e/ruff-0.9.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfc5f1d7afeda8d5d37660eeca6d389b142d7f2b5a1ab659d9214ebd0e025231", size = 12414702, upload-time = "2025-01-30T18:09:21.672Z" },
+ { url = "https://files.pythonhosted.org/packages/34/cb/f5d50d0c4ecdcc7670e348bd0b11878154bc4617f3fdd1e8ad5297c0d0ba/ruff-0.9.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:faa935fc00ae854d8b638c16a5f1ce881bc3f67446957dd6f2af440a5fc8526b", size = 11859608, upload-time = "2025-01-30T18:09:25.663Z" },
+ { url = "https://files.pythonhosted.org/packages/d6/f4/9c8499ae8426da48363bbb78d081b817b0f64a9305f9b7f87eab2a8fb2c1/ruff-0.9.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a6c634fc6f5a0ceae1ab3e13c58183978185d131a29c425e4eaa9f40afe1e6d6", size = 11485702, upload-time = "2025-01-30T18:09:28.903Z" },
+ { url = "https://files.pythonhosted.org/packages/18/59/30490e483e804ccaa8147dd78c52e44ff96e1c30b5a95d69a63163cdb15b/ruff-0.9.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:433dedf6ddfdec7f1ac7575ec1eb9844fa60c4c8c2f8887a070672b8d353d34c", size = 12067782, upload-time = "2025-01-30T18:09:32.371Z" },
+ { url = "https://files.pythonhosted.org/packages/3d/8c/893fa9551760b2f8eb2a351b603e96f15af167ceaf27e27ad873570bc04c/ruff-0.9.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:d612dbd0f3a919a8cc1d12037168bfa536862066808960e0cc901404b77968f0", size = 12483087, upload-time = "2025-01-30T18:09:36.124Z" },
+ { url = "https://files.pythonhosted.org/packages/23/15/f6751c07c21ca10e3f4a51ea495ca975ad936d780c347d9808bcedbd7182/ruff-0.9.4-py3-none-win32.whl", hash = "sha256:db1192ddda2200671f9ef61d9597fcef89d934f5d1705e571a93a67fb13a4402", size = 9852302, upload-time = "2025-01-30T18:09:40.013Z" },
+ { url = "https://files.pythonhosted.org/packages/12/41/2d2d2c6a72e62566f730e49254f602dfed23019c33b5b21ea8f8917315a1/ruff-0.9.4-py3-none-win_amd64.whl", hash = "sha256:05bebf4cdbe3ef75430d26c375773978950bbf4ee3c95ccb5448940dc092408e", size = 10850051, upload-time = "2025-01-30T18:09:43.42Z" },
+ { url = "https://files.pythonhosted.org/packages/c6/e6/3d6ec3bc3d254e7f005c543a661a41c3e788976d0e52a1ada195bd664344/ruff-0.9.4-py3-none-win_arm64.whl", hash = "sha256:585792f1e81509e38ac5123492f8875fbc36f3ede8185af0a26df348e5154f41", size = 10078251, upload-time = "2025-01-30T18:09:48.01Z" },
]
[[package]]
@@ -1180,68 +1159,35 @@ wheels = [
[[package]]
name = "time-machine"
-version = "2.19.0"
+version = "2.16.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "python-dateutil" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/f8/a4/1b5fdd165f61b67f445fac2a7feb0c655118edef429cd09ff5a8067f7f1d/time_machine-2.19.0.tar.gz", hash = "sha256:7c5065a8b3f2bbb449422c66ef71d114d3f909c276a6469642ecfffb6a0fcd29", size = 14576, upload-time = "2025-08-19T17:22:08.402Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/9b/aa/7e00614d339e4d687f6e96e312a1566022528427d237ec639df66c4547bc/time_machine-2.19.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:c85cf437dc3c07429456d8d6670ac90ecbd8241dcd0fbf03e8db2800576f91ff", size = 19308, upload-time = "2025-08-19T17:20:55.25Z" },
- { url = "https://files.pythonhosted.org/packages/ab/3c/bde3c757394f5bca2fbc1528d4117960a26c38f9b160bf471b38d2378d8f/time_machine-2.19.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d9238897e8ef54acdf59f5dff16f59ca0720e7c02d820c56b4397c11db5d3eb9", size = 15019, upload-time = "2025-08-19T17:20:56.204Z" },
- { url = "https://files.pythonhosted.org/packages/c8/e0/8ca916dd918018352d377f1f5226ee071cfbeb7dbbde2b03d14a411ac2b1/time_machine-2.19.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:e312c7d5d6bfffb96c6a7b39ff29e3046de100d7efaa3c01552654cfbd08f14c", size = 33079, upload-time = "2025-08-19T17:20:57.166Z" },
- { url = "https://files.pythonhosted.org/packages/48/69/184a0209f02dd0cb5e01e8d13cd4c97a5f389c4e3d09b95160dd676ad1e7/time_machine-2.19.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:714c40b2c90d1c57cc403382d5a9cf16e504cb525bfe9650095317da3c3d62b5", size = 34925, upload-time = "2025-08-19T17:20:58.117Z" },
- { url = "https://files.pythonhosted.org/packages/43/42/4bbf4309e8e57cea1086eb99052d97ff6ddecc1ab6a3b07aa4512f8bf963/time_machine-2.19.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2eaa1c675d500dc3ccae19e9fb1feff84458a68c132bbea47a80cc3dd2df7072", size = 36384, upload-time = "2025-08-19T17:20:59.108Z" },
- { url = "https://files.pythonhosted.org/packages/b1/af/9f510dc1719157348c1a2e87423aed406589070b54b503cb237d9bf3a4fe/time_machine-2.19.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e77a414e9597988af53b2b2e67242c9d2f409769df0d264b6d06fda8ca3360d4", size = 34881, upload-time = "2025-08-19T17:21:00.116Z" },
- { url = "https://files.pythonhosted.org/packages/ca/28/61764a635c70cc76c76ba582dfdc1a84834cddaeb96789023af5214426b2/time_machine-2.19.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:cd93996970e11c382b04d4937c3cd0b0167adeef14725ece35aae88d8a01733c", size = 32931, upload-time = "2025-08-19T17:21:01.095Z" },
- { url = "https://files.pythonhosted.org/packages/b6/e0/f028d93b266e6ade8aca5851f76ebbc605b2905cdc29981a2943b43e1a6c/time_machine-2.19.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8e20a6d8d6e23174bd7e931e134d9610b136db460b249d07e84ecdad029ec352", size = 34241, upload-time = "2025-08-19T17:21:02.052Z" },
- { url = "https://files.pythonhosted.org/packages/7d/a6/36d1950ed1d3f613158024cf1dcc73db1d9ef0b9117cf51ef2e37dc06499/time_machine-2.19.0-cp312-cp312-win32.whl", hash = "sha256:95afc9bc65228b27be80c2756799c20b8eb97c4ef382a9b762b6d7888bc84099", size = 17021, upload-time = "2025-08-19T17:21:03.374Z" },
- { url = "https://files.pythonhosted.org/packages/b1/0d/e2dce93355abda3cac69e77fe96566757e98b8fe7fdcbddce89c9ced3f5f/time_machine-2.19.0-cp312-cp312-win_amd64.whl", hash = "sha256:e84909af950e2448f4e2562ea5759c946248c99ab380d2b47d79b62bd76fa236", size = 17857, upload-time = "2025-08-19T17:21:04.331Z" },
- { url = "https://files.pythonhosted.org/packages/eb/28/50ae6fb83b7feeeca7a461c0dc156cf7ef5e6ef594a600d06634fde6a2cb/time_machine-2.19.0-cp312-cp312-win_arm64.whl", hash = "sha256:0390a1ea9fa7e9d772a39b7c61b34fdcca80eb9ffac339cc0441c6c714c81470", size = 16677, upload-time = "2025-08-19T17:21:05.39Z" },
- { url = "https://files.pythonhosted.org/packages/a9/b8/24ebce67aa531bae2cbe164bb3f4abc6467dc31f3aead35e77f5a075ea3e/time_machine-2.19.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:5e172866753e6041d3b29f3037dc47c20525176a494a71bbd0998dfdc4f11f2f", size = 19373, upload-time = "2025-08-19T17:21:06.701Z" },
- { url = "https://files.pythonhosted.org/packages/53/a5/c9a5240fd2f845d3ff9fa26f8c8eaa29f7239af9d65007e61d212250f15b/time_machine-2.19.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f70f68379bd6f542ae6775cce9a4fa3dcc20bf7959c42eaef871c14469e18097", size = 15056, upload-time = "2025-08-19T17:21:07.667Z" },
- { url = "https://files.pythonhosted.org/packages/b9/92/66cce5d2fb2a5e68459aca85fd18a7e2d216f725988940cd83f96630f2f1/time_machine-2.19.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:e69e0b0f694728a00e72891ef8dd00c7542952cb1c87237db594b6b27d504a96", size = 33172, upload-time = "2025-08-19T17:21:08.619Z" },
- { url = "https://files.pythonhosted.org/packages/ae/20/b499e9ab4364cd466016c33dcdf4f56629ca4c20b865bd4196d229f31d92/time_machine-2.19.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:3ae0a8b869574301ec5637e32c270c7384cca5cd6e230f07af9d29271a7fa293", size = 35042, upload-time = "2025-08-19T17:21:09.622Z" },
- { url = "https://files.pythonhosted.org/packages/41/32/b252d3d32791eb16c07d553c820dbc33d9c7fa771de3d1c602190bded2b7/time_machine-2.19.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:554e4317de90e2f7605ff80d153c8bb56b38c0d0c0279feb17e799521e987b8c", size = 36535, upload-time = "2025-08-19T17:21:10.571Z" },
- { url = "https://files.pythonhosted.org/packages/98/cf/4d0470062b9742e1b040ab81bad04d1a5d1de09806507bb6188989cfa1a7/time_machine-2.19.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6567a5ec5538ed550539ac29be11b3cb36af1f9894e2a72940cba0292cc7c3c9", size = 34945, upload-time = "2025-08-19T17:21:11.538Z" },
- { url = "https://files.pythonhosted.org/packages/24/71/2f741b29d98b1c18f6777a32236497c3d3264b6077e431cea4695684c8a1/time_machine-2.19.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:82e9ffe8dfff07b0d810a2ad015a82cd78c6a237f6c7cf185fa7f747a3256f8a", size = 33014, upload-time = "2025-08-19T17:21:12.858Z" },
- { url = "https://files.pythonhosted.org/packages/e8/83/ca8dba6106562843fd99f672e5aaf95badbc10f4f13f7cfe8d8640a7019d/time_machine-2.19.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7e1c4e578cdd69b3531d8dd3fbcb92a0cd879dadb912ee37af99c3a9e3c0d285", size = 34350, upload-time = "2025-08-19T17:21:13.923Z" },
- { url = "https://files.pythonhosted.org/packages/21/7f/34fe540450e18d0a993240100e4b86e8d03d831b92af8bb6ddb2662dc6fc/time_machine-2.19.0-cp313-cp313-win32.whl", hash = "sha256:72dbd4cbc3d96dec9dd281ddfbb513982102776b63e4e039f83afb244802a9e5", size = 17047, upload-time = "2025-08-19T17:21:14.874Z" },
- { url = "https://files.pythonhosted.org/packages/bf/5d/c8be73df82c7ebe7cd133279670e89b8b110af3ce1412c551caa9d08e625/time_machine-2.19.0-cp313-cp313-win_amd64.whl", hash = "sha256:e17e3e089ac95f9a145ce07ff615e3c85674f7de36f2d92aaf588493a23ffb4b", size = 17868, upload-time = "2025-08-19T17:21:15.819Z" },
- { url = "https://files.pythonhosted.org/packages/92/13/2dfd3b8fb285308f61cd7aa9bfa96f46ddf916e3549a0f0afd094c556599/time_machine-2.19.0-cp313-cp313-win_arm64.whl", hash = "sha256:149072aff8e3690e14f4916103d898ea0d5d9c95531b6aa0995251c299533f7b", size = 16710, upload-time = "2025-08-19T17:21:16.748Z" },
- { url = "https://files.pythonhosted.org/packages/05/c1/deebb361727d2c5790f9d4d874be1b19afd41f4375581df465e6718b46a2/time_machine-2.19.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:f3589fee1ed0ab6ee424a55b0ea1ec694c4ba64cc26895bcd7d99f3d1bc6a28a", size = 20053, upload-time = "2025-08-19T17:21:17.704Z" },
- { url = "https://files.pythonhosted.org/packages/45/e8/fe3376951e6118d8ec1d1f94066a169b791424fe4a26c7dfc069b153ee08/time_machine-2.19.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:7887e85275c4975fe54df03dcdd5f38bd36be973adc68a8c77e17441c3b443d6", size = 15423, upload-time = "2025-08-19T17:21:18.668Z" },
- { url = "https://files.pythonhosted.org/packages/9c/c7/f88d95cd1a87c650cf3749b4d64afdaf580297aa18ad7f4b44ec9d252dfc/time_machine-2.19.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ce0be294c209928563fcce1c587963e60ec803436cf1e181acd5bc1e425d554b", size = 39630, upload-time = "2025-08-19T17:21:19.645Z" },
- { url = "https://files.pythonhosted.org/packages/cc/5d/65a5c48a65357e56ec6f032972e4abd1c02d4fca4b0717a3aaefd19014d4/time_machine-2.19.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:a62fd1ab380012c86f4c042010418ed45eb31604f4bf4453e17c9fa60bc56a29", size = 41242, upload-time = "2025-08-19T17:21:20.979Z" },
- { url = "https://files.pythonhosted.org/packages/f6/f9/fe5209e1615fde0a8cad6c4e857157b150333ed1fe31a7632b08cfe0ebdd/time_machine-2.19.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b25ec853a4530a5800731257f93206b12cbdee85ede964ebf8011b66086a7914", size = 44278, upload-time = "2025-08-19T17:21:21.984Z" },
- { url = "https://files.pythonhosted.org/packages/4a/3a/a5e5fe9c5d614cde0a9387ff35e8dfd12c5ef6384e4c1a21b04e6e0b905d/time_machine-2.19.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a430e4d0e0556f021a9c78e9b9f68e5e8910bdace4aa34ed4d1a73e239ed9384", size = 42321, upload-time = "2025-08-19T17:21:23.755Z" },
- { url = "https://files.pythonhosted.org/packages/a1/c5/56eca774e9162bc1ce59111d2bd69140dc8908c9478c92ec7bd15d547600/time_machine-2.19.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2415b7495ec4364c8067071e964fbadfe746dd4cdb43983f2f0bd6ebed13315c", size = 39270, upload-time = "2025-08-19T17:21:26.009Z" },
- { url = "https://files.pythonhosted.org/packages/9b/69/5dd0c420667578169a12acc8c8fd7452e8cfb181e41c9b4ac7e88fa36686/time_machine-2.19.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dbfc6b90c10f288594e1bf89a728a98cc0030791fd73541bbdc6b090aff83143", size = 40193, upload-time = "2025-08-19T17:21:27.054Z" },
- { url = "https://files.pythonhosted.org/packages/75/a7/de974d421bd55c9355583427c2a38fb0237bb5fd6614af492ba89dacb2f9/time_machine-2.19.0-cp313-cp313t-win32.whl", hash = "sha256:16f5d81f650c0a4d117ab08036dc30b5f8b262e11a4a0becc458e7f1c011b228", size = 17542, upload-time = "2025-08-19T17:21:28.674Z" },
- { url = "https://files.pythonhosted.org/packages/76/0a/aa0d05becd5d06ae8d3f16d657dc8cc9400c8d79aef80299de196467ff12/time_machine-2.19.0-cp313-cp313t-win_amd64.whl", hash = "sha256:645699616ec14e147094f601e6ab9553ff6cea37fad9c42720a6d7ed04bcd5dc", size = 18703, upload-time = "2025-08-19T17:21:29.663Z" },
- { url = "https://files.pythonhosted.org/packages/1f/c0/f785a4c7c73aa176510f7c48b84b49c26be84af0d534deb222e0327f750e/time_machine-2.19.0-cp313-cp313t-win_arm64.whl", hash = "sha256:b32daa965d13237536ea3afaa5ad61ade2b2d9314bc3a20196a0d2e1d7b57c6a", size = 17020, upload-time = "2025-08-19T17:21:30.653Z" },
- { url = "https://files.pythonhosted.org/packages/ed/97/c5fb51def06c0b2b6735332ad118ab35b4d9b85368792e5b638e99b1b686/time_machine-2.19.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:31cb43c8fd2d961f31bed0ff4e0026964d2b35e5de9e0fabbfecf756906d3612", size = 19360, upload-time = "2025-08-19T17:21:31.94Z" },
- { url = "https://files.pythonhosted.org/packages/2d/4e/2d795f7d6b7f5205ffe737a05bb1cf19d8038233b797062b2ef412b8512b/time_machine-2.19.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:bdf481a75afc6bff3e520db594501975b652f7def21cd1de6aa971d35ba644e6", size = 15033, upload-time = "2025-08-19T17:21:32.934Z" },
- { url = "https://files.pythonhosted.org/packages/dd/32/9bad501e360b4e758c58fae616ca5f8c7ad974b343f2463a15b2bf77a366/time_machine-2.19.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:00bee4bb950ac6a08d62af78e4da0cf2b4fc2abf0de2320d0431bf610db06e7c", size = 33379, upload-time = "2025-08-19T17:21:33.925Z" },
- { url = "https://files.pythonhosted.org/packages/a3/45/eda0ca4d793dfd162478d6163759b1c6ce7f6e61daa7fd7d62b31f21f87f/time_machine-2.19.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9f02199490906582302ce09edd32394fb393271674c75d7aa76c7a3245f16003", size = 35123, upload-time = "2025-08-19T17:21:34.945Z" },
- { url = "https://files.pythonhosted.org/packages/f0/5a/97e16325442ae5731fcaac794f0a1ef9980eff8a5491e58201d7eb814a34/time_machine-2.19.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e35726c7ba625f844c13b1fc0d4f81f394eefaee1d3a094a9093251521f2ef15", size = 36588, upload-time = "2025-08-19T17:21:35.975Z" },
- { url = "https://files.pythonhosted.org/packages/e8/9d/bf0b2ccc930cc4a316f26f1c78d3f313cd0fa13bb7480369b730a8f129db/time_machine-2.19.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:304315023999cd401ff02698870932b893369e1cfeb2248d09f6490507a92e97", size = 35013, upload-time = "2025-08-19T17:21:37.017Z" },
- { url = "https://files.pythonhosted.org/packages/f0/5a/39ac6a3078174f9715d88364871348b249631f12e76de1b862433b3f8862/time_machine-2.19.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:9765d4f003f263ea8bfd90d2d15447ca4b3dfa181922cf6cf808923b02ac180a", size = 33303, upload-time = "2025-08-19T17:21:38.352Z" },
- { url = "https://files.pythonhosted.org/packages/b3/ac/d8646baf9f95f2e792a6d7a7b35e92fca253c4a992afff801beafae0e5c2/time_machine-2.19.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:7837ef3fd5911eb9b480909bb93d922737b6bdecea99dfcedb0a03807de9b2d3", size = 34440, upload-time = "2025-08-19T17:21:39.382Z" },
- { url = "https://files.pythonhosted.org/packages/ce/8b/8b6568c5ae966d80ead03ab537be3c6acf2af06fb501c2d466a3162c6295/time_machine-2.19.0-cp314-cp314-win32.whl", hash = "sha256:4bb5bd43b1bdfac3007b920b51d8e761f024ed465cfeec63ac4296922a4ec428", size = 17162, upload-time = "2025-08-19T17:21:40.381Z" },
- { url = "https://files.pythonhosted.org/packages/46/a5/211c1ab4566eba5308b2dc001b6349e3a032e3f6afa67ca2f27ea6b27af5/time_machine-2.19.0-cp314-cp314-win_amd64.whl", hash = "sha256:f583bbd0aa8ab4a7c45a684bf636d9e042d466e30bcbae1d13e7541e2cbe7207", size = 18040, upload-time = "2025-08-19T17:21:41.363Z" },
- { url = "https://files.pythonhosted.org/packages/b8/fc/4c2fb705f6371cb83824da45a8b967514a922fc092a0ef53979334d97a70/time_machine-2.19.0-cp314-cp314-win_arm64.whl", hash = "sha256:f379c6f8a6575a8284592179cf528ce89373f060301323edcc44f1fa1d37be12", size = 16752, upload-time = "2025-08-19T17:21:42.336Z" },
- { url = "https://files.pythonhosted.org/packages/79/ab/6437d18f31c666b5116c97572a282ac2590a82a0a9867746a6647eaf4613/time_machine-2.19.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:a3b8981f9c663b0906b05ab4d0ca211fae4b63b47c6ec26de5374fe56c836162", size = 20057, upload-time = "2025-08-19T17:21:43.35Z" },
- { url = "https://files.pythonhosted.org/packages/6c/a2/e03639ec2ba7200328bbcad8a2b2b1d5fccca9cceb9481b164a1cabdcb33/time_machine-2.19.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:8e9c6363893e7f52c226afbebb23e825259222d100e67dfd24c8a6d35f1a1907", size = 15430, upload-time = "2025-08-19T17:21:44.725Z" },
- { url = "https://files.pythonhosted.org/packages/5d/ff/39e63a48e840f3e36ce24846ee51dd99c6dba635659b1750a2993771e88e/time_machine-2.19.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:206fcd6c9a6f00cac83db446ad1effc530a8cec244d2780af62db3a2d0a9871b", size = 39622, upload-time = "2025-08-19T17:21:45.821Z" },
- { url = "https://files.pythonhosted.org/packages/9a/2e/ee5ac79c4954768705801e54817c7d58e07e25a0bb227e775f501f3e2122/time_machine-2.19.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bf33016a1403c123373ffaeff25e26e69d63bf2c63b6163932efed94160db7ef", size = 41235, upload-time = "2025-08-19T17:21:46.783Z" },
- { url = "https://files.pythonhosted.org/packages/3a/3e/9af5f39525e779185c77285b8bbae15340eeeaa0afb33d458bc8b47d459b/time_machine-2.19.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9247c4bb9bbd3ff584ef4efbdec8efd9f37aa08bcfc4728bde1e489c2cb445bd", size = 44276, upload-time = "2025-08-19T17:21:47.759Z" },
- { url = "https://files.pythonhosted.org/packages/59/fe/572c7443cc27140bbeae3947279bbd4a120f9e8622253a20637f260b7813/time_machine-2.19.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:77f9bb0b86758d1f2d9352642c874946ad5815df53ef4ca22eb9d532179fe50d", size = 42330, upload-time = "2025-08-19T17:21:48.881Z" },
- { url = "https://files.pythonhosted.org/packages/cf/24/1a81c2e08ee7dae13ec8ceed27a29afa980c3d63852e42f1e023bf0faa03/time_machine-2.19.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:0b529e262df3b9c449f427385f4d98250828c879168c2e00eec844439f40b370", size = 39281, upload-time = "2025-08-19T17:21:49.907Z" },
- { url = "https://files.pythonhosted.org/packages/d2/60/6f0d6e5108978ca1a2a4ffb4d1c7e176d9199bb109fd44efe2680c60b52a/time_machine-2.19.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:9199246e31cdc810e5d89cb71d09144c4d745960fdb0824da4994d152aca3303", size = 40201, upload-time = "2025-08-19T17:21:50.953Z" },
- { url = "https://files.pythonhosted.org/packages/73/b9/3ea4951e8293b0643feb98c0b9a176fa822154f1810835db3f282968ab10/time_machine-2.19.0-cp314-cp314t-win32.whl", hash = "sha256:0fe81bae55b7aefc2c2a34eb552aa82e6c61a86b3353a3c70df79b9698cb02ca", size = 17743, upload-time = "2025-08-19T17:21:51.948Z" },
- { url = "https://files.pythonhosted.org/packages/e4/8b/cd802884ca8a98e2b6cdc2397d57dd12ff8a7d1481e06fc3fad3d4e7e5ff/time_machine-2.19.0-cp314-cp314t-win_amd64.whl", hash = "sha256:7253791b8d7e7399fbeed7a8193cb01bc004242864306288797056badbdaf80b", size = 18956, upload-time = "2025-08-19T17:21:52.997Z" },
- { url = "https://files.pythonhosted.org/packages/c6/49/cabb1593896082fd55e34768029b8b0ca23c9be8b2dc127e0fc14796d33e/time_machine-2.19.0-cp314-cp314t-win_arm64.whl", hash = "sha256:536bd1ac31ab06a1522e7bf287602188f502dc19d122b1502c4f60b1e8efac79", size = 17068, upload-time = "2025-08-19T17:21:54.064Z" },
+sdist = { url = "https://files.pythonhosted.org/packages/fb/dd/5022939b9cadefe3af04f4012186c29b8afbe858b1ec2cfa38baeec94dab/time_machine-2.16.0.tar.gz", hash = "sha256:4a99acc273d2f98add23a89b94d4dd9e14969c01214c8514bfa78e4e9364c7e2", size = 24626, upload-time = "2024-10-08T14:21:59.734Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/4a/f4/603a84e7ae6427a53953db9f61b689dc6adf233e03c5f5ca907a901452fd/time_machine-2.16.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:84788f4d62a8b1bf5e499bb9b0e23ceceea21c415ad6030be6267ce3d639842f", size = 20155, upload-time = "2024-10-08T14:21:20.055Z" },
+ { url = "https://files.pythonhosted.org/packages/d8/94/dbe69aecb4b84be52d34814e63176c5ca61f38ee9e6ecda11104653405b5/time_machine-2.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:15ec236b6571730236a193d9d6c11d472432fc6ab54e85eac1c16d98ddcd71bf", size = 16640, upload-time = "2024-10-08T14:21:22.005Z" },
+ { url = "https://files.pythonhosted.org/packages/da/13/27f11be25d7bd298e033b9da93217e5b68309bf724b6e494cdadb471d00d/time_machine-2.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cedc989717c8b44a3881ac3d68ab5a95820448796c550de6a2149ed1525157f0", size = 33721, upload-time = "2024-10-08T14:21:23.059Z" },
+ { url = "https://files.pythonhosted.org/packages/e6/9d/70e4640fed1fd8122204ae825c688d0ef8c04f515ec6bf3c5f3086d6510e/time_machine-2.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9d26d79de1c63a8c6586c75967e09b0ff306aa7e944a1eaddb74595c9b1839ca", size = 31646, upload-time = "2024-10-08T14:21:24.037Z" },
+ { url = "https://files.pythonhosted.org/packages/a1/cb/93bc0e51bea4e171a85151dbba3c3b3f612b50b953cd3076f5b4f0db9e14/time_machine-2.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:317b68b56a9c3731e0cf8886e0f94230727159e375988b36c60edce0ddbcb44a", size = 33403, upload-time = "2024-10-08T14:21:24.975Z" },
+ { url = "https://files.pythonhosted.org/packages/89/71/2c6a63ad4fbce3d62d46bbd9ac4433f30bade7f25978ce00815b905bcfcf/time_machine-2.16.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:43e1e18279759897be3293a255d53e6b1cb0364b69d9591d0b80c51e461c94b0", size = 33327, upload-time = "2024-10-08T14:21:25.958Z" },
+ { url = "https://files.pythonhosted.org/packages/68/4e/205c2b26763b8817cd6b8868242843800a1fbf275f2af35f5ba35ff2b01a/time_machine-2.16.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e43adb22def972a29d2b147999b56897116085777a0fea182fd93ee45730611e", size = 31454, upload-time = "2024-10-08T14:21:27.367Z" },
+ { url = "https://files.pythonhosted.org/packages/d7/95/44c1aa3994919f93534244c40cfd2fb9416d7686dc0c8b9b262c751b5118/time_machine-2.16.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0c766bea27a0600e36806d628ebc4b47178b12fcdfb6c24dc0a566a9c06bfe7f", size = 32972, upload-time = "2024-10-08T14:21:28.351Z" },
+ { url = "https://files.pythonhosted.org/packages/d4/ee/75243df9c7cf30f108758e887141a58e6544baaa46e2e647b9ccc56db819/time_machine-2.16.0-cp312-cp312-win32.whl", hash = "sha256:6dae82ab647d107817e013db82223e20a9853fa88543fec853ae326382d03c2e", size = 19078, upload-time = "2024-10-08T14:21:29.425Z" },
+ { url = "https://files.pythonhosted.org/packages/d4/7c/d4e67cc031f9653c92167ccf87d241e3208653d191c96ac79281c273ab92/time_machine-2.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:265462c77dc9576267c3c7f20707780a171a9fdbac93ac22e608c309efd68c33", size = 19923, upload-time = "2024-10-08T14:21:30.759Z" },
+ { url = "https://files.pythonhosted.org/packages/aa/b6/7047226fcb9afefe47fc80f605530535bf71ad99b6797f057abbfa4cd9a5/time_machine-2.16.0-cp312-cp312-win_arm64.whl", hash = "sha256:ef768e14768eebe3bb1196c0dece8e14c1c6991605721214a0c3c68cf77eb216", size = 18003, upload-time = "2024-10-08T14:21:32.662Z" },
+ { url = "https://files.pythonhosted.org/packages/a6/18/3087d0eb185cedbc82385f46bf16032ec7102a0e070205a2c88c4ecf9952/time_machine-2.16.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7751bf745d54e9e8b358c0afa332815da9b8a6194b26d0fd62876ab6c4d5c9c0", size = 20209, upload-time = "2024-10-08T14:21:34.222Z" },
+ { url = "https://files.pythonhosted.org/packages/03/a3/fcc3eaf69390402ecf491d718e533b6d0e06d944d77fc8d87be3a2839102/time_machine-2.16.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1784edf173ca840ba154de6eed000b5727f65ab92972c2f88cec5c4d6349c5f2", size = 16681, upload-time = "2024-10-08T14:21:35.14Z" },
+ { url = "https://files.pythonhosted.org/packages/a2/96/8b76d264014bf9dc21873218de50d67223c71736f87fe6c65e582f7c29ac/time_machine-2.16.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f5876a5682ce1f517e55d7ace2383432627889f6f7e338b961f99d684fd9e8d", size = 33768, upload-time = "2024-10-08T14:21:36.942Z" },
+ { url = "https://files.pythonhosted.org/packages/5c/13/59ae8259be02b6c657ef6e3b6952bf274b43849f6f35cc61a576c68ce301/time_machine-2.16.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:806672529a2e255cd901f244c9033767dc1fa53466d0d3e3e49565a1572a64fe", size = 31685, upload-time = "2024-10-08T14:21:37.881Z" },
+ { url = "https://files.pythonhosted.org/packages/3e/c1/9f142beb4d373a2a01ebb58d5117289315baa5131d880ec804db49e94bf7/time_machine-2.16.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:667b150fedb54acdca2a4bea5bf6da837b43e6dd12857301b48191f8803ba93f", size = 33447, upload-time = "2024-10-08T14:21:38.809Z" },
+ { url = "https://files.pythonhosted.org/packages/95/f7/ed9ecd93c2d38dca77d0a28e070020f3ce0fb23e0d4a6edb14bcfffa5526/time_machine-2.16.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:da3ae1028af240c0c46c79adf9c1acffecc6ed1701f2863b8132f5ceae6ae4b5", size = 33408, upload-time = "2024-10-08T14:21:39.785Z" },
+ { url = "https://files.pythonhosted.org/packages/91/40/d0d274d70fa2c4cad531745deb8c81346365beb0a2736be05a3acde8b94a/time_machine-2.16.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:520a814ea1b2706c89ab260a54023033d3015abef25c77873b83e3d7c1fafbb2", size = 31526, upload-time = "2024-10-08T14:21:40.769Z" },
+ { url = "https://files.pythonhosted.org/packages/1d/ba/a27cdbb324d9a6d779cde0d514d47b696b5a6a653705d4b511fd65ef1514/time_machine-2.16.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8243664438bb468408b29c6865958662d75e51f79c91842d2794fa22629eb697", size = 33042, upload-time = "2024-10-08T14:21:41.722Z" },
+ { url = "https://files.pythonhosted.org/packages/72/63/64e9156c9e38c18720d0cc41378168635241de44013ffe3dd5b099447eb0/time_machine-2.16.0-cp313-cp313-win32.whl", hash = "sha256:32d445ce20d25c60ab92153c073942b0bac9815bfbfd152ce3dcc225d15ce988", size = 19108, upload-time = "2024-10-08T14:21:43.596Z" },
+ { url = "https://files.pythonhosted.org/packages/3d/40/27f5738fbd50b78dcc0682c14417eac5a49ccf430525dd0c5a058be125a2/time_machine-2.16.0-cp313-cp313-win_amd64.whl", hash = "sha256:f6927dda86425f97ffda36131f297b1a601c64a6ee6838bfa0e6d3149c2f0d9f", size = 19935, upload-time = "2024-10-08T14:21:45.277Z" },
+ { url = "https://files.pythonhosted.org/packages/35/75/c4d8b2f0fe7dac22854d88a9c509d428e78ac4bf284bc54cfe83f75cc13b/time_machine-2.16.0-cp313-cp313-win_arm64.whl", hash = "sha256:4d3843143c46dddca6491a954bbd0abfd435681512ac343169560e9bab504129", size = 18047, upload-time = "2024-10-08T14:21:46.261Z" },
]
[[package]]
@@ -1258,11 +1204,11 @@ wheels = [
[[package]]
name = "typing-extensions"
-version = "4.15.0"
+version = "4.12.2"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321, upload-time = "2024-06-07T18:52:15.995Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" },
+ { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438, upload-time = "2024-06-07T18:52:13.582Z" },
]
[[package]]
@@ -1385,9 +1331,9 @@ wheels = [
[[package]]
name = "zipp"
-version = "3.23.0"
+version = "3.21.0"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/3f/50/bad581df71744867e9468ebd0bcd6505de3b275e06f202c2cb016e3ff56f/zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4", size = 24545, upload-time = "2024-11-10T15:05:20.202Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" },
+ { url = "https://files.pythonhosted.org/packages/b7/1a/7e4798e9339adc931158c9d69ecc34f5e6791489d469f5e50ec15e35f458/zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931", size = 9630, upload-time = "2024-11-10T15:05:19.275Z" },
]