Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .python-version
Original file line number Diff line number Diff line change
@@ -1 +1 @@
3.9.18
3.12
8 changes: 1 addition & 7 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ dependencies = [
"anyio>=3.5.0, <5",
"distro>=1.7.0, <2",
"sniffio",
"cached-property; python_version < '3.8'",
"tqdm",
"rich",
"click",
Expand All @@ -21,15 +20,10 @@ dependencies = [
"pandas",
"termcolor",
]
requires-python = ">= 3.7"
requires-python = ">= 3.12"
classifiers = [
"Typing :: Typed",
"Intended Audience :: Developers",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Operating System :: OS Independent",
"Operating System :: POSIX",
Expand Down
67 changes: 31 additions & 36 deletions requirements.lock
Original file line number Diff line number Diff line change
@@ -1,52 +1,45 @@
# generated by rye
# use `rye lock` or `rye sync` to update this lockfile
#
# last locked with the following flags:
# pre: false
# features: []
# all-features: true
# with-sources: false
# generate-hashes: false
# universal: false

-e file:.
# This file was autogenerated by uv via the following command:
# uv pip compile -o requirements.lock pyproject.toml
annotated-types==0.6.0
# via pydantic
anyio==4.4.0
# via httpx
# via llama-stack-client
# via
# llama-stack-client (pyproject.toml)
# httpx
certifi==2023.7.22
# via httpcore
# via httpx
# via
# httpcore
# httpx
click==8.1.7
# via llama-stack-client
# via llama-stack-client (pyproject.toml)
distro==1.8.0
# via llama-stack-client
# via llama-stack-client (pyproject.toml)
exceptiongroup==1.1.3
# via anyio
h11==0.14.0
# via httpcore
httpcore==1.0.2
# via httpx
httpx==0.25.2
# via llama-stack-client
# via llama-stack-client (pyproject.toml)
idna==3.4
# via anyio
# via httpx
# via
# anyio
# httpx
markdown-it-py==3.0.0
# via rich
mdurl==0.1.2
# via markdown-it-py
numpy==2.0.2
# via pandas
pandas==2.2.3
# via llama-stack-client
# via llama-stack-client (pyproject.toml)
prompt-toolkit==3.0.48
# via llama-stack-client
# via llama-stack-client (pyproject.toml)
pyaml==24.12.1
# via llama-stack-client
# via llama-stack-client (pyproject.toml)
pydantic==2.7.1
# via llama-stack-client
# via llama-stack-client (pyproject.toml)
pydantic-core==2.18.2
# via pydantic
pygments==2.18.0
Expand All @@ -58,23 +51,25 @@ pytz==2024.2
pyyaml==6.0.2
# via pyaml
rich==13.9.4
# via llama-stack-client
# via llama-stack-client (pyproject.toml)
six==1.17.0
# via python-dateutil
sniffio==1.3.0
# via anyio
# via httpx
# via llama-stack-client
# via
# llama-stack-client (pyproject.toml)
# anyio
# httpx
termcolor==2.5.0
# via llama-stack-client
# via llama-stack-client (pyproject.toml)
tqdm==4.67.1
# via llama-stack-client
# via llama-stack-client (pyproject.toml)
typing-extensions==4.8.0
# via anyio
# via llama-stack-client
# via pydantic
# via pydantic-core
# via rich
# via
# llama-stack-client (pyproject.toml)
# anyio
# pydantic
# pydantic-core
# rich
tzdata==2024.2
# via pandas
wcwidth==0.2.13
Expand Down
100 changes: 17 additions & 83 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,23 +1,11 @@
# This file was autogenerated by uv via the following command:
# uv export --frozen --no-hashes --no-emit-project --no-default-groups --output-file=requirements.txt
annotated-types==0.5.0 ; python_full_version < '3.8'
annotated-types==0.7.0
# via pydantic
annotated-types==0.7.0 ; python_full_version >= '3.8'
# via pydantic
anyio==3.7.1 ; python_full_version < '3.8'
# via
# httpcore
# llama-stack-client
anyio==4.5.2 ; python_full_version == '3.8.*'
anyio==4.8.0
# via
# httpx
# llama-stack-client
anyio==4.8.0 ; python_full_version >= '3.9'
# via
# httpx
# llama-stack-client
cached-property==1.5.2 ; python_full_version < '3.8'
# via llama-stack-client
certifi==2025.1.31
# via
# httpcore
Expand All @@ -30,79 +18,42 @@ colorama==0.4.6 ; sys_platform == 'win32'
# tqdm
distro==1.9.0
# via llama-stack-client
exceptiongroup==1.2.2 ; python_full_version < '3.11'
# via anyio
h11==0.14.0
# via httpcore
httpcore==0.17.3 ; python_full_version < '3.8'
# via httpx
httpcore==1.0.7 ; python_full_version >= '3.8'
httpcore==1.0.7
# via httpx
httpx==0.24.1 ; python_full_version < '3.8'
# via llama-stack-client
httpx==0.28.1 ; python_full_version >= '3.8'
httpx==0.28.1
# via llama-stack-client
idna==3.10
# via
# anyio
# httpx
importlib-metadata==6.7.0 ; python_full_version < '3.8'
# via
# click
# pydantic
markdown-it-py==2.2.0 ; python_full_version < '3.8'
# via rich
markdown-it-py==3.0.0 ; python_full_version >= '3.8'
markdown-it-py==3.0.0
# via rich
mdurl==0.1.2
# via markdown-it-py
numpy==1.21.6 ; python_full_version < '3.8'
# via pandas
numpy==1.24.4 ; python_full_version == '3.8.*'
numpy==2.2.3
# via pandas
numpy==2.0.2 ; python_full_version == '3.9.*'
# via pandas
numpy==2.2.3 ; python_full_version >= '3.10'
# via pandas
pandas==1.1.5 ; python_full_version < '3.7.1'
# via llama-stack-client
pandas==1.3.5 ; python_full_version >= '3.7.1' and python_full_version < '3.8'
# via llama-stack-client
pandas==2.0.3 ; python_full_version == '3.8.*'
pandas==2.2.3
# via llama-stack-client
pandas==2.2.3 ; python_full_version >= '3.9'
prompt-toolkit==3.0.50
# via llama-stack-client
prompt-toolkit==3.0.48 ; python_full_version < '3.8'
pyaml==25.1.0
# via llama-stack-client
prompt-toolkit==3.0.50 ; python_full_version >= '3.8'
pydantic==2.10.6
# via llama-stack-client
pyaml==23.5.8 ; python_full_version < '3.8'
# via llama-stack-client
pyaml==25.1.0 ; python_full_version >= '3.8'
# via llama-stack-client
pydantic==2.5.3 ; python_full_version < '3.8'
# via llama-stack-client
pydantic==2.10.6 ; python_full_version >= '3.8'
# via llama-stack-client
pydantic-core==2.14.6 ; python_full_version < '3.8'
# via pydantic
pydantic-core==2.27.2 ; python_full_version >= '3.8'
pydantic-core==2.27.2
# via pydantic
pygments==2.17.2 ; python_full_version < '3.8'
# via rich
pygments==2.19.1 ; python_full_version >= '3.8'
pygments==2.19.1
# via rich
python-dateutil==2.9.0.post0
# via pandas
pytz==2025.1
# via pandas
pyyaml==6.0.1 ; python_full_version < '3.8'
# via pyaml
pyyaml==6.0.2 ; python_full_version >= '3.8'
pyyaml==6.0.2
# via pyaml
rich==13.8.1 ; python_full_version < '3.8'
# via llama-stack-client
rich==13.9.4 ; python_full_version >= '3.8'
rich==13.9.4
# via llama-stack-client
six==1.17.0
# via python-dateutil
Expand All @@ -112,36 +63,19 @@ sniffio==1.3.1
# httpcore
# httpx
# llama-stack-client
termcolor==2.3.0 ; python_full_version < '3.8'
# via llama-stack-client
termcolor==2.4.0 ; python_full_version == '3.8.*'
# via llama-stack-client
termcolor==2.5.0 ; python_full_version >= '3.9'
termcolor==2.5.0
# via llama-stack-client
tqdm==4.67.1
# via llama-stack-client
typing-extensions==4.7.1 ; python_full_version < '3.8'
# via
# annotated-types
# anyio
# h11
# importlib-metadata
# llama-stack-client
# markdown-it-py
# pydantic
# pydantic-core
# rich
typing-extensions==4.12.2 ; python_full_version >= '3.8'
typing-extensions==4.12.2
# via
# annotated-types
# anyio
# llama-stack-client
# pydantic
# pydantic-core
# rich
tzdata==2025.1 ; python_full_version >= '3.8'
tzdata==2025.1
# via pandas
wcwidth==0.2.13
# via prompt-toolkit
zipp==3.15.0 ; python_full_version < '3.8'
# via importlib-metadata
Loading